# HG changeset patch # User Denis Laxalde # Date 1452948531 -3600 # Node ID 0b59724cb3f21e2414b86fc37f759dd41da83330 # Parent 058bb3dc685f4692882a0c47646184713ecd3be4 Reorganize source tree to have a "cubicweb" top-level package Basically: mkdir cubicweb hg mv *.py -X setup.py cubicweb hg mv dataimport devtools entities etwist ext hooks i18n misc schemas server skeleton sobjects test web wsgi cubicweb Other changes: * adjust path to cubicweb-ctl in devtools tests * update setup.py to avoid importing __pkginfo__ (exec it instead), replace os.path.walk by os.walk and prepend `modname` here and there * update tox.ini to account for new test locations * update doc/conf.py so that it still finds __pkginfo__.py and CWDIR in doc/Makefile diff -r 058bb3dc685f -r 0b59724cb3f2 MANIFEST.in --- a/MANIFEST.in Mon Jan 04 18:40:30 2016 +0100 +++ b/MANIFEST.in Sat Jan 16 13:48:51 2016 +0100 @@ -19,33 +19,33 @@ recursive-include doc/images *.png *.svg include doc/conf.py -recursive-include misc *.py *.png *.display +recursive-include cubicweb/misc *.py *.png *.display -include web/views/*.pt -recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf *.svg *.woff *.eot -recursive-include web/wdoc *.rst *.png *.xml ChangeLog* -recursive-include devtools/data *.js *.css *.sh +include cubicweb/web/views/*.pt +recursive-include cubicweb/web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf *.svg *.woff *.eot +recursive-include cubicweb/web/wdoc *.rst *.png *.xml ChangeLog* +recursive-include cubicweb/devtools/data *.js *.css *.sh -recursive-include i18n *.pot *.po -recursive-include schemas *.py *.sql +recursive-include cubicweb/i18n *.pot *.po +recursive-include cubicweb/schemas *.py *.sql -recursive-include test/data bootstrap_cubes *.py *.sql -recursive-include entities/test/data bootstrap_cubes *.py -recursive-include sobjects/test/data bootstrap_cubes *.py -recursive-include hooks/test/data bootstrap_cubes *.py -recursive-include server/test/data bootstrap_cubes *.py source* *.conf.in *.ldif -recursive-include devtools/test/data bootstrap_cubes *.py *.txt *.js *.po.ref -recursive-include web/test/data bootstrap_cubes pouet.css *.py -recursive-include etwist/test/data *.py +recursive-include cubicweb/test/data bootstrap_cubes *.py *.sql +recursive-include cubicweb/entities/test/data bootstrap_cubes *.py +recursive-include cubicweb/sobjects/test/data bootstrap_cubes *.py +recursive-include cubicweb/hooks/test/data bootstrap_cubes *.py +recursive-include cubicweb/server/test/data bootstrap_cubes *.py source* *.conf.in *.ldif +recursive-include cubicweb/devtools/test/data bootstrap_cubes *.py *.txt *.js *.po.ref +recursive-include cubicweb/web/test/data bootstrap_cubes pouet.css *.py +recursive-include cubicweb/etwist/test/data *.py -recursive-include web/test/jstests *.js *.html *.css *.json -recursive-include web/test/windmill *.py +recursive-include cubicweb/web/test/jstests *.js *.html *.css *.json +recursive-include cubicweb/web/test/windmill *.py -recursive-include skeleton *.py *.css *.js *.po compat *.in *.tmpl rules +recursive-include cubicweb/skeleton *.py *.css *.js *.po compat *.in *.tmpl rules prune doc/book/en/.static prune doc/book/fr/.static prune doc/html/_sources/ -prune misc/cwfs +prune cubicweb/misc/cwfs prune doc/js_api global-exclude *.pyc diff -r 058bb3dc685f -r 0b59724cb3f2 __init__.py --- a/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,265 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""CubicWeb is a generic framework to quickly build applications which describes -relations between entitites. -""" -__docformat__ = "restructuredtext en" - -# ignore the pygments UserWarnings -import warnings -import zlib -warnings.filterwarnings('ignore', category=UserWarning, - message='.*was already imported', - module='.*pygments') - - -from six import PY2, binary_type, text_type -from six.moves import builtins - -CW_SOFTWARE_ROOT = __path__[0] - -import sys, os, logging -from io import BytesIO - -from six.moves import cPickle as pickle - -from logilab.common.deprecation import deprecated -from logilab.common.logging_ext import set_log_methods -from yams.constraints import BASE_CONVERTERS, BASE_CHECKERS - -# pre python 2.7.2 safety -logging.basicConfig() - -from cubicweb.__pkginfo__ import version as __version__ - - -set_log_methods(sys.modules[__name__], logging.getLogger('cubicweb')) - -# make all exceptions accessible from the package -from cubicweb._exceptions import * -from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound - - -# '_' is available to mark internationalized string but should not be used to -# do the actual translation -_ = text_type -if not hasattr(builtins, '_'): - builtins._ = deprecated("[3.22] Use 'from cubicweb import _'")(_) - - -# convert eid to the right type, raise ValueError if it's not a valid eid -@deprecated('[3.17] typed_eid() was removed. replace it with int() when needed.') -def typed_eid(eid): - return int(eid) - -#def log_thread(f, w, a): -# print f.f_code.co_filename, f.f_code.co_name -#import threading -#threading.settrace(log_thread) - -class Binary(BytesIO): - """class to hold binary data. Use BytesIO to prevent use of unicode data""" - _allowed_types = (binary_type, bytearray, buffer if PY2 else memoryview) - - def __init__(self, buf=b''): - assert isinstance(buf, self._allowed_types), \ - "Binary objects must use bytes/buffer objects, not %s" % buf.__class__ - super(Binary, self).__init__(buf) - - def write(self, data): - assert isinstance(data, self._allowed_types), \ - "Binary objects must use bytes/buffer objects, not %s" % data.__class__ - super(Binary, self).write(data) - - def to_file(self, fobj): - """write a binary to disk - - the writing is performed in a safe way for files stored on - Windows SMB shares - """ - pos = self.tell() - self.seek(0) - if sys.platform == 'win32': - while True: - # the 16kB chunksize comes from the shutil module - # in stdlib - chunk = self.read(16*1024) - if not chunk: - break - fobj.write(chunk) - else: - fobj.write(self.read()) - self.seek(pos) - - @staticmethod - def from_file(filename): - """read a file and returns its contents in a Binary - - the reading is performed in a safe way for files stored on - Windows SMB shares - """ - binary = Binary() - with open(filename, 'rb') as fobj: - if sys.platform == 'win32': - while True: - # the 16kB chunksize comes from the shutil module - # in stdlib - chunk = fobj.read(16*1024) - if not chunk: - break - binary.write(chunk) - else: - binary.write(fobj.read()) - binary.seek(0) - return binary - - def __eq__(self, other): - if not isinstance(other, Binary): - return False - return self.getvalue() == other.getvalue() - - - # Binary helpers to store/fetch python objects - - @classmethod - def zpickle(cls, obj): - """ return a Binary containing a gzipped pickle of obj """ - retval = cls() - retval.write(zlib.compress(pickle.dumps(obj, protocol=2))) - return retval - - def unzpickle(self): - """ decompress and loads the stream before returning it """ - return pickle.loads(zlib.decompress(self.getvalue())) - - -def check_password(eschema, value): - return isinstance(value, (binary_type, Binary)) -BASE_CHECKERS['Password'] = check_password - -def str_or_binary(value): - if isinstance(value, Binary): - return value - return binary_type(value) -BASE_CONVERTERS['Password'] = str_or_binary - - -# use this dictionary to rename entity types while keeping bw compat -ETYPE_NAME_MAP = {} - -# XXX cubic web cube migration map. See if it's worth keeping this mecanism -# to help in cube renaming -CW_MIGRATION_MAP = {} - -def neg_role(role): - if role == 'subject': - return 'object' - return 'subject' - -def role(obj): - try: - return obj.role - except AttributeError: - return neg_role(obj.target) - -def target(obj): - try: - return obj.target - except AttributeError: - return neg_role(obj.role) - - -class CubicWebEventManager(object): - """simple event / callback manager. - - Typical usage to register a callback:: - - >>> from cubicweb import CW_EVENT_MANAGER - >>> CW_EVENT_MANAGER.bind('after-registry-reload', mycallback) - - Typical usage to emit an event:: - - >>> from cubicweb import CW_EVENT_MANAGER - >>> CW_EVENT_MANAGER.emit('after-registry-reload') - - emit() accepts an additional context parameter that will be passed - to the callback if specified (and only in that case) - """ - def __init__(self): - self.callbacks = {} - - def bind(self, event, callback, *args, **kwargs): - self.callbacks.setdefault(event, []).append( (callback, args, kwargs) ) - - def emit(self, event, context=None): - for callback, args, kwargs in self.callbacks.get(event, ()): - if context is None: - callback(*args, **kwargs) - else: - callback(context, *args, **kwargs) - -CW_EVENT_MANAGER = CubicWebEventManager() - -def onevent(event, *args, **kwargs): - """decorator to ease event / callback binding - - >>> from cubicweb import onevent - >>> @onevent('before-registry-reload') - ... def mycallback(): - ... print 'hello' - ... - >>> - """ - def _decorator(func): - CW_EVENT_MANAGER.bind(event, func, *args, **kwargs) - return func - return _decorator - - -from yams.schema import role_name as rname - -def validation_error(entity, errors, substitutions=None, i18nvalues=None): - """easy way to retrieve a :class:`cubicweb.ValidationError` for an entity or eid. - - You may also have 2-tuple as error keys, :func:`yams.role_name` will be - called automatically for them. - - Messages in errors **should not be translated yet**, though marked for - internationalization. You may give an additional substition dictionary that - will be used for interpolation after the translation. - """ - if substitutions is None: - # set empty dict else translation won't be done for backward - # compatibility reason (see ValidationError.translate method) - substitutions = {} - for key in list(errors): - if isinstance(key, tuple): - errors[rname(*key)] = errors.pop(key) - return ValidationError(getattr(entity, 'eid', entity), errors, - substitutions, i18nvalues) - - -# exceptions ################################################################## - -class ProgrammingError(Exception): #DatabaseError): - """Exception raised for errors that are related to the database's operation - and not necessarily under the control of the programmer, e.g. an unexpected - disconnect occurs, the data source name is not found, a transaction could - not be processed, a memory allocation error occurred during processing, - etc. - """ diff -r 058bb3dc685f -r 0b59724cb3f2 __pkginfo__.py --- a/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,138 +0,0 @@ -# pylint: disable=W0622,C0103 -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb global packaging information for the cubicweb knowledge management -software -""" - -modname = distname = "cubicweb" - -numversion = (3, 22, 0) -version = '.'.join(str(num) for num in numversion) - -description = "a repository of entities / relations for knowledge management" -author = "Logilab" -author_email = "contact@logilab.fr" -web = 'http://www.cubicweb.org' -license = 'LGPL' - -classifiers = [ - 'Environment :: Web Environment', - 'Framework :: CubicWeb', - 'Programming Language :: Python', - 'Programming Language :: JavaScript', -] - -__depends__ = { - 'six': '>= 1.4.0', - 'logilab-common': '>= 0.63.1', - 'logilab-mtconverter': '>= 0.8.0', - 'rql': '>= 0.34.0', - 'yams': '>= 0.42.0', - #gettext # for xgettext, msgcat, etc... - # web dependencies - 'lxml': '', - # XXX graphviz - # server dependencies - 'logilab-database': '>= 1.15.0', - 'passlib': '', - 'pytz': '', - 'Markdown': '' - } - -__recommends__ = { - 'docutils': '>= 0.6', - 'Pillow': '', # for captcha - 'pycrypto': '', # for crypto extensions - 'fyzz': '>= 0.1.0', # for sparql - 'vobject': '>= 0.6.0', # for ical view - 'rdflib': None, # - 'pyzmq': None, - 'Twisted': '', - #'Products.FCKeditor':'', - #'SimpleTAL':'>= 4.1.6', - } - -import sys -from os import listdir, environ -from os.path import join, isdir -import glob - -scripts = [s for s in glob.glob(join('bin', 'cubicweb-*')) - if not s.endswith('.bat')] -include_dirs = [join('test', 'data'), - join('server', 'test', 'data'), - join('hooks', 'test', 'data'), - join('web', 'test', 'data'), - join('devtools', 'data'), - join('devtools', 'test', 'data'), - 'schemas', 'skeleton'] - - -_server_migration_dir = join('misc', 'migration') -_data_dir = join('web', 'data') -_wdoc_dir = join('web', 'wdoc') -_wdocimages_dir = join(_wdoc_dir, 'images') -_views_dir = join('web', 'views') -_i18n_dir = 'i18n' - -_pyversion = '.'.join(str(num) for num in sys.version_info[0:2]) -if '--home' in sys.argv: - # --home install - pydir = 'python' + _pyversion -else: - pydir = join('python' + _pyversion, 'site-packages') - -# data files that shall be copied into the main package directory -package_data = { - 'cubicweb.web.views':['*.pt'], - } - -try: - # data files that shall be copied outside the main package directory - data_files = [ - # server data - [join('share', 'cubicweb', 'schemas'), - glob.glob(join('schemas', '*.sql'))], - [join('share', 'cubicweb', 'migration'), - [join(_server_migration_dir, filename) - for filename in listdir(_server_migration_dir)]], - # web data - [join('share', 'cubicweb', 'cubes', 'shared', 'data'), - [join(_data_dir, fname) for fname in listdir(_data_dir) - if not isdir(join(_data_dir, fname))]], - [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'), - [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]], - [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview'), - [join(_data_dir, 'jquery-treeview', fname) for fname in listdir(join(_data_dir, 'jquery-treeview')) - if not isdir(join(_data_dir, 'jquery-treeview', fname))]], - [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview', 'images'), - [join(_data_dir, 'jquery-treeview', 'images', fname) - for fname in listdir(join(_data_dir, 'jquery-treeview', 'images'))]], - [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'), - [join(_wdoc_dir, fname) for fname in listdir(_wdoc_dir) - if not isdir(join(_wdoc_dir, fname))]], - [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'), - [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]], - [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'), - glob.glob(join(_i18n_dir, '*.po'))], - # skeleton - ] -except OSError: - # we are in an installed directory, don't care about this - pass diff -r 058bb3dc685f -r 0b59724cb3f2 _exceptions.py --- a/_exceptions.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,209 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Exceptions shared by different cubicweb packages.""" - -__docformat__ = "restructuredtext en" - -from warnings import warn - -from six import PY3, text_type - -from logilab.common.decorators import cachedproperty - -from yams import ValidationError - -# abstract exceptions ######################################################### - -class CubicWebException(Exception): - """base class for cubicweb server exception""" - msg = "" - def __unicode__(self): - if self.msg: - if self.args: - return self.msg % tuple(self.args) - else: - return self.msg - else: - return u' '.join(text_type(arg) for arg in self.args) - __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') - -class ConfigurationError(CubicWebException): - """a misconfiguration error""" - -class InternalError(CubicWebException): - """base class for exceptions which should not occur""" - -class SecurityError(CubicWebException): - """base class for cubicweb server security exceptions""" - -class RepositoryError(CubicWebException): - """base class for repository exceptions""" - -class SourceException(CubicWebException): - """base class for source exceptions""" - -class CubicWebRuntimeError(CubicWebException): - """base class for runtime exceptions""" - -# repository exceptions ####################################################### - -class ConnectionError(RepositoryError): - """raised when a bad connection id is given or when an attempt to establish - a connection failed - """ - -class AuthenticationError(ConnectionError): - """raised when an attempt to establish a connection failed due to wrong - connection information (login / password or other authentication token) - """ - -class BadConnectionId(ConnectionError): - """raised when a bad connection id is given""" - -class UnknownEid(RepositoryError): - """the eid is not defined in the system tables""" - msg = 'No entity with eid %s in the repository' - -class UniqueTogetherError(RepositoryError): - """raised when a unique_together constraint caused an IntegrityError""" - def __init__(self, session, **kwargs): - self.session = session - assert 'rtypes' in kwargs or 'cstrname' in kwargs - self.kwargs = kwargs - # fill cache while the session is open - self.rtypes - - @cachedproperty - def rtypes(self): - if 'rtypes' in self.kwargs: - return self.kwargs['rtypes'] - cstrname = unicode(self.kwargs['cstrname']) - cstr = self.session.find('CWUniqueTogetherConstraint', name=cstrname).one() - return sorted(rtype.name for rtype in cstr.relations) - - @cachedproperty - def args(self): - warn('[3.18] UniqueTogetherError.args is deprecated, just use ' - 'the .rtypes accessor.', - DeprecationWarning) - # the first argument, etype, is never used and was never garanteed anyway - return None, self.rtypes - - -class ViolatedConstraint(RepositoryError): - def __init__(self, cnx, cstrname): - self.cnx = cnx - self.cstrname = cstrname - - -# security exceptions ######################################################### - -class Unauthorized(SecurityError): - """raised when a user tries to perform an action without sufficient - credentials - """ - msg = 'You are not allowed to perform this operation' - msg1 = 'You are not allowed to perform %s operation on %s' - var = None - - def __str__(self): - try: - if self.args and len(self.args) == 2: - return self.msg1 % self.args - if self.args: - return ' '.join(self.args) - return self.msg - except Exception as ex: - return str(ex) - -class Forbidden(SecurityError): - """raised when a user tries to perform a forbidden action - """ - -# source exceptions ########################################################### - -class EidNotInSource(SourceException): - """trying to access an object with a particular eid from a particular - source has failed - """ - msg = 'No entity with eid %s in %s' - - -# registry exceptions ######################################################### - -# pre 3.15 bw compat -from logilab.common.registry import RegistryException, ObjectNotFound, NoSelectableObject - -class UnknownProperty(RegistryException): - """property found in database but unknown in registry""" - -# query exception ############################################################# - -class QueryError(CubicWebRuntimeError): - """a query try to do something it shouldn't""" - -class NotAnEntity(CubicWebRuntimeError): - """raised when get_entity is called for a column which doesn't contain - a non final entity - """ - -class MultipleResultsError(CubicWebRuntimeError): - """raised when ResultSet.one() is called on a resultset with multiple rows - of multiple columns. - """ - -class NoResultError(CubicWebRuntimeError): - """raised when no result is found but at least one is expected. - """ - -class UndoTransactionException(QueryError): - """Raised when undoing a transaction could not be performed completely. - - Note that : - 1) the partial undo operation might be acceptable - depending upon the final application - - 2) the undo operation can also fail with a `ValidationError` in - cases where the undoing breaks integrity constraints checked - immediately. - - 3) It might be that neither of those exception is raised but a - subsequent `commit` might raise a `ValidationError` in cases - where the undoing breaks integrity constraints checked at - commit time. - - :type txuuix: int - :param txuuid: Unique identifier of the partially undone transaction - - :type errors: list - :param errors: List of errors occurred during undoing - """ - msg = u"The following error(s) occurred while undoing transaction #%d : %s" - - def __init__(self, txuuid, errors): - super(UndoTransactionException, self).__init__(txuuid, errors) - self.txuuid = txuuid - self.errors = errors - -# tools exceptions ############################################################ - -class ExecutionError(Exception): - """server execution control error (already started, not running...)""" - -# pylint: disable=W0611 -from logilab.common.clcommands import BadCommandUsage diff -r 058bb3dc685f -r 0b59724cb3f2 _gcdebug.py --- a/_gcdebug.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,112 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from __future__ import print_function - -import gc, types, weakref - -from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema -try: - from cubicweb.web.request import _NeedAuthAccessMock -except ImportError: - _NeedAuthAccessMock = None - -listiterator = type(iter([])) - -IGNORE_CLASSES = ( - type, tuple, dict, list, set, frozenset, type(len), - weakref.ref, weakref.WeakKeyDictionary, - listiterator, - property, classmethod, - types.ModuleType, types.FunctionType, types.MethodType, - types.MemberDescriptorType, types.GetSetDescriptorType, - ) -if _NeedAuthAccessMock is not None: - IGNORE_CLASSES = IGNORE_CLASSES + (_NeedAuthAccessMock,) - -def _get_counted_class(obj, classes): - for cls in classes: - if isinstance(obj, cls): - return cls - raise AssertionError() - -def gc_info(countclasses, - ignoreclasses=IGNORE_CLASSES, - viewreferrersclasses=(), showobjs=False, maxlevel=1): - gc.collect() - gc.collect() - counters = {} - ocounters = {} - for obj in gc.get_objects(): - if isinstance(obj, countclasses): - cls = _get_counted_class(obj, countclasses) - try: - counters[cls.__name__] += 1 - except KeyError: - counters[cls.__name__] = 1 - elif not isinstance(obj, ignoreclasses): - try: - key = '%s.%s' % (obj.__class__.__module__, - obj.__class__.__name__) - except AttributeError: - key = str(obj) - try: - ocounters[key] += 1 - except KeyError: - ocounters[key] = 1 - if isinstance(obj, viewreferrersclasses): - print(' ', obj, referrers(obj, showobjs, maxlevel)) - garbage = [repr(obj) for obj in gc.garbage] - return counters, ocounters, garbage - - -def referrers(obj, showobj=False, maxlevel=1): - objreferrers = _referrers(obj, maxlevel) - try: - return sorted(set((type(x), showobj and x or getattr(x, '__name__', '%#x' % id(x))) - for x in objreferrers)) - except TypeError: - s = set() - unhashable = [] - for x in objreferrers: - try: - s.add(x) - except TypeError: - unhashable.append(x) - return sorted(s) + unhashable - -def _referrers(obj, maxlevel, _seen=None, _level=0): - interesting = [] - if _seen is None: - _seen = set() - for x in gc.get_referrers(obj): - if id(x) in _seen: - continue - _seen.add(id(x)) - if isinstance(x, types.FrameType): - continue - if isinstance(x, (CubicWebRelationSchema, CubicWebEntitySchema)): - continue - if isinstance(x, (list, tuple, set, dict, listiterator)): - if _level >= maxlevel: - pass - #interesting.append(x) - else: - interesting += _referrers(x, maxlevel, _seen, _level+1) - else: - interesting.append(x) - return interesting diff -r 058bb3dc685f -r 0b59724cb3f2 appobject.py --- a/appobject.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,161 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -The `AppObject` class ---------------------- - -The AppObject class is the base class for all dynamically loaded objects -(application objects) accessible through the vregistry. - -We can find a certain number of attributes and methods defined in this class and -common to all the application objects. - -""" -__docformat__ = "restructuredtext en" - -from logging import getLogger - -from logilab.common.deprecation import deprecated, class_renamed -from logilab.common.logging_ext import set_log_methods - -# first line imports for bw compat -from logilab.common.registry import (objectify_predicate, traced_selection, Predicate, - RegistrableObject, yes) - - -objectify_selector = deprecated('[3.15] objectify_selector has been ' - 'renamed to objectify_predicates in ' - 'logilab.common.registry')(objectify_predicate) -traced_selection = deprecated('[3.15] traced_selection has been ' - 'moved to logilab.common.registry')(traced_selection) -Selector = class_renamed('Selector', Predicate, - '[3.15] Selector has been renamed to Predicate ' - 'in logilab.common.registry') - -@deprecated('[3.15] lltrace decorator can now be removed') -def lltrace(func): - return func - -# the base class for all appobjects ############################################ - -class AppObject(RegistrableObject): - """This is the base class for CubicWeb application objects which are - selected in a request context. - - The following attributes should be set on concrete appobject classes: - - At selection time, the following attributes are set on the instance: - - :attr:`_cw` - current request - :attr:`cw_extra_kwargs` - other received arguments - - And also the following, only if `rset` is found in arguments (in which case - rset/row/col will be removed from `cwextra_kwargs`): - - :attr:`cw_rset` - context result set or None - - :attr:`cw_row` - if a result set is set and the context is about a particular cell in the - result set, and not the result set as a whole, specify the row number we - are interested in, else None - - :attr:`cw_col` - if a result set is set and the context is about a particular cell in the - result set, and not the result set as a whole, specify the col number we - are interested in, else None - - - .. Note:: - - * do not inherit directly from this class but from a more specific class - such as `AnyEntity`, `EntityView`, `AnyRsetView`, `Action`... - - """ - __select__ = yes() - - @classmethod - def __registered__(cls, registry): - """called by the registry when the appobject has been registered. - - It must return the object that will be actually registered (this may be - the right hook to create an instance for example). By default the - appobject is returned without any transformation. - """ - pdefs = getattr(cls, 'cw_property_defs', {}) - for propid, pdef in pdefs.items(): - pdef = pdef.copy() # may be shared - pdef['default'] = getattr(cls, propid, pdef['default']) - pdef['sitewide'] = getattr(cls, 'site_wide', pdef.get('sitewide')) - registry.vreg.register_property(cls._cwpropkey(propid), **pdef) - assert callable(cls.__select__), cls - return cls - - def __init__(self, req, **extra): - super(AppObject, self).__init__() - self._cw = req - try: - self.cw_rset = extra.pop('rset') - self.cw_row = extra.pop('row', None) - self.cw_col = extra.pop('col', None) - except KeyError: - pass - self.cw_extra_kwargs = extra - - # persistent class properties ############################################## - # - # optional `cw_property_defs` dict on a class defines available persistent - # properties for this class: - # - # * key: id of the property (the actual CWProperty key is build using - # .. - # * value: tuple (property type, vocabfunc, default value, property description) - # possible types are those used by `logilab.common.configuration` - # - # notice that when it exists multiple objects with the same id (adaptation, - # overriding) only the first encountered definition is considered, so those - # objects can't try to have different default values for instance. - # - # you can then access to a property value using self.cw_propval, where self - # is an instance of class - - @classmethod - def _cwpropkey(cls, propid): - """return cw property key for the property of the given id for this - class - """ - return '%s.%s.%s' % (cls.__registry__, cls.__regid__, propid) - - def cw_propval(self, propid): - """return cw property value associated to key - - .. - """ - return self._cw.property_value(self._cwpropkey(propid)) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -set_log_methods(AppObject, getLogger('cubicweb.appobject')) - -# defined here to avoid warning on usage on the AppObject class -yes = deprecated('[3.15] yes has been moved to logilab.common.registry')(yes) diff -r 058bb3dc685f -r 0b59724cb3f2 crypto.py --- a/crypto.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Simple cryptographic routines, based on python-crypto.""" -__docformat__ = "restructuredtext en" - -from base64 import b64encode, b64decode - -from six.moves import cPickle as pickle - -from Crypto.Cipher import Blowfish - - -_CYPHERERS = {} -def _cypherer(seed): - try: - return _CYPHERERS[seed] - except KeyError: - _CYPHERERS[seed] = Blowfish.new(seed, Blowfish.MODE_ECB) - return _CYPHERERS[seed] - - -def encrypt(data, seed): - string = pickle.dumps(data) - string = string + '*' * (8 - len(string) % 8) - string = b64encode(_cypherer(seed).encrypt(string)) - return unicode(string) - - -def decrypt(string, seed): - # pickle ignores trailing characters so we do not need to strip them off - string = _cypherer(seed).decrypt(b64decode(string)) - return pickle.loads(string) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,265 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""CubicWeb is a generic framework to quickly build applications which describes +relations between entitites. +""" +__docformat__ = "restructuredtext en" + +# ignore the pygments UserWarnings +import warnings +import zlib +warnings.filterwarnings('ignore', category=UserWarning, + message='.*was already imported', + module='.*pygments') + + +from six import PY2, binary_type, text_type +from six.moves import builtins + +CW_SOFTWARE_ROOT = __path__[0] + +import sys, os, logging +from io import BytesIO + +from six.moves import cPickle as pickle + +from logilab.common.deprecation import deprecated +from logilab.common.logging_ext import set_log_methods +from yams.constraints import BASE_CONVERTERS, BASE_CHECKERS + +# pre python 2.7.2 safety +logging.basicConfig() + +from cubicweb.__pkginfo__ import version as __version__ + + +set_log_methods(sys.modules[__name__], logging.getLogger('cubicweb')) + +# make all exceptions accessible from the package +from cubicweb._exceptions import * +from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound + + +# '_' is available to mark internationalized string but should not be used to +# do the actual translation +_ = text_type +if not hasattr(builtins, '_'): + builtins._ = deprecated("[3.22] Use 'from cubicweb import _'")(_) + + +# convert eid to the right type, raise ValueError if it's not a valid eid +@deprecated('[3.17] typed_eid() was removed. replace it with int() when needed.') +def typed_eid(eid): + return int(eid) + +#def log_thread(f, w, a): +# print f.f_code.co_filename, f.f_code.co_name +#import threading +#threading.settrace(log_thread) + +class Binary(BytesIO): + """class to hold binary data. Use BytesIO to prevent use of unicode data""" + _allowed_types = (binary_type, bytearray, buffer if PY2 else memoryview) + + def __init__(self, buf=b''): + assert isinstance(buf, self._allowed_types), \ + "Binary objects must use bytes/buffer objects, not %s" % buf.__class__ + super(Binary, self).__init__(buf) + + def write(self, data): + assert isinstance(data, self._allowed_types), \ + "Binary objects must use bytes/buffer objects, not %s" % data.__class__ + super(Binary, self).write(data) + + def to_file(self, fobj): + """write a binary to disk + + the writing is performed in a safe way for files stored on + Windows SMB shares + """ + pos = self.tell() + self.seek(0) + if sys.platform == 'win32': + while True: + # the 16kB chunksize comes from the shutil module + # in stdlib + chunk = self.read(16*1024) + if not chunk: + break + fobj.write(chunk) + else: + fobj.write(self.read()) + self.seek(pos) + + @staticmethod + def from_file(filename): + """read a file and returns its contents in a Binary + + the reading is performed in a safe way for files stored on + Windows SMB shares + """ + binary = Binary() + with open(filename, 'rb') as fobj: + if sys.platform == 'win32': + while True: + # the 16kB chunksize comes from the shutil module + # in stdlib + chunk = fobj.read(16*1024) + if not chunk: + break + binary.write(chunk) + else: + binary.write(fobj.read()) + binary.seek(0) + return binary + + def __eq__(self, other): + if not isinstance(other, Binary): + return False + return self.getvalue() == other.getvalue() + + + # Binary helpers to store/fetch python objects + + @classmethod + def zpickle(cls, obj): + """ return a Binary containing a gzipped pickle of obj """ + retval = cls() + retval.write(zlib.compress(pickle.dumps(obj, protocol=2))) + return retval + + def unzpickle(self): + """ decompress and loads the stream before returning it """ + return pickle.loads(zlib.decompress(self.getvalue())) + + +def check_password(eschema, value): + return isinstance(value, (binary_type, Binary)) +BASE_CHECKERS['Password'] = check_password + +def str_or_binary(value): + if isinstance(value, Binary): + return value + return binary_type(value) +BASE_CONVERTERS['Password'] = str_or_binary + + +# use this dictionary to rename entity types while keeping bw compat +ETYPE_NAME_MAP = {} + +# XXX cubic web cube migration map. See if it's worth keeping this mecanism +# to help in cube renaming +CW_MIGRATION_MAP = {} + +def neg_role(role): + if role == 'subject': + return 'object' + return 'subject' + +def role(obj): + try: + return obj.role + except AttributeError: + return neg_role(obj.target) + +def target(obj): + try: + return obj.target + except AttributeError: + return neg_role(obj.role) + + +class CubicWebEventManager(object): + """simple event / callback manager. + + Typical usage to register a callback:: + + >>> from cubicweb import CW_EVENT_MANAGER + >>> CW_EVENT_MANAGER.bind('after-registry-reload', mycallback) + + Typical usage to emit an event:: + + >>> from cubicweb import CW_EVENT_MANAGER + >>> CW_EVENT_MANAGER.emit('after-registry-reload') + + emit() accepts an additional context parameter that will be passed + to the callback if specified (and only in that case) + """ + def __init__(self): + self.callbacks = {} + + def bind(self, event, callback, *args, **kwargs): + self.callbacks.setdefault(event, []).append( (callback, args, kwargs) ) + + def emit(self, event, context=None): + for callback, args, kwargs in self.callbacks.get(event, ()): + if context is None: + callback(*args, **kwargs) + else: + callback(context, *args, **kwargs) + +CW_EVENT_MANAGER = CubicWebEventManager() + +def onevent(event, *args, **kwargs): + """decorator to ease event / callback binding + + >>> from cubicweb import onevent + >>> @onevent('before-registry-reload') + ... def mycallback(): + ... print 'hello' + ... + >>> + """ + def _decorator(func): + CW_EVENT_MANAGER.bind(event, func, *args, **kwargs) + return func + return _decorator + + +from yams.schema import role_name as rname + +def validation_error(entity, errors, substitutions=None, i18nvalues=None): + """easy way to retrieve a :class:`cubicweb.ValidationError` for an entity or eid. + + You may also have 2-tuple as error keys, :func:`yams.role_name` will be + called automatically for them. + + Messages in errors **should not be translated yet**, though marked for + internationalization. You may give an additional substition dictionary that + will be used for interpolation after the translation. + """ + if substitutions is None: + # set empty dict else translation won't be done for backward + # compatibility reason (see ValidationError.translate method) + substitutions = {} + for key in list(errors): + if isinstance(key, tuple): + errors[rname(*key)] = errors.pop(key) + return ValidationError(getattr(entity, 'eid', entity), errors, + substitutions, i18nvalues) + + +# exceptions ################################################################## + +class ProgrammingError(Exception): #DatabaseError): + """Exception raised for errors that are related to the database's operation + and not necessarily under the control of the programmer, e.g. an unexpected + disconnect occurs, the data source name is not found, a transaction could + not be processed, a memory allocation error occurred during processing, + etc. + """ diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,138 @@ +# pylint: disable=W0622,C0103 +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb global packaging information for the cubicweb knowledge management +software +""" + +modname = distname = "cubicweb" + +numversion = (3, 22, 0) +version = '.'.join(str(num) for num in numversion) + +description = "a repository of entities / relations for knowledge management" +author = "Logilab" +author_email = "contact@logilab.fr" +web = 'http://www.cubicweb.org' +license = 'LGPL' + +classifiers = [ + 'Environment :: Web Environment', + 'Framework :: CubicWeb', + 'Programming Language :: Python', + 'Programming Language :: JavaScript', +] + +__depends__ = { + 'six': '>= 1.4.0', + 'logilab-common': '>= 0.63.1', + 'logilab-mtconverter': '>= 0.8.0', + 'rql': '>= 0.34.0', + 'yams': '>= 0.42.0', + #gettext # for xgettext, msgcat, etc... + # web dependencies + 'lxml': '', + # XXX graphviz + # server dependencies + 'logilab-database': '>= 1.15.0', + 'passlib': '', + 'pytz': '', + 'Markdown': '' + } + +__recommends__ = { + 'docutils': '>= 0.6', + 'Pillow': '', # for captcha + 'pycrypto': '', # for crypto extensions + 'fyzz': '>= 0.1.0', # for sparql + 'vobject': '>= 0.6.0', # for ical view + 'rdflib': None, # + 'pyzmq': None, + 'Twisted': '', + #'Products.FCKeditor':'', + #'SimpleTAL':'>= 4.1.6', + } + +import sys +from os import listdir, environ +from os.path import join, isdir +import glob + +scripts = [s for s in glob.glob(join('bin', 'cubicweb-*')) + if not s.endswith('.bat')] +include_dirs = [join('test', 'data'), + join('server', 'test', 'data'), + join('hooks', 'test', 'data'), + join('web', 'test', 'data'), + join('devtools', 'data'), + join('devtools', 'test', 'data'), + 'schemas', 'skeleton'] + + +_server_migration_dir = join(modname, 'misc', 'migration') +_data_dir = join(modname, 'web', 'data') +_wdoc_dir = join(modname, 'web', 'wdoc') +_wdocimages_dir = join(_wdoc_dir, 'images') +_views_dir = join(modname, 'web', 'views') +_i18n_dir = join(modname, 'i18n') + +_pyversion = '.'.join(str(num) for num in sys.version_info[0:2]) +if '--home' in sys.argv: + # --home install + pydir = 'python' + _pyversion +else: + pydir = join('python' + _pyversion, 'site-packages') + +# data files that shall be copied into the main package directory +package_data = { + 'cubicweb.web.views':['*.pt'], + } + +try: + # data files that shall be copied outside the main package directory + data_files = [ + # server data + [join('share', 'cubicweb', 'schemas'), + glob.glob(join(modname, 'schemas', '*.sql'))], + [join('share', 'cubicweb', 'migration'), + [join(_server_migration_dir, filename) + for filename in listdir(_server_migration_dir)]], + # web data + [join('share', 'cubicweb', 'cubes', 'shared', 'data'), + [join(_data_dir, fname) for fname in listdir(_data_dir) + if not isdir(join(_data_dir, fname))]], + [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'), + [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]], + [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview'), + [join(_data_dir, 'jquery-treeview', fname) for fname in listdir(join(_data_dir, 'jquery-treeview')) + if not isdir(join(_data_dir, 'jquery-treeview', fname))]], + [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview', 'images'), + [join(_data_dir, 'jquery-treeview', 'images', fname) + for fname in listdir(join(_data_dir, 'jquery-treeview', 'images'))]], + [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'), + [join(_wdoc_dir, fname) for fname in listdir(_wdoc_dir) + if not isdir(join(_wdoc_dir, fname))]], + [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'), + [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]], + [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'), + glob.glob(join(_i18n_dir, '*.po'))], + # skeleton + ] +except OSError: + # we are in an installed directory, don't care about this + pass diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/_exceptions.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/_exceptions.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,209 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Exceptions shared by different cubicweb packages.""" + +__docformat__ = "restructuredtext en" + +from warnings import warn + +from six import PY3, text_type + +from logilab.common.decorators import cachedproperty + +from yams import ValidationError + +# abstract exceptions ######################################################### + +class CubicWebException(Exception): + """base class for cubicweb server exception""" + msg = "" + def __unicode__(self): + if self.msg: + if self.args: + return self.msg % tuple(self.args) + else: + return self.msg + else: + return u' '.join(text_type(arg) for arg in self.args) + __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') + +class ConfigurationError(CubicWebException): + """a misconfiguration error""" + +class InternalError(CubicWebException): + """base class for exceptions which should not occur""" + +class SecurityError(CubicWebException): + """base class for cubicweb server security exceptions""" + +class RepositoryError(CubicWebException): + """base class for repository exceptions""" + +class SourceException(CubicWebException): + """base class for source exceptions""" + +class CubicWebRuntimeError(CubicWebException): + """base class for runtime exceptions""" + +# repository exceptions ####################################################### + +class ConnectionError(RepositoryError): + """raised when a bad connection id is given or when an attempt to establish + a connection failed + """ + +class AuthenticationError(ConnectionError): + """raised when an attempt to establish a connection failed due to wrong + connection information (login / password or other authentication token) + """ + +class BadConnectionId(ConnectionError): + """raised when a bad connection id is given""" + +class UnknownEid(RepositoryError): + """the eid is not defined in the system tables""" + msg = 'No entity with eid %s in the repository' + +class UniqueTogetherError(RepositoryError): + """raised when a unique_together constraint caused an IntegrityError""" + def __init__(self, session, **kwargs): + self.session = session + assert 'rtypes' in kwargs or 'cstrname' in kwargs + self.kwargs = kwargs + # fill cache while the session is open + self.rtypes + + @cachedproperty + def rtypes(self): + if 'rtypes' in self.kwargs: + return self.kwargs['rtypes'] + cstrname = unicode(self.kwargs['cstrname']) + cstr = self.session.find('CWUniqueTogetherConstraint', name=cstrname).one() + return sorted(rtype.name for rtype in cstr.relations) + + @cachedproperty + def args(self): + warn('[3.18] UniqueTogetherError.args is deprecated, just use ' + 'the .rtypes accessor.', + DeprecationWarning) + # the first argument, etype, is never used and was never garanteed anyway + return None, self.rtypes + + +class ViolatedConstraint(RepositoryError): + def __init__(self, cnx, cstrname): + self.cnx = cnx + self.cstrname = cstrname + + +# security exceptions ######################################################### + +class Unauthorized(SecurityError): + """raised when a user tries to perform an action without sufficient + credentials + """ + msg = 'You are not allowed to perform this operation' + msg1 = 'You are not allowed to perform %s operation on %s' + var = None + + def __str__(self): + try: + if self.args and len(self.args) == 2: + return self.msg1 % self.args + if self.args: + return ' '.join(self.args) + return self.msg + except Exception as ex: + return str(ex) + +class Forbidden(SecurityError): + """raised when a user tries to perform a forbidden action + """ + +# source exceptions ########################################################### + +class EidNotInSource(SourceException): + """trying to access an object with a particular eid from a particular + source has failed + """ + msg = 'No entity with eid %s in %s' + + +# registry exceptions ######################################################### + +# pre 3.15 bw compat +from logilab.common.registry import RegistryException, ObjectNotFound, NoSelectableObject + +class UnknownProperty(RegistryException): + """property found in database but unknown in registry""" + +# query exception ############################################################# + +class QueryError(CubicWebRuntimeError): + """a query try to do something it shouldn't""" + +class NotAnEntity(CubicWebRuntimeError): + """raised when get_entity is called for a column which doesn't contain + a non final entity + """ + +class MultipleResultsError(CubicWebRuntimeError): + """raised when ResultSet.one() is called on a resultset with multiple rows + of multiple columns. + """ + +class NoResultError(CubicWebRuntimeError): + """raised when no result is found but at least one is expected. + """ + +class UndoTransactionException(QueryError): + """Raised when undoing a transaction could not be performed completely. + + Note that : + 1) the partial undo operation might be acceptable + depending upon the final application + + 2) the undo operation can also fail with a `ValidationError` in + cases where the undoing breaks integrity constraints checked + immediately. + + 3) It might be that neither of those exception is raised but a + subsequent `commit` might raise a `ValidationError` in cases + where the undoing breaks integrity constraints checked at + commit time. + + :type txuuix: int + :param txuuid: Unique identifier of the partially undone transaction + + :type errors: list + :param errors: List of errors occurred during undoing + """ + msg = u"The following error(s) occurred while undoing transaction #%d : %s" + + def __init__(self, txuuid, errors): + super(UndoTransactionException, self).__init__(txuuid, errors) + self.txuuid = txuuid + self.errors = errors + +# tools exceptions ############################################################ + +class ExecutionError(Exception): + """server execution control error (already started, not running...)""" + +# pylint: disable=W0611 +from logilab.common.clcommands import BadCommandUsage diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/_gcdebug.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/_gcdebug.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,112 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from __future__ import print_function + +import gc, types, weakref + +from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema +try: + from cubicweb.web.request import _NeedAuthAccessMock +except ImportError: + _NeedAuthAccessMock = None + +listiterator = type(iter([])) + +IGNORE_CLASSES = ( + type, tuple, dict, list, set, frozenset, type(len), + weakref.ref, weakref.WeakKeyDictionary, + listiterator, + property, classmethod, + types.ModuleType, types.FunctionType, types.MethodType, + types.MemberDescriptorType, types.GetSetDescriptorType, + ) +if _NeedAuthAccessMock is not None: + IGNORE_CLASSES = IGNORE_CLASSES + (_NeedAuthAccessMock,) + +def _get_counted_class(obj, classes): + for cls in classes: + if isinstance(obj, cls): + return cls + raise AssertionError() + +def gc_info(countclasses, + ignoreclasses=IGNORE_CLASSES, + viewreferrersclasses=(), showobjs=False, maxlevel=1): + gc.collect() + gc.collect() + counters = {} + ocounters = {} + for obj in gc.get_objects(): + if isinstance(obj, countclasses): + cls = _get_counted_class(obj, countclasses) + try: + counters[cls.__name__] += 1 + except KeyError: + counters[cls.__name__] = 1 + elif not isinstance(obj, ignoreclasses): + try: + key = '%s.%s' % (obj.__class__.__module__, + obj.__class__.__name__) + except AttributeError: + key = str(obj) + try: + ocounters[key] += 1 + except KeyError: + ocounters[key] = 1 + if isinstance(obj, viewreferrersclasses): + print(' ', obj, referrers(obj, showobjs, maxlevel)) + garbage = [repr(obj) for obj in gc.garbage] + return counters, ocounters, garbage + + +def referrers(obj, showobj=False, maxlevel=1): + objreferrers = _referrers(obj, maxlevel) + try: + return sorted(set((type(x), showobj and x or getattr(x, '__name__', '%#x' % id(x))) + for x in objreferrers)) + except TypeError: + s = set() + unhashable = [] + for x in objreferrers: + try: + s.add(x) + except TypeError: + unhashable.append(x) + return sorted(s) + unhashable + +def _referrers(obj, maxlevel, _seen=None, _level=0): + interesting = [] + if _seen is None: + _seen = set() + for x in gc.get_referrers(obj): + if id(x) in _seen: + continue + _seen.add(id(x)) + if isinstance(x, types.FrameType): + continue + if isinstance(x, (CubicWebRelationSchema, CubicWebEntitySchema)): + continue + if isinstance(x, (list, tuple, set, dict, listiterator)): + if _level >= maxlevel: + pass + #interesting.append(x) + else: + interesting += _referrers(x, maxlevel, _seen, _level+1) + else: + interesting.append(x) + return interesting diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/appobject.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/appobject.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,161 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +The `AppObject` class +--------------------- + +The AppObject class is the base class for all dynamically loaded objects +(application objects) accessible through the vregistry. + +We can find a certain number of attributes and methods defined in this class and +common to all the application objects. + +""" +__docformat__ = "restructuredtext en" + +from logging import getLogger + +from logilab.common.deprecation import deprecated, class_renamed +from logilab.common.logging_ext import set_log_methods + +# first line imports for bw compat +from logilab.common.registry import (objectify_predicate, traced_selection, Predicate, + RegistrableObject, yes) + + +objectify_selector = deprecated('[3.15] objectify_selector has been ' + 'renamed to objectify_predicates in ' + 'logilab.common.registry')(objectify_predicate) +traced_selection = deprecated('[3.15] traced_selection has been ' + 'moved to logilab.common.registry')(traced_selection) +Selector = class_renamed('Selector', Predicate, + '[3.15] Selector has been renamed to Predicate ' + 'in logilab.common.registry') + +@deprecated('[3.15] lltrace decorator can now be removed') +def lltrace(func): + return func + +# the base class for all appobjects ############################################ + +class AppObject(RegistrableObject): + """This is the base class for CubicWeb application objects which are + selected in a request context. + + The following attributes should be set on concrete appobject classes: + + At selection time, the following attributes are set on the instance: + + :attr:`_cw` + current request + :attr:`cw_extra_kwargs` + other received arguments + + And also the following, only if `rset` is found in arguments (in which case + rset/row/col will be removed from `cwextra_kwargs`): + + :attr:`cw_rset` + context result set or None + + :attr:`cw_row` + if a result set is set and the context is about a particular cell in the + result set, and not the result set as a whole, specify the row number we + are interested in, else None + + :attr:`cw_col` + if a result set is set and the context is about a particular cell in the + result set, and not the result set as a whole, specify the col number we + are interested in, else None + + + .. Note:: + + * do not inherit directly from this class but from a more specific class + such as `AnyEntity`, `EntityView`, `AnyRsetView`, `Action`... + + """ + __select__ = yes() + + @classmethod + def __registered__(cls, registry): + """called by the registry when the appobject has been registered. + + It must return the object that will be actually registered (this may be + the right hook to create an instance for example). By default the + appobject is returned without any transformation. + """ + pdefs = getattr(cls, 'cw_property_defs', {}) + for propid, pdef in pdefs.items(): + pdef = pdef.copy() # may be shared + pdef['default'] = getattr(cls, propid, pdef['default']) + pdef['sitewide'] = getattr(cls, 'site_wide', pdef.get('sitewide')) + registry.vreg.register_property(cls._cwpropkey(propid), **pdef) + assert callable(cls.__select__), cls + return cls + + def __init__(self, req, **extra): + super(AppObject, self).__init__() + self._cw = req + try: + self.cw_rset = extra.pop('rset') + self.cw_row = extra.pop('row', None) + self.cw_col = extra.pop('col', None) + except KeyError: + pass + self.cw_extra_kwargs = extra + + # persistent class properties ############################################## + # + # optional `cw_property_defs` dict on a class defines available persistent + # properties for this class: + # + # * key: id of the property (the actual CWProperty key is build using + # .. + # * value: tuple (property type, vocabfunc, default value, property description) + # possible types are those used by `logilab.common.configuration` + # + # notice that when it exists multiple objects with the same id (adaptation, + # overriding) only the first encountered definition is considered, so those + # objects can't try to have different default values for instance. + # + # you can then access to a property value using self.cw_propval, where self + # is an instance of class + + @classmethod + def _cwpropkey(cls, propid): + """return cw property key for the property of the given id for this + class + """ + return '%s.%s.%s' % (cls.__registry__, cls.__regid__, propid) + + def cw_propval(self, propid): + """return cw property value associated to key + + .. + """ + return self._cw.property_value(self._cwpropkey(propid)) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + +set_log_methods(AppObject, getLogger('cubicweb.appobject')) + +# defined here to avoid warning on usage on the AppObject class +yes = deprecated('[3.15] yes has been moved to logilab.common.registry')(yes) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/crypto.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/crypto.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,47 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Simple cryptographic routines, based on python-crypto.""" +__docformat__ = "restructuredtext en" + +from base64 import b64encode, b64decode + +from six.moves import cPickle as pickle + +from Crypto.Cipher import Blowfish + + +_CYPHERERS = {} +def _cypherer(seed): + try: + return _CYPHERERS[seed] + except KeyError: + _CYPHERERS[seed] = Blowfish.new(seed, Blowfish.MODE_ECB) + return _CYPHERERS[seed] + + +def encrypt(data, seed): + string = pickle.dumps(data) + string = string + '*' * (8 - len(string) % 8) + string = b64encode(_cypherer(seed).encrypt(string)) + return unicode(string) + + +def decrypt(string, seed): + # pickle ignores trailing characters so we do not need to strip them off + string = _cypherer(seed).decrypt(b64decode(string)) + return pickle.loads(string) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/cwconfig.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/cwconfig.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1346 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +.. _ResourceMode: + +Resource mode +------------- + +Standard resource mode +``````````````````````````` + +A resource *mode* is a predefined set of settings for various resources +directories, such as cubes, instances, etc. to ease development with the +framework. There are two running modes with *CubicWeb*: + +* **system**: resources are searched / created in the system directories (eg + usually requiring root access): + + - instances are stored in :file:`/etc/cubicweb.d` + - temporary files (such as pid file) in :file:`/var/run/cubicweb` + + where `` is the detected installation prefix ('/usr/local' for + instance). + +* **user**: resources are searched / created in the user home directory: + + - instances are stored in :file:`~/etc/cubicweb.d` + - temporary files (such as pid file) in :file:`/tmp` + + + + +.. _CubicwebWithinVirtualEnv: + +Within virtual environment +``````````````````````````` + +If you are not administrator of you machine or if you need to play with some +specific version of |cubicweb| you can use `virtualenv`_ a tool to create +isolated Python environments. + +- instances are stored in :file:`/etc/cubicweb.d` +- temporary files (such as pid file) in :file:`/var/run/cubicweb` + +.. _`virtualenv`: http://pypi.python.org/pypi/virtualenv + +Custom resource location +```````````````````````````````` + +Notice that each resource path may be explicitly set using an environment +variable if the default doesn't suit your needs. Here are the default resource +directories that are affected according to mode: + +* **system**: :: + + CW_INSTANCES_DIR = /etc/cubicweb.d/ + CW_INSTANCES_DATA_DIR = /var/lib/cubicweb/instances/ + CW_RUNTIME_DIR = /var/run/cubicweb/ + +* **user**: :: + + CW_INSTANCES_DIR = ~/etc/cubicweb.d/ + CW_INSTANCES_DATA_DIR = ~/etc/cubicweb.d/ + CW_RUNTIME_DIR = /tmp + +Cubes search path is also affected, see the :ref:`Cube` section. + +Setting Cubicweb Mode +````````````````````` + +By default, the mode is set to 'system' for standard installation. The mode is +set to 'user' if `cubicweb is used from a mercurial repository`_. You can force +this by setting the :envvar:`CW_MODE` environment variable to either 'user' or +'system' so you can easily: + +* use system wide installation but user specific instances and all, without root + privileges on the system (`export CW_MODE=user`) + +* use local checkout of cubicweb on system wide instances (requires root + privileges on the system (`export CW_MODE=system`) + +If you've a doubt about the mode you're currently running, check the first line +outputed by the :command:`cubicweb-ctl list` command. + +.. _`cubicweb is used from a mercurial repository`: CubicwebDevelopmentMod_ + +.. _CubicwebDevelopmentMod: + +Development Mode +````````````````````` +If :file:`.hg` directory is found into the cubicweb package, there are specific resource rules. + +`` is the source checkout's ``cubicweb`` directory: + +* main cubes directory is `/../../cubes`. You can specify + another one with :envvar:`CW_INSTANCES_DIR` environment variable or simply + add some other directories by using :envvar:`CW_CUBES_PATH` + +* cubicweb migration files are searched in `/misc/migration` + instead of `/share/cubicweb/migration/`. + + +.. _ConfigurationEnv: + +Environment configuration +------------------------- + +Python +`````` + +If you installed *CubicWeb* by cloning the Mercurial shell repository or from source +distribution, then you will need to update the environment variable PYTHONPATH by +adding the path to `cubicweb`: + +Add the following lines to either :file:`.bashrc` or :file:`.bash_profile` to +configure your development environment :: + + export PYTHONPATH=/full/path/to/grshell-cubicweb + +If you installed *CubicWeb* with packages, no configuration is required and your +new cubes will be placed in `/usr/share/cubicweb/cubes` and your instances will +be placed in `/etc/cubicweb.d`. + + +CubicWeb +```````` + +Here are all environment variables that may be used to configure *CubicWeb*: + +.. envvar:: CW_MODE + + Resource mode: user or system, as explained in :ref:`ResourceMode`. + +.. envvar:: CW_CUBES_PATH + + Augments the default search path for cubes. You may specify several + directories using ':' as separator (';' under windows environment). + +.. envvar:: CW_INSTANCES_DIR + + Directory where cubicweb instances will be found. + +.. envvar:: CW_INSTANCES_DATA_DIR + + Directory where cubicweb instances data will be written (backup file...) + +.. envvar:: CW_RUNTIME_DIR + + Directory where pid files will be written +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +import os +import stat +import logging +import logging.config +from smtplib import SMTP +from threading import Lock +from os.path import (exists, join, expanduser, abspath, normpath, + basename, isdir, dirname, splitext) +from warnings import warn, filterwarnings + +from six import text_type + +from logilab.common.decorators import cached, classproperty +from logilab.common.deprecation import deprecated +from logilab.common.logging_ext import set_log_methods, init_log +from logilab.common.configuration import (Configuration, Method, + ConfigurationMixIn, merge_options) + +from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, + ConfigurationError, Binary, _) +from cubicweb.toolsutils import create_dir + +CONFIGURATIONS = [] + +SMTP_LOCK = Lock() + + +def configuration_cls(name): + """return the configuration class registered with the given name""" + try: + return [c for c in CONFIGURATIONS if c.name == name][0] + except IndexError: + raise ConfigurationError('no such config %r (check it exists with "cubicweb-ctl list")' % name) + +def possible_configurations(directory): + """return a list of installed configurations in a directory + according to \*-ctl files + """ + return [name for name in ('repository', 'all-in-one') + if exists(join(directory, '%s.conf' % name))] + +def guess_configuration(directory): + """try to guess the configuration to use for a directory. If multiple + configurations are found, ConfigurationError is raised + """ + modes = possible_configurations(directory) + if len(modes) != 1: + raise ConfigurationError('unable to guess configuration from %r %s' + % (directory, modes)) + return modes[0] + +def _find_prefix(start_path=CW_SOFTWARE_ROOT): + """Runs along the parent directories of *start_path* (default to cubicweb source directory) + looking for one containing a 'share/cubicweb' directory. + The first matching directory is assumed as the prefix installation of cubicweb + + Returns the matching prefix or None. + """ + prefix = start_path + old_prefix = None + if not isdir(start_path): + prefix = dirname(start_path) + while (not isdir(join(prefix, 'share', 'cubicweb')) + or prefix.endswith('.egg')) and prefix != old_prefix: + old_prefix = prefix + prefix = dirname(prefix) + if isdir(join(prefix, 'share', 'cubicweb')): + return prefix + return sys.prefix + +# persistent options definition +PERSISTENT_OPTIONS = ( + ('encoding', + {'type' : 'string', + 'default': 'UTF-8', + 'help': _('user interface encoding'), + 'group': 'ui', 'sitewide': True, + }), + ('language', + {'type' : 'string', + 'default': 'en', + 'vocabulary': Method('available_languages'), + 'help': _('language of the user interface'), + 'group': 'ui', + }), + ('date-format', + {'type' : 'string', + 'default': '%Y/%m/%d', + 'help': _('how to format date in the ui (see this page for format description)'), + 'group': 'ui', + }), + ('datetime-format', + {'type' : 'string', + 'default': '%Y/%m/%d %H:%M', + 'help': _('how to format date and time in the ui (see this page for format description)'), + 'group': 'ui', + }), + ('time-format', + {'type' : 'string', + 'default': '%H:%M', + 'help': _('how to format time in the ui (see this page for format description)'), + 'group': 'ui', + }), + ('float-format', + {'type' : 'string', + 'default': '%.3f', + 'help': _('how to format float numbers in the ui'), + 'group': 'ui', + }), + ('default-text-format', + {'type' : 'choice', + 'choices': ('text/plain', 'text/rest', 'text/html', 'text/markdown'), + 'default': 'text/plain', + 'help': _('default text format for rich text fields.'), + 'group': 'ui', + }), + ('short-line-size', + {'type' : 'int', + 'default': 80, + 'help': _('maximum number of characters in short description'), + 'group': 'navigation', + }), + ) + +def register_persistent_options(options): + global PERSISTENT_OPTIONS + PERSISTENT_OPTIONS = merge_options(PERSISTENT_OPTIONS + options) + +CFGTYPE2ETYPE_MAP = { + 'string': 'String', + 'choice': 'String', + 'yn': 'Boolean', + 'int': 'Int', + 'float' : 'Float', + } + +_forced_mode = os.environ.get('CW_MODE') +assert _forced_mode in (None, 'system', 'user') + +# CWDEV tells whether directories such as i18n/, web/data/, etc. (ie containing +# some other resources than python libraries) are located with the python code +# or as a 'shared' cube +CWDEV = exists(join(CW_SOFTWARE_ROOT, 'i18n')) + +try: + _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX'] +except KeyError: + _INSTALL_PREFIX = _find_prefix() +_USR_INSTALL = _INSTALL_PREFIX == '/usr' + +class CubicWebNoAppConfiguration(ConfigurationMixIn): + """base class for cubicweb configuration without a specific instance directory + """ + # to set in concrete configuration + name = None + # log messages format (see logging module documentation for available keys) + log_format = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s' + # the format below can be useful to debug multi thread issues: + # log_format = '%(asctime)s - [%(threadName)s] (%(name)s) %(levelname)s: %(message)s' + # nor remove appobjects based on unused interface [???] + cleanup_unused_appobjects = True + + quick_start = False + + if (CWDEV and _forced_mode != 'system'): + mode = 'user' + _CUBES_DIR = join(CW_SOFTWARE_ROOT, '../../cubes') + else: + mode = _forced_mode or 'system' + _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes') + + CUBES_DIR = abspath(os.environ.get('CW_CUBES_DIR', _CUBES_DIR)) + CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep) + + options = ( + ('log-threshold', + {'type' : 'string', # XXX use a dedicated type? + 'default': 'WARNING', + 'help': 'server\'s log level', + 'group': 'main', 'level': 1, + }), + ('umask', + {'type' : 'int', + 'default': 0o077, + 'help': 'permission umask for files created by the server', + 'group': 'main', 'level': 2, + }), + # common configuration options which are potentially required as soon as + # you're using "base" application objects (ie to really server/web + # specific) + ('base-url', + {'type' : 'string', + 'default': None, + 'help': 'web server root url', + 'group': 'main', 'level': 1, + }), + ('allow-email-login', + {'type' : 'yn', + 'default': False, + 'help': 'allow users to login with their primary email if set', + 'group': 'main', 'level': 2, + }), + ('mangle-emails', + {'type' : 'yn', + 'default': False, + 'help': "don't display actual email addresses but mangle them if \ +this option is set to yes", + 'group': 'email', 'level': 3, + }), + ) + # static and class methods used to get instance independant resources ## + @staticmethod + def cubicweb_version(): + """return installed cubicweb version""" + from logilab.common.changelog import Version + from cubicweb import __pkginfo__ + version = __pkginfo__.numversion + assert len(version) == 3, version + return Version(version) + + @staticmethod + def persistent_options_configuration(): + return Configuration(options=PERSISTENT_OPTIONS) + + @classmethod + def shared_dir(cls): + """return the shared data directory (i.e. directory where standard + library views and data may be found) + """ + if CWDEV: + return join(CW_SOFTWARE_ROOT, 'web') + return cls.cube_dir('shared') + + @classmethod + def i18n_lib_dir(cls): + """return instance's i18n directory""" + if CWDEV: + return join(CW_SOFTWARE_ROOT, 'i18n') + return join(cls.shared_dir(), 'i18n') + + @classmethod + def cw_languages(cls): + for fname in os.listdir(join(cls.i18n_lib_dir())): + if fname.endswith('.po'): + yield splitext(fname)[0] + + + @classmethod + def available_cubes(cls): + import re + cubes = set() + for directory in cls.cubes_search_path(): + if not exists(directory): + cls.error('unexistant directory in cubes search path: %s' + % directory) + continue + for cube in os.listdir(directory): + if cube == 'shared': + continue + if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cube): + continue # skip invalid python package name + cubedir = join(directory, cube) + if isdir(cubedir) and exists(join(cubedir, '__init__.py')): + cubes.add(cube) + return sorted(cubes) + + @classmethod + def cubes_search_path(cls): + """return the path of directories where cubes should be searched""" + path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH + if directory.strip() and exists(directory.strip())] + if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR): + path.append(cls.CUBES_DIR) + return path + + @classproperty + def extrapath(cls): + extrapath = {} + for cubesdir in cls.cubes_search_path(): + if cubesdir != cls.CUBES_DIR: + extrapath[cubesdir] = 'cubes' + return extrapath + + @classmethod + def cube_dir(cls, cube): + """return the cube directory for the given cube id, raise + `ConfigurationError` if it doesn't exist + """ + for directory in cls.cubes_search_path(): + cubedir = join(directory, cube) + if exists(cubedir): + return cubedir + raise ConfigurationError('no cube %r in %s' % ( + cube, cls.cubes_search_path())) + + @classmethod + def cube_migration_scripts_dir(cls, cube): + """cube migration scripts directory""" + return join(cls.cube_dir(cube), 'migration') + + @classmethod + def cube_pkginfo(cls, cube): + """return the information module for the given cube""" + cube = CW_MIGRATION_MAP.get(cube, cube) + try: + parent = __import__('cubes.%s.__pkginfo__' % cube) + return getattr(parent, cube).__pkginfo__ + except Exception as ex: + raise ConfigurationError( + 'unable to find packaging information for cube %s (%s: %s)' + % (cube, ex.__class__.__name__, ex)) + + @classmethod + def cube_version(cls, cube): + """return the version of the cube located in the given directory + """ + from logilab.common.changelog import Version + version = cls.cube_pkginfo(cube).numversion + assert len(version) == 3, version + return Version(version) + + @classmethod + def _cube_deps(cls, cube, key, oldkey): + """return cubicweb cubes used by the given cube""" + pkginfo = cls.cube_pkginfo(cube) + try: + # explicit __xxx_cubes__ attribute + deps = getattr(pkginfo, key) + except AttributeError: + # deduce cubes from generic __xxx__ attribute + try: + gendeps = getattr(pkginfo, key.replace('_cubes', '')) + except AttributeError: + deps = {} + else: + deps = dict( (x[len('cubicweb-'):], v) + for x, v in gendeps.items() + if x.startswith('cubicweb-')) + for depcube in deps: + try: + newname = CW_MIGRATION_MAP[depcube] + except KeyError: + pass + else: + deps[newname] = deps.pop(depcube) + return deps + + @classmethod + def cube_depends_cubicweb_version(cls, cube): + # XXX no backward compat (see _cube_deps above) + try: + pkginfo = cls.cube_pkginfo(cube) + deps = getattr(pkginfo, '__depends__') + return deps.get('cubicweb') + except AttributeError: + return None + + @classmethod + def cube_dependencies(cls, cube): + """return cubicweb cubes used by the given cube""" + return cls._cube_deps(cube, '__depends_cubes__', '__use__') + + @classmethod + def cube_recommends(cls, cube): + """return cubicweb cubes recommended by the given cube""" + return cls._cube_deps(cube, '__recommends_cubes__', '__recommend__') + + @classmethod + def expand_cubes(cls, cubes, with_recommends=False): + """expand the given list of top level cubes used by adding recursivly + each cube dependencies + """ + cubes = list(cubes) + todo = cubes[:] + if with_recommends: + available = set(cls.available_cubes()) + while todo: + cube = todo.pop(0) + for depcube in cls.cube_dependencies(cube): + if depcube not in cubes: + cubes.append(depcube) + todo.append(depcube) + if with_recommends: + for depcube in cls.cube_recommends(cube): + if depcube not in cubes and depcube in available: + cubes.append(depcube) + todo.append(depcube) + return cubes + + @classmethod + def reorder_cubes(cls, cubes): + """reorder cubes from the top level cubes to inner dependencies + cubes + """ + from logilab.common.graph import ordered_nodes, UnorderableGraph + graph = {} + for cube in cubes: + cube = CW_MIGRATION_MAP.get(cube, cube) + graph[cube] = set(dep for dep in cls.cube_dependencies(cube) + if dep in cubes) + graph[cube] |= set(dep for dep in cls.cube_recommends(cube) + if dep in cubes) + try: + return ordered_nodes(graph) + except UnorderableGraph as ex: + raise ConfigurationError(ex) + + @classmethod + def cls_adjust_sys_path(cls): + """update python path if necessary""" + cubes_parent_dir = normpath(join(cls.CUBES_DIR, '..')) + if not cubes_parent_dir in sys.path: + sys.path.insert(0, cubes_parent_dir) + try: + import cubes + cubes.__path__ = cls.cubes_search_path() + except ImportError: + return # cubes dir doesn't exists + + @classmethod + def load_available_configs(cls): + from logilab.common.modutils import load_module_from_file + for conffile in ('web/webconfig.py', 'etwist/twconfig.py', + 'server/serverconfig.py',): + if exists(join(CW_SOFTWARE_ROOT, conffile)): + load_module_from_file(join(CW_SOFTWARE_ROOT, conffile)) + + @classmethod + def load_cwctl_plugins(cls): + from logilab.common.modutils import load_module_from_file + cls.cls_adjust_sys_path() + for ctlfile in ('web/webctl.py', 'etwist/twctl.py', + 'server/serverctl.py', + 'devtools/devctl.py', 'goa/goactl.py'): + if exists(join(CW_SOFTWARE_ROOT, ctlfile)): + try: + load_module_from_file(join(CW_SOFTWARE_ROOT, ctlfile)) + except ImportError as err: + cls.error('could not import the command provider %s: %s', + ctlfile, err) + cls.info('loaded cubicweb-ctl plugin %s', ctlfile) + for cube in cls.available_cubes(): + pluginfile = join(cls.cube_dir(cube), 'ccplugin.py') + initfile = join(cls.cube_dir(cube), '__init__.py') + if exists(pluginfile): + try: + __import__('cubes.%s.ccplugin' % cube) + cls.info('loaded cubicweb-ctl plugin from %s', cube) + except Exception: + cls.exception('while loading plugin %s', pluginfile) + elif exists(initfile): + try: + __import__('cubes.%s' % cube) + except Exception: + cls.exception('while loading cube %s', cube) + else: + cls.warning('no __init__ file in cube %s', cube) + + @classmethod + def init_available_cubes(cls): + """cubes may register some sources (svnfile for instance) in their + __init__ file, so they should be loaded early in the startup process + """ + for cube in cls.available_cubes(): + try: + __import__('cubes.%s' % cube) + except Exception as ex: + cls.warning("can't init cube %s: %s", cube, ex) + + cubicweb_appobject_path = set(['entities']) + cube_appobject_path = set(['entities']) + + def __init__(self, debugmode=False): + if debugmode: + # in python 2.7, DeprecationWarning are not shown anymore by default + filterwarnings('default', category=DeprecationWarning) + register_stored_procedures() + self._cubes = None + super(CubicWebNoAppConfiguration, self).__init__() + self.debugmode = debugmode + self.adjust_sys_path() + self.load_defaults() + # will be properly initialized later by _gettext_init + self.translations = {'en': (text_type, lambda ctx, msgid: text_type(msgid) )} + self._site_loaded = set() + # don't register ReStructured Text directives by simple import, avoid pb + # with eg sphinx. + # XXX should be done properly with a function from cw.uicfg + try: + from cubicweb.ext.rest import cw_rest_init + except ImportError: + pass + else: + cw_rest_init() + + def adjust_sys_path(self): + # overriden in CubicWebConfiguration + self.cls_adjust_sys_path() + + def init_log(self, logthreshold=None, logfile=None, syslog=False): + """init the log service""" + if logthreshold is None: + if self.debugmode: + logthreshold = 'DEBUG' + else: + logthreshold = self['log-threshold'] + if sys.platform == 'win32': + # no logrotate on win32, so use logging rotation facilities + # for now, hard code weekly rotation every sunday, and 52 weeks kept + # idea: make this configurable? + init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format, + rotation_parameters={'when': 'W6', # every sunday + 'interval': 1, + 'backupCount': 52}) + else: + init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format) + # configure simpleTal logger + logging.getLogger('simpleTAL').setLevel(logging.ERROR) + + def appobjects_path(self): + """return a list of files or directories where the registry will look + for application objects. By default return nothing in NoApp config. + """ + return [] + + def build_appobjects_path(self, templpath, evobjpath=None, tvobjpath=None): + """given a list of directories, return a list of sub files and + directories that should be loaded by the instance objects registry. + + :param evobjpath: + optional list of sub-directories (or files without the .py ext) of + the cubicweb library that should be tested and added to the output list + if they exists. If not give, default to `cubicweb_appobject_path` class + attribute. + :param tvobjpath: + optional list of sub-directories (or files without the .py ext) of + directories given in `templpath` that should be tested and added to + the output list if they exists. If not give, default to + `cube_appobject_path` class attribute. + """ + vregpath = self.build_appobjects_cubicweb_path(evobjpath) + vregpath += self.build_appobjects_cube_path(templpath, tvobjpath) + return vregpath + + def build_appobjects_cubicweb_path(self, evobjpath=None): + vregpath = [] + if evobjpath is None: + evobjpath = self.cubicweb_appobject_path + # NOTE: for the order, see http://www.cubicweb.org/ticket/2330799 + # it is clearly a workaround + for subdir in sorted(evobjpath, key=lambda x:x != 'entities'): + path = join(CW_SOFTWARE_ROOT, subdir) + if exists(path): + vregpath.append(path) + return vregpath + + def build_appobjects_cube_path(self, templpath, tvobjpath=None): + vregpath = [] + if tvobjpath is None: + tvobjpath = self.cube_appobject_path + for directory in templpath: + # NOTE: for the order, see http://www.cubicweb.org/ticket/2330799 + for subdir in sorted(tvobjpath, key=lambda x:x != 'entities'): + path = join(directory, subdir) + if exists(path): + vregpath.append(path) + elif exists(path + '.py'): + vregpath.append(path + '.py') + return vregpath + + apphome = None + + def load_site_cubicweb(self, paths=None): + """load instance's specific site_cubicweb file""" + if paths is None: + paths = self.cubes_path() + if self.apphome is not None: + paths = [self.apphome] + paths + for path in reversed(paths): + sitefile = join(path, 'site_cubicweb.py') + if exists(sitefile) and not sitefile in self._site_loaded: + self._load_site_cubicweb(sitefile) + self._site_loaded.add(sitefile) + + def _load_site_cubicweb(self, sitefile): + # XXX extrapath argument to load_module_from_file only in lgc > 0.50.2 + from logilab.common.modutils import load_module_from_modpath, modpath_from_file + module = load_module_from_modpath(modpath_from_file(sitefile, self.extrapath)) + self.debug('%s loaded', sitefile) + return module + + def cwproperty_definitions(self): + cfg = self.persistent_options_configuration() + for section, options in cfg.options_by_section(): + section = section.lower() + for optname, optdict, value in options: + key = '%s.%s' % (section, optname) + type, vocab = self.map_option(optdict) + default = cfg.option_default(optname, optdict) + pdef = {'type': type, 'vocabulary': vocab, 'default': default, + 'help': optdict['help'], + 'sitewide': optdict.get('sitewide', False)} + yield key, pdef + + def map_option(self, optdict): + try: + vocab = optdict['choices'] + except KeyError: + vocab = optdict.get('vocabulary') + if isinstance(vocab, Method): + vocab = getattr(self, vocab.method, ()) + return CFGTYPE2ETYPE_MAP[optdict['type']], vocab + + def default_instance_id(self): + """return the instance identifier, useful for option which need this + as default value + """ + return None + + _cubes = None + + def init_cubes(self, cubes): + self._cubes = self.reorder_cubes(cubes) + # load cubes'__init__.py file first + for cube in cubes: + __import__('cubes.%s' % cube) + self.load_site_cubicweb() + + def cubes(self): + """return the list of cubes used by this instance + + result is ordered from the top level cubes to inner dependencies + cubes + """ + assert self._cubes is not None, 'cubes not initialized' + return self._cubes + + def cubes_path(self): + """return the list of path to cubes used by this instance, from outer + most to inner most cubes + """ + return [self.cube_dir(p) for p in self.cubes()] + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + @classmethod + def debug(cls, msg, *a, **kw): + pass + info = warning = error = critical = exception = debug + + +class CubicWebConfiguration(CubicWebNoAppConfiguration): + """base class for cubicweb server and web configurations""" + + if CubicWebNoAppConfiguration.mode == 'user': + _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/') + #mode == system' + elif _USR_INSTALL: + _INSTANCES_DIR = '/etc/cubicweb.d/' + else: + _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d') + + # set to true during repair (shell, migration) to allow some things which + # wouldn't be possible otherwise + repairing = False + + # set by upgrade command + verbosity = 0 + cmdline_options = None + options = CubicWebNoAppConfiguration.options + ( + ('log-file', + {'type' : 'string', + 'default': Method('default_log_file'), + 'help': 'file where output logs should be written', + 'group': 'main', 'level': 2, + }), + ('statsd-endpoint', + {'type' : 'string', + 'default': '', + 'help': 'UDP address of the statsd endpoint; it must be formatted' + 'like :; disabled is unset.', + 'group': 'main', 'level': 2, + }), + # email configuration + ('smtp-host', + {'type' : 'string', + 'default': 'mail', + 'help': 'hostname of the SMTP mail server', + 'group': 'email', 'level': 1, + }), + ('smtp-port', + {'type' : 'int', + 'default': 25, + 'help': 'listening port of the SMTP mail server', + 'group': 'email', 'level': 1, + }), + ('sender-name', + {'type' : 'string', + 'default': Method('default_instance_id'), + 'help': 'name used as HELO name for outgoing emails from the \ +repository.', + 'group': 'email', 'level': 2, + }), + ('sender-addr', + {'type' : 'string', + 'default': 'cubicweb@mydomain.com', + 'help': 'email address used as HELO address for outgoing emails from \ +the repository', + 'group': 'email', 'level': 1, + }), + ('logstat-interval', + {'type' : 'int', + 'default': 0, + 'help': 'interval (in seconds) at which stats are dumped in the logstat file; set 0 to disable', + 'group': 'main', 'level': 2, + }), + ('logstat-file', + {'type' : 'string', + 'default': Method('default_stats_file'), + 'help': 'file where stats for the instance should be written', + 'group': 'main', 'level': 2, + }), + ) + + @classmethod + def instances_dir(cls): + """return the control directory""" + return abspath(os.environ.get('CW_INSTANCES_DIR', cls._INSTANCES_DIR)) + + @classmethod + def migration_scripts_dir(cls): + """cubicweb migration scripts directory""" + if CWDEV: + return join(CW_SOFTWARE_ROOT, 'misc', 'migration') + mdir = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'migration') + if not exists(mdir): + raise ConfigurationError('migration path %s doesn\'t exist' % mdir) + return mdir + + @classmethod + def config_for(cls, appid, config=None, debugmode=False, creating=False): + """return a configuration instance for the given instance identifier + """ + cls.load_available_configs() + config = config or guess_configuration(cls.instance_home(appid)) + configcls = configuration_cls(config) + return configcls(appid, debugmode, creating) + + @classmethod + def possible_configurations(cls, appid): + """return the name of possible configurations for the given + instance id + """ + home = cls.instance_home(appid) + return possible_configurations(home) + + @classmethod + def instance_home(cls, appid): + """return the home directory of the instance with the given + instance id + """ + home = join(cls.instances_dir(), appid) + if not exists(home): + raise ConfigurationError('no such instance %s (check it exists with' + ' "cubicweb-ctl list")' % appid) + return home + + MODES = ('common', 'repository', 'Any') + MCOMPAT = {'all-in-one': MODES, + 'repository': ('common', 'repository', 'Any')} + @classmethod + def accept_mode(cls, mode): + #assert mode in cls.MODES, mode + return mode in cls.MCOMPAT[cls.name] + + # default configuration methods ########################################### + + def default_instance_id(self): + """return the instance identifier, useful for option which need this + as default value + """ + return self.appid + + def default_log_file(self): + """return default path to the log file of the instance'server""" + if self.mode == 'user': + import tempfile + basepath = join(tempfile.gettempdir(), '%s-%s' % ( + basename(self.appid), self.name)) + path = basepath + '.log' + i = 1 + while exists(path) and i < 100: # arbitrary limit to avoid infinite loop + try: + open(path, 'a') + break + except IOError: + path = '%s-%s.log' % (basepath, i) + i += 1 + return path + if _USR_INSTALL: + return '/var/log/cubicweb/%s-%s.log' % (self.appid, self.name) + else: + log_path = os.path.join(_INSTALL_PREFIX, 'var', 'log', 'cubicweb', '%s-%s.log') + return log_path % (self.appid, self.name) + + def default_stats_file(self): + """return default path to the stats file of the instance'server""" + logfile = self.default_log_file() + if logfile.endswith('.log'): + logfile = logfile[:-4] + return logfile + '.stats' + + def default_pid_file(self): + """return default path to the pid file of the instance'server""" + if self.mode == 'system': + if _USR_INSTALL: + default = '/var/run/cubicweb/' + else: + default = os.path.join(_INSTALL_PREFIX, 'var', 'run', 'cubicweb') + else: + import tempfile + default = tempfile.gettempdir() + # runtime directory created on startup if necessary, don't check it + # exists + rtdir = abspath(os.environ.get('CW_RUNTIME_DIR', default)) + return join(rtdir, '%s-%s.pid' % (self.appid, self.name)) + + # config -> repository + + def repository(self, vreg=None): + from cubicweb.server.repository import Repository + from cubicweb.server.utils import TasksManager + return Repository(self, TasksManager(), vreg=vreg) + + # instance methods used to get instance specific resources ############# + + def __init__(self, appid, debugmode=False, creating=False): + self.appid = appid + # set to true while creating an instance + self.creating = creating + super(CubicWebConfiguration, self).__init__(debugmode) + fake_gettext = (text_type, lambda ctx, msgid: text_type(msgid)) + for lang in self.available_languages(): + self.translations[lang] = fake_gettext + self._cubes = None + self.load_file_configuration(self.main_config_file()) + + def adjust_sys_path(self): + super(CubicWebConfiguration, self).adjust_sys_path() + # adding apphome to python path is not usually necessary in production + # environments, but necessary for tests + if self.apphome and self.apphome not in sys.path: + sys.path.insert(0, self.apphome) + + @property + def apphome(self): + return join(self.instances_dir(), self.appid) + + @property + def appdatahome(self): + if self.mode == 'system': + if _USR_INSTALL: + iddir = os.path.join('/var','lib', 'cubicweb', 'instances') + else: + iddir = os.path.join(_INSTALL_PREFIX, 'var', 'lib', 'cubicweb', 'instances') + else: + iddir = self.instances_dir() + iddir = abspath(os.environ.get('CW_INSTANCES_DATA_DIR', iddir)) + return join(iddir, self.appid) + + def init_cubes(self, cubes): + super(CubicWebConfiguration, self).init_cubes(cubes) + # reload config file in cases options are defined in cubes __init__ + # or site_cubicweb files + self.load_file_configuration(self.main_config_file()) + # configuration initialization hook + self.load_configuration(**(self.cmdline_options or {})) + + def add_cubes(self, cubes): + """add given cubes to the list of used cubes""" + if not isinstance(cubes, list): + cubes = list(cubes) + self._cubes = self.reorder_cubes(list(self._cubes) + cubes) + self.load_site_cubicweb([self.cube_dir(cube) for cube in cubes]) + + def main_config_file(self): + """return instance's control configuration file""" + return join(self.apphome, '%s.conf' % self.name) + + def save(self): + """write down current configuration""" + with open(self.main_config_file(), 'w') as fobj: + self.generate_config(fobj) + + def check_writeable_uid_directory(self, path): + """check given directory path exists, belongs to the user running the + server process and is writeable. + + If not, try to fix this, letting exception propagate when not possible. + """ + if not exists(path): + self.info('creating %s directory', path) + try: + os.makedirs(path) + except OSError as ex: + self.warning('error while creating %s directory: %s', path, ex) + return + if self['uid']: + try: + uid = int(self['uid']) + except ValueError: + from pwd import getpwnam + uid = getpwnam(self['uid']).pw_uid + else: + try: + uid = os.getuid() + except AttributeError: # we are on windows + return + fstat = os.stat(path) + if fstat.st_uid != uid: + self.info('giving ownership of %s directory to %s', path, self['uid']) + try: + os.chown(path, uid, os.getgid()) + except OSError as ex: + self.warning('error while giving ownership of %s directory to %s: %s', + path, self['uid'], ex) + if not (fstat.st_mode & stat.S_IWUSR): + self.info('forcing write permission on directory %s', path) + try: + os.chmod(path, fstat.st_mode | stat.S_IWUSR) + except OSError as ex: + self.warning('error while forcing write permission on directory %s: %s', + path, ex) + return + + @cached + def instance_md5_version(self): + from hashlib import md5 # pylint: disable=E0611 + infos = [] + for pkg in sorted(self.cubes()): + version = self.cube_version(pkg) + infos.append('%s-%s' % (pkg, version)) + infos.append('cubicweb-%s' % str(self.cubicweb_version())) + return md5((';'.join(infos)).encode('ascii')).hexdigest() + + def load_configuration(self, **kw): + """load instance's configuration files""" + super(CubicWebConfiguration, self).load_configuration(**kw) + if self.apphome and not self.creating: + # init gettext + self._gettext_init() + + def _load_site_cubicweb(self, sitefile): + # overridden to register cube specific options + mod = super(CubicWebConfiguration, self)._load_site_cubicweb(sitefile) + if getattr(mod, 'options', None): + self.register_options(mod.options) + self.load_defaults() + + def init_log(self, logthreshold=None, force=False): + """init the log service""" + if not force and hasattr(self, '_logging_initialized'): + return + self._logging_initialized = True + super_self = super(CubicWebConfiguration, self) + super_self.init_log(logthreshold, logfile=self.get('log-file')) + # read a config file if it exists + logconfig = join(self.apphome, 'logging.conf') + if exists(logconfig): + logging.config.fileConfig(logconfig) + # set the statsd address, if any + if self.get('statsd-endpoint'): + try: + address, port = self.get('statsd-endpoint').split(':') + port = int(port) + except: + self.error('statsd-endpoint: invalid address format ({}); ' + 'it should be "ip:port"'.format(self.get('statsd-endpoint'))) + else: + import statsd_logger + statsd_logger.setup('cubicweb.%s' % self.appid, (address, port)) + + def available_languages(self, *args): + """return available translation for an instance, by looking for + compiled catalog + + take \*args to be usable as a vocabulary method + """ + from glob import glob + yield 'en' # ensure 'en' is yielded even if no .mo found + for path in glob(join(self.apphome, 'i18n', + '*', 'LC_MESSAGES')): + lang = path.split(os.sep)[-2] + if lang != 'en': + yield lang + + def _gettext_init(self): + """set language for gettext""" + from cubicweb.cwgettext import translation + path = join(self.apphome, 'i18n') + for language in self.available_languages(): + self.info("loading language %s", language) + try: + tr = translation('cubicweb', path, languages=[language]) + self.translations[language] = (tr.ugettext, tr.upgettext) + except (ImportError, AttributeError, IOError): + if self.mode != 'test': + # in test contexts, data/i18n does not exist, hence + # logging will only pollute the logs + self.exception('localisation support error for language %s', + language) + + def appobjects_path(self): + """return a list of files or directories where the registry will look + for application objects + """ + templpath = list(reversed(self.cubes_path())) + if self.apphome: # may be unset in tests + templpath.append(self.apphome) + return self.build_appobjects_path(templpath) + + def set_sources_mode(self, sources): + if not 'all' in sources: + print('warning: ignoring specified sources, requires a repository ' + 'configuration') + + def i18ncompile(self, langs=None): + from cubicweb import i18n + if langs is None: + langs = self.available_languages() + i18ndir = join(self.apphome, 'i18n') + if not exists(i18ndir): + create_dir(i18ndir) + sourcedirs = [join(path, 'i18n') for path in self.cubes_path()] + sourcedirs.append(self.i18n_lib_dir()) + return i18n.compile_i18n_catalogs(sourcedirs, i18ndir, langs) + + def sendmails(self, msgs, fromaddr=None): + """msgs: list of 2-uple (message object, recipients). Return False + if connection to the smtp server failed, else True. + """ + server, port = self['smtp-host'], self['smtp-port'] + if fromaddr is None: + fromaddr = '%s <%s>' % (self['sender-name'], self['sender-addr']) + SMTP_LOCK.acquire() + try: + try: + smtp = SMTP(server, port) + except Exception as ex: + self.exception("can't connect to smtp server %s:%s (%s)", + server, port, ex) + return False + for msg, recipients in msgs: + try: + smtp.sendmail(fromaddr, recipients, msg.as_string()) + except Exception as ex: + self.exception("error sending mail to %s (%s)", + recipients, ex) + smtp.close() + finally: + SMTP_LOCK.release() + return True + +set_log_methods(CubicWebNoAppConfiguration, + logging.getLogger('cubicweb.configuration')) + +# alias to get a configuration instance from an instance id +instance_configuration = CubicWebConfiguration.config_for +application_configuration = deprecated('use instance_configuration')(instance_configuration) + + +_EXT_REGISTERED = False +def register_stored_procedures(): + from logilab.database import FunctionDescr + from rql.utils import register_function, iter_funcnode_variables + from rql.nodes import SortTerm, Constant, VariableRef + + global _EXT_REGISTERED + if _EXT_REGISTERED: + return + _EXT_REGISTERED = True + + class COMMA_JOIN(FunctionDescr): + supported_backends = ('postgres', 'sqlite',) + rtype = 'String' + + def st_description(self, funcnode, mainindex, tr): + return ', '.join(sorted(term.get_description(mainindex, tr) + for term in iter_funcnode_variables(funcnode))) + + register_function(COMMA_JOIN) # XXX do not expose? + + + class CONCAT_STRINGS(COMMA_JOIN): + aggregat = True + + register_function(CONCAT_STRINGS) # XXX bw compat + + + class GROUP_CONCAT(CONCAT_STRINGS): + supported_backends = ('mysql', 'postgres', 'sqlite',) + + register_function(GROUP_CONCAT) + + + class LIMIT_SIZE(FunctionDescr): + supported_backends = ('postgres', 'sqlite',) + minargs = maxargs = 3 + rtype = 'String' + + def st_description(self, funcnode, mainindex, tr): + return funcnode.children[0].get_description(mainindex, tr) + + register_function(LIMIT_SIZE) + + + class TEXT_LIMIT_SIZE(LIMIT_SIZE): + supported_backends = ('mysql', 'postgres', 'sqlite',) + minargs = maxargs = 2 + + register_function(TEXT_LIMIT_SIZE) + + + class FTIRANK(FunctionDescr): + """return ranking of a variable that must be used as some has_text + relation subject in the query's restriction. Usually used to sort result + of full-text search by ranking. + """ + supported_backends = ('postgres',) + rtype = 'Float' + + def st_check_backend(self, backend, funcnode): + """overriden so that on backend not supporting fti ranking, the + function is removed when in an orderby clause, or replaced by a 1.0 + constant. + """ + if not self.supports(backend): + parent = funcnode.parent + while parent is not None and not isinstance(parent, SortTerm): + parent = parent.parent + if isinstance(parent, SortTerm): + parent.parent.remove(parent) + else: + funcnode.parent.replace(funcnode, Constant(1.0, 'Float')) + parent = funcnode + for vref in parent.iget_nodes(VariableRef): + vref.unregister_reference() + + register_function(FTIRANK) + + + class FSPATH(FunctionDescr): + """return path of some bytes attribute stored using the Bytes + File-System Storage (bfss) + """ + rtype = 'Bytes' # XXX return a String? potential pb with fs encoding + + def update_cb_stack(self, stack): + assert len(stack) == 1 + stack[0] = self.source_execute + + def as_sql(self, backend, args): + raise NotImplementedError( + 'This callback is only available for BytesFileSystemStorage ' + 'managed attribute. Is FSPATH() argument BFSS managed?') + + def source_execute(self, source, session, value): + fpath = source.binary_to_str(value) + try: + return Binary(fpath) + except OSError as ex: + source.critical("can't open %s: %s", fpath, ex) + return None + + register_function(FSPATH) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/cwctl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/cwctl.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1154 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""the cubicweb-ctl tool, based on logilab.common.clcommands to +provide a pluggable commands system. +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +# *ctl module should limit the number of import to be imported as quickly as +# possible (for cubicweb-ctl reactivity, necessary for instance for usable bash +# completion). So import locally in command helpers. +import sys +from warnings import warn, filterwarnings +from os import remove, listdir, system, pathsep +from os.path import exists, join, isfile, isdir, dirname, abspath + +try: + from os import kill, getpgid +except ImportError: + def kill(*args): + """win32 kill implementation""" + def getpgid(): + """win32 getpgid implementation""" + +from six.moves.urllib.parse import urlparse + +from logilab.common.clcommands import CommandLine +from logilab.common.shellutils import ASK +from logilab.common.configuration import merge_options + +from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage +from cubicweb.utils import support_args +from cubicweb.cwconfig import CubicWebConfiguration as cwcfg, CWDEV, CONFIGURATIONS +from cubicweb.toolsutils import Command, rm, create_dir, underline_title +from cubicweb.__pkginfo__ import version + +# don't check duplicated commands, it occurs when reloading site_cubicweb +CWCTL = CommandLine('cubicweb-ctl', 'The CubicWeb swiss-knife.', + version=version, check_duplicated_command=False) + +def wait_process_end(pid, maxtry=10, waittime=1): + """wait for a process to actually die""" + import signal + from time import sleep + nbtry = 0 + while nbtry < maxtry: + try: + kill(pid, signal.SIGUSR1) + except (OSError, AttributeError): # XXX win32 + break + nbtry += 1 + sleep(waittime) + else: + raise ExecutionError('can\'t kill process %s' % pid) + +def list_instances(regdir): + if isdir(regdir): + return sorted(idir for idir in listdir(regdir) if isdir(join(regdir, idir))) + else: + return [] + +def detect_available_modes(templdir): + modes = [] + for fname in ('schema', 'schema.py'): + if exists(join(templdir, fname)): + modes.append('repository') + break + for fname in ('data', 'views', 'views.py'): + if exists(join(templdir, fname)): + modes.append('web ui') + break + return modes + + +class InstanceCommand(Command): + """base class for command taking 0 to n instance id as arguments + (0 meaning all registered instances) + """ + arguments = '[...]' + options = ( + ("force", + {'short': 'f', 'action' : 'store_true', + 'default': False, + 'help': 'force command without asking confirmation', + } + ), + ) + actionverb = None + + def ordered_instances(self): + """return instances in the order in which they should be started, + considering $REGISTRY_DIR/startorder file if it exists (useful when + some instances depends on another as external source). + + Instance used by another one should appears first in the file (one + instance per line) + """ + regdir = cwcfg.instances_dir() + _allinstances = list_instances(regdir) + if isfile(join(regdir, 'startorder')): + allinstances = [] + for line in open(join(regdir, 'startorder')): + line = line.strip() + if line and not line.startswith('#'): + try: + _allinstances.remove(line) + allinstances.append(line) + except ValueError: + print('ERROR: startorder file contains unexistant ' + 'instance %s' % line) + allinstances += _allinstances + else: + allinstances = _allinstances + return allinstances + + def run(self, args): + """run the _method on each argument (a list of instance + identifiers) + """ + if not args: + args = self.ordered_instances() + try: + askconfirm = not self.config.force + except AttributeError: + # no force option + askconfirm = False + else: + askconfirm = False + self.run_args(args, askconfirm) + + def run_args(self, args, askconfirm): + status = 0 + for appid in args: + if askconfirm: + print('*'*72) + if not ASK.confirm('%s instance %r ?' % (self.name, appid)): + continue + try: + status = max(status, self.run_arg(appid)) + except (KeyboardInterrupt, SystemExit): + sys.stderr.write('%s aborted\n' % self.name) + return 2 # specific error code + sys.exit(status) + + def run_arg(self, appid): + cmdmeth = getattr(self, '%s_instance' % self.name) + try: + status = cmdmeth(appid) + except (ExecutionError, ConfigurationError) as ex: + sys.stderr.write('instance %s not %s: %s\n' % ( + appid, self.actionverb, ex)) + status = 4 + except Exception as ex: + import traceback + traceback.print_exc() + sys.stderr.write('instance %s not %s: %s\n' % ( + appid, self.actionverb, ex)) + status = 8 + return status + +class InstanceCommandFork(InstanceCommand): + """Same as `InstanceCommand`, but command is forked in a new environment + for each argument + """ + + def run_args(self, args, askconfirm): + if len(args) > 1: + forkcmd = ' '.join(w for w in sys.argv if not w in args) + else: + forkcmd = None + for appid in args: + if askconfirm: + print('*'*72) + if not ASK.confirm('%s instance %r ?' % (self.name, appid)): + continue + if forkcmd: + status = system('%s %s' % (forkcmd, appid)) + if status: + print('%s exited with status %s' % (forkcmd, status)) + else: + self.run_arg(appid) + + +# base commands ############################################################### + +class ListCommand(Command): + """List configurations, cubes and instances. + + List available configurations, installed cubes, and registered instances. + + If given, the optional argument allows to restrict listing only a category of items. + """ + name = 'list' + arguments = '[all|cubes|configurations|instances]' + options = ( + ('verbose', + {'short': 'v', 'action' : 'store_true', + 'help': "display more information."}), + ) + + def run(self, args): + """run the command with its specific arguments""" + if not args: + mode = 'all' + elif len(args) == 1: + mode = args[0] + else: + raise BadCommandUsage('Too many arguments') + + from cubicweb.migration import ConfigurationProblem + + if mode == 'all': + print('CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)) + print() + + if mode in ('all', 'config', 'configurations'): + print('Available configurations:') + for config in CONFIGURATIONS: + print('*', config.name) + for line in config.__doc__.splitlines(): + line = line.strip() + if not line: + continue + print(' ', line) + print() + + if mode in ('all', 'cubes'): + cfgpb = ConfigurationProblem(cwcfg) + try: + cubesdir = pathsep.join(cwcfg.cubes_search_path()) + namesize = max(len(x) for x in cwcfg.available_cubes()) + except ConfigurationError as ex: + print('No cubes available:', ex) + except ValueError: + print('No cubes available in %s' % cubesdir) + else: + print('Available cubes (%s):' % cubesdir) + for cube in cwcfg.available_cubes(): + try: + tinfo = cwcfg.cube_pkginfo(cube) + tversion = tinfo.version + cfgpb.add_cube(cube, tversion) + except (ConfigurationError, AttributeError) as ex: + tinfo = None + tversion = '[missing cube information: %s]' % ex + print('* %s %s' % (cube.ljust(namesize), tversion)) + if self.config.verbose: + if tinfo: + descr = getattr(tinfo, 'description', '') + if not descr: + descr = tinfo.__doc__ + if descr: + print(' '+ ' \n'.join(descr.splitlines())) + modes = detect_available_modes(cwcfg.cube_dir(cube)) + print(' available modes: %s' % ', '.join(modes)) + print() + + if mode in ('all', 'instances'): + try: + regdir = cwcfg.instances_dir() + except ConfigurationError as ex: + print('No instance available:', ex) + print() + return + instances = list_instances(regdir) + if instances: + print('Available instances (%s):' % regdir) + for appid in instances: + modes = cwcfg.possible_configurations(appid) + if not modes: + print('* %s (BROKEN instance, no configuration found)' % appid) + continue + print('* %s (%s)' % (appid, ', '.join(modes))) + try: + config = cwcfg.config_for(appid, modes[0]) + except Exception as exc: + print(' (BROKEN instance, %s)' % exc) + continue + else: + print('No instance available in %s' % regdir) + print() + + if mode == 'all': + # configuration management problem solving + cfgpb.solve() + if cfgpb.warnings: + print('Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)) + if cfgpb.errors: + print('Errors:') + for op, cube, version, src in cfgpb.errors: + if op == 'add': + print('* cube', cube, end=' ') + if version: + print(' version', version, end=' ') + print('is not installed, but required by %s' % src) + else: + print('* cube %s version %s is installed, but version %s is required by %s' % ( + cube, cfgpb.cubes[cube], version, src)) + +def check_options_consistency(config): + if config.automatic and config.config_level > 0: + raise BadCommandUsage('--automatic and --config-level should not be ' + 'used together') + +class CreateInstanceCommand(Command): + """Create an instance from a cube. This is a unified + command which can handle web / server / all-in-one installation + according to available parts of the software library and of the + desired cube. + + + the name of cube to use (list available cube names using + the "list" command). You can use several cubes by separating + them using comma (e.g. 'jpl,email') + + an identifier for the instance to create + """ + name = 'create' + arguments = ' ' + min_args = max_args = 2 + options = ( + ('automatic', + {'short': 'a', 'action' : 'store_true', + 'default': False, + 'help': 'automatic mode: never ask and use default answer to every ' + 'question. this may require that your login match a database super ' + 'user (allowed to create database & all).', + }), + ('config-level', + {'short': 'l', 'type' : 'int', 'metavar': '', + 'default': 0, + 'help': 'configuration level (0..2): 0 will ask for essential ' + 'configuration parameters only while 2 will ask for all parameters', + }), + ('config', + {'short': 'c', 'type' : 'choice', 'metavar': '', + 'choices': ('all-in-one', 'repository'), + 'default': 'all-in-one', + 'help': 'installation type, telling which part of an instance ' + 'should be installed. You can list available configurations using the' + ' "list" command. Default to "all-in-one", e.g. an installation ' + 'embedding both the RQL repository and the web server.', + }), + ('no-db-create', + {'short': 'S', + 'action': 'store_true', + 'default': False, + 'help': 'stop after creation and do not continue with db-create', + }), + ) + + def run(self, args): + """run the command with its specific arguments""" + from logilab.common.textutils import splitstrip + check_options_consistency(self.config) + configname = self.config.config + cubes, appid = args + cubes = splitstrip(cubes) + # get the configuration and helper + config = cwcfg.config_for(appid, configname, creating=True) + cubes = config.expand_cubes(cubes) + config.init_cubes(cubes) + helper = self.config_helper(config) + # check the cube exists + try: + templdirs = [cwcfg.cube_dir(cube) + for cube in cubes] + except ConfigurationError as ex: + print(ex) + print('\navailable cubes:', end=' ') + print(', '.join(cwcfg.available_cubes())) + return + # create the registry directory for this instance + print('\n'+underline_title('Creating the instance %s' % appid)) + create_dir(config.apphome) + # cubicweb-ctl configuration + if not self.config.automatic: + print('\n'+underline_title('Configuring the instance (%s.conf)' + % configname)) + config.input_config('main', self.config.config_level) + # configuration'specific stuff + print() + helper.bootstrap(cubes, self.config.automatic, self.config.config_level) + # input for cubes specific options + if not self.config.automatic: + sections = set(sect.lower() for sect, opt, odict in config.all_options() + if 'type' in odict + and odict.get('level') <= self.config.config_level) + for section in sections: + if section not in ('main', 'email', 'web'): + print('\n' + underline_title('%s options' % section)) + config.input_config(section, self.config.config_level) + # write down configuration + config.save() + self._handle_win32(config, appid) + print('-> generated config %s' % config.main_config_file()) + # handle i18n files structure + # in the first cube given + from cubicweb import i18n + langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))] + errors = config.i18ncompile(langs) + if errors: + print('\n'.join(errors)) + if self.config.automatic \ + or not ASK.confirm('error while compiling message catalogs, ' + 'continue anyway ?'): + print('creation not completed') + return + # create the additional data directory for this instance + if config.appdatahome != config.apphome: # true in dev mode + create_dir(config.appdatahome) + create_dir(join(config.appdatahome, 'backup')) + if config['uid']: + from logilab.common.shellutils import chown + # this directory should be owned by the uid of the server process + print('set %s as owner of the data directory' % config['uid']) + chown(config.appdatahome, config['uid']) + print('\n-> creation done for %s\n' % repr(config.apphome)[1:-1]) + if not self.config.no_db_create: + helper.postcreate(self.config.automatic, self.config.config_level) + + def _handle_win32(self, config, appid): + if sys.platform != 'win32': + return + service_template = """ +import sys +import win32serviceutil +sys.path.insert(0, r"%(CWPATH)s") + +from cubicweb.etwist.service import CWService + +classdict = {'_svc_name_': 'cubicweb-%(APPID)s', + '_svc_display_name_': 'CubicWeb ' + '%(CNAME)s', + 'instance': '%(APPID)s'} +%(CNAME)sService = type('%(CNAME)sService', (CWService,), classdict) + +if __name__ == '__main__': + win32serviceutil.HandleCommandLine(%(CNAME)sService) +""" + open(join(config.apphome, 'win32svc.py'), 'wb').write( + service_template % {'APPID': appid, + 'CNAME': appid.capitalize(), + 'CWPATH': abspath(join(dirname(__file__), '..'))}) + + +class DeleteInstanceCommand(Command): + """Delete an instance. Will remove instance's files and + unregister it. + """ + name = 'delete' + arguments = '' + min_args = max_args = 1 + options = () + + def run(self, args): + """run the command with its specific arguments""" + appid = args[0] + configs = [cwcfg.config_for(appid, configname) + for configname in cwcfg.possible_configurations(appid)] + if not configs: + raise ExecutionError('unable to guess configuration for %s' % appid) + for config in configs: + helper = self.config_helper(config, required=False) + if helper: + helper.cleanup() + # remove home + rm(config.apphome) + # remove instance data directory + try: + rm(config.appdatahome) + except OSError as ex: + import errno + if ex.errno != errno.ENOENT: + raise + confignames = ', '.join([config.name for config in configs]) + print('-> instance %s (%s) deleted.' % (appid, confignames)) + + +# instance commands ######################################################## + +class StartInstanceCommand(InstanceCommandFork): + """Start the given instances. If no instance is given, start them all. + + ... + identifiers of the instances to start. If no instance is + given, start them all. + """ + name = 'start' + actionverb = 'started' + options = ( + ("debug", + {'short': 'D', 'action' : 'store_true', + 'help': 'start server in debug mode.'}), + ("force", + {'short': 'f', 'action' : 'store_true', + 'default': False, + 'help': 'start the instance even if it seems to be already \ +running.'}), + ('profile', + {'short': 'P', 'type' : 'string', 'metavar': '', + 'default': None, + 'help': 'profile code and use the specified file to store stats', + }), + ('loglevel', + {'short': 'l', 'type' : 'choice', 'metavar': '', + 'default': None, 'choices': ('debug', 'info', 'warning', 'error'), + 'help': 'debug if -D is set, error otherwise', + }), + ('param', + {'short': 'p', 'type' : 'named', 'metavar' : 'key1:value1,key2:value2', + 'default': {}, + 'help': 'override configuration file option with .', + }), + ) + + def start_instance(self, appid): + """start the instance's server""" + try: + import twisted # noqa + except ImportError: + msg = ( + "Twisted is required by the 'start' command\n" + "Either install it, or use one of the alternative commands:\n" + "- '{ctl} wsgi {appid}'\n" + "- '{ctl} pyramid {appid}' (requires the pyramid cube)\n") + raise ExecutionError(msg.format(ctl='cubicweb-ctl', appid=appid)) + config = cwcfg.config_for(appid, debugmode=self['debug']) + # override config file values with cmdline options + config.cmdline_options = self.config.param + init_cmdline_log_threshold(config, self['loglevel']) + if self['profile']: + config.global_set_option('profile', self.config.profile) + helper = self.config_helper(config, cmdname='start') + pidf = config['pid-file'] + if exists(pidf) and not self['force']: + msg = "%s seems to be running. Remove %s by hand if necessary or use \ +the --force option." + raise ExecutionError(msg % (appid, pidf)) + if helper.start_server(config) == 1: + print('instance %s started' % appid) + + +def init_cmdline_log_threshold(config, loglevel): + if loglevel is not None: + config.global_set_option('log-threshold', loglevel.upper()) + config.init_log(config['log-threshold'], force=True) + + +class StopInstanceCommand(InstanceCommand): + """Stop the given instances. + + ... + identifiers of the instances to stop. If no instance is + given, stop them all. + """ + name = 'stop' + actionverb = 'stopped' + + def ordered_instances(self): + instances = super(StopInstanceCommand, self).ordered_instances() + instances.reverse() + return instances + + def stop_instance(self, appid): + """stop the instance's server""" + config = cwcfg.config_for(appid) + helper = self.config_helper(config, cmdname='stop') + helper.poststop() # do this anyway + pidf = config['pid-file'] + if not exists(pidf): + sys.stderr.write("%s doesn't exist.\n" % pidf) + return + import signal + pid = int(open(pidf).read().strip()) + try: + kill(pid, signal.SIGTERM) + except Exception: + sys.stderr.write("process %s seems already dead.\n" % pid) + else: + try: + wait_process_end(pid) + except ExecutionError as ex: + sys.stderr.write('%s\ntrying SIGKILL\n' % ex) + try: + kill(pid, signal.SIGKILL) + except Exception: + # probably dead now + pass + wait_process_end(pid) + try: + remove(pidf) + except OSError: + # already removed by twistd + pass + print('instance %s stopped' % appid) + + +class RestartInstanceCommand(StartInstanceCommand): + """Restart the given instances. + + ... + identifiers of the instances to restart. If no instance is + given, restart them all. + """ + name = 'restart' + actionverb = 'restarted' + + def run_args(self, args, askconfirm): + regdir = cwcfg.instances_dir() + if not isfile(join(regdir, 'startorder')) or len(args) <= 1: + # no specific startorder + super(RestartInstanceCommand, self).run_args(args, askconfirm) + return + print ('some specific start order is specified, will first stop all ' + 'instances then restart them.') + # get instances in startorder + for appid in args: + if askconfirm: + print('*'*72) + if not ASK.confirm('%s instance %r ?' % (self.name, appid)): + continue + StopInstanceCommand(self.logger).stop_instance(appid) + forkcmd = [w for w in sys.argv if not w in args] + forkcmd[1] = 'start' + forkcmd = ' '.join(forkcmd) + for appid in reversed(args): + status = system('%s %s' % (forkcmd, appid)) + if status: + sys.exit(status) + + def restart_instance(self, appid): + StopInstanceCommand(self.logger).stop_instance(appid) + self.start_instance(appid) + + +class ReloadConfigurationCommand(RestartInstanceCommand): + """Reload the given instances. This command is equivalent to a + restart for now. + + ... + identifiers of the instances to reload. If no instance is + given, reload them all. + """ + name = 'reload' + + def reload_instance(self, appid): + self.restart_instance(appid) + + +class StatusCommand(InstanceCommand): + """Display status information about the given instances. + + ... + identifiers of the instances to status. If no instance is + given, get status information about all registered instances. + """ + name = 'status' + options = () + + @staticmethod + def status_instance(appid): + """print running status information for an instance""" + status = 0 + for mode in cwcfg.possible_configurations(appid): + config = cwcfg.config_for(appid, mode) + print('[%s-%s]' % (appid, mode), end=' ') + try: + pidf = config['pid-file'] + except KeyError: + print('buggy instance, pid file not specified') + continue + if not exists(pidf): + print("doesn't seem to be running") + status = 1 + continue + pid = int(open(pidf).read().strip()) + # trick to guess whether or not the process is running + try: + getpgid(pid) + except OSError: + print("should be running with pid %s but the process can not be found" % pid) + status = 1 + continue + print("running with pid %s" % (pid)) + return status + +class UpgradeInstanceCommand(InstanceCommandFork): + """Upgrade an instance after cubicweb and/or component(s) upgrade. + + For repository update, you will be prompted for a login / password to use + to connect to the system database. For some upgrades, the given user + should have create or alter table permissions. + + ... + identifiers of the instances to upgrade. If no instance is + given, upgrade them all. + """ + name = 'upgrade' + actionverb = 'upgraded' + options = InstanceCommand.options + ( + ('force-cube-version', + {'short': 't', 'type' : 'named', 'metavar': 'cube1:X.Y.Z,cube2:X.Y.Z', + 'default': None, + 'help': 'force migration from the indicated version for the specified cube(s).'}), + + ('force-cubicweb-version', + {'short': 'e', 'type' : 'string', 'metavar': 'X.Y.Z', + 'default': None, + 'help': 'force migration from the indicated cubicweb version.'}), + + ('fs-only', + {'short': 's', 'action' : 'store_true', + 'default': False, + 'help': 'only upgrade files on the file system, not the database.'}), + + ('nostartstop', + {'short': 'n', 'action' : 'store_true', + 'default': False, + 'help': 'don\'t try to stop instance before migration and to restart it after.'}), + + ('verbosity', + {'short': 'v', 'type' : 'int', 'metavar': '<0..2>', + 'default': 1, + 'help': "0: no confirmation, 1: only main commands confirmed, 2 ask \ +for everything."}), + + ('backup-db', + {'short': 'b', 'type' : 'yn', 'metavar': '', + 'default': None, + 'help': "Backup the instance database before upgrade.\n"\ + "If the option is ommitted, confirmation will be ask.", + }), + + ('ext-sources', + {'short': 'E', 'type' : 'csv', 'metavar': '', + 'default': None, + 'help': "For multisources instances, specify to which sources the \ +repository should connect to for upgrading. When unspecified or 'migration' is \ +given, appropriate sources for migration will be automatically selected \ +(recommended). If 'all' is given, will connect to all defined sources.", + }), + ) + + def upgrade_instance(self, appid): + print('\n' + underline_title('Upgrading the instance %s' % appid)) + from logilab.common.changelog import Version + config = cwcfg.config_for(appid) + instance_running = exists(config['pid-file']) + config.repairing = True # notice we're not starting the server + config.verbosity = self.config.verbosity + set_sources_mode = getattr(config, 'set_sources_mode', None) + if set_sources_mode is not None: + set_sources_mode(self.config.ext_sources or ('migration',)) + # get instance and installed versions for the server and the componants + mih = config.migration_handler() + repo = mih.repo + vcconf = repo.get_versions() + helper = self.config_helper(config, required=False) + if self.config.force_cube_version: + for cube, version in self.config.force_cube_version.items(): + vcconf[cube] = Version(version) + toupgrade = [] + for cube in config.cubes(): + installedversion = config.cube_version(cube) + try: + applversion = vcconf[cube] + except KeyError: + config.error('no version information for %s' % cube) + continue + if installedversion > applversion: + toupgrade.append( (cube, applversion, installedversion) ) + cubicwebversion = config.cubicweb_version() + if self.config.force_cubicweb_version: + applcubicwebversion = Version(self.config.force_cubicweb_version) + vcconf['cubicweb'] = applcubicwebversion + else: + applcubicwebversion = vcconf.get('cubicweb') + if cubicwebversion > applcubicwebversion: + toupgrade.append(('cubicweb', applcubicwebversion, cubicwebversion)) + # only stop once we're sure we have something to do + if instance_running and not (CWDEV or self.config.nostartstop): + StopInstanceCommand(self.logger).stop_instance(appid) + # run cubicweb/componants migration scripts + if self.config.fs_only or toupgrade: + for cube, fromversion, toversion in toupgrade: + print('-> migration needed from %s to %s for %s' % (fromversion, toversion, cube)) + with mih.cnx: + with mih.cnx.security_enabled(False, False): + mih.migrate(vcconf, reversed(toupgrade), self.config) + else: + print('-> no data migration needed for instance %s.' % appid) + # rewrite main configuration file + mih.rewrite_configuration() + mih.shutdown() + # handle i18n upgrade + if not self.i18nupgrade(config): + return + print() + if helper: + helper.postupgrade(repo) + print('-> instance migrated.') + if instance_running and not (CWDEV or self.config.nostartstop): + # restart instance through fork to get a proper environment, avoid + # uicfg pb (and probably gettext catalogs, to check...) + forkcmd = '%s start %s' % (sys.argv[0], appid) + status = system(forkcmd) + if status: + print('%s exited with status %s' % (forkcmd, status)) + print() + + def i18nupgrade(self, config): + # handle i18n upgrade: + # * install new languages + # * recompile catalogs + # XXX search available language in the first cube given + from cubicweb import i18n + templdir = cwcfg.cube_dir(config.cubes()[0]) + langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))] + errors = config.i18ncompile(langs) + if errors: + print('\n'.join(errors)) + if not ASK.confirm('Error while compiling message catalogs, ' + 'continue anyway?'): + print('-> migration not completed.') + return False + return True + + +class ListVersionsInstanceCommand(InstanceCommand): + """List versions used by an instance. + + ... + identifiers of the instances to list versions for. + """ + name = 'versions' + + def versions_instance(self, appid): + config = cwcfg.config_for(appid) + # should not raise error if db versions don't match fs versions + config.repairing = True + # no need to load all appobjects and schema + config.quick_start = True + if hasattr(config, 'set_sources_mode'): + config.set_sources_mode(('migration',)) + vcconf = config.repository().get_versions() + for key in sorted(vcconf): + print(key+': %s.%s.%s' % vcconf[key]) + +class ShellCommand(Command): + """Run an interactive migration shell on an instance. This is a python shell + with enhanced migration commands predefined in the namespace. An additional + argument may be given corresponding to a file containing commands to execute + in batch mode. + + By default it will connect to a local instance using an in memory + connection, unless a URL to a running instance is specified. + + Arguments after bare "--" string will not be processed by the shell command + You can use it to pass extra arguments to your script and expect for + them in '__args__' afterwards. + + + the identifier of the instance to connect. + """ + name = 'shell' + arguments = ' [batch command file(s)] [-- ", re.M|re.I|re.S) +def _remove_script_tags(data): + """Remove the script (usually javascript) tags to help the lxml + XMLParser / HTMLParser do their job. Without that, they choke on + tags embedded in JS strings. + """ + # Notice we may want to use lxml cleaner, but it's far too intrusive: + # + # cleaner = Cleaner(scripts=True, + # javascript=False, + # comments=False, + # style=False, + # links=False, + # meta=False, + # page_structure=False, + # processing_instructions=False, + # embedded=False, + # frames=False, + # forms=False, + # annoying_tags=False, + # remove_tags=(), + # remove_unknown_tags=False, + # safe_attrs_only=False, + # add_nofollow=False) + # >>> cleaner.clean_html('') + # '' + # >>> cleaner.clean_html('') + # '' + # >>> cleaner.clean_html('
') + # '
' + # >>> cleaner.clean_html('

') + # '

' + # >>> cleaner.clean_html('

') + # '

' + # + # using that, we'll miss most actual validation error we want to + # catch. For now, use dumb regexp + return _REM_SCRIPT_RGX.sub(b'', data) + + +class Validator(object): + """ base validator API """ + parser = None + + def parse_string(self, source): + etree = self._parse(self.preprocess_data(source)) + return PageInfo(source, etree) + + def preprocess_data(self, data): + return data + + def _parse(self, pdata): + try: + return etree.fromstring(pdata, self.parser) + except etree.XMLSyntaxError as exc: + new_exc = AssertionError(u'invalid document: %s' % exc) + new_exc.position = exc.position + raise new_exc + + +class DTDValidator(Validator): + def __init__(self): + Validator.__init__(self) + # XXX understand what's happening under windows + self.parser = etree.XMLParser(dtd_validation=sys.platform != 'win32') + + def preprocess_data(self, data): + """used to fix potential blockquote mess generated by docutils""" + if STRICT_DOCTYPE not in data: + return data + # parse using transitional DTD + data = data.replace(STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE) + tree = self._parse(data) + namespace = tree.nsmap.get(None) + # this is the list of authorized child tags for
nodes + expected = 'p h1 h2 h3 h4 h5 h6 div ul ol dl pre hr blockquote address ' \ + 'fieldset table form noscript ins del script'.split() + if namespace: + blockquotes = tree.findall('.//{%s}blockquote' % namespace) + expected = ['{%s}%s' % (namespace, tag) for tag in expected] + else: + blockquotes = tree.findall('.//blockquote') + # quick and dirty approach: remove all blockquotes + for blockquote in blockquotes: + parent = blockquote.getparent() + parent.remove(blockquote) + data = etree.tostring(tree) + return '%s\n%s' % ( + STRICT_DOCTYPE, data) + + +class XMLValidator(Validator): + """XML validator, checks that XML is well-formed and used XMLNS are defined""" + + def __init__(self): + Validator.__init__(self) + self.parser = etree.XMLParser() + +SaxOnlyValidator = class_renamed('SaxOnlyValidator', + XMLValidator, + '[3.17] you should use the ' + 'XMLValidator class instead') + + +class XMLSyntaxValidator(Validator): + """XML syntax validator, check XML is well-formed""" + + class MySaxErrorHandler(sax.ErrorHandler): + """override default handler to avoid choking because of unknown entity""" + def fatalError(self, exception): + # XXX check entity in htmlentitydefs + if not str(exception).endswith('undefined entity'): + raise exception + _parser = sax.make_parser() + _parser.setContentHandler(sax.handler.ContentHandler()) + _parser.setErrorHandler(MySaxErrorHandler()) + + def __init__(self): + super(XMLSyntaxValidator, self).__init__() + # XMLParser() wants xml namespaces defined + # XMLParser(recover=True) will accept almost anything + # + # -> use the later but preprocess will check xml well-formness using a + # dumb SAX parser + self.parser = etree.XMLParser(recover=True) + + def preprocess_data(self, data): + return _remove_script_tags(data) + + def _parse(self, data): + inpsrc = sax.InputSource() + inpsrc.setByteStream(BytesIO(data)) + try: + self._parser.parse(inpsrc) + except sax.SAXParseException as exc: + new_exc = AssertionError(u'invalid document: %s' % exc) + new_exc.position = (exc._linenum, exc._colnum) + raise new_exc + return super(XMLSyntaxValidator, self)._parse(data) + + +class HTMLValidator(Validator): + + def __init__(self): + Validator.__init__(self) + self.parser = etree.HTMLParser(recover=False) + + def preprocess_data(self, data): + return _remove_script_tags(data) + + +class PageInfo(object): + """holds various informations on the view's output""" + def __init__(self, source, root): + self.source = source + self.etree = root + self.raw_text = u''.join(root.xpath('//text()')) + self.namespace = self.etree.nsmap + self.default_ns = self.namespace.get(None) + self.a_tags = self.find_tag('a') + self.h1_tags = self.find_tag('h1') + self.h2_tags = self.find_tag('h2') + self.h3_tags = self.find_tag('h3') + self.h4_tags = self.find_tag('h4') + self.input_tags = self.find_tag('input') + self.title_tags = [self.h1_tags, self.h2_tags, self.h3_tags, self.h4_tags] + + def _iterstr(self, tag): + if self.default_ns is None: + return ".//%s" % tag + else: + return ".//{%s}%s" % (self.default_ns, tag) + + def matching_nodes(self, tag, **attrs): + for elt in self.etree.iterfind(self._iterstr(tag)): + eltattrs = elt.attrib + for attr, value in attrs.items(): + try: + if eltattrs[attr] != value: + break + except KeyError: + break + else: # all attributes match + yield elt + + def has_tag(self, tag, nboccurs=1, **attrs): + """returns True if tag with given attributes appears in the page + `nbtimes` (any if None) + """ + for elt in self.matching_nodes(tag, **attrs): + if nboccurs is None: # no need to check number of occurences + return True + if not nboccurs: # too much occurences + return False + nboccurs -= 1 + if nboccurs == 0: # correct number of occurences + return True + return False # no matching tag/attrs + + def find_tag(self, tag, gettext=True): + """return a list which contains text of all "tag" elements """ + iterstr = self._iterstr(tag) + if not gettext or tag in ('a', 'input'): + return [(elt.text, elt.attrib) + for elt in self.etree.iterfind(iterstr)] + return [u''.join(elt.xpath('.//text()')) + for elt in self.etree.iterfind(iterstr)] + + def appears(self, text): + """returns True if appears in the page""" + return text in self.raw_text + + def __contains__(self, text): + return text in self.source + + def has_title(self, text, level=None): + """returns True if text + + :param level: the title's level (1 for h1, 2 for h2, etc.) + """ + if level is None: + for hlist in self.title_tags: + if text in hlist: + return True + return False + else: + hlist = self.title_tags[level - 1] + return text in hlist + + def has_title_regexp(self, pattern, level=None): + """returns True if pattern""" + sre = re.compile(pattern) + if level is None: + for hlist in self.title_tags: + for title in hlist: + if sre.match(title): + return True + return False + else: + hlist = self.title_tags[level - 1] + for title in hlist: + if sre.match(title): + return True + return False + + def has_link(self, text, url=None): + """returns True if text was found in the page""" + for link_text, attrs in self.a_tags: + if text == link_text: + if url is None: + return True + try: + href = attrs['href'] + if href == url: + return True + except KeyError: + continue + return False + + def has_link_regexp(self, pattern, url=None): + """returns True if pattern was found in the page""" + sre = re.compile(pattern) + for link_text, attrs in self.a_tags: + if sre.match(link_text): + if url is None: + return True + try: + href = attrs['href'] + if href == url: + return True + except KeyError: + continue + return False + +VALMAP = {None: None, + 'dtd': DTDValidator, + 'xml': XMLValidator, + 'html': HTMLValidator, + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/httptest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/httptest.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,170 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""this module contains base classes and utilities for integration with running +http server +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import random +import threading +import socket + +from six.moves import range, http_client +from six.moves.urllib.parse import urlparse + + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools import ApptestConfiguration + + +def get_available_port(ports_scan): + """return the first available port from the given ports range + + Try to connect port by looking for refused connection (111) or transport + endpoint already connected (106) errors + + Raise a RuntimeError if no port can be found + + :type ports_range: list + :param ports_range: range of ports to test + :rtype: int + + .. see:: :func:`test.test_support.bind_port` + """ + ports_scan = list(ports_scan) + random.shuffle(ports_scan) # lower the chance of race condition + for port in ports_scan: + try: + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock = s.connect(("localhost", port)) + except socket.error as err: + if err.args[0] in (111, 106): + return port + finally: + s.close() + raise RuntimeError('get_available_port([ports_range]) cannot find an available port') + + +class CubicWebServerTC(CubicWebTC): + """Class for running a Twisted-based test web server. + """ + ports_range = range(7000, 8000) + + def start_server(self): + from twisted.internet import reactor + from cubicweb.etwist.server import run + # use a semaphore to avoid starting test while the http server isn't + # fully initilialized + semaphore = threading.Semaphore(0) + def safe_run(*args, **kwargs): + try: + run(*args, **kwargs) + finally: + semaphore.release() + + reactor.addSystemEventTrigger('after', 'startup', semaphore.release) + t = threading.Thread(target=safe_run, name='cubicweb_test_web_server', + args=(self.config, True), kwargs={'repo': self.repo}) + self.web_thread = t + t.start() + semaphore.acquire() + if not self.web_thread.isAlive(): + # XXX race condition with actual thread death + raise RuntimeError('Could not start the web server') + #pre init utils connection + parseurl = urlparse(self.config['base-url']) + assert parseurl.port == self.config['port'], (self.config['base-url'], self.config['port']) + self._web_test_cnx = http_client.HTTPConnection(parseurl.hostname, + parseurl.port) + self._ident_cookie = None + + def stop_server(self, timeout=15): + """Stop the webserver, waiting for the thread to return""" + from twisted.internet import reactor + if self._web_test_cnx is None: + self.web_logout() + self._web_test_cnx.close() + try: + reactor.stop() + self.web_thread.join(timeout) + assert not self.web_thread.isAlive() + + finally: + reactor.__init__() + + def web_login(self, user=None, passwd=None): + """Log the current http session for the provided credential + + If no user is provided, admin connection are used. + """ + if user is None: + user = self.admlogin + passwd = self.admpassword + if passwd is None: + passwd = user + response = self.web_get("login?__login=%s&__password=%s" % + (user, passwd)) + assert response.status == http_client.SEE_OTHER, response.status + self._ident_cookie = response.getheader('Set-Cookie') + assert self._ident_cookie + return True + + def web_logout(self, user='admin', pwd=None): + """Log out current http user""" + if self._ident_cookie is not None: + response = self.web_get('logout') + self._ident_cookie = None + + def web_request(self, path='', method='GET', body=None, headers=None): + """Return an http_client.HTTPResponse object for the specified path + + Use available credential if available. + """ + if headers is None: + headers = {} + if self._ident_cookie is not None: + assert 'Cookie' not in headers + headers['Cookie'] = self._ident_cookie + self._web_test_cnx.request(method, '/' + path, headers=headers, body=body) + response = self._web_test_cnx.getresponse() + response.body = response.read() # to chain request + response.read = lambda : response.body + return response + + def web_get(self, path='', body=None, headers=None): + return self.web_request(path=path, body=body, headers=headers) + + def setUp(self): + super(CubicWebServerTC, self).setUp() + port = self.config['port'] or get_available_port(self.ports_range) + self.config.global_set_option('port', port) # force rewrite here + self.config.global_set_option('base-url', 'http://127.0.0.1:%d/' % port) + # call load_configuration again to let the config reset its datadir_url + self.config.load_configuration() + self.start_server() + + def tearDown(self): + from twisted.internet import error + try: + self.stop_server() + except error.ReactorNotRunning as err: + # Server could be launched manually + print(err) + super(CubicWebServerTC, self).tearDown() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/instrument.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/instrument.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,225 @@ +# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr -- mailto:contact@logilab.fr +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . +"""Instrumentation utilities""" +from __future__ import print_function + +import os + +try: + import pygraphviz +except ImportError: + pygraphviz = None + +from cubicweb.cwvreg import CWRegistryStore +from cubicweb.devtools.devctl import DevConfiguration + + +ALL_COLORS = [ + "00FF00", "0000FF", "FFFF00", "FF00FF", "00FFFF", "000000", + "800000", "008000", "000080", "808000", "800080", "008080", "808080", + "C00000", "00C000", "0000C0", "C0C000", "C000C0", "00C0C0", "C0C0C0", + "400000", "004000", "000040", "404000", "400040", "004040", "404040", + "200000", "002000", "000020", "202000", "200020", "002020", "202020", + "600000", "006000", "000060", "606000", "600060", "006060", "606060", + "A00000", "00A000", "0000A0", "A0A000", "A000A0", "00A0A0", "A0A0A0", + "E00000", "00E000", "0000E0", "E0E000", "E000E0", "00E0E0", "E0E0E0", + ] +_COLORS = {} +def get_color(key): + try: + return _COLORS[key] + except KeyError: + _COLORS[key] = '#'+ALL_COLORS[len(_COLORS) % len(ALL_COLORS)] + return _COLORS[key] + +def warn(msg, *args): + print('WARNING: %s' % (msg % args)) + +def info(msg): + print('INFO: ' + msg) + + +class PropagationAnalyzer(object): + """Abstract propagation analyzer, providing utility function to extract + entities involved in propagation from a schema, as well as propagation + rules from hooks (provided they use intrumentalized sets, see + :class:`CubeTracerSet`). + + Concrete classes should at least define `prop_rel` class attribute and + implements the `is_root` method. + + See `localperms` or `nosylist` cubes for example usage (`ccplugin` module). + """ + prop_rel = None # name of the propagation relation + + def init(self, cube): + """Initialize analyze for the given cube, returning the (already loaded) + vregistry and a set of entities which we're interested in. + """ + config = DevConfiguration(cube) + schema = config.load_schema() + vreg = CWRegistryStore(config) + vreg.set_schema(schema) # set_schema triggers objects registrations + eschemas = set(eschema for eschema in schema.entities() + if self.should_include(eschema)) + return vreg, eschemas + + def is_root(self, eschema): + """Return `True` if given entity schema is a root of the graph""" + raise NotImplementedError() + + def should_include(self, eschema): + """Return `True` if given entity schema should be included by the graph. + """ + + if self.prop_rel in eschema.subjrels or self.is_root(eschema): + return True + return False + + def prop_edges(self, s_rels, o_rels, eschemas): + """Return a set of edges where propagation has been detected. + + Each edge is defined by a 4-uple (from node, to node, rtype, package) + where `rtype` is the relation type bringing from to and `package` is the cube adding the rule to the propagation + control set (see see :class:`CubeTracerSet`). + """ + schema = iter(eschemas).next().schema + prop_edges = set() + for rtype in s_rels: + found = False + for subj, obj in schema.rschema(rtype).rdefs: + if subj in eschemas and obj in eschemas: + found = True + prop_edges.add( (subj, obj, rtype, s_rels.value_cube[rtype]) ) + if not found: + warn('no rdef match for %s', rtype) + for rtype in o_rels: + found = False + for subj, obj in schema.rschema(rtype).rdefs: + if subj in eschemas and obj in eschemas: + found = True + prop_edges.add( (obj, subj, rtype, o_rels.value_cube[rtype]) ) + if not found: + warn('no rdef match for %s', rtype) + return prop_edges + + def detect_problems(self, eschemas, edges): + """Given the set of analyzed entity schemas and edges between them, + return a set of entity schemas where a problem has been detected. + """ + problematic = set() + for eschema in eschemas: + if self.has_problem(eschema, edges): + problematic.add(eschema) + not_problematic = set(eschemas).difference(problematic) + if not_problematic: + info('nothing problematic in: %s' % + ', '.join(e.type for e in not_problematic)) + return problematic + + def has_problem(self, eschema, edges): + """Return `True` if the given schema is considered problematic, + considering base propagation rules. + """ + root = self.is_root(eschema) + has_prop_rel = self.prop_rel in eschema.subjrels + # root but no propagation relation + if root and not has_prop_rel: + warn('%s is root but miss %s', eschema, self.prop_rel) + return True + # propagated but without propagation relation / not propagated but + # with propagation relation + if not has_prop_rel and \ + any(edge for edge in edges if edge[1] == eschema): + warn("%s miss %s but is reached by propagation", + eschema, self.prop_rel) + return True + elif has_prop_rel and not root: + rdef = eschema.rdef(self.prop_rel, takefirst=True) + edges = [edge for edge in edges if edge[1] == eschema] + if not edges: + warn("%s has %s but isn't reached by " + "propagation", eschema, self.prop_rel) + return True + # require_permission relation / propagation rule not added by + # the same cube + elif not any(edge for edge in edges if edge[-1] == rdef.package): + warn('%s has %s relation / propagation rule' + ' not added by the same cube (%s / %s)', eschema, + self.prop_rel, rdef.package, edges[0][-1]) + return True + return False + + def init_graph(self, eschemas, edges, problematic): + """Initialize and return graph, adding given nodes (entity schemas) and + edges between them. + + Require pygraphviz installed. + """ + if pygraphviz is None: + raise RuntimeError('pygraphviz is not installed') + graph = pygraphviz.AGraph(strict=False, directed=True) + for eschema in eschemas: + if eschema in problematic: + params = {'color': '#ff0000', 'fontcolor': '#ff0000'} + else: + params = {}#'color': get_color(eschema.package)} + graph.add_node(eschema.type, **params) + for subj, obj, rtype, package in edges: + graph.add_edge(str(subj), str(obj), label=rtype, + color=get_color(package)) + return graph + + def add_colors_legend(self, graph): + """Add a legend of used colors to the graph.""" + for package, color in sorted(_COLORS.items()): + graph.add_node(package, color=color, fontcolor=color, shape='record') + + +class CubeTracerSet(object): + """Dumb set implementation whose purpose is to keep track of which cube is + being loaded when something is added to the set. + + Results will be found in the `value_cube` attribute dictionary. + + See `localperms` or `nosylist` cubes for example usage (`hooks` module). + """ + def __init__(self, vreg, wrapped): + self.vreg = vreg + self.wrapped = wrapped + self.value_cube = {} + + def add(self, value): + self.wrapped.add(value) + cube = self.vreg.currently_loading_cube + if value in self.value_cube: + warn('%s is propagated by cube %s and cube %s', + value, self.value_cube[value], cube) + else: + self.value_cube[value] = cube + + def __iter__(self): + return iter(self.wrapped) + + def __ior__(self, other): + for value in other: + self.add(value) + return self + + def __ror__(self, other): + other |= self.wrapped + return other diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/qunit.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/qunit.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,293 @@ +# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from __future__ import absolute_import + +import os, os.path as osp +import errno +from tempfile import mkdtemp +from subprocess import Popen, PIPE, STDOUT + +from six.moves.queue import Queue, Empty + +# imported by default to simplify further import statements +from logilab.common.testlib import unittest_main, with_tempdir, InnerTest, Tags +import webtest.http + +import cubicweb +from cubicweb.view import View +from cubicweb.web.controller import Controller +from cubicweb.web.views.staticcontrollers import StaticFileController, STATIC_CONTROLLERS +from cubicweb.devtools import webtest as cwwebtest + + +class FirefoxHelper(object): + + def __init__(self, url=None): + self._process = None + self._profile_dir = mkdtemp(prefix='cwtest-ffxprof-') + self.firefox_cmd = ['firefox', '-no-remote'] + if os.name == 'posix': + self.firefox_cmd = [osp.join(osp.dirname(__file__), 'data', 'xvfb-run.sh'), + '-a', '-s', '-noreset -screen 0 800x600x24'] + self.firefox_cmd + + def test(self): + try: + proc = Popen(['firefox', '--help'], stdout=PIPE, stderr=STDOUT) + stdout, _ = proc.communicate() + return proc.returncode == 0, stdout + except OSError as exc: + if exc.errno == errno.ENOENT: + msg = '[%s] %s' % (errno.errorcode[exc.errno], exc.strerror) + return False, msg + raise + + def start(self, url): + self.stop() + cmd = self.firefox_cmd + ['-silent', '--profile', self._profile_dir, + '-url', url] + with open(os.devnull, 'w') as fnull: + self._process = Popen(cmd, stdout=fnull, stderr=fnull) + + def stop(self): + if self._process is not None: + assert self._process.returncode is None, self._process.returncode + self._process.terminate() + self._process.wait() + self._process = None + + def __del__(self): + self.stop() + + +class QUnitTestCase(cwwebtest.CubicWebTestTC): + + tags = cwwebtest.CubicWebTestTC.tags | Tags(('qunit',)) + + # testfile, (dep_a, dep_b) + all_js_tests = () + + def setUp(self): + super(QUnitTestCase, self).setUp() + self.test_queue = Queue() + class MyQUnitResultController(QUnitResultController): + tc = self + test_queue = self.test_queue + self._qunit_controller = MyQUnitResultController + self.webapp.app.appli.vreg.register(MyQUnitResultController) + self.webapp.app.appli.vreg.register(QUnitView) + self.webapp.app.appli.vreg.register(CWDevtoolsStaticController) + self.server = webtest.http.StopableWSGIServer.create(self.webapp.app) + self.config.global_set_option('base-url', self.server.application_url) + + def tearDown(self): + self.server.shutdown() + self.webapp.app.appli.vreg.unregister(self._qunit_controller) + self.webapp.app.appli.vreg.unregister(QUnitView) + self.webapp.app.appli.vreg.unregister(CWDevtoolsStaticController) + super(QUnitTestCase, self).tearDown() + + def test_javascripts(self): + for args in self.all_js_tests: + self.assertIn(len(args), (1, 2)) + test_file = args[0] + if len(args) > 1: + depends = args[1] + else: + depends = () + for js_test in self._test_qunit(test_file, depends): + yield js_test + + @with_tempdir + def _test_qunit(self, test_file, depends=(), timeout=10): + QUnitView.test_file = test_file + QUnitView.depends = depends + + while not self.test_queue.empty(): + self.test_queue.get(False) + + browser = FirefoxHelper() + isavailable, reason = browser.test() + if not isavailable: + self.fail('firefox not available or not working properly (%s)' % reason) + browser.start(self.config['base-url'] + "?vid=qunit") + test_count = 0 + error = False + def raise_exception(cls, *data): + raise cls(*data) + while not error: + try: + result, test_name, msg = self.test_queue.get(timeout=timeout) + test_name = '%s (%s)' % (test_name, test_file) + self.set_description(test_name) + if result is None: + break + test_count += 1 + if result: + yield InnerTest(test_name, lambda : 1) + else: + yield InnerTest(test_name, self.fail, msg) + except Empty: + error = True + msg = '%s inactivity timeout (%is). %i test results received' + yield InnerTest(test_file, raise_exception, RuntimeError, + msg % (test_file, timeout, test_count)) + browser.stop() + if test_count <= 0 and not error: + yield InnerTest(test_name, raise_exception, RuntimeError, + 'No test yielded by qunit for %s' % test_file) + +class QUnitResultController(Controller): + + __regid__ = 'qunit_result' + + + # Class variables to circumvent the instantiation of a new Controller for each request. + _log_stack = [] # store QUnit log messages + _current_module_name = '' # store the current QUnit module name + + def publish(self, rset=None): + event = self._cw.form['event'] + getattr(self, 'handle_%s' % event)() + return b'' + + def handle_module_start(self): + self.__class__._current_module_name = self._cw.form.get('name', '') + + def handle_test_done(self): + name = '%s // %s' % (self._current_module_name, self._cw.form.get('name', '')) + failures = int(self._cw.form.get('failures', 0)) + total = int(self._cw.form.get('total', 0)) + + self._log_stack.append('%i/%i assertions failed' % (failures, total)) + msg = '\n'.join(self._log_stack) + + if failures: + self.tc.test_queue.put((False, name, msg)) + else: + self.tc.test_queue.put((True, name, msg)) + self._log_stack[:] = [] + + def handle_done(self): + self.tc.test_queue.put((None, None, None)) + + def handle_log(self): + result = self._cw.form['result'] + message = self._cw.form.get('message', '') + actual = self._cw.form.get('actual') + expected = self._cw.form.get('expected') + source = self._cw.form.get('source') + log = '%s: %s' % (result, message) + if result == 'false' and actual is not None and expected is not None: + log += ' (got: %s, expected: %s)' % (actual, expected) + if source is not None: + log += '\n' + source + self._log_stack.append(log) + + +class QUnitView(View): + __regid__ = 'qunit' + + templatable = False + + depends = None + test_file = None + + def call(self, **kwargs): + w = self.w + req = self._cw + w(u''' + + + + + + + + ''') + w(u'') + w(u'') + w(u'') + + for dep in self.depends: + w(u' \n' % dep) + + w(u' ') + w(u' ' % self.test_file) + w(u''' + +
+
+ + ''') + + +class CWDevtoolsStaticController(StaticFileController): + __regid__ = 'devtools' + + def publish(self, rset=None): + staticdir = osp.join(osp.dirname(__file__), 'data') + relpath = self.relpath[len(self.__regid__) + 1:] + return self.static_file(osp.join(staticdir, relpath)) + + +STATIC_CONTROLLERS.append(CWDevtoolsStaticController) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/realdbtest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/realdbtest.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,59 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from cubicweb import toolsutils +from cubicweb.devtools import DEFAULT_SOURCES, BaseApptestConfiguration + +class RealDatabaseConfiguration(BaseApptestConfiguration): + init_repository = False + sourcesdef = DEFAULT_SOURCES.copy() + + def sources(self): + """ + By default, we run tests with the sqlite DB backend. + One may use its own configuration by just creating a + 'sources' file in the test directory from wich tests are + launched. + """ + self._sources = self.sourcesdef + return self._sources + + +def buildconfig(dbuser, dbpassword, dbname, adminuser, adminpassword, dbhost=None): + """convenience function that builds a real-db configuration class""" + sourcesdef = {'system': {'adapter' : 'native', + 'db-encoding' : 'UTF-8', #'ISO-8859-1', + 'db-user' : dbuser, + 'db-password' : dbpassword, + 'db-name' : dbname, + 'db-driver' : 'postgres', + 'db-host' : dbhost, + }, + 'admin' : {'login': adminuser, + 'password': adminpassword, + }, + } + return type('MyRealDBConfig', (RealDatabaseConfiguration,), + {'sourcesdef': sourcesdef}) + + +def loadconfig(filename): + """convenience function that builds a real-db configuration class + from a file + """ + return type('MyRealDBConfig', (RealDatabaseConfiguration,), + {'sourcesdef': toolsutils.read_config(filename)}) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/repotest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/repotest.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,353 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some utilities to ease repository testing + +This module contains functions to initialize a new repository. +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from pprint import pprint + +from logilab.common.testlib import SkipTest + +def tuplify(mylist): + return [tuple(item) for item in mylist] + +def snippet_key(a): + # a[0] may be a dict or a key/value tuple + return (sorted(dict(a[0]).items()), [e.expression for e in a[1]]) + +def test_plan(self, rql, expected, kwargs=None): + with self.session.new_cnx() as cnx: + plan = self._prepare_plan(cnx, rql, kwargs) + self.planner.build_plan(plan) + try: + self.assertEqual(len(plan.steps), len(expected), + 'expected %s steps, got %s' % (len(expected), len(plan.steps))) + # step order is important + for i, step in enumerate(plan.steps): + compare_steps(self, step.test_repr(), expected[i]) + except AssertionError: + pprint([step.test_repr() for step in plan.steps]) + raise + +def compare_steps(self, step, expected): + try: + self.assertEqual(step[0], expected[0], 'expected step type %s, got %s' % (expected[0], step[0])) + if len(step) > 2 and isinstance(step[1], list) and isinstance(expected[1], list): + queries, equeries = step[1], expected[1] + self.assertEqual(len(queries), len(equeries), + 'expected %s queries, got %s' % (len(equeries), len(queries))) + for i, (rql, sol) in enumerate(queries): + self.assertEqual(rql, equeries[i][0]) + self.assertEqual(sorted(sorted(x.items()) for x in sol), sorted(sorted(x.items()) for x in equeries[i][1])) + idx = 2 + else: + idx = 1 + self.assertEqual(step[idx:-1], expected[idx:-1], + 'expected step characteristic \n%s\n, got\n%s' % (expected[1:-1], step[1:-1])) + self.assertEqual(len(step[-1]), len(expected[-1]), + 'got %s child steps, expected %s' % (len(step[-1]), len(expected[-1]))) + except AssertionError: + print('error on step ', end=' ') + pprint(step[:-1]) + raise + children = step[-1] + if step[0] in ('UnionFetchStep', 'UnionStep'): + # sort children + children = sorted(children) + expectedchildren = sorted(expected[-1]) + else: + expectedchildren = expected[-1] + for i, substep in enumerate(children): + compare_steps(self, substep, expectedchildren[i]) + + +class DumbOrderedDict(list): + def __iter__(self): + return self.iterkeys() + def __contains__(self, key): + return key in self.iterkeys() + def __getitem__(self, key): + for key_, value in list.__iter__(self): + if key == key_: + return value + raise KeyError(key) + def iterkeys(self): + return (x for x, y in list.__iter__(self)) + def iteritems(self): + return (x for x in list.__iter__(self)) + def items(self): + return [x for x in list.__iter__(self)] + +class DumbOrderedDict2(object): + def __init__(self, origdict, sortkey): + self.origdict = origdict + self.sortkey = sortkey + def __getattr__(self, attr): + return getattr(self.origdict, attr) + def __iter__(self): + return iter(sorted(self.origdict, key=self.sortkey)) + +def schema_eids_idx(schema): + """return a dictionary mapping schema types to their eids so we can reread + it from the fs instead of the db (too costly) between tests + """ + schema_eids = {} + for x in schema.entities(): + schema_eids[x] = x.eid + for x in schema.relations(): + schema_eids[x] = x.eid + for rdef in x.rdefs.values(): + schema_eids[(rdef.subject, rdef.rtype, rdef.object)] = rdef.eid + return schema_eids + +def restore_schema_eids_idx(schema, schema_eids): + """rebuild schema eid index""" + for x in schema.entities(): + x.eid = schema_eids[x] + schema._eid_index[x.eid] = x + for x in schema.relations(): + x.eid = schema_eids[x] + schema._eid_index[x.eid] = x + for rdef in x.rdefs.values(): + rdef.eid = schema_eids[(rdef.subject, rdef.rtype, rdef.object)] + schema._eid_index[rdef.eid] = rdef + + +from logilab.common.testlib import TestCase, mock_object +from logilab.database import get_db_helper + +from rql import RQLHelper + +from cubicweb.devtools.fake import FakeRepo, FakeConfig, FakeSession +from cubicweb.server import set_debug, debugged +from cubicweb.server.querier import QuerierHelper +from cubicweb.server.session import Session +from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions + +class RQLGeneratorTC(TestCase): + schema = backend = None # set this in concrete class + + @classmethod + def setUpClass(cls): + if cls.backend is not None: + try: + cls.dbhelper = get_db_helper(cls.backend) + except ImportError as ex: + raise SkipTest(str(ex)) + + def setUp(self): + self.repo = FakeRepo(self.schema, config=FakeConfig(apphome=self.datadir)) + self.repo.system_source = mock_object(dbdriver=self.backend) + self.rqlhelper = RQLHelper(self.schema, + special_relations={'eid': 'uid', + 'has_text': 'fti'}, + backend=self.backend) + self.qhelper = QuerierHelper(self.repo, self.schema) + ExecutionPlan._check_permissions = _dummy_check_permissions + rqlannotation._select_principal = _select_principal + if self.backend is not None: + self.o = SQLGenerator(self.schema, self.dbhelper) + + def tearDown(self): + ExecutionPlan._check_permissions = _orig_check_permissions + rqlannotation._select_principal = _orig_select_principal + + def set_debug(self, debug): + set_debug(debug) + def debugged(self, debug): + return debugged(debug) + + def _prepare(self, rql): + #print '******************** prepare', rql + union = self.rqlhelper.parse(rql) + #print '********* parsed', union.as_string() + self.rqlhelper.compute_solutions(union) + #print '********* solutions', solutions + self.rqlhelper.simplify(union) + #print '********* simplified', union.as_string() + plan = self.qhelper.plan_factory(union, {}, FakeSession(self.repo)) + plan.preprocess(union) + for select in union.children: + select.solutions.sort(key=lambda x: list(x.items())) + #print '********* ppsolutions', solutions + return union + + +class BaseQuerierTC(TestCase): + repo = None # set this in concrete class + + def setUp(self): + self.o = self.repo.querier + self.session = next(iter(self.repo._sessions.values())) + self.ueid = self.session.user.eid + assert self.ueid != -1 + self.repo._type_source_cache = {} # clear cache + self.maxeid = self.get_max_eid() + do_monkey_patch() + self._dumb_sessions = [] + + def get_max_eid(self): + with self.session.new_cnx() as cnx: + return cnx.execute('Any MAX(X)')[0][0] + + def cleanup(self): + with self.session.new_cnx() as cnx: + cnx.execute('DELETE Any X WHERE X eid > %s' % self.maxeid) + cnx.commit() + + def tearDown(self): + undo_monkey_patch() + self.cleanup() + assert self.session.user.eid != -1 + + def set_debug(self, debug): + set_debug(debug) + def debugged(self, debug): + return debugged(debug) + + def _rqlhelper(self): + rqlhelper = self.repo.vreg.rqlhelper + # reset uid_func so it don't try to get type from eids + rqlhelper._analyser.uid_func = None + rqlhelper._analyser.uid_func_mapping = {} + return rqlhelper + + def _prepare_plan(self, cnx, rql, kwargs=None, simplify=True): + rqlhelper = self._rqlhelper() + rqlst = rqlhelper.parse(rql) + rqlhelper.compute_solutions(rqlst, kwargs=kwargs) + if simplify: + rqlhelper.simplify(rqlst) + for select in rqlst.children: + select.solutions.sort(key=lambda x: list(x.items())) + return self.o.plan_factory(rqlst, kwargs, cnx) + + def _prepare(self, cnx, rql, kwargs=None): + plan = self._prepare_plan(cnx, rql, kwargs, simplify=False) + plan.preprocess(plan.rqlst) + rqlst = plan.rqlst.children[0] + rqlst.solutions = remove_unused_solutions(rqlst, rqlst.solutions, {}, self.repo.schema)[0] + return rqlst + + def user_groups_session(self, *groups): + """lightweight session using the current user with hi-jacked groups""" + # use self.session.user.eid to get correct owned_by relation, unless explicit eid + with self.session.new_cnx() as cnx: + u = self.repo._build_user(cnx, self.session.user.eid) + u._groups = set(groups) + s = Session(u, self.repo) + return s + + def qexecute(self, rql, args=None, build_descr=True): + with self.session.new_cnx() as cnx: + try: + return self.o.execute(cnx, rql, args, build_descr) + finally: + if rql.startswith(('INSERT', 'DELETE', 'SET')): + cnx.commit() + + +class BasePlannerTC(BaseQuerierTC): + + def setup(self): + # XXX source_defs + self.o = self.repo.querier + self.session = self.repo._sessions.values()[0] + self.schema = self.o.schema + self.system = self.repo.system_source + do_monkey_patch() + self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered + + def tearDown(self): + undo_monkey_patch() + + def _prepare_plan(self, cnx, rql, kwargs=None): + rqlst = self.o.parse(rql, annotate=True) + self.o.solutions(cnx, rqlst, kwargs) + if rqlst.TYPE == 'select': + self.repo.vreg.rqlhelper.annotate(rqlst) + for select in rqlst.children: + select.solutions.sort(key=lambda x: list(x.items())) + else: + rqlst.solutions.sort(key=lambda x: list(x.items())) + return self.o.plan_factory(rqlst, kwargs, cnx) + + +# monkey patch some methods to get predictable results ####################### + +from cubicweb import rqlrewrite +_orig_iter_relations = rqlrewrite.iter_relations +_orig_insert_snippets = rqlrewrite.RQLRewriter.insert_snippets +_orig_build_variantes = rqlrewrite.RQLRewriter.build_variantes + +def _insert_snippets(self, snippets, varexistsmap=None): + _orig_insert_snippets(self, sorted(snippets, key=snippet_key), varexistsmap) + +def _build_variantes(self, newsolutions): + variantes = _orig_build_variantes(self, newsolutions) + sortedvariantes = [] + for variante in variantes: + orderedkeys = sorted((k[1], k[2], v) for k, v in variante.items()) + variante = DumbOrderedDict(sorted(variante.items(), + key=lambda a: (a[0][1], a[0][2], a[1]))) + sortedvariantes.append( (orderedkeys, variante) ) + return [v for ok, v in sorted(sortedvariantes)] + +from cubicweb.server.querier import ExecutionPlan +_orig_check_permissions = ExecutionPlan._check_permissions + +def _check_permissions(*args, **kwargs): + res, restricted = _orig_check_permissions(*args, **kwargs) + res = DumbOrderedDict(sorted(res.items(), key=lambda x: [y.items() for y in x[1]])) + return res, restricted + +def _dummy_check_permissions(self, rqlst): + return {(): rqlst.solutions}, set() + +from cubicweb.server import rqlannotation +_orig_select_principal = rqlannotation._select_principal + +def _select_principal(scope, relations): + def sort_key(something): + try: + return something.r_type + except AttributeError: + return (something[0].r_type, something[1]) + return _orig_select_principal(scope, relations, + _sort=lambda rels: sorted(rels, key=sort_key)) + + +def _ordered_iter_relations(stinfo): + return sorted(_orig_iter_relations(stinfo), key=lambda x:x.r_type) + +def do_monkey_patch(): + rqlrewrite.iter_relations = _ordered_iter_relations + rqlrewrite.RQLRewriter.insert_snippets = _insert_snippets + rqlrewrite.RQLRewriter.build_variantes = _build_variantes + ExecutionPlan._check_permissions = _check_permissions + ExecutionPlan.tablesinorder = None + +def undo_monkey_patch(): + rqlrewrite.iter_relations = _orig_iter_relations + rqlrewrite.RQLRewriter.insert_snippets = _orig_insert_snippets + rqlrewrite.RQLRewriter.build_variantes = _orig_build_variantes + ExecutionPlan._check_permissions = _orig_check_permissions diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/stresstester.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/stresstester.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,196 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" Usage: %s [OPTIONS] + +Stress test a CubicWeb repository + +OPTIONS: + -h / --help + Display this help message and exit. + + -u / --user + Connect as instead of being prompted to give it. + -p / --password + Automatically give for authentication instead of being prompted + to give it. + + -n / --nb-times + Repeat queries times. + -t / --nb-threads + Execute queries in parallel threads. + -P / --profile + dumps profile results (hotshot) in + -o / --report-output + Write profiler report into rather than on stdout + +Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +http://www.logilab.fr/ -- mailto:contact@logilab.fr +""" +from __future__ import print_function + +import os +import sys +import threading +import getopt +import traceback +from getpass import getpass +from os.path import basename +from time import clock + +from logilab.common.fileutils import lines +from logilab.common.ureports import Table, TextWriter +from cubicweb.server.repository import Repository +from cubicweb.dbapi import Connection + +TB_LOCK = threading.Lock() + +class QueryExecutor: + def __init__(self, cursor, times, queries, reporter = None): + self._cursor = cursor + self._times = times + self._queries = queries + self._reporter = reporter + + def run(self): + cursor = self._cursor + times = self._times + while times: + for index, query in enumerate(self._queries): + start = clock() + try: + cursor.execute(query) + except Exception: + TB_LOCK.acquire() + traceback.print_exc() + TB_LOCK.release() + return + if self._reporter is not None: + self._reporter.add_proftime(clock() - start, index) + times -= 1 + +def usage(status=0): + """print usage string and exit""" + print(__doc__ % basename(sys.argv[0])) + sys.exit(status) + + +class ProfileReporter: + """a profile reporter gathers all profile informations from several + threads and can write a report that summarizes all profile informations + """ + profiler_lock = threading.Lock() + + def __init__(self, queries): + self._queries = tuple(queries) + self._profile_results = [(0., 0)] * len(self._queries) + # self._table_report = Table(3, rheaders = True) + len_max = max([len(query) for query in self._queries]) + 5 + self._query_fmt = '%%%ds' % len_max + + def add_proftime(self, elapsed_time, query_index): + """add a new time measure for query""" + ProfileReporter.profiler_lock.acquire() + cumul_time, times = self._profile_results[query_index] + cumul_time += elapsed_time + times += 1. + self._profile_results[query_index] = (cumul_time, times) + ProfileReporter.profiler_lock.release() + + def dump_report(self, output = sys.stdout): + """dump report in 'output'""" + table_elems = ['RQL Query', 'Times', 'Avg Time'] + total_time = 0. + for query, (cumul_time, times) in zip(self._queries, self._profile_results): + avg_time = cumul_time / float(times) + table_elems += [str(query), '%f' % times, '%f' % avg_time ] + total_time += cumul_time + table_elems.append('Total time :') + table_elems.append(str(total_time)) + table_elems.append(' ') + table_layout = Table(3, rheaders = True, children = table_elems) + TextWriter().format(table_layout, output) + # output.write('\n'.join(tmp_output)) + + +def run(args): + """run the command line tool""" + try: + opts, args = getopt.getopt(args, 'hn:t:u:p:P:o:', ['help', 'user=', 'password=', + 'nb-times=', 'nb-threads=', + 'profile', 'report-output=',]) + except Exception as ex: + print(ex) + usage(1) + repeat = 100 + threads = 1 + user = os.environ.get('USER', os.environ.get('LOGNAME')) + password = None + report_output = sys.stdout + prof_file = None + for opt, val in opts: + if opt in ('-h', '--help'): + usage() + if opt in ('-u', '--user'): + user = val + elif opt in ('-p', '--password'): + password = val + elif opt in ('-n', '--nb-times'): + repeat = int(val) + elif opt in ('-t', '--nb-threads'): + threads = int(val) + elif opt in ('-P', '--profile'): + prof_file = val + elif opt in ('-o', '--report-output'): + report_output = open(val, 'w') + if len(args) != 2: + usage(1) + queries = [query for query in lines(args[1]) if not query.startswith('#')] + if user is None: + user = raw_input('login: ') + if password is None: + password = getpass('password: ') + from cubicweb.cwconfig import instance_configuration + config = instance_configuration(args[0]) + # get local access to the repository + print("Creating repo", prof_file) + repo = Repository(config, prof_file) + cnxid = repo.connect(user, password=password) + # connection to the CubicWeb repository + repo_cnx = Connection(repo, cnxid) + repo_cursor = repo_cnx.cursor() + reporter = ProfileReporter(queries) + if threads > 1: + executors = [] + while threads: + qe = QueryExecutor(repo_cursor, repeat, queries, reporter = reporter) + executors.append(qe) + thread = threading.Thread(target=qe.run) + qe.thread = thread + thread.start() + threads -= 1 + for qe in executors: + qe.thread.join() +## for qe in executors: +## print qe.thread, repeat - qe._times, 'times' + else: + QueryExecutor(repo_cursor, repeat, queries, reporter = reporter).run() + reporter.dump_report(report_output) + + +if __name__ == '__main__': + run(sys.argv[1:]) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/bootstrap_cubes --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/bootstrap_cubes Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +person diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/cubes/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/cubes/i18ntestcube/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/cubes/i18ntestcube/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/cubes/i18ntestcube/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,18 @@ +# pylint: disable=W0622 +"""cubicweb i18n test cube application packaging information""" + +modname = 'i18ntestcube' +distname = 'cubicweb-i18ntestcube' + +numversion = (0, 1, 0) +version = '.'.join(str(num) for num in numversion) + +license = 'LGPL' +author = 'LOGILAB S.A. (Paris, FRANCE)' +author_email = 'contact@logilab.fr' +description = 'forum' +web = 'http://www.cubicweb.org/project/%s' % distname + +__depends__ = {'cubicweb': '>= 3.16.4', + } +__recommends__ = {} diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/cubes/i18ntestcube/i18n/en.po.ref --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/cubes/i18ntestcube/i18n/en.po.ref Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,182 @@ +msgid "" +msgstr "" +"Project-Id-Version: cubicweb 3.16.5\n" +"PO-Revision-Date: 2008-03-28 18:14+0100\n" +"Last-Translator: Logilab Team \n" +"Language-Team: fr \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: cubicweb-devtools\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +# schema pot file, generated on 2013-07-12 16:18:12 +# +# singular and plural forms for each entity type +# subject and object forms for each relation type +# (no object form for final or symmetric relation types) +msgid "Forum" +msgstr "" + +msgid "Forum_plural" +msgstr "" + +msgid "This Forum" +msgstr "" + +msgid "This Forum:" +msgstr "" + +msgid "New Forum" +msgstr "" + +msgctxt "inlined:Forum.in_forum.object" +msgid "add a ForumThread" +msgstr "" + +msgctxt "inlined:Forum.in_forum.object" +msgid "ForumThread" +msgstr "" + +msgid "add ForumThread in_forum Forum object" +msgstr "" + +msgid "add a Forum" +msgstr "" + +msgid "add a ForumThread" +msgstr "" + +msgid "creating ForumThread (ForumThread in_forum Forum %(linkto)s)" +msgstr "" + +msgid "ForumThread" +msgstr "" + +msgid "ForumThread_plural" +msgstr "" + +msgid "This ForumThread" +msgstr "" + +msgid "This ForumThread:" +msgstr "" + +msgid "New ForumThread" +msgstr "" + +msgid "content" +msgstr "" + +msgctxt "ForumThread" +msgid "content" +msgstr "" + +msgid "content_format" +msgstr "" + +msgctxt "ForumThread" +msgid "content_format" +msgstr "" + +msgctxt "Forum" +msgid "description" +msgstr "" + +msgctxt "Forum" +msgid "description_format" +msgstr "" + +msgid "in_forum" +msgstr "" + +msgctxt "ForumThread" +msgid "in_forum" +msgstr "" + +msgctxt "Forum" +msgid "in_forum_object" +msgstr "" + +msgid "in_forum_object" +msgstr "" + +msgid "interested_in" +msgstr "" + +msgctxt "CWUser" +msgid "interested_in" +msgstr "" + +msgctxt "ForumThread" +msgid "interested_in_object" +msgstr "" + +msgctxt "Forum" +msgid "interested_in_object" +msgstr "" + +msgid "interested_in_object" +msgstr "" + +msgid "nosy_list" +msgstr "" + +msgctxt "ForumThread" +msgid "nosy_list" +msgstr "" + +msgctxt "Forum" +msgid "nosy_list" +msgstr "" + +msgctxt "CWUser" +msgid "nosy_list_object" +msgstr "" + +msgid "nosy_list_object" +msgstr "" + +msgctxt "ForumThread" +msgid "title" +msgstr "" + +msgid "topic" +msgstr "" + +msgctxt "Forum" +msgid "topic" +msgstr "" + +msgid "Topic" +msgstr "" + +msgid "Description" +msgstr "" + +msgid "Number of threads" +msgstr "" + +msgid "Last activity" +msgstr "" + +msgid "" +"a long\n" +"tranlated line\n" +"hop." +msgstr "" + +msgid "Subject" +msgstr "" + +msgid "Created" +msgstr "" + +msgid "Answers" +msgstr "" + +msgid "Last answered" +msgstr "" + +msgid "This forum does not have any thread yet." +msgstr "" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/cubes/i18ntestcube/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/cubes/i18ntestcube/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr -- mailto:contact@logilab.fr +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""cubicweb-forum schema""" + +from yams.buildobjs import (String, RichString, EntityType, + RelationDefinition, SubjectRelation) +from yams.reader import context + +class Forum(EntityType): + topic = String(maxsize=50, required=True, unique=True) + description = RichString() + +class ForumThread(EntityType): + __permissions__ = { + 'read': ('managers', 'users'), + 'add': ('managers', 'users'), + 'update': ('managers', 'owners'), + 'delete': ('managers', 'owners') + } + title = String(required=True, fulltextindexed=True, maxsize=256) + content = RichString(required=True, fulltextindexed=True) + in_forum = SubjectRelation('Forum', cardinality='1*', inlined=True, + composite='object') +class interested_in(RelationDefinition): + subject = 'CWUser' + object = ('ForumThread', 'Forum') + +class nosy_list(RelationDefinition): + subject = ('Forum', 'ForumThread') + object = 'CWUser' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/cubes/i18ntestcube/views.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/cubes/i18ntestcube/views.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,61 @@ +# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr -- mailto:contact@logilab.fr +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""cubicweb-forum views/forms/actions/components for web ui""" + +from cubicweb import view +from cubicweb.predicates import is_instance +from cubicweb.web.views import primary, baseviews, uicfg +from cubicweb.web.views.uicfg import autoform_section as afs + +class MyAFS(uicfg.AutoformSectionRelationTags): + __select__ = is_instance('ForumThread') + +_myafs = MyAFS() + +_myafs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined') + +afs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined') + + +class ForumSameETypeListView(baseviews.SameETypeListView): + __select__ = baseviews.SameETypeListView.__select__ & is_instance('Forum') + + def call(self, **kwargs): + _ = self._cw._ + _('Topic'), _('Description') + _('Number of threads'), _('Last activity') + _('''a long +tranlated line +hop.''') + + +class ForumLastActivity(view.EntityView): + __regid__ = 'forum_last_activity' + __select__ = view.EntityView.__select__ & is_instance('Forum') + + +class ForumPrimaryView(primary.PrimaryView): + __select__ = primary.PrimaryView.__select__ & is_instance('Forum') + + def render_entity_attributes(self, entity): + _ = self._cw._ + _('Subject'), _('Created'), _('Answers'), + _('Last answered') + _('This forum does not have any thread yet.') + +class ForumThreadPrimaryView(primary.PrimaryView): + __select__ = primary.PrimaryView.__select__ & is_instance('ForumThread') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/firstnames.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/firstnames.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1599 @@ +ash +pasqualino +asl +benjy +wolodymyr +dionysos +launce +khaleel +sondra +maaike +lavinia +giosu +daisy +xiang +belgin +edda +olympia +treasa +katya +misi +ville +mahon +yngve +moritz +elder +gawel +horsa +blossom +deanne +imelda +deanna +cairbre +eddy +horst +gaenor +breanne +hewie +breanna +jarvis +jamin +loise +jamil +fingall +giselle +jamie +shinju +gisella +akilina +jordan +gertie +cardea +eiran +valdemar +sebestyen +galia +bride +greg +fausta +eniola +rudo +pratibha +kisha +mickey +charlotte +karp +charlotta +nunzia +nunzio +patrice +kara +hallam +collyn +kari +karl +dusan +lia +cherokee +lim +lin +yvain +madlyn +liv +lir +lis +tullio +norma +liz +lettice +kae +kaj +kai +tatyanna +kam +freddie +elton +meinir +blaise +kat +japeth +alpha +kay +mack +jayna +jayne +hormazed +lupita +humbert +vitya +neoptolemus +richardine +hallvard +diogo +larkin +ravi +louiza +hermogenes +alanis +yadira +leandra +milburga +leandro +sorin +randi +kaleb +rogerio +sanna +kalea +justice +kaleo +dijana +shprintza +randy +colby +otthild +mariamne +patrycja +darwin +christal +khalida +kaley +allegria +vidya +renaud +sisel +suibhne +lonny +julienne +calliope +rocco +alexander +aristide +edwige +xzavier +rajesh +egil +gell +mahavir +charline +sigi +theophania +maurice +afon +konnor +kiran +angie +jalila +tolly +havva +metody +engel +philander +lancelot +nathalie +leilah +dane +elm +chatzkel +keaton +ashlie +kudret +rava +danette +eachann +wilburn +jeff +kazimiera +rukmini +lauryn +femie +mahvash +berkant +alesha +daedalus +aphra +karla +tetty +agostinho +bolivar +savitri +karly +forbes +vencesl +bahija +walter +imam +iman +krzys +imad +elsa +neville +tracie +else +anthony +shevon +katherine +marylou +wojtek +oddmund +tristand +areli +valkyrie +garfield +wyatt +luanne +ossia +luanna +luciana +guido +luciano +shachar +astraea +paco +leland +avra +amenhotep +kekoa +gorden +sameera +boutros +ruaidhr +friedemann +darrell +hideaki +petar +donatien +fannie +eliana +iason +fedora +grant +shay +estee +marcelle +marcella +lothair +shae +ester +marcello +estev +cassian +allyson +dima +goodwin +cezar +blair +monique +elwin +ihsan +olufunmilayo +arturo +nanaia +greetje +clovia +beowulf +vassily +madail +emmeline +guendolen +nandag +eilish +sakari +elisheva +crispin +aksel +alvin +cernunnos +feardorcha +heshel +afra +iqbal +pryce +siddhartha +mikkel +alvis +myrtie +khajag +yesenia +nikki +grigory +grigore +maeve +rebeca +diederick +maeva +grigori +cheryl +rahim +marco +marci +stein +trista +olufemi +emmanuelle +nadezhda +wahid +marcy +vanda +lavra +alida +amara +hipolito +valent +renatus +moira +donny +lucretia +donna +vesta +cadoc +reetta +erma +markku +rosamond +gracia +tuyet +sieffre +gracie +kodey +debra +photine +jacek +yanick +isiah +khordad +rui +stef +rub +foma +sten +kassy +rue +nelly +merrick +ayn +macy +vincente +anargyros +rut +lenox +jenessa +faith +barnaby +manny +jyotsana +hasan +iakopa +edvard +narcisa +loredana +ida +torborg +rollo +stamatios +pero +natalya +maudie +carlton +paulina +aliyah +lanty +tadg +deiniol +dwayne +alison +fabius +rbj +latasha +maarit +roxanna +katinka +publius +augustijn +ferdy +khadiga +akosua +rees +quetzalcoatl +kristian +larry +reed +krystal +micheil +paolo +chelsey +ute +paola +hamilcar +malin +deangelo +munir +velma +malik +utz +malie +govad +chelsea +malia +willem +seetha +andrina +rupert +myrrine +theodoros +tito +ivonne +nan +beryl +nat +tawnie +korn +marzena +tinek +hermine +kora +frances +william +tianna +evan +kory +merletta +kort +nevan +naheed +heath +tyreek +shona +amyas +urjasz +katy +gu +gr +hilde +mehmud +gy +hilda +psyche +olive +nuno +vinnie +ga +kato +kata +jeunesse +kate +chandrakant +caoilainn +arik +rhonda +leocadio +euan +aric +leocadia +aria +bronwen +marcellin +vladislav +ferapont +nichole +kizzy +duilio +jafet +maas +tue +felicity +mansoor +rfhlaith +brigitta +fishke +akua +izabela +olaf +vittore +michael +skar +ryan +gretta +alvena +olav +brigitte +euterpe +barbara +aiolos +carter +khalifa +tziporah +honora +feich +marilena +onesime +theo +gunvor +sa'id +katlyn +nicholas +preeti +etzel +ekewaka +vinal +jubal +ramsey +rowley +jocelin +alfsigr +kalliope +micah +frantisek +holger +alysha +chant +derry +corin +janus +morcant +chang +corie +gena +randa +joost +vasile +clark +clare +wim +wil +clara +danika +jory +eleonoora +ayelet +caligula +zakiah +kilie +meliora +ottavio +idoya +ninette +hudson +deon +gawdat +frida +jonathan +reynold +laocadia +cerise +cosmo +hezekiah +winston +isak +allyn +noelene +trajan +vijaya +cosma +tresha +astrithr +priya +astrophel +pocahontas +eliphalet +stafford +salah +salal +pauliina +lazer +feidhlim +jackalyn +kenny +alayna +wilfried +wasim +blaine +femke +jehu +kenna +lenore +nkechi +letizia +kian +kayleigh +spartacus +manuela +leyton +lesley +georg +ferdinand +cuauhtemoc +aeron +lavrenti +nyx +ronald +yoshiko +gundula +eluf +toma +riccardo +ruadh +matylda +winter +mayson +llew +clytia +jamila +fariha +aegle +octavio +steafan +jacqui +mikelo +dovid +modestus +blake +jeanna +alessa +conway +brook +sunday +kizzie +hande +catherine +eckhard +rr +gwyneth +aukusti +placid +rufino +kyleigh +helah +benoite +eluned +sanaz +cnaeus +ettie +benaiah +brendan +wenonah +nye +candela +dragan +sanda +naveen +margar +naveed +austen +sandu +britta +brodie +morton +kamilla +sandy +guilherme +dorothea +calix +braxton +wigburg +tryphena +ricky +may +sylwia +libor +marek +ece +trinity +katsuro +tercero +'ismat +mared +jill +amato +achim +princess +jaquelyn +eustathios +tapio +aglea +kees +evstathios +edwyna +austin +cristian +jouko +nikandros +leonora +kaitlynn +christoph +mai +parthalan +tancredo +rosaleen +lynnette +yasamin +encarnacion +gerolt +ionut +harmon +ailbhe +islwyn +muirenn +nyah +mariana +viktor +greta +kreszentia +grete +hormazd +foka +poseidon +kazimir +ultan +ben +sudhir +bea +bee +saburo +elnora +ber +michelyne +clytemnestra +yardena +gavrel +michelangelo +wystan +odhiambo +miquel +bertha +su +berthe +alisia +kelley +leonhard +rodger +ewald +oluwaseyi +celandine +kunegunda +luisa +khayyam +iisakki +luise +ligia +zaina +tatiana +siarl +jorge +bronislaw +bronislav +montana +edric +miloslava +achilles +donaldina +wilfredo +laurens +haifa +stelian +glenice +calvino +rodica +hulda +indy +uri +laurena +tzeitel +laurene +urs +danita +platon +parker +chadwick +lorne +narinder +theodoric +florentina +ambrosine +nikephoros +kapel +aeolus +cenek +hadi +perle +alyona +cyril +perla +cicely +darby +madhav +hector +ethan +aretha +ilker +avdotya +boris +sassa +misty +bonaventure +kiefer +emmet +arkadios +farrah +tivoli +pietari +mohammed +shoshana +felipe +felipa +maurene +tancred +raymonde +sho +faron +arundhati +esteri +silvanus +nuha +aloisia +baris +tammie +fabricio +lux +luz +driskoll +tyra +luc +marsha +luk +aron +joye +ken +gethsemane +kelan +yuko +merry +proserpine +precious +suibne +mindy +vitus +olga +jia +kalysta +angharad +ciera +careen +inglebert +apphia +muadhnait +christen +rebekah +dominique +gita +tori +harmonie +anatolius +harmonia +denise +johann +johano +denisa +viktoria +padmini +johana +christer +barakat +willy +sari +fitzroy +yaw +sara +yan +quim +quin +yaa +katelin +pontus +raelene +alexus +gwandoya +venceslav +ott +artemidoros +zaynab +folant +salman +ealdgy +randal +macey +heriberto +kimball +ekin +dema +evelyn +demi +pip +simona +daniil +emmerson +kausalya +kortney +gavriil +yered +parth +fido +solange +oona +anka +renie +anke +habakkuk +linwood +teofilo +grazyna +enitan +bhaskar +finnian +perseus +mordechai +fyodor +ashley +philo +i +hecate +phile +theodor +kiaran +ashlee +dollie +savannah +upton +sofia +noak +sofie +laurel +lauren +dubaku +zacharjasz +patricio +trudi +sophus +vida +patricia +trudy +tapani +mavreena +jesper +sandrine +sonia +livio +mikolaj +laurine +livia +finnegan +oprah +waheed +lavonne +perdita +liviu +imen +attila +lincoln +fernanda +evrard +fernande +jaana +artair +fernando +candy +cande +kazimierz +kaija +shamgar +laxmi +martie +page +candi +brody +piaras +shea +herbie +shem +kristaps +sher +cleveland +carreen +margaid +phinehas +justina +wendi +linus +wenda +matrona +christiane +wendy +kerensa +roch +fergal +fanny +kamila +oswin +camilo +everette +katashi +myron +ridley +shavonne +blythe +nader +marlowe +miha +carolyn +glenn +gadar +rainard +sybella +raquel +rozabela +serhat +bashemath +jing +gobnet +yentl +sylvana +dolores +sanjit +tamsin +sanjiv +innes +daniela +daniele +margr +keysha +rogelio +ean +hj +philipp +valerian +marge +gail +margh +gaia +engelbert +kathie +artemisia +margo +stefan +pansy +swanhilda +swanhilde +alessio +beata +beate +babur +beatrice +eris +erin +maura +camryn +conan +erik +krysia +nigelia +mauri +averill +draco +eric +sophronius +mauro +diego +simcha +malachy +barth +maoilios +germaine +malachi +katariina +lianne +ferdinando +donagh +kelemen +taletta +star +gilah +faustus +lfwine +rayna +gotthard +sa'd +stan +klemen +pranay +howie +dewey +tiarnan +katherina +uzma +jabril +hakan +martin +elsie +cleve +imani +moshe +padma +inmaculada +augustine +trenton +ghislain +aiden +alfhild +ireneus +gottschalk +andra +jahzeel +andro +fredrik +wynter +kohar +tobin +giustino +buddy +marcos +mieszko +giustina +khalil +aur +helladius +riccarda +elettra +glykeria +yeva +trahaearn +ulisse +wilfred +sorrel +saara +ekwueme +sarita +finella +waldo +herbert +elissa +bevan +lavern +till +ruxandra +lavender +ghalib +eldon +masterman +tameka +mihajlo +mahin +neo +asim +jordon +pace +ned +giampiero +asia +nea +haze +bearach +cheng +pieter +yonah +chikako +maverick +fonsie +ozzy +meg +mitxel +filbert +mel +neves +henrik +mei +hilaire +drew +deemer +liborio +dubhghlas +bogdan +dipak +rapha +golda +maighread +masha +pranciskis +mitchell +titilayo +aydin +ippolit +toiba +omar +cindy +alexandrina +lyubov +hiltraud +joshua +moray +baptiste +bahiyya +marquita +benedicta +reagan +latifah +scevola +ardashir +pakpao +topaz +janine +omolara +janina +morag +euripides +lennart +orb +helmuth +armo +diederik +lennard +raeburn +oscar +odell +ualan +noemi +melba +berlin +lazarus +merla +meera +anastas +rhamantus +yussel +meshullam +esdras +kumar +flora +norwood +rio +apollinaris +oleg +rim +nadzeija +akio +akim +efisio +jayda +olek +rowanne +honey +karola +chetana +candelas +friede +phaedrus +frieda +joann +braidy +hitomi +kieron +dakarai +teofil +dervila +ria +pietrina +becky +alechjo +santos +egon +olwin +ove +balthazar +reeta +becka +tillo +royce +peninnah +earnestine +janis +jakab +janie +rosalba +hosanna +aharon +fife +zacharias +fifi +aleesha +murray +helena +helene +rashmi +afia +oswald +zachariah +shawnee +pius +zdenek +kichiro +melchiorre +erland +yaroslava +anushka +cree +iser +rachel +anik +fabiola +ania +aneurin +hernando +ernesto +ernesta +astor +manasseh +naphtali +shai +lorena +lazar +luce +lorenz +luca +briana +rosemary +dawid +nava +payton +linos +aida +gunne +milan +tuomas +sahar +doug +mikala +dawn +vincenza +saturninus +channah +mandy +reuven +cormag +cormac +mandi +sachie +ladonna +phuong +tasha +ramon +hashim +fachtna +euphemia +tisha +jozafat +horatius +imke +venus +rodolf +binyamin +cosmin +oluwafunmilayo +nekane +loup +kohinoor +teuvo +xue +innokenti +vincenzo +kiley +isa +hannibal +vijay +kornelia +afanasy +vittorio +tuor +adalia +damayanti +afanasi +grady +evangelos +ermete +brock +bonita +arisha +pelagia +solvej +parthenope +peggie +kierra +jozefa +garry +giuditta +ladislas +jozefo +swietoslaw +yildiz +nasira +eshe +helen +gretchen +shekhar +daren +lenuta +dymphna +daina +matteo +berjouhi +jerusha +solomon +gernot +murtagh +meaveen +godwin +ladislao +minh +hachiro +farquhar +ichabod +mina +caleb +veera +ginger +ming +jaynie +sharyn +seonag +ferdie +ilana +gabriela +gabriele +lloren +hooda +mabelle +timeus +teagan +gorka +ulrich +philadelphia +razvan +lamprecht +marit +kean +marin +mario +rhonwen +vilko +konstantin +tyr +maria +fastred +kazuki +krister +don +dom +iekika +ruben +m +calanthe +luchjo +vicki +sheryl +afanen +kirabo +dov +kristel +dot +kristen +pavao +donelle +antti +donella +katerina +liza +wladyslaw +gerlach +hrodohaidis +samnang +ashok +raelyn +tipene +kallias +kun +gebhard +folke +katica +lennie +rupinder +maryann +adolphus +lachtna +petri +monica +kyriakos +brannon +deforest +shankara +hourig +haniyya +christopher +griogair +saturn +tola +earl +decebal +bas +petra +adelia +cleto +bao +bal +bai +julien +clarette +dimitar +fioralba +tommie +domhnall +ragnhei +gunnar +ailill +juliet +pete +vasya +peta +duff +imaculada +peti +manola +kolab +petr +neriah +manolo +edoardo +onora +elisud +graciano +fayza +as'ad +romola +vernon +pluto +genevra +yahweh +mukesh +fiacre +sudarshana +shahriar +athanasius +una +casimir +derval +ernst +sherilyn +taranis +enzo +bedelia +winnie +kalyan +jinan +plamen +quinn +monat +alcaeus +mathieu +aindri +raffaella +armin +lovell +cyrus +chelo +sidonius +basia +tina +basil +basim +fuad +riley +tracee +chun +talia diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,32 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from yams.buildobjs import EntityType, SubjectRelation, String, Int, Date + +from cubes.person.schema import Person + +Person.add_relation(Date(), 'birthday') + +class Bug(EntityType): + title = String(maxsize=64, required=True, fulltextindexed=True) + severity = String(vocabulary=('important', 'normal', 'minor'), default='normal') + cost = Int() + description = String(maxsize=4096, fulltextindexed=True) + identical_to = SubjectRelation('Bug', symmetric=True) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/static/js_examples/dep_1.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/static/js_examples/dep_1.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +a = 4; diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/static/js_examples/deps_2.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/static/js_examples/deps_2.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +b = a +2; diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/static/js_examples/test_simple_failure.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/static/js_examples/test_simple_failure.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,18 @@ +$(document).ready(function() { + + QUnit.module("air"); + + QUnit.test("test 1", function (assert) { + assert.equal(2, 4); + }); + + QUnit.test("test 2", function (assert) { + assert.equal('', '45'); + assert.equal('1024', '32'); + }); + + QUnit.module("able"); + QUnit.test("test 3", function (assert) { + assert.deepEqual(1, 1); + }); +}); diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/static/js_examples/test_simple_success.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/static/js_examples/test_simple_success.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +$(document).ready(function() { + + QUnit.module("air"); + + QUnit.test("test 1", function (assert) { + assert.equal(2, 2); + }); + + QUnit.test("test 2", function (assert) { + assert.equal('45', '45'); + }); + + QUnit.module("able"); + QUnit.test("test 3", function (assert) { + assert.deepEqual(1, 1); + }); +}); diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/static/js_examples/test_with_dep.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/static/js_examples/test_with_dep.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +$(document).ready(function() { + + QUnit.module("air"); + + QUnit.test("test 1", function (assert) { + assert.equal(a, 4); + }); + +}); diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/static/js_examples/test_with_ordered_deps.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/static/js_examples/test_with_ordered_deps.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +$(document).ready(function() { + + QUnit.module("air"); + + QUnit.test("test 1", function (assert) { + assert.equal(b, 6); + }); + +}); diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/data/static/js_examples/utils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/data/static/js_examples/utils.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,29 @@ +function datetuple(d) { + return [d.getFullYear(), d.getMonth()+1, d.getDate(), + d.getHours(), d.getMinutes()]; +} + +function pprint(obj) { + print('{'); + for(k in obj) { + print(' ' + k + ' = ' + obj[k]); + } + print('}'); +} + +function arrayrepr(array) { + return '[' + array.join(', ') + ']'; +} + +function assertArrayEquals(array1, array2) { + if (array1.length != array2.length) { + throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); + } + for (var i=0; i. +"""only for unit tests !""" + +from cubicweb.view import EntityView +from cubicweb.predicates import is_instance + +HTML_PAGE = u""" + +

Hello World !

+ + +""" + +class SimpleView(EntityView): + __regid__ = 'simple' + __select__ = is_instance('Bug',) + + def call(self, **kwargs): + self.cell_call(0, 0) + + def cell_call(self, row, col): + self.w(HTML_PAGE) + +class RaisingView(EntityView): + __regid__ = 'raising' + __select__ = is_instance('Bug',) + + def cell_call(self, row, col): + raise ValueError() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,3 @@ +Twisted +webtest +cubicweb-person diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/unittest_dbfill.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/unittest_dbfill.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,121 @@ +# -*- coding: iso-8859-1 -*- +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for database value generator""" + +import os.path as osp +import re +import datetime +import io + +from six.moves import range + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.devtools.fill import ValueGenerator, make_tel +from cubicweb.devtools import ApptestConfiguration + +DATADIR = osp.join(osp.abspath(osp.dirname(__file__)), 'data') +ISODATE_SRE = re.compile('(?P\d{4})-(?P\d{2})-(?P\d{2})$') + + +class MyValueGenerator(ValueGenerator): + + def generate_Bug_severity(self, entity, index): + return u'dangerous' + + def generate_Any_description(self, entity, index, format=None): + return u'yo' + + +class ValueGeneratorTC(TestCase): + """test case for ValueGenerator""" + + def _choice_func(self, etype, attrname): + try: + return getattr(self, '_available_%s_%s' % (etype, attrname))(etype, attrname) + except AttributeError: + return None + + def _available_Person_firstname(self, etype, attrname): + return [f.strip() for f in io.open(osp.join(DATADIR, 'firstnames.txt'), encoding='latin1')] + + def setUp(self): + config = ApptestConfiguration('data', apphome=DATADIR) + config.bootstrap_cubes() + schema = config.load_schema() + e_schema = schema.eschema('Person') + self.person_valgen = ValueGenerator(e_schema, self._choice_func) + e_schema = schema.eschema('Bug') + self.bug_valgen = MyValueGenerator(e_schema) + self.config = config + + def test_string(self): + """test string generation""" + surname = self.person_valgen.generate_attribute_value({}, 'surname', 12) + self.assertEqual(surname, u'&surname12') + + def test_domain_value(self): + """test value generation from a given domain value""" + firstname = self.person_valgen.generate_attribute_value({}, 'firstname', 12) + possible_choices = self._choice_func('Person', 'firstname') + self.assertTrue(firstname in possible_choices, + '%s not in %s' % (firstname, possible_choices)) + + def test_choice(self): + """test choice generation""" + # Test for random index + for index in range(5): + sx_value = self.person_valgen.generate_attribute_value({}, 'civility', index) + self.assertTrue(sx_value in ('Mr', 'Mrs', 'Ms')) + + def test_integer(self): + """test integer generation""" + # Test for random index + for index in range(5): + cost_value = self.bug_valgen.generate_attribute_value({}, 'cost', index) + self.assertIn(cost_value, list(range(index+1))) + + def test_date(self): + """test date generation""" + # Test for random index + for index in range(10): + date_value = self.person_valgen.generate_attribute_value({}, 'birthday', index) + self.assertTrue(isinstance(date_value, datetime.date)) + + def test_phone(self): + """tests make_tel utility""" + self.assertEqual(make_tel(22030405), '22 03 04 05') + + def test_customized_generation(self): + self.assertEqual(self.bug_valgen.generate_attribute_value({}, 'severity', 12), + u'dangerous') + self.assertEqual(self.bug_valgen.generate_attribute_value({}, 'description', 12), + u'yo') + self.assertEqual(self.person_valgen.generate_attribute_value({}, 'description', 12), + u'yo') + + +class ConstraintInsertionTC(TestCase): + + def test_writeme(self): + self.skipTest('Test automatic insertion / Schema Constraints') + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/unittest_devctl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/unittest_devctl.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,48 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for cubicweb-ctl commands from devtools""" + +import os.path as osp +import sys +import tempfile +import shutil +from subprocess import Popen, PIPE, STDOUT +from unittest import TestCase + + +class CubicWebCtlTC(TestCase): + """test case for devtools commands""" + + def test_newcube(self): + cwctl = osp.abspath(osp.join(osp.dirname(__file__), + '../../../bin/cubicweb-ctl')) + + tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube") + try: + cmd = [sys.executable, cwctl, 'newcube', + '--directory', tmpdir, 'foo'] + proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT) + stdout, _ = proc.communicate(b'short_desc\n') + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + self.assertEqual(proc.returncode, 0, msg=stdout) + + +if __name__ == '__main__': + from unittest import main + main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/unittest_fill.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/unittest_fill.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,70 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for cubicweb.devtools.fill module + +""" +import re + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.devtools.fill import ValueGenerator, _ValueGenerator + +ISODATE_SRE = re.compile('(?P\d{4})-(?P\d{2})-(?P\d{2})$') + + +class AutoExtendableTC(TestCase): + + def setUp(self): + self.attrvalues = dir(_ValueGenerator) + + def tearDown(self): + attrvalues = set(dir(_ValueGenerator)) + for attrname in attrvalues - set(self.attrvalues): + delattr(_ValueGenerator, attrname) + + + def test_autoextend(self): + self.assertNotIn('generate_server', dir(ValueGenerator)) + class MyValueGenerator(ValueGenerator): + def generate_server(self, index): + return attrname + self.assertIn('generate_server', dir(ValueGenerator)) + + + def test_bad_signature_detection(self): + self.assertNotIn('generate_server', dir(ValueGenerator)) + try: + class MyValueGenerator(ValueGenerator): + def generate_server(self): + pass + except TypeError: + self.assertNotIn('generate_server', dir(ValueGenerator)) + else: + self.fail('TypeError not raised') + + + def test_signature_extension(self): + self.assertNotIn('generate_server', dir(ValueGenerator)) + class MyValueGenerator(ValueGenerator): + def generate_server(self, index, foo): + pass + self.assertIn('generate_server', dir(ValueGenerator)) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/unittest_httptest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/unittest_httptest.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,70 @@ +# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.devtools.httptest module""" + +from six.moves import http_client + +from logilab.common.testlib import Tags +from cubicweb.devtools.httptest import CubicWebServerTC + + +class TwistedCWAnonTC(CubicWebServerTC): + + def test_response(self): + try: + response = self.web_get() + except http_client.NotConnected as ex: + self.fail("Can't connection to test server: %s" % ex) + + def test_response_anon(self): + response = self.web_get() + self.assertEqual(response.status, http_client.OK) + + def test_base_url(self): + if self.config['base-url'] not in self.web_get().read(): + self.fail('no mention of base url in retrieved page') + + +class TwistedCWIdentTC(CubicWebServerTC): + test_db_id = 'httptest-cwident' + anonymous_allowed = False + tags = CubicWebServerTC.tags | Tags(('auth',)) + + def test_response_denied(self): + response = self.web_get() + self.assertEqual(response.status, http_client.FORBIDDEN) + + def test_login(self): + response = self.web_get() + if response.status != http_client.FORBIDDEN: + self.skipTest('Already authenticated, "test_response_denied" must have failed') + # login + self.web_login(self.admlogin, self.admpassword) + response = self.web_get() + self.assertEqual(response.status, http_client.OK, response.body) + # logout + self.web_logout() + response = self.web_get() + self.assertEqual(response.status, http_client.FORBIDDEN, response.body) + + + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/unittest_i18n.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/unittest_i18n.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,77 @@ +# -*- coding: iso-8859-1 -*- +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for i18n messages generator""" + +import os, os.path as osp +import sys +import subprocess + +from unittest import TestCase, main + +from cubicweb.cwconfig import CubicWebNoAppConfiguration + +DATADIR = osp.join(osp.abspath(osp.dirname(__file__)), 'data') + +def load_po(fname): + """load a po file and return a set of encountered (msgid, msgctx)""" + msgs = set() + msgid = msgctxt = None + with open(fname) as fobj: + for line in fobj: + if line.strip() in ('', '#'): + continue + if line.startswith('msgstr'): + assert not (msgid, msgctxt) in msgs + msgs.add( (msgid, msgctxt) ) + msgid = msgctxt = None + elif line.startswith('msgid'): + msgid = line.split(' ', 1)[1][1:-1] + elif line.startswith('msgctx'): + msgctxt = line.split(' ', 1)[1][1: -1] + elif msgid is not None: + msgid += line[1:-1] + elif msgctxt is not None: + msgctxt += line[1:-1] + return msgs + + +class cubePotGeneratorTC(TestCase): + """test case for i18n pot file generator""" + + def test_i18ncube(self): + env = os.environ.copy() + env['CW_CUBES_PATH'] = osp.join(DATADIR, 'cubes') + if 'PYTHONPATH' in env: + env['PYTHONPATH'] += os.pathsep + else: + env['PYTHONPATH'] = '' + env['PYTHONPATH'] += DATADIR + cwctl = osp.abspath(osp.join(osp.dirname(__file__), + '../../../bin/cubicweb-ctl')) + with open(os.devnull, 'w') as devnull: + subprocess.check_call([sys.executable, cwctl, 'i18ncube', 'i18ntestcube'], + env=env, stdout=devnull) + cube = osp.join(DATADIR, 'cubes', 'i18ntestcube') + msgs = load_po(osp.join(cube, 'i18n', 'en.po.ref')) + newmsgs = load_po(osp.join(cube, 'i18n', 'en.po')) + self.assertEqual(msgs, newmsgs) + + +if __name__ == '__main__': + main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/unittest_qunit.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/unittest_qunit.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,27 @@ +from cubicweb.devtools import qunit + + +def js(name): + return '/static/js_examples/' + name + +class QUnitTestCaseTC(qunit.QUnitTestCase): + + all_js_tests = ( + (js('test_simple_success.js'),), + (js('test_with_dep.js'), (js('dep_1.js'),)), + (js('test_with_ordered_deps.js'), (js('dep_1.js'), js('deps_2.js'),)), + ) + + + def test_simple_failure(self): + js_tests = list(self._test_qunit(js('test_simple_failure.js'))) + self.assertEqual(len(js_tests), 3) + test_1, test_2, test_3 = js_tests + self.assertRaises(self.failureException, test_1[0], *test_1[1:]) + self.assertRaises(self.failureException, test_2[0], *test_2[1:]) + test_3[0](*test_3[1:]) + + +if __name__ == '__main__': + from unittest import main + main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/unittest_testlib.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/unittest_testlib.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,298 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittests for cw.devtools.testlib module""" + +from io import BytesIO, StringIO +from unittest import TextTestRunner + +from six import PY2 + +from logilab.common.testlib import TestSuite, TestCase, unittest_main +from logilab.common.registry import yes + +from cubicweb.devtools import htmlparser +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.pytestconf import clean_repo_test_cls + +class FakeFormTC(TestCase): + def test_fake_form(self): + class entity: + cw_etype = 'Entity' + eid = 0 + sio = BytesIO(b'hop\n') + form = CubicWebTC.fake_form('import', + {'file': ('filename.txt', sio), + 'encoding': u'utf-8', + }, [(entity, {'field': 'value'})]) + self.assertEqual(form, {'__form_id': 'import', + '__maineid': 0, + '__type:0': 'Entity', + '_cw_entity_fields:0': '__type,field', + '_cw_fields': 'encoding,file', + 'eid': [0], + 'encoding': u'utf-8', + 'field:0': 'value', + 'file': ('filename.txt', sio)}) + +class WebTestTC(TestCase): + + def setUp(self): + output = BytesIO() if PY2 else StringIO() + self.runner = TextTestRunner(stream=output) + + def test_error_raised(self): + class MyWebTest(CubicWebTC): + + def test_error_view(self): + with self.admin_access.web_request() as req: + req.create_entity('Bug', title=u"bt") + self.view('raising', req.execute('Bug B'), template=None, req=req) + + def test_correct_view(self): + with self.admin_access.web_request() as req: + self.view('primary', req.execute('CWUser U'), template=None, req=req) + + tests = [MyWebTest('test_error_view'), MyWebTest('test_correct_view')] + result = self.runner.run(TestSuite(tests)) + self.assertEqual(result.testsRun, 2) + self.assertEqual(len(result.errors), 0) + self.assertEqual(len(result.failures), 1) + clean_repo_test_cls(MyWebTest) + + +class RepoInstancesConsistencyTC(CubicWebTC): + test_db_id = 'RepoInstancesConsistencyTC' + + def pre_setup_database(self, cnx, config): + self.assertIs(cnx.repo, config.repository()) + + def test_pre_setup(self): + pass + + +HTML_PAGE = u""" + + need a title + +

Hello World !

+ + +""" + +HTML_PAGE2 = u""" + + need a title + +

Test

+

Hello world !

+

h2 title

+

h3 title

+

antoher h2 title

+

h4 title

+

Logilab introduces CW !

+ + +""" + +HTML_PAGE_ERROR = u""" + + need a title + + Logilab introduces CW ! + + +""" + +HTML_NON_STRICT = u""" + + need a title + +

title

+ + +""" + + +class HTMLPageInfoTC(TestCase): + """test cases for PageInfo""" + + def setUp(self): + parser = htmlparser.HTMLValidator() + # disable cleanup that would remove doctype + parser.preprocess_data = lambda data: data + self.page_info = parser.parse_string(HTML_PAGE2) + + def test_source1(self): + """make sure source is stored correctly""" + self.assertEqual(self.page_info.source, HTML_PAGE2) + + def test_source2(self): + """make sure source is stored correctly - raise exception""" + parser = htmlparser.DTDValidator() + self.assertRaises(AssertionError, parser.parse_string, HTML_PAGE_ERROR) + + def test_has_title_no_level(self): + """tests h? tags information""" + self.assertEqual(self.page_info.has_title('Test'), True) + self.assertEqual(self.page_info.has_title('Test '), False) + self.assertEqual(self.page_info.has_title('Tes'), False) + self.assertEqual(self.page_info.has_title('Hello world !'), True) + + def test_has_title_level(self): + """tests h? tags information""" + self.assertEqual(self.page_info.has_title('Test', level = 1), True) + self.assertEqual(self.page_info.has_title('Test', level = 2), False) + self.assertEqual(self.page_info.has_title('Test', level = 3), False) + self.assertEqual(self.page_info.has_title('Test', level = 4), False) + self.assertRaises(IndexError, self.page_info.has_title, 'Test', level = 5) + + def test_has_title_regexp_no_level(self): + """tests has_title_regexp() with no particular level specified""" + self.assertEqual(self.page_info.has_title_regexp('h[23] title'), True) + + def test_has_title_regexp_level(self): + """tests has_title_regexp() with a particular level specified""" + self.assertEqual(self.page_info.has_title_regexp('h[23] title', 2), True) + self.assertEqual(self.page_info.has_title_regexp('h[23] title', 3), True) + self.assertEqual(self.page_info.has_title_regexp('h[23] title', 4), False) + + def test_appears(self): + """tests PageInfo.appears()""" + self.assertEqual(self.page_info.appears('CW'), True) + self.assertEqual(self.page_info.appears('Logilab'), True) + self.assertEqual(self.page_info.appears('Logilab introduces'), True) + self.assertEqual(self.page_info.appears('H2 title'), False) + + def test_has_link(self): + """tests has_link()""" + self.assertEqual(self.page_info.has_link('Logilab'), True) + self.assertEqual(self.page_info.has_link('logilab'), False) + self.assertEqual(self.page_info.has_link('Logilab', 'http://www.logilab.org'), True) + self.assertEqual(self.page_info.has_link('Logilab', 'http://www.google.com'), False) + + def test_has_link_regexp(self): + """test has_link_regexp()""" + self.assertEqual(self.page_info.has_link_regexp('L[oi]gilab'), True) + self.assertEqual(self.page_info.has_link_regexp('L[ai]gilab'), False) + + +class CWUtilitiesTC(CubicWebTC): + + def test_temporary_permissions_eschema(self): + eschema = self.schema['CWUser'] + with self.temporary_permissions(CWUser={'read': ()}): + self.assertEqual(eschema.permissions['read'], ()) + self.assertTrue(eschema.permissions['add']) + self.assertTrue(eschema.permissions['read'], ()) + + def test_temporary_permissions_rdef(self): + rdef = self.schema['CWUser'].rdef('in_group') + with self.temporary_permissions((rdef, {'read': ()})): + self.assertEqual(rdef.permissions['read'], ()) + self.assertTrue(rdef.permissions['add']) + self.assertTrue(rdef.permissions['read'], ()) + + def test_temporary_permissions_rdef_with_exception(self): + rdef = self.schema['CWUser'].rdef('in_group') + try: + with self.temporary_permissions((rdef, {'read': ()})): + self.assertEqual(rdef.permissions['read'], ()) + self.assertTrue(rdef.permissions['add']) + raise ValueError('goto') + except ValueError: + self.assertTrue(rdef.permissions['read'], ()) + else: + self.fail('exception was caught unexpectedly') + + def test_temporary_appobjects_registered(self): + + class AnAppobject(object): + __registries__ = ('hip',) + __regid__ = 'hop' + __select__ = yes() + registered = None + + @classmethod + def __registered__(cls, reg): + cls.registered = reg + + with self.temporary_appobjects(AnAppobject): + self.assertEqual(self.vreg['hip'], AnAppobject.registered) + self.assertIn(AnAppobject, self.vreg['hip']['hop']) + self.assertNotIn(AnAppobject, self.vreg['hip']['hop']) + + def test_login(self): + """Calling login should not break hook control""" + with self.admin_access.repo_cnx() as cnx: + self.hook_executed = False + self.create_user(cnx, 'babar') + cnx.commit() + + from cubicweb.server import hook + from cubicweb.predicates import is_instance + + class MyHook(hook.Hook): + __regid__ = 'whatever' + __select__ = hook.Hook.__select__ & is_instance('CWProperty') + category = 'test-hook' + events = ('after_add_entity',) + test = self + + def __call__(self): + self.test.hook_executed = True + + with self.new_access('babar').repo_cnx() as cnx: + with self.temporary_appobjects(MyHook): + with cnx.allow_all_hooks_but('test-hook'): + prop = cnx.create_entity('CWProperty', pkey=u'ui.language', value=u'en') + cnx.commit() + self.assertFalse(self.hook_executed) + + +class RepoAccessTC(CubicWebTC): + + def test_repo_connection(self): + acc = self.new_access('admin') + with acc.repo_cnx() as cnx: + rset = cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_client_connection(self): + acc = self.new_access('admin') + with acc.client_cnx() as cnx: + rset = cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_web_request(self): + acc = self.new_access('admin') + with acc.web_request(elephant='babar') as req: + rset = req.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + self.assertEqual('babar', req.form['elephant']) + + def test_close(self): + acc = self.new_access('admin') + acc.close() + + def test_admin_access(self): + with self.admin_access.client_cnx() as cnx: + self.assertEqual('admin', cnx.user.login) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/test/unittest_webtest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/test/unittest_webtest.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,41 @@ +from six.moves import http_client + +from logilab.common.testlib import Tags +from cubicweb.devtools.webtest import CubicWebTestTC + + +class CWTTC(CubicWebTestTC): + def test_response(self): + response = self.webapp.get('/') + self.assertEqual(200, response.status_int) + + def test_base_url(self): + if self.config['base-url'] not in self.webapp.get('/').text: + self.fail('no mention of base url in retrieved page') + + +class CWTIdentTC(CubicWebTestTC): + test_db_id = 'webtest-ident' + anonymous_allowed = False + tags = CubicWebTestTC.tags | Tags(('auth',)) + + def test_reponse_denied(self): + res = self.webapp.get('/', expect_errors=True) + self.assertEqual(http_client.FORBIDDEN, res.status_int) + + def test_login(self): + res = self.webapp.get('/', expect_errors=True) + self.assertEqual(http_client.FORBIDDEN, res.status_int) + + self.login(self.admlogin, self.admpassword) + res = self.webapp.get('/') + self.assertEqual(http_client.OK, res.status_int) + + self.logout() + res = self.webapp.get('/', expect_errors=True) + self.assertEqual(http_client.FORBIDDEN, res.status_int) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/testlib.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/testlib.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1335 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""this module contains base classes and utilities for cubicweb tests""" +from __future__ import print_function + +import sys +import re +from os.path import dirname, join, abspath +from math import log +from contextlib import contextmanager +from itertools import chain + +from six import text_type, string_types +from six.moves import range +from six.moves.urllib.parse import urlparse, parse_qs, unquote as urlunquote + +import yams.schema + +from logilab.common.testlib import TestCase, InnerTest, Tags +from logilab.common.pytest import nocoverage, pause_trace +from logilab.common.debugger import Debugger +from logilab.common.umessage import message_from_string +from logilab.common.decorators import cached, classproperty, clear_cache, iclassmethod +from logilab.common.deprecation import deprecated, class_deprecated +from logilab.common.shellutils import getlogin + +from cubicweb import (ValidationError, NoSelectableObject, AuthenticationError, + BadConnectionId) +from cubicweb import cwconfig, devtools, web, server, repoapi +from cubicweb.utils import json +from cubicweb.sobjects import notification +from cubicweb.web import Redirect, application, eid_param +from cubicweb.server.hook import SendMailOp +from cubicweb.server.session import Session +from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS +from cubicweb.devtools import fake, htmlparser, DEFAULT_EMPTY_DB_ID + + +# low-level utilities ########################################################## + +class CubicWebDebugger(Debugger): + """special debugger class providing a 'view' function which saves some + html into a temporary file and open a web browser to examinate it. + """ + def do_view(self, arg): + import webbrowser + data = self._getval(arg) + with open('/tmp/toto.html', 'w') as toto: + toto.write(data) + webbrowser.open('file:///tmp/toto.html') + + +def line_context_filter(line_no, center, before=3, after=None): + """return true if line are in context + + if after is None: after = before + """ + if after is None: + after = before + return center - before <= line_no <= center + after + + +def unprotected_entities(schema, strict=False): + """returned a set of each non final entity type, excluding "system" entities + (eg CWGroup, CWUser...) + """ + if strict: + protected_entities = yams.schema.BASE_TYPES + else: + protected_entities = yams.schema.BASE_TYPES.union(SYSTEM_ENTITIES) + return set(schema.entities()) - protected_entities + + +class JsonValidator(object): + def parse_string(self, data): + return json.loads(data.decode('ascii')) + + +@contextmanager +def real_error_handling(app): + """By default, CubicWebTC `app` attribute (ie the publisher) is monkey + patched so that unexpected error are raised rather than going through the + `error_handler` method. + + By using this context manager you disable this monkey-patching temporarily. + Hence when publishihng a request no error will be raised, you'll get + req.status_out set to an HTTP error status code and the generated page will + usually hold a traceback as HTML. + + >>> with real_error_handling(app): + >>> page = app.handle_request(req) + """ + # remove the monkey patched error handler + fake_error_handler = app.error_handler + del app.error_handler + # return the app + yield app + # restore + app.error_handler = fake_error_handler + + +# email handling, to test emails sent by an application ######################## + +MAILBOX = [] + + +class Email(object): + """you'll get instances of Email into MAILBOX during tests that trigger + some notification. + + * `msg` is the original message object + + * `recipients` is a list of email address which are the recipients of this + message + """ + def __init__(self, fromaddr, recipients, msg): + self.fromaddr = fromaddr + self.recipients = recipients + self.msg = msg + + @property + def message(self): + return message_from_string(self.msg) + + @property + def subject(self): + return self.message.get('Subject') + + @property + def content(self): + return self.message.get_payload(decode=True) + + def __repr__(self): + return '' % (','.join(self.recipients), + self.message.get('Subject')) + + +# the trick to get email into MAILBOX instead of actually sent: monkey patch +# cwconfig.SMTP object +class MockSMTP: + + def __init__(self, server, port): + pass + + def close(self): + pass + + def sendmail(self, fromaddr, recipients, msg): + MAILBOX.append(Email(fromaddr, recipients, msg)) + +cwconfig.SMTP = MockSMTP + + +# Repoaccess utility ###############################################3########### + +class RepoAccess(object): + """An helper to easily create object to access the repo as a specific user + + Each RepoAccess have it own session. + + A repo access can create three type of object: + + .. automethod:: cubicweb.testlib.RepoAccess.cnx + .. automethod:: cubicweb.testlib.RepoAccess.web_request + + The RepoAccess need to be closed to destroy the associated Session. + TestCase usually take care of this aspect for the user. + + .. automethod:: cubicweb.testlib.RepoAccess.close + """ + + def __init__(self, repo, login, requestcls): + self._repo = repo + self._login = login + self.requestcls = requestcls + self._session = self._unsafe_connect(login) + + def _unsafe_connect(self, login, **kwargs): + """ a completely unsafe connect method for the tests """ + # use an internal connection + with self._repo.internal_cnx() as cnx: + # try to get a user object + user = cnx.find('CWUser', login=login).one() + user.groups + user.properties + user.login + session = Session(user, self._repo) + self._repo._sessions[session.sessionid] = session + user._cw = user.cw_rset.req = session + with session.new_cnx() as cnx: + self._repo.hm.call_hooks('session_open', cnx) + # commit connection at this point in case write operation has been + # done during `session_open` hooks + cnx.commit() + return session + + @contextmanager + def cnx(self): + """Context manager returning a server side connection for the user""" + with self._session.new_cnx() as cnx: + yield cnx + + # aliases for bw compat + client_cnx = repo_cnx = cnx + + @contextmanager + def web_request(self, url=None, headers={}, method='GET', **kwargs): + """Context manager returning a web request pre-linked to a client cnx + + To commit and rollback use:: + + req.cnx.commit() + req.cnx.rolback() + """ + req = self.requestcls(self._repo.vreg, url=url, headers=headers, + method=method, form=kwargs) + with self._session.new_cnx() as cnx: + req.set_cnx(cnx) + yield req + + def close(self): + """Close the session associated to the RepoAccess""" + if self._session is not None: + self._repo.close(self._session.sessionid) + self._session = None + + @contextmanager + def shell(self): + from cubicweb.server.migractions import ServerMigrationHelper + with self._session.new_cnx() as cnx: + mih = ServerMigrationHelper(None, repo=self._repo, cnx=cnx, + interactive=False, + # hack so it don't try to load fs schema + schema=1) + yield mih + cnx.commit() + + +# base class for cubicweb tests requiring a full cw environments ############### + +class CubicWebTC(TestCase): + """abstract class for test using an apptest environment + + attributes: + + * `vreg`, the vregistry + * `schema`, self.vreg.schema + * `config`, cubicweb configuration + * `cnx`, repoapi connection to the repository using an admin user + * `session`, server side session associated to `cnx` + * `app`, the cubicweb publisher (for web testing) + * `repo`, the repository object + * `admlogin`, login of the admin user + * `admpassword`, password of the admin user + * `shell`, create and use shell environment + * `anonymous_allowed`: flag telling if anonymous browsing should be allowed + """ + appid = 'data' + configcls = devtools.ApptestConfiguration + requestcls = fake.FakeRequest + tags = TestCase.tags | Tags('cubicweb', 'cw_repo') + test_db_id = DEFAULT_EMPTY_DB_ID + + # anonymous is logged by default in cubicweb test cases + anonymous_allowed = True + + def __init__(self, *args, **kwargs): + self._admin_session = None + self.repo = None + self._open_access = set() + super(CubicWebTC, self).__init__(*args, **kwargs) + + # repository connection handling ########################################### + + def new_access(self, login): + """provide a new RepoAccess object for a given user + + The access is automatically closed at the end of the test.""" + login = text_type(login) + access = RepoAccess(self.repo, login, self.requestcls) + self._open_access.add(access) + return access + + def _close_access(self): + while self._open_access: + try: + self._open_access.pop().close() + except BadConnectionId: + continue # already closed + + @property + def session(self): + """return admin session""" + return self._admin_session + + # XXX this doesn't need to a be classmethod anymore + def _init_repo(self): + """init the repository and connection to it. + """ + # get or restore and working db. + db_handler = devtools.get_test_db_handler(self.config, self.init_config) + db_handler.build_db_cache(self.test_db_id, self.pre_setup_database) + db_handler.restore_database(self.test_db_id) + self.repo = db_handler.get_repo(startup=True) + # get an admin session (without actual login) + login = text_type(db_handler.config.default_admin_config['login']) + self.admin_access = self.new_access(login) + self._admin_session = self.admin_access._session + + # config management ######################################################## + + @classproperty + def config(cls): + """return the configuration object + + Configuration is cached on the test class. + """ + if cls is CubicWebTC: + # Prevent direct use of CubicWebTC directly to avoid database + # caching issues + return None + try: + return cls.__dict__['_config'] + except KeyError: + home = abspath(join(dirname(sys.modules[cls.__module__].__file__), cls.appid)) + config = cls._config = cls.configcls(cls.appid, apphome=home) + config.mode = 'test' + return config + + @classmethod # XXX could be turned into a regular method + def init_config(cls, config): + """configuration initialization hooks. + + You may only want to override here the configuraton logic. + + Otherwise, consider to use a different :class:`ApptestConfiguration` + defined in the `configcls` class attribute. + + This method will be called by the database handler once the config has + been properly bootstrapped. + """ + admincfg = config.default_admin_config + cls.admlogin = text_type(admincfg['login']) + cls.admpassword = admincfg['password'] + # uncomment the line below if you want rql queries to be logged + # config.global_set_option('query-log-file', + # '/tmp/test_rql_log.' + `os.getpid()`) + config.global_set_option('log-file', None) + # set default-dest-addrs to a dumb email address to avoid mailbox or + # mail queue pollution + config.global_set_option('default-dest-addrs', ['whatever']) + send_to = '%s@logilab.fr' % getlogin() + config.global_set_option('sender-addr', send_to) + config.global_set_option('default-dest-addrs', send_to) + config.global_set_option('sender-name', 'cubicweb-test') + config.global_set_option('sender-addr', 'cubicweb-test@logilab.fr') + # default_base_url on config class isn't enough for TestServerConfiguration + config.global_set_option('base-url', config.default_base_url()) + # web resources + try: + config.global_set_option('embed-allowed', re.compile('.*')) + except Exception: # not in server only configuration + pass + + @property + def vreg(self): + return self.repo.vreg + + # global resources accessors ############################################### + + @property + def schema(self): + """return the application schema""" + return self.vreg.schema + + def set_option(self, optname, value): + self.config.global_set_option(optname, value) + + def set_debug(self, debugmode): + server.set_debug(debugmode) + + def debugged(self, debugmode): + return server.debugged(debugmode) + + # default test setup and teardown ######################################### + + def setUp(self): + # monkey patch send mail operation so emails are sent synchronously + self._patch_SendMailOp() + with pause_trace(): + previous_failure = self.__class__.__dict__.get('_repo_init_failed') + if previous_failure is not None: + self.skipTest('repository is not initialised: %r' % previous_failure) + try: + self._init_repo() + except Exception as ex: + self.__class__._repo_init_failed = ex + raise + self.addCleanup(self._close_access) + self.config.set_anonymous_allowed(self.anonymous_allowed) + self.setup_database() + MAILBOX[:] = [] # reset mailbox + + def tearDown(self): + # XXX hack until logilab.common.testlib is fixed + if self._admin_session is not None: + self.repo.close(self._admin_session.sessionid) + self._admin_session = None + while self._cleanups: + cleanup, args, kwargs = self._cleanups.pop(-1) + cleanup(*args, **kwargs) + self.repo.turn_repo_off() + + def _patch_SendMailOp(self): + # monkey patch send mail operation so emails are sent synchronously + _old_mail_postcommit_event = SendMailOp.postcommit_event + SendMailOp.postcommit_event = SendMailOp.sendmails + + def reverse_SendMailOp_monkey_patch(): + SendMailOp.postcommit_event = _old_mail_postcommit_event + + self.addCleanup(reverse_SendMailOp_monkey_patch) + + def setup_database(self): + """add your database setup code by overriding this method""" + + @classmethod + def pre_setup_database(cls, cnx, config): + """add your pre database setup code by overriding this method + + Do not forget to set the cls.test_db_id value to enable caching of the + result. + """ + + # user / session management ############################################### + + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def user(self, req=None): + """return the application schema""" + if req is None: + return self.request().user + else: + return req.user + + @iclassmethod # XXX turn into a class method + def create_user(self, req, login=None, groups=('users',), password=None, + email=None, commit=True, **kwargs): + """create and return a new user entity""" + if password is None: + password = login + if login is not None: + login = text_type(login) + user = req.create_entity('CWUser', login=login, + upassword=password, **kwargs) + req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' + % ','.join(repr(str(g)) for g in groups), + {'x': user.eid}) + if email is not None: + req.create_entity('EmailAddress', address=text_type(email), + reverse_primary_email=user) + user.cw_clear_relation_cache('in_group', 'subject') + if commit: + try: + req.commit() # req is a session + except AttributeError: + req.cnx.commit() + return user + + # other utilities ######################################################### + + @contextmanager + def temporary_appobjects(self, *appobjects): + self.vreg._loadedmods.setdefault(self.__module__, {}) + for obj in appobjects: + self.vreg.register(obj) + registered = getattr(obj, '__registered__', None) + if registered: + for registry in obj.__registries__: + registered(self.vreg[registry]) + try: + yield + finally: + for obj in appobjects: + self.vreg.unregister(obj) + + @contextmanager + def temporary_permissions(self, *perm_overrides, **perm_kwoverrides): + """Set custom schema permissions within context. + + There are two ways to call this method, which may be used together : + + * using positional argument(s): + + .. sourcecode:: python + + rdef = self.schema['CWUser'].rdef('login') + with self.temporary_permissions((rdef, {'read': ()})): + ... + + + * using named argument(s): + + .. sourcecode:: python + + with self.temporary_permissions(CWUser={'read': ()}): + ... + + Usually the former will be preferred to override permissions on a + relation definition, while the latter is well suited for entity types. + + The allowed keys in the permission dictionary depend on the schema type + (entity type / relation definition). Resulting permissions will be + similar to `orig_permissions.update(partial_perms)`. + """ + torestore = [] + for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.items()): + if isinstance(erschema, string_types): + erschema = self.schema[erschema] + for action, actionperms in etypeperms.items(): + origperms = erschema.permissions[action] + erschema.set_action_permissions(action, actionperms) + torestore.append([erschema, action, origperms]) + try: + yield + finally: + for erschema, action, permissions in torestore: + if action is None: + erschema.permissions = permissions + else: + erschema.set_action_permissions(action, permissions) + + def assertModificationDateGreater(self, entity, olddate): + entity.cw_attr_cache.pop('modification_date', None) + self.assertGreater(entity.modification_date, olddate) + + def assertMessageEqual(self, req, params, expected_msg): + msg = req.session.data[params['_cwmsgid']] + self.assertEqual(expected_msg, msg) + + # workflow utilities ####################################################### + + def assertPossibleTransitions(self, entity, expected): + transitions = entity.cw_adapt_to('IWorkflowable').possible_transitions() + self.assertListEqual(sorted(tr.name for tr in transitions), + sorted(expected)) + + # views and actions registries inspection ################################## + + def pviews(self, req, rset): + return sorted((a.__regid__, a.__class__) + for a in self.vreg['views'].possible_views(req, rset=rset)) + + def pactions(self, req, rset, + skipcategories=('addrelated', 'siteactions', 'useractions', + 'footer', 'manage')): + return [(a.__regid__, a.__class__) + for a in self.vreg['actions'].poss_visible_objects(req, rset=rset) + if a.category not in skipcategories] + + def pactions_by_cats(self, req, rset, categories=('addrelated',)): + return [(a.__regid__, a.__class__) + for a in self.vreg['actions'].poss_visible_objects(req, rset=rset) + if a.category in categories] + + def pactionsdict(self, req, rset, + skipcategories=('addrelated', 'siteactions', 'useractions', + 'footer', 'manage')): + res = {} + for a in self.vreg['actions'].poss_visible_objects(req, rset=rset): + if a.category not in skipcategories: + res.setdefault(a.category, []).append(a.__class__) + return res + + def action_submenu(self, req, rset, id): + return self._test_action(self.vreg['actions'].select(id, req, rset=rset)) + + def _test_action(self, action): + class fake_menu(list): + @property + def items(self): + return self + + class fake_box(object): + def action_link(self, action, **kwargs): + return (action.title, action.url()) + submenu = fake_menu() + action.fill_menu(fake_box(), submenu) + return submenu + + def list_views_for(self, rset): + """returns the list of views that can be applied on `rset`""" + req = rset.req + only_once_vids = ('primary', 'secondary', 'text') + req.data['ex'] = ValueError("whatever") + viewsvreg = self.vreg['views'] + for vid, views in viewsvreg.items(): + if vid[0] == '_': + continue + if rset.rowcount > 1 and vid in only_once_vids: + continue + views = [view for view in views + if view.category != 'startupview' + and not issubclass(view, notification.NotificationView) + and not isinstance(view, class_deprecated)] + if views: + try: + view = viewsvreg._select_best(views, req, rset=rset) + if view is None: + raise NoSelectableObject((req,), {'rset': rset}, views) + if view.linkable(): + yield view + else: + not_selected(self.vreg, view) + # else the view is expected to be used as subview and should + # not be tested directly + except NoSelectableObject: + continue + + def list_actions_for(self, rset): + """returns the list of actions that can be applied on `rset`""" + req = rset.req + for action in self.vreg['actions'].possible_objects(req, rset=rset): + yield action + + def list_boxes_for(self, rset): + """returns the list of boxes that can be applied on `rset`""" + req = rset.req + for box in self.vreg['ctxcomponents'].possible_objects(req, rset=rset): + yield box + + def list_startup_views(self): + """returns the list of startup views""" + with self.admin_access.web_request() as req: + for view in self.vreg['views'].possible_views(req, None): + if view.category == 'startupview': + yield view.__regid__ + else: + not_selected(self.vreg, view) + + # web ui testing utilities ################################################# + + @property + @cached + def app(self): + """return a cubicweb publisher""" + publisher = application.CubicWebPublisher(self.repo, self.config) + + def raise_error_handler(*args, **kwargs): + raise + + publisher.error_handler = raise_error_handler + return publisher + + @deprecated('[3.19] use the .remote_calling method') + def remote_call(self, fname, *args): + """remote json call simulation""" + dump = json.dumps + args = [dump(arg) for arg in args] + req = self.request(fname=fname, pageid='123', arg=args) + ctrl = self.vreg['controllers'].select('ajax', req) + return ctrl.publish(), req + + @contextmanager + def remote_calling(self, fname, *args): + """remote json call simulation""" + args = [json.dumps(arg) for arg in args] + with self.admin_access.web_request(fname=fname, pageid='123', arg=args) as req: + ctrl = self.vreg['controllers'].select('ajax', req) + yield ctrl.publish(), req + + def app_handle_request(self, req, path='view'): + return self.app.core_handle(req, path) + + @deprecated("[3.15] app_handle_request is the new and better way" + " (beware of small semantic changes)") + def app_publish(self, *args, **kwargs): + return self.app_handle_request(*args, **kwargs) + + def ctrl_publish(self, req, ctrl='edit', rset=None): + """call the publish method of the edit controller""" + ctrl = self.vreg['controllers'].select(ctrl, req, appli=self.app) + try: + result = ctrl.publish(rset) + req.cnx.commit() + except web.Redirect: + req.cnx.commit() + raise + return result + + @staticmethod + def fake_form(formid, field_dict=None, entity_field_dicts=()): + """Build _cw.form dictionnary to fake posting of some standard cubicweb form + + * `formid`, the form id, usually form's __regid__ + + * `field_dict`, dictionary of name:value for fields that are not tied to an entity + + * `entity_field_dicts`, list of (entity, dictionary) where dictionary contains name:value + for fields that are not tied to the given entity + """ + assert field_dict or entity_field_dicts, \ + 'field_dict and entity_field_dicts arguments must not be both unspecified' + if field_dict is None: + field_dict = {} + form = {'__form_id': formid} + fields = [] + for field, value in field_dict.items(): + fields.append(field) + form[field] = value + + def _add_entity_field(entity, field, value): + entity_fields.append(field) + form[eid_param(field, entity.eid)] = value + + for entity, field_dict in entity_field_dicts: + if '__maineid' not in form: + form['__maineid'] = entity.eid + entity_fields = [] + form.setdefault('eid', []).append(entity.eid) + _add_entity_field(entity, '__type', entity.cw_etype) + for field, value in field_dict.items(): + _add_entity_field(entity, field, value) + if entity_fields: + form[eid_param('_cw_entity_fields', entity.eid)] = ','.join(entity_fields) + if fields: + form['_cw_fields'] = ','.join(sorted(fields)) + return form + + @deprecated('[3.19] use .admin_request_from_url instead') + def req_from_url(self, url): + """parses `url` and builds the corresponding CW-web request + + req.form will be setup using the url's query string + """ + req = self.request(url=url) + if isinstance(url, unicode): + url = url.encode(req.encoding) # req.setup_params() expects encoded strings + querystring = urlparse(url)[-2] + params = parse_qs(querystring) + req.setup_params(params) + return req + + @contextmanager + def admin_request_from_url(self, url): + """parses `url` and builds the corresponding CW-web request + + req.form will be setup using the url's query string + """ + with self.admin_access.web_request(url=url) as req: + if isinstance(url, unicode): + url = url.encode(req.encoding) # req.setup_params() expects encoded strings + querystring = urlparse(url)[-2] + params = parse_qs(querystring) + req.setup_params(params) + yield req + + def url_publish(self, url, data=None): + """takes `url`, uses application's app_resolver to find the appropriate + controller and result set, then publishes the result. + + To simulate post of www-form-encoded data, give a `data` dictionary + containing desired key/value associations. + + This should pretty much correspond to what occurs in a real CW server + except the apache-rewriter component is not called. + """ + with self.admin_request_from_url(url) as req: + if data is not None: + req.form.update(data) + ctrlid, rset = self.app.url_resolver.process(req, req.relative_path(False)) + return self.ctrl_publish(req, ctrlid, rset) + + def http_publish(self, url, data=None): + """like `url_publish`, except this returns a http response, even in case + of errors. You may give form parameters using the `data` argument. + """ + with self.admin_request_from_url(url) as req: + if data is not None: + req.form.update(data) + with real_error_handling(self.app): + result = self.app_handle_request(req, req.relative_path(False)) + return result, req + + @staticmethod + def _parse_location(req, location): + try: + path, params = location.split('?', 1) + except ValueError: + path = location + params = {} + else: + cleanup = lambda p: (p[0], urlunquote(p[1])) + params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p) + if path.startswith(req.base_url()): # may be relative + path = path[len(req.base_url()):] + return path, params + + def expect_redirect(self, callback, req): + """call the given callback with req as argument, expecting to get a + Redirect exception + """ + try: + callback(req) + except Redirect as ex: + return self._parse_location(req, ex.location) + else: + self.fail('expected a Redirect exception') + + def expect_redirect_handle_request(self, req, path='edit'): + """call the publish method of the application publisher, expecting to + get a Redirect exception + """ + self.app_handle_request(req, path) + self.assertTrue(300 <= req.status_out < 400, req.status_out) + location = req.get_response_header('location') + return self._parse_location(req, location) + + @deprecated("[3.15] expect_redirect_handle_request is the new and better way" + " (beware of small semantic changes)") + def expect_redirect_publish(self, *args, **kwargs): + return self.expect_redirect_handle_request(*args, **kwargs) + + def set_auth_mode(self, authmode, anonuser=None): + self.set_option('auth-mode', authmode) + self.set_option('anonymous-user', anonuser) + if anonuser is None: + self.config.anonymous_credential = None + else: + self.config.anonymous_credential = (anonuser, anonuser) + + def init_authentication(self, authmode, anonuser=None): + self.set_auth_mode(authmode, anonuser) + req = self.requestcls(self.vreg, url='login') + sh = self.app.session_handler + authm = sh.session_manager.authmanager + authm.anoninfo = self.vreg.config.anonymous_user() + authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]} + # not properly cleaned between tests + self.open_sessions = sh.session_manager._sessions = {} + return req, self.session + + def assertAuthSuccess(self, req, origsession, nbsessions=1): + sh = self.app.session_handler + session = self.app.get_session(req) + cnx = repoapi.Connection(session) + req.set_cnx(cnx) + self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions) + self.assertEqual(session.login, origsession.login) + self.assertEqual(session.anonymous_session, False) + + def assertAuthFailure(self, req, nbsessions=0): + with self.assertRaises(AuthenticationError): + self.app.get_session(req) + # +0 since we do not track the opened session + self.assertEqual(len(self.open_sessions), nbsessions) + clear_cache(req, 'get_authorization') + + # content validation ####################################################### + + # validators are used to validate (XML, DTD, whatever) view's content + # validators availables are : + # DTDValidator : validates XML + declared DTD + # SaxOnlyValidator : guarantees XML is well formed + # None : do not try to validate anything + # validators used must be imported from from.devtools.htmlparser + content_type_validators = { + # maps MIME type : validator name + # + # do not set html validators here, we need HTMLValidator for html + # snippets + # 'text/html': DTDValidator, + # 'application/xhtml+xml': DTDValidator, + 'application/xml': htmlparser.XMLValidator, + 'text/xml': htmlparser.XMLValidator, + 'application/json': JsonValidator, + 'text/plain': None, + 'text/comma-separated-values': None, + 'text/x-vcard': None, + 'text/calendar': None, + 'image/png': None, + } + # maps vid : validator name (override content_type_validators) + vid_validators = dict((vid, htmlparser.VALMAP[valkey]) + for vid, valkey in VIEW_VALIDATORS.items()) + + def view(self, vid, rset=None, req=None, template='main-template', + **kwargs): + """This method tests the view `vid` on `rset` using `template` + + If no error occurred while rendering the view, the HTML is analyzed + and parsed. + + :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo` + encapsulation the generated HTML + """ + if req is None: + if rset is None: + req = self.request() + else: + req = rset.req + req.form['vid'] = vid + viewsreg = self.vreg['views'] + view = viewsreg.select(vid, req, rset=rset, **kwargs) + # set explicit test description + if rset is not None: + # coerce to "bytes" on py2 because the description will be sent to + # sys.stdout/stderr which takes "bytes" on py2 and "unicode" on py3 + rql = str(rset.printable_rql()) + self.set_description("testing vid=%s defined in %s with (%s)" % ( + vid, view.__module__, rql)) + else: + self.set_description("testing vid=%s defined in %s without rset" % ( + vid, view.__module__)) + if template is None: # raw view testing, no template + viewfunc = view.render + else: + kwargs['view'] = view + viewfunc = lambda **k: viewsreg.main_template(req, template, + rset=rset, **kwargs) + return self._test_view(viewfunc, view, template, kwargs) + + def _test_view(self, viewfunc, view, template='main-template', kwargs={}): + """this method does the actual call to the view + + If no error occurred while rendering the view, the HTML is analyzed + and parsed. + + :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo` + encapsulation the generated HTML + """ + try: + output = viewfunc(**kwargs) + except Exception: + # hijack exception: generative tests stop when the exception + # is not an AssertionError + klass, exc, tcbk = sys.exc_info() + try: + msg = '[%s in %s] %s' % (klass, view.__regid__, exc) + except Exception: + msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__) + exc = AssertionError(msg) + exc.__traceback__ = tcbk + raise exc + return self._check_html(output, view, template) + + def get_validator(self, view=None, content_type=None, output=None): + if view is not None: + try: + return self.vid_validators[view.__regid__]() + except KeyError: + if content_type is None: + content_type = view.content_type + if content_type is None: + content_type = 'text/html' + if content_type in ('text/html', 'application/xhtml+xml') and output: + if output.startswith(b''): + # only check XML well-formness since HTMLValidator isn't html5 + # compatible and won't like various other extensions + default_validator = htmlparser.XMLSyntaxValidator + elif output.startswith(b' used in progress widget, unknown in html dtd + output = re.sub('', '', output) + return self.assertWellFormed(validator, output.strip(), context=view.__regid__) + + def assertWellFormed(self, validator, content, context=None): + try: + return validator.parse_string(content) + except Exception: + # hijack exception: generative tests stop when the exception + # is not an AssertionError + klass, exc, tcbk = sys.exc_info() + if context is None: + msg = u'[%s]' % (klass,) + else: + msg = u'[%s in %s]' % (klass, context) + msg = msg.encode(sys.getdefaultencoding(), 'replace') + + try: + str_exc = str(exc) + except Exception: + str_exc = 'undisplayable exception' + msg += str_exc.encode(sys.getdefaultencoding(), 'replace') + if content is not None: + position = getattr(exc, "position", (0,))[0] + if position: + # define filter + if isinstance(content, str): + content = unicode(content, sys.getdefaultencoding(), 'replace') + content = validator.preprocess_data(content) + content = content.splitlines() + width = int(log(len(content), 10)) + 1 + line_template = " %" + ("%i" % width) + "i: %s" + # XXX no need to iterate the whole file except to get + # the line number + content = u'\n'.join(line_template % (idx + 1, line) + for idx, line in enumerate(content) + if line_context_filter(idx+1, position)) + msg += u'\nfor content:\n%s' % content + exc = AssertionError(msg) + exc.__traceback__ = tcbk + raise exc + + def assertDocTestFile(self, testfile): + # doctest returns tuple (failure_count, test_count) + with self.admin_access.shell() as mih: + result = mih.process_script(testfile) + if result[0] and result[1]: + raise self.failureException("doctest file '%s' failed" + % testfile) + + # notifications ############################################################ + + def assertSentEmail(self, subject, recipients=None, nb_msgs=None): + """test recipients in system mailbox for given email subject + + :param subject: email subject to find in mailbox + :param recipients: list of email recipients + :param nb_msgs: expected number of entries + :returns: list of matched emails + """ + messages = [email for email in MAILBOX + if email.message.get('Subject') == subject] + if recipients is not None: + sent_to = set() + for msg in messages: + sent_to.update(msg.recipients) + self.assertSetEqual(set(recipients), sent_to) + if nb_msgs is not None: + self.assertEqual(len(MAILBOX), nb_msgs) + return messages + + +# auto-populating test classes and utilities ################################### + +from cubicweb.devtools.fill import insert_entity_queries, make_relations_queries + +# XXX cleanup unprotected_entities & all mess + + +def how_many_dict(schema, cnx, how_many, skip): + """given a schema, compute how many entities by type we need to be able to + satisfy relations cardinality. + + The `how_many` argument tells how many entities of which type we want at + least. + + Return a dictionary with entity types as key, and the number of entities for + this type as value. + """ + relmap = {} + for rschema in schema.relations(): + if rschema.final: + continue + for subj, obj in rschema.rdefs: + card = rschema.rdef(subj, obj).cardinality + # if the relation is mandatory, we'll need at least as many subj and + # obj to satisfy it + if card[0] in '1+' and card[1] in '1?': + # subj has to be linked to at least one obj, + # but obj can be linked to only one subj + # -> we need at least as many subj as obj to satisfy + # cardinalities for this relation + relmap.setdefault((rschema, subj), []).append(str(obj)) + if card[1] in '1+' and card[0] in '1?': + # reverse subj and obj in the above explanation + relmap.setdefault((rschema, obj), []).append(str(subj)) + unprotected = unprotected_entities(schema) + for etype in skip: # XXX (syt) duh? explain or kill + unprotected.add(etype) + howmanydict = {} + # step 1, compute a base number of each entity types: number of already + # existing entities of this type + `how_many` + for etype in unprotected_entities(schema, strict=True): + howmanydict[str(etype)] = cnx.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] + if etype in unprotected: + howmanydict[str(etype)] += how_many + # step 2, augment nb entity per types to satisfy cardinality constraints, + # by recomputing for each relation that constrained an entity type: + # + # new num for etype = max(current num, sum(num for possible target etypes)) + # + # XXX we should first check there is no cycle then propagate changes + for (rschema, etype), targets in relmap.items(): + relfactor = sum(howmanydict[e] for e in targets) + howmanydict[str(etype)] = max(relfactor, howmanydict[etype]) + return howmanydict + + +class AutoPopulateTest(CubicWebTC): + """base class for test with auto-populating of the database""" + __abstract__ = True + + test_db_id = 'autopopulate' + + tags = CubicWebTC.tags | Tags('autopopulated') + + pdbclass = CubicWebDebugger + # this is a hook to be able to define a list of rql queries + # that are application dependent and cannot be guessed automatically + application_rql = [] + + no_auto_populate = () + ignored_relations = set() + + def to_test_etypes(self): + return unprotected_entities(self.schema, strict=True) + + def custom_populate(self, how_many, cnx): + pass + + def post_populate(self, cnx): + pass + + @nocoverage + def auto_populate(self, how_many): + """this method populates the database with `how_many` entities + of each possible type. It also inserts random relations between them + """ + with self.admin_access.cnx() as cnx: + with cnx.security_enabled(read=False, write=False): + self._auto_populate(cnx, how_many) + cnx.commit() + + def _auto_populate(self, cnx, how_many): + self.custom_populate(how_many, cnx) + vreg = self.vreg + howmanydict = how_many_dict(self.schema, cnx, how_many, self.no_auto_populate) + for etype in unprotected_entities(self.schema): + if etype in self.no_auto_populate: + continue + nb = howmanydict.get(etype, how_many) + for rql, args in insert_entity_queries(etype, self.schema, vreg, nb): + cnx.execute(rql, args) + edict = {} + for etype in unprotected_entities(self.schema, strict=True): + rset = cnx.execute('%s X' % etype) + edict[str(etype)] = set(row[0] for row in rset.rows) + existingrels = {} + ignored_relations = SYSTEM_RELATIONS | self.ignored_relations + for rschema in self.schema.relations(): + if rschema.final or rschema in ignored_relations: + continue + rset = cnx.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema) + existingrels.setdefault(rschema.type, set()).update((x, y) for x, y in rset) + q = make_relations_queries(self.schema, edict, cnx, ignored_relations, + existingrels=existingrels) + for rql, args in q: + try: + cnx.execute(rql, args) + except ValidationError as ex: + # failed to satisfy some constraint + print('error in automatic db population', ex) + cnx.commit_state = None # reset uncommitable flag + self.post_populate(cnx) + + def iter_individual_rsets(self, etypes=None, limit=None): + etypes = etypes or self.to_test_etypes() + with self.admin_access.web_request() as req: + for etype in etypes: + if limit: + rql = 'Any X LIMIT %s WHERE X is %s' % (limit, etype) + else: + rql = 'Any X WHERE X is %s' % etype + rset = req.execute(rql) + for row in range(len(rset)): + if limit and row > limit: + break + # XXX iirk + rset2 = rset.limit(limit=1, offset=row) + yield rset2 + + def iter_automatic_rsets(self, limit=10): + """generates basic resultsets for each entity type""" + etypes = self.to_test_etypes() + if not etypes: + return + with self.admin_access.web_request() as req: + for etype in etypes: + yield req.execute('Any X LIMIT %s WHERE X is %s' % (limit, etype)) + etype1 = etypes.pop() + try: + etype2 = etypes.pop() + except KeyError: + etype2 = etype1 + # test a mixed query (DISTINCT/GROUP to avoid getting duplicate + # X which make muledit view failing for instance (html validation fails + # because of some duplicate "id" attributes) + yield req.execute('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' % + (etype1, etype2)) + # test some application-specific queries if defined + for rql in self.application_rql: + yield req.execute(rql) + + def _test_everything_for(self, rset): + """this method tries to find everything that can be tested + for `rset` and yields a callable test (as needed in generative tests) + """ + propdefs = self.vreg['propertydefs'] + # make all components visible + for k, v in propdefs.items(): + if k.endswith('visible') and not v['default']: + propdefs[k]['default'] = True + for view in self.list_views_for(rset): + backup_rset = rset.copy(rset.rows, rset.description) + yield InnerTest(self._testname(rset, view.__regid__, 'view'), + self.view, view.__regid__, rset, + rset.req.reset_headers(), 'main-template') + # We have to do this because some views modify the + # resultset's syntax tree + rset = backup_rset + for action in self.list_actions_for(rset): + yield InnerTest(self._testname(rset, action.__regid__, 'action'), + self._test_action, action) + for box in self.list_boxes_for(rset): + w = [].append + yield InnerTest(self._testname(rset, box.__regid__, 'box'), box.render, w) + + @staticmethod + def _testname(rset, objid, objtype): + return '%s_%s_%s' % ('_'.join(rset.column_types(0)), objid, objtype) + + +# concrete class for automated application testing ############################ + +class AutomaticWebTest(AutoPopulateTest): + """import this if you wan automatic tests to be ran""" + + tags = AutoPopulateTest.tags | Tags('web', 'generated') + + def setUp(self): + if self.__class__ is AutomaticWebTest: + # Prevent direct use of AutomaticWebTest to avoid database caching + # issues. + return + super(AutomaticWebTest, self).setUp() + + # access to self.app for proper initialization of the authentication + # machinery (else some views may fail) + self.app + + def test_one_each_config(self): + self.auto_populate(1) + for rset in self.iter_automatic_rsets(limit=1): + for testargs in self._test_everything_for(rset): + yield testargs + + def test_ten_each_config(self): + self.auto_populate(10) + for rset in self.iter_automatic_rsets(limit=10): + for testargs in self._test_everything_for(rset): + yield testargs + + def test_startup_views(self): + for vid in self.list_startup_views(): + with self.admin_access.web_request() as req: + yield self.view, vid, None, req + + +# registry instrumentization ################################################### + +def not_selected(vreg, appobject): + try: + vreg._selected[appobject.__class__] -= 1 + except (KeyError, AttributeError): + pass + + +# def vreg_instrumentize(testclass): +# # XXX broken +# from cubicweb.devtools.apptest import TestEnvironment +# env = testclass._env = TestEnvironment('data', configcls=testclass.configcls) +# for reg in env.vreg.values(): +# reg._selected = {} +# try: +# orig_select_best = reg.__class__.__orig_select_best +# except Exception: +# orig_select_best = reg.__class__._select_best +# def instr_select_best(self, *args, **kwargs): +# selected = orig_select_best(self, *args, **kwargs) +# try: +# self._selected[selected.__class__] += 1 +# except KeyError: +# self._selected[selected.__class__] = 1 +# except AttributeError: +# pass # occurs on reg used to restore database +# return selected +# reg.__class__._select_best = instr_select_best +# reg.__class__.__orig_select_best = orig_select_best + + +# def print_untested_objects(testclass, skipregs=('hooks', 'etypes')): +# for regname, reg in testclass._env.vreg.items(): +# if regname in skipregs: +# continue +# for appobjects in reg.values(): +# for appobject in appobjects: +# if not reg._selected.get(appobject): +# print 'not tested', regname, appobject diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/devtools/webtest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/devtools/webtest.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,34 @@ +from __future__ import absolute_import + +import webtest + +from cubicweb.wsgi import handler +from cubicweb.devtools.testlib import CubicWebTC + + +class CubicWebTestTC(CubicWebTC): + def setUp(self): + super(CubicWebTestTC, self).setUp() + self.config.global_set_option('base-url', 'http://localhost.local/') + # call load_configuration again to let the config reset its datadir_url + self.config.load_configuration() + webapp = handler.CubicWebWSGIApplication(self.config) + self.webapp = webtest.TestApp(webapp) + + def tearDown(self): + del self.webapp + super(CubicWebTestTC, self).tearDown() + + def login(self, user=None, password=None, **args): + if user is None: + user = self.admlogin + if password is None: + password = self.admpassword if user == self.admlogin else user + args.update({ + '__login': user, + '__password': password + }) + return self.webapp.get('/login', args) + + def logout(self): + return self.webapp.get('/logout') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,208 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""base application's entities class implementation: `AnyEntity`""" + +__docformat__ = "restructuredtext en" + +from warnings import warn + +from six import text_type, string_types + +from logilab.common.decorators import classproperty +from logilab.common.deprecation import deprecated + +from cubicweb import Unauthorized +from cubicweb.entity import Entity + + +class AnyEntity(Entity): + """an entity instance has e_schema automagically set on the class and + instances have access to their issuing cursor + """ + __regid__ = 'Any' + + @classproperty + def cw_etype(cls): + """entity type as a unicode string""" + return text_type(cls.__regid__) + + @classmethod + def cw_create_url(cls, req, **kwargs): + """ return the url of the entity creation form for this entity type""" + return req.build_url('add/%s' % cls.__regid__, **kwargs) + + @classmethod + @deprecated('[3.22] use cw_fti_index_rql_limit instead') + def cw_fti_index_rql_queries(cls, req): + """return the list of rql queries to fetch entities to FT-index + + The default is to fetch all entities at once and to prefetch + indexable attributes but one could imagine iterating over + "smaller" resultsets if the table is very big or returning + a subset of entities that match some business-logic condition. + """ + restrictions = ['X is %s' % cls.__regid__] + selected = ['X'] + for attrschema in sorted(cls.e_schema.indexable_attributes()): + varname = attrschema.type.upper() + restrictions.append('X %s %s' % (attrschema, varname)) + selected.append(varname) + return ['Any %s WHERE %s' % (', '.join(selected), + ', '.join(restrictions))] + + @classmethod + def cw_fti_index_rql_limit(cls, req, limit=1000): + """generate rsets of entities to FT-index + + By default, each successive result set is limited to 1000 entities + """ + if cls.cw_fti_index_rql_queries.__func__ != AnyEntity.cw_fti_index_rql_queries.__func__: + warn("[3.22] cw_fti_index_rql_queries is replaced by cw_fti_index_rql_limit", + DeprecationWarning) + for rql in cls.cw_fti_index_rql_queries(req): + yield req.execute(rql) + return + restrictions = ['X is %s' % cls.__regid__] + selected = ['X'] + start = 0 + for attrschema in sorted(cls.e_schema.indexable_attributes()): + varname = attrschema.type.upper() + restrictions.append('X %s %s' % (attrschema, varname)) + selected.append(varname) + while True: + q_restrictions = restrictions + ['X eid > %s' % start] + rset = req.execute('Any %s ORDERBY X LIMIT %s WHERE %s' % + (', '.join(selected), + limit, + ', '.join(q_restrictions))) + if rset: + start = rset[-1][0] + yield rset + else: + break + + # meta data api ########################################################### + + def dc_title(self): + """return a suitable *unicode* title for this entity""" + for rschema, attrschema in self.e_schema.attribute_definitions(): + if rschema.meta: + continue + value = self.cw_attr_value(rschema.type) + if value is not None: + # make the value printable (dates, floats, bytes, etc.) + return self.printable_value(rschema.type, value, attrschema.type, + format='text/plain') + return u'%s #%s' % (self.dc_type(), self.eid) + + def dc_long_title(self): + """return a more detailled title for this entity""" + return self.dc_title() + + def dc_description(self, format='text/plain'): + """return a suitable description for this entity""" + if 'description' in self.e_schema.subjrels: + return self.printable_value('description', format=format) + return u'' + + def dc_authors(self): + """return a suitable description for the author(s) of the entity""" + try: + return ', '.join(u.name() for u in self.owned_by) + except Unauthorized: + return u'' + + def dc_creator(self): + """return a suitable description for the creator of the entity""" + if self.creator: + return self.creator.name() + return u'' + + def dc_date(self, date_format=None):# XXX default to ISO 8601 ? + """return latest modification date of this entity""" + return self._cw.format_date(self.modification_date, date_format=date_format) + + def dc_type(self, form=''): + """return the display name for the type of this entity (translated)""" + return self.e_schema.display_name(self._cw, form) + + def dc_language(self): + """return language used by this entity (translated)""" + # check if entities has internationalizable attributes + # XXX one is enough or check if all String attributes are internationalizable? + for rschema, attrschema in self.e_schema.attribute_definitions(): + if rschema.rdef(self.e_schema, attrschema).internationalizable: + return self._cw._(self._cw.user.property_value('ui.language')) + return self._cw._(self._cw.vreg.property_value('ui.language')) + + @property + def creator(self): + """return the CWUser entity which has created this entity, or None if + unknown or if the curent user doesn't has access to this euser + """ + try: + return self.created_by[0] + except (Unauthorized, IndexError): + return None + + # abstractions making the whole things (well, some at least) working ###### + + def sortvalue(self, rtype=None): + """return a value which can be used to sort this entity or given + entity's attribute + """ + if rtype is None: + return self.dc_title().lower() + value = self.cw_attr_value(rtype) + # do not restrict to `unicode` because Bytes will return a `str` value + if isinstance(value, string_types): + return self.printable_value(rtype, format='text/plain').lower() + return value + + +def fetch_config(fetchattrs, mainattr=None, pclass=AnyEntity, order='ASC'): + """function to ease basic configuration of an entity class ORM. Basic usage + is: + + .. sourcecode:: python + + class MyEntity(AnyEntity): + + fetch_attrs, cw_fetch_order = fetch_config(['attr1', 'attr2']) + # uncomment line below if you want the same sorting for 'unrelated' entities + # cw_fetch_unrelated_order = cw_fetch_order + + Using this, when using ORM methods retrieving this type of entity, 'attr1' + and 'attr2' will be automatically prefetched and results will be sorted on + 'attr1' ascending (ie the first attribute in the list). + + This function will automatically add to fetched attributes those defined in + parent class given using the `pclass` argument. + + Also, You can use `mainattr` and `order` argument to have a different + sorting. + """ + if pclass is not None: + fetchattrs += pclass.fetch_attrs + if mainattr is None: + mainattr = fetchattrs[0] + @classmethod + def fetch_order(cls, select, attr, var): + if attr == mainattr: + select.add_sort_var(var, order=='ASC') + return fetchattrs, fetch_order diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/adapters.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/adapters.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,427 @@ +# copyright 2010-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some basic entity adapter implementations, for interfaces used in the +framework itself. +""" +from cubicweb import _ + +from itertools import chain +from hashlib import md5 + +from logilab.mtconverter import TransformError +from logilab.common.decorators import cached + +from cubicweb import ValidationError, view, ViolatedConstraint, UniqueTogetherError +from cubicweb.predicates import is_instance, relation_possible, match_exception + + +class IEmailableAdapter(view.EntityAdapter): + __regid__ = 'IEmailable' + __select__ = relation_possible('primary_email') | relation_possible('use_email') + + def get_email(self): + if getattr(self.entity, 'primary_email', None): + return self.entity.primary_email[0].address + if getattr(self.entity, 'use_email', None): + return self.entity.use_email[0].address + return None + + def allowed_massmail_keys(self): + """returns a set of allowed email substitution keys + + The default is to return the entity's attribute list but you might + override this method to allow extra keys. For instance, a Person + class might want to return a `companyname` key. + """ + return set(rschema.type + for rschema, attrtype in self.entity.e_schema.attribute_definitions() + if attrtype.type not in ('Password', 'Bytes')) + + def as_email_context(self): + """returns the dictionary as used by the sendmail controller to + build email bodies. + + NOTE: the dictionary keys should match the list returned by the + `allowed_massmail_keys` method. + """ + return dict((attr, getattr(self.entity, attr)) + for attr in self.allowed_massmail_keys()) + + +class INotifiableAdapter(view.EntityAdapter): + __regid__ = 'INotifiable' + __select__ = is_instance('Any') + + def notification_references(self, view): + """used to control References field of email send on notification + for this entity. `view` is the notification view. + + Should return a list of eids which can be used to generate message + identifiers of previously sent email(s) + """ + itree = self.entity.cw_adapt_to('ITree') + if itree is not None: + return itree.path()[:-1] + if view.msgid_timestamp: + return (self.entity.eid,) + return () + + +class IFTIndexableAdapter(view.EntityAdapter): + """standard adapter to handle fulltext indexing + + .. automethod:: cubicweb.entities.adapters.IFTIndexableAdapter.fti_containers + .. automethod:: cubicweb.entities.adapters.IFTIndexableAdapter.get_words + """ + __regid__ = 'IFTIndexable' + __select__ = is_instance('Any') + + def fti_containers(self, _done=None): + """return the list of entities to index when handling ``self.entity`` + + The actual list of entities depends on ``fulltext_container`` usage + in the datamodel definition + """ + if _done is None: + _done = set() + entity = self.entity + _done.add(entity.eid) + containers = tuple(entity.e_schema.fulltext_containers()) + if containers: + for rschema, role in containers: + if role == 'object': + targets = getattr(entity, rschema.type) + else: + targets = getattr(entity, 'reverse_%s' % rschema) + for target in targets: + if target.eid in _done: + continue + for container in target.cw_adapt_to('IFTIndexable').fti_containers(_done): + yield container + else: + yield entity + + # weight in ABCD + entity_weight = 1.0 + attr_weight = {} + + def get_words(self): + """used by the full text indexer to get words to index + + this method should only be used on the repository side since it depends + on the logilab.database package + + :rtype: list + :return: the list of indexable word of this entity + """ + from logilab.database.fti import tokenize + # take care to cases where we're modyfying the schema + entity = self.entity + pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) + words = {} + for rschema in entity.e_schema.indexable_attributes(): + if (entity.e_schema, rschema) in pending: + continue + weight = self.attr_weight.get(rschema, 'C') + try: + value = entity.printable_value(rschema, format=u'text/plain') + except TransformError: + continue + except Exception: + self.exception("can't add value of %s to text index for entity %s", + rschema, entity.eid) + continue + if value: + words.setdefault(weight, []).extend(tokenize(value)) + for rschema, role in entity.e_schema.fulltext_relations(): + if role == 'subject': + for entity_ in getattr(entity, rschema.type): + merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) + else: # if role == 'object': + for entity_ in getattr(entity, 'reverse_%s' % rschema.type): + merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) + return words + + +def merge_weight_dict(maindict, newdict): + for weight, words in newdict.items(): + maindict.setdefault(weight, []).extend(words) + + +class IDownloadableAdapter(view.EntityAdapter): + """interface for downloadable entities""" + __regid__ = 'IDownloadable' + __abstract__ = True + + def download_url(self, **kwargs): # XXX not really part of this interface + """return a URL to download entity's content + + It should be a unicode object containing url-encoded ASCII. + """ + raise NotImplementedError + + def download_content_type(self): + """return MIME type (unicode) of the downloadable content""" + raise NotImplementedError + + def download_encoding(self): + """return encoding (unicode) of the downloadable content""" + raise NotImplementedError + + def download_file_name(self): + """return file name (unicode) of the downloadable content""" + raise NotImplementedError + + def download_data(self): + """return actual data (bytes) of the downloadable content""" + raise NotImplementedError + + +# XXX should propose to use two different relations for children/parent +class ITreeAdapter(view.EntityAdapter): + """This adapter provides a tree interface. + + It has to be overriden to be configured using the tree_relation, + child_role and parent_role class attributes to benefit from this default + implementation. + + This class provides the following methods: + + .. automethod: iterparents + .. automethod: iterchildren + .. automethod: prefixiter + + .. automethod: is_leaf + .. automethod: is_root + + .. automethod: root + .. automethod: parent + .. automethod: children + .. automethod: different_type_children + .. automethod: same_type_children + .. automethod: children_rql + .. automethod: path + """ + __regid__ = 'ITree' + __abstract__ = True + + child_role = 'subject' + parent_role = 'object' + + def children_rql(self): + """Returns RQL to get the children of the entity.""" + return self.entity.cw_related_rql(self.tree_relation, self.parent_role) + + def different_type_children(self, entities=True): + """Return children entities of different type as this entity. + + According to the `entities` parameter, return entity objects or the + equivalent result set. + """ + res = self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + eschema = self.entity.e_schema + if entities: + return [e for e in res if e.e_schema != eschema] + return res.filtered_rset(lambda x: x.e_schema != eschema, self.entity.cw_col) + + def same_type_children(self, entities=True): + """Return children entities of the same type as this entity. + + According to the `entities` parameter, return entity objects or the + equivalent result set. + """ + res = self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + eschema = self.entity.e_schema + if entities: + return [e for e in res if e.e_schema == eschema] + return res.filtered_rset(lambda x: x.e_schema is eschema, self.entity.cw_col) + + def is_leaf(self): + """Returns True if the entity does not have any children.""" + return len(self.children()) == 0 + + def is_root(self): + """Returns true if the entity is root of the tree (e.g. has no parent). + """ + return self.parent() is None + + def root(self): + """Return the root entity of the tree.""" + return self._cw.entity_from_eid(self.path()[0]) + + def parent(self): + """Returns the parent entity if any, else None (e.g. if we are on the + root). + """ + try: + return self.entity.related(self.tree_relation, self.child_role, + entities=True)[0] + except (KeyError, IndexError): + return None + + def children(self, entities=True, sametype=False): + """Return children entities. + + According to the `entities` parameter, return entity objects or the + equivalent result set. + """ + if sametype: + return self.same_type_children(entities) + else: + return self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + + def iterparents(self, strict=True): + """Return an iterator on the parents of the entity.""" + def _uptoroot(self): + curr = self + while True: + curr = curr.parent() + if curr is None: + break + yield curr + curr = curr.cw_adapt_to('ITree') + if not strict: + return chain([self.entity], _uptoroot(self)) + return _uptoroot(self) + + def iterchildren(self, _done=None): + """Return an iterator over the item's children.""" + if _done is None: + _done = set() + for child in self.children(): + if child.eid in _done: + self.error('loop in %s tree: %s', child.cw_etype.lower(), child) + continue + yield child + _done.add(child.eid) + + def prefixiter(self, _done=None): + """Return an iterator over the item's descendants in a prefixed order.""" + if _done is None: + _done = set() + if self.entity.eid in _done: + return + _done.add(self.entity.eid) + yield self.entity + for child in self.same_type_children(): + for entity in child.cw_adapt_to('ITree').prefixiter(_done): + yield entity + + @cached + def path(self): + """Returns the list of eids from the root object to this object.""" + path = [] + adapter = self + entity = adapter.entity + while entity is not None: + if entity.eid in path: + self.error('loop in %s tree: %s', entity.cw_etype.lower(), entity) + break + path.append(entity.eid) + try: + # check we are not jumping to another tree + if (adapter.tree_relation != self.tree_relation or + adapter.child_role != self.child_role): + break + entity = adapter.parent() + adapter = entity.cw_adapt_to('ITree') + except AttributeError: + break + path.reverse() + return path + + +class ISerializableAdapter(view.EntityAdapter): + """Adapter to serialize an entity to a bare python structure that may be + directly serialized to e.g. JSON. + """ + + __regid__ = 'ISerializable' + __select__ = is_instance('Any') + + def serialize(self): + entity = self.entity + entity.complete() + data = { + 'cw_etype': entity.cw_etype, + 'cw_source': entity.cw_metainformation()['source']['uri'], + 'eid': entity.eid, + } + for rschema, __ in entity.e_schema.attribute_definitions(): + attr = rschema.type + try: + value = entity.cw_attr_cache[attr] + except KeyError: + # Bytes + continue + data[attr] = value + return data + + +# error handling adapters ###################################################### + + +class IUserFriendlyError(view.EntityAdapter): + __regid__ = 'IUserFriendlyError' + __abstract__ = True + + def __init__(self, *args, **kwargs): + self.exc = kwargs.pop('exc') + super(IUserFriendlyError, self).__init__(*args, **kwargs) + + +class IUserFriendlyUniqueTogether(IUserFriendlyError): + __select__ = match_exception(UniqueTogetherError) + + def raise_user_exception(self): + rtypes = self.exc.rtypes + errors = {} + msgargs = {} + i18nvalues = [] + for rtype in rtypes: + errors[rtype] = _('%(KEY-rtype)s is part of violated unicity constraint') + msgargs[rtype + '-rtype'] = rtype + i18nvalues.append(rtype + '-rtype') + errors[''] = _('some relations violate a unicity constraint') + raise ValidationError(self.entity.eid, errors, msgargs=msgargs, i18nvalues=i18nvalues) + + +class IUserFriendlyCheckConstraint(IUserFriendlyError): + __select__ = match_exception(ViolatedConstraint) + + def raise_user_exception(self): + cstrname = self.exc.cstrname + eschema = self.entity.e_schema + for rschema, attrschema in eschema.attribute_definitions(): + rdef = rschema.rdef(eschema, attrschema) + for constraint in rdef.constraints: + if cstrname == 'cstr' + md5( + (eschema.type + rschema.type + constraint.type() + + (constraint.serialize() or '')).encode('ascii')).hexdigest(): + break + else: + continue + break + else: + assert 0 + key = rschema.type + '-subject' + msg, args = constraint.failed_message(key, self.entity.cw_edited[rschema.type]) + raise ValidationError(self.entity.eid, {key: msg}, args) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/authobjs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/authobjs.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,188 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""entity classes user and group entities""" + +__docformat__ = "restructuredtext en" + +from six import string_types + +from logilab.common.decorators import cached + +from cubicweb import Unauthorized +from cubicweb.entities import AnyEntity, fetch_config + +class CWGroup(AnyEntity): + __regid__ = 'CWGroup' + fetch_attrs, cw_fetch_order = fetch_config(['name']) + cw_fetch_unrelated_order = cw_fetch_order + + def dc_long_title(self): + name = self.name + trname = self._cw._(name) + if trname != name: + return '%s (%s)' % (name, trname) + return name + + @cached + def num_users(self): + """return the number of users in this group""" + return self._cw.execute('Any COUNT(U) WHERE U in_group G, G eid %(g)s', + {'g': self.eid})[0][0] + + +class CWUser(AnyEntity): + __regid__ = 'CWUser' + fetch_attrs, cw_fetch_order = fetch_config(['login', 'firstname', 'surname']) + cw_fetch_unrelated_order = cw_fetch_order + + # used by repository to check if the user can log in or not + AUTHENTICABLE_STATES = ('activated',) + + # low level utilities ##################################################### + def __init__(self, *args, **kwargs): + groups = kwargs.pop('groups', None) + properties = kwargs.pop('properties', None) + super(CWUser, self).__init__(*args, **kwargs) + if groups is not None: + self._groups = groups + if properties is not None: + self._properties = properties + + @property + def groups(self): + try: + return self._groups + except AttributeError: + self._groups = set(g.name for g in self.in_group) + return self._groups + + @property + def properties(self): + try: + return self._properties + except AttributeError: + self._properties = dict( + self._cw.execute( + 'Any K, V WHERE P for_user U, U eid %(userid)s, ' + 'P pkey K, P value V', + {'userid': self.eid})) + return self._properties + + def prefered_language(self, language=None): + """return language used by this user, if explicitly defined (eg not + using http negociation) + """ + language = language or self.property_value('ui.language') + vreg = self._cw.vreg + try: + vreg.config.translations[language] + except KeyError: + language = vreg.property_value('ui.language') + assert language in vreg.config.translations[language], language + return language + + def property_value(self, key): + try: + # properties stored on the user aren't correctly typed + # (e.g. all values are unicode string) + return self._cw.vreg.typed_value(key, self.properties[key]) + except KeyError: + pass + except ValueError: + self.warning('incorrect value for eproperty %s of user %s', + key, self.login) + return self._cw.vreg.property_value(key) + + def set_property(self, pkey, value): + value = unicode(value) + try: + prop = self._cw.execute( + 'CWProperty X WHERE X pkey %(k)s, X for_user U, U eid %(u)s', + {'k': pkey, 'u': self.eid}).get_entity(0, 0) + except Exception: + kwargs = dict(pkey=unicode(pkey), value=value) + if self.is_in_group('managers'): + kwargs['for_user'] = self + self._cw.create_entity('CWProperty', **kwargs) + else: + prop.cw_set(value=value) + + def matching_groups(self, groups): + """return the number of the given group(s) in which the user is + + :type groups: str or iterable(str) + :param groups: a group name or an iterable on group names + """ + if isinstance(groups, string_types): + groups = frozenset((groups,)) + elif isinstance(groups, (tuple, list)): + groups = frozenset(groups) + return len(groups & self.groups) # XXX return the resulting set instead of its size + + def is_in_group(self, group): + """convience / shortcut method to test if the user belongs to `group` + """ + return group in self.groups + + def is_anonymous(self): + """ checks if user is an anonymous user""" + #FIXME on the web-side anonymous user is detected according + # to config['anonymous-user'], we don't have this info on + # the server side. + return self.groups == frozenset(('guests', )) + + def owns(self, eid): + try: + return self._cw.execute( + 'Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', + {'x': eid, 'u': self.eid}) + except Unauthorized: + return False + owns = cached(owns, keyarg=1) + + # presentation utilities ################################################## + + def name(self): + """construct a name using firstname / surname or login if not defined""" + + if self.firstname and self.surname: + return self._cw._('%(firstname)s %(surname)s') % { + 'firstname': self.firstname, 'surname' : self.surname} + if self.firstname: + return self.firstname + return self.login + + def dc_title(self): + return self.login + + dc_long_title = name + + def __call__(self, *args, **kwargs): + """ugly hack for compatibility betweeb dbapi and repo api + + In the dbapi, Connection and Session have a ``user`` method to + generated a user for a request In the repo api, Connection and Session + have a user attribute inherited from SessionRequestBase prototype. This + ugly hack allows to not break user of the user method. + + XXX Deprecate me ASAP""" + return self + +from logilab.common.deprecation import class_renamed +EUser = class_renamed('EUser', CWUser) +EGroup = class_renamed('EGroup', CWGroup) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/lib.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/lib.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,149 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""entity classes for optional library entities""" + +__docformat__ = "restructuredtext en" +from warnings import warn +from datetime import datetime + +from six.moves import range +from six.moves.urllib.parse import urlsplit, urlunsplit + +from logilab.mtconverter import xml_escape + +from cubicweb import UnknownProperty +from cubicweb.entity import _marker +from cubicweb.entities import AnyEntity, fetch_config + +def mangle_email(address): + try: + name, host = address.split('@', 1) + except ValueError: + return address + return '%s at %s' % (name, host.replace('.', ' dot ')) + + +class EmailAddress(AnyEntity): + __regid__ = 'EmailAddress' + fetch_attrs, cw_fetch_order = fetch_config(['address', 'alias']) + rest_attr = 'eid' + + def dc_title(self): + if self.alias: + return '%s <%s>' % (self.alias, self.display_address()) + return self.display_address() + + @property + def email_of(self): + return self.reverse_use_email and self.reverse_use_email[0] or None + + @property + def prefered(self): + return self.prefered_form and self.prefered_form[0] or self + + def related_emails(self, skipeids=None): + # XXX move to eemail + # check email relations are in the schema first + subjrels = self.e_schema.object_relations() + if not ('sender' in subjrels and 'recipients' in subjrels): + return + rset = self._cw.execute('DISTINCT Any X, S, D ORDERBY D DESC ' + 'WHERE X sender Y or X recipients Y, ' + 'X subject S, X date D, Y eid %(y)s', + {'y': self.eid}) + if skipeids is None: + skipeids = set() + for i in range(len(rset)): + eid = rset[i][0] + if eid in skipeids: + continue + skipeids.add(eid) + yield rset.get_entity(i, 0) + + def display_address(self): + if self._cw.vreg.config['mangle-emails']: + return mangle_email(self.address) + return self.address + + def printable_value(self, attr, value=_marker, attrtype=None, + format='text/html'): + """overriden to return displayable address when necessary""" + if attr == 'address': + address = self.display_address() + if format == 'text/html': + address = xml_escape(address) + return address + return super(EmailAddress, self).printable_value(attr, value, attrtype, format) + + +class Bookmark(AnyEntity): + """customized class for Bookmark entities""" + __regid__ = 'Bookmark' + fetch_attrs, cw_fetch_order = fetch_config(['title', 'path']) + + def actual_url(self): + url = self._cw.build_url(self.path) + if self.title: + urlparts = list(urlsplit(url)) + if urlparts[3]: + urlparts[3] += '&vtitle=%s' % self._cw.url_quote(self.title) + else: + urlparts[3] = 'vtitle=%s' % self._cw.url_quote(self.title) + url = urlunsplit(urlparts) + return url + + def action_url(self): + return self.absolute_url() + '/follow' + + +class CWProperty(AnyEntity): + __regid__ = 'CWProperty' + + fetch_attrs, cw_fetch_order = fetch_config(['pkey', 'value']) + rest_attr = 'pkey' + + def typed_value(self): + return self._cw.vreg.typed_value(self.pkey, self.value) + + def dc_description(self, format='text/plain'): + try: + return self._cw._(self._cw.vreg.property_info(self.pkey)['help']) + except UnknownProperty: + return u'' + + +class CWCache(AnyEntity): + """Cache""" + __regid__ = 'CWCache' + fetch_attrs, cw_fetch_order = fetch_config(['name']) + + def __init__(self, *args, **kwargs): + warn('[3.19] CWCache entity type is going away soon. ' + 'Other caching mechanisms can be used more reliably ' + 'to the same effect.', + DeprecationWarning) + super(CWCache, self).__init__(*args, **kwargs) + + def touch(self): + self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s', + {'t': datetime.now(), 'x': self.eid}) + + def valid(self, date): + if date: + return date > self.timestamp + return False diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/schemaobjs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/schemaobjs.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,178 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""schema definition related entities""" + +__docformat__ = "restructuredtext en" + +from logilab.common.decorators import cached + +from yams.schema import role_name + +from cubicweb import ValidationError +from cubicweb.schema import ERQLExpression, RRQLExpression + +from cubicweb.entities import AnyEntity, fetch_config + + +class CWEType(AnyEntity): + __regid__ = 'CWEType' + fetch_attrs, cw_fetch_order = fetch_config(['name']) + + def dc_title(self): + return u'%s (%s)' % (self.name, self._cw._(self.name)) + + def dc_long_title(self): + stereotypes = [] + _ = self._cw._ + if self.final: + stereotypes.append(_('final')) + if stereotypes: + return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes)) + return self.dc_title() + + +class CWRType(AnyEntity): + __regid__ = 'CWRType' + fetch_attrs, cw_fetch_order = fetch_config(['name']) + + def dc_title(self): + return u'%s (%s)' % (self.name, self._cw._(self.name)) + + def dc_long_title(self): + stereotypes = [] + _ = self._cw._ + if self.symmetric: + stereotypes.append(_('symmetric')) + if self.inlined: + stereotypes.append(_('inlined')) + if self.final: + stereotypes.append(_('final')) + if stereotypes: + return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes)) + return self.dc_title() + + def check_inlined_allowed(self): + """check inlining is possible, raise ValidationError if not possible + """ + # don't use the persistent schema, we may miss cardinality changes + # in the same transaction + for rdef in self.reverse_relation_type: + card = rdef.cardinality[0] + if not card in '?1': + qname = role_name('inlined', 'subject') + rtype = self.name + stype = rdef.stype + otype = rdef.otype + msg = self._cw._("can't set inlined=True, " + "%(stype)s %(rtype)s %(otype)s " + "has cardinality=%(card)s") + raise ValidationError(self.eid, {qname: msg % locals()}) + + +class CWRelation(AnyEntity): + __regid__ = 'CWRelation' + fetch_attrs = fetch_config(['cardinality'])[0] + + def dc_title(self): + return u'%s %s %s' % ( + self.from_entity[0].name, + self.relation_type[0].name, + self.to_entity[0].name) + + def dc_long_title(self): + card = self.cardinality + scard, ocard = u'', u'' + if card[0] != '1': + scard = '[%s]' % card[0] + if card[1] != '1': + ocard = '[%s]' % card[1] + return u'%s %s%s%s %s' % ( + self.from_entity[0].name, + scard, self.relation_type[0].name, ocard, + self.to_entity[0].name) + + @property + def rtype(self): + return self.relation_type[0] + + @property + def stype(self): + return self.from_entity[0] + + @property + def otype(self): + return self.to_entity[0] + + def yams_schema(self): + rschema = self._cw.vreg.schema.rschema(self.rtype.name) + return rschema.rdefs[(self.stype.name, self.otype.name)] + + +class CWAttribute(CWRelation): + __regid__ = 'CWAttribute' + + def dc_long_title(self): + card = self.cardinality + scard = u'' + if card[0] == '1': + scard = '+' + return u'%s %s%s %s' % ( + self.from_entity[0].name, + scard, self.relation_type[0].name, + self.to_entity[0].name) + + +class CWConstraint(AnyEntity): + __regid__ = 'CWConstraint' + fetch_attrs, cw_fetch_order = fetch_config(['value']) + + def dc_title(self): + return '%s(%s)' % (self.cstrtype[0].name, self.value or u'') + + @property + def type(self): + return self.cstrtype[0].name + + +class RQLExpression(AnyEntity): + __regid__ = 'RQLExpression' + fetch_attrs, cw_fetch_order = fetch_config(['exprtype', 'mainvars', 'expression']) + + def dc_title(self): + return self.expression or u'' + + def dc_long_title(self): + return '%s(%s)' % (self.exprtype, self.expression or u'') + + @property + def expression_of(self): + for rel in ('read_permission', 'add_permission', 'delete_permission', + 'update_permission', 'condition'): + values = getattr(self, 'reverse_%s' % rel) + if values: + return values[0] + + @cached + def _rqlexpr(self): + if self.exprtype == 'ERQLExpression': + return ERQLExpression(self.expression, self.mainvars, self.eid) + #if self.exprtype == 'RRQLExpression': + return RRQLExpression(self.expression, self.mainvars, self.eid) + + def check_expression(self, *args, **kwargs): + return self._rqlexpr().check(*args, **kwargs) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/sources.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/sources.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,184 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""data source related entities""" + +__docformat__ = "restructuredtext en" + +import re +from socket import gethostname +import logging + +from logilab.common.textutils import text_to_dict +from logilab.common.configuration import OptionError +from logilab.mtconverter import xml_escape + +from cubicweb.entities import AnyEntity, fetch_config + +class _CWSourceCfgMixIn(object): + @property + def dictconfig(self): + return self.config and text_to_dict(self.config) or {} + + def update_config(self, skip_unknown=False, **config): + from cubicweb.server import SOURCE_TYPES + from cubicweb.server.serverconfig import (SourceConfiguration, + generate_source_config) + cfg = self.dictconfig + cfg.update(config) + options = SOURCE_TYPES[self.type].options + sconfig = SourceConfiguration(self._cw.vreg.config, options=options) + for opt, val in cfg.items(): + try: + sconfig.set_option(opt, val) + except OptionError: + if skip_unknown: + continue + raise + cfgstr = unicode(generate_source_config(sconfig), self._cw.encoding) + self.cw_set(config=cfgstr) + + +class CWSource(_CWSourceCfgMixIn, AnyEntity): + __regid__ = 'CWSource' + fetch_attrs, cw_fetch_order = fetch_config(['name', 'type']) + + @property + def host_config(self): + dictconfig = self.dictconfig + host = gethostname() + for hostcfg in self.host_configs: + if hostcfg.match(host): + self.info('matching host config %s for source %s', + hostcfg.match_host, self.name) + dictconfig.update(hostcfg.dictconfig) + return dictconfig + + @property + def host_configs(self): + return self.reverse_cw_host_config_of + + def init_mapping(self, mapping): + for key, options in mapping: + if isinstance(key, tuple): # relation definition + assert len(key) == 3 + restrictions = ['X relation_type RT, RT name %(rt)s'] + kwargs = {'rt': key[1]} + if key[0] != '*': + restrictions.append('X from_entity FT, FT name %(ft)s') + kwargs['ft'] = key[0] + if key[2] != '*': + restrictions.append('X to_entity TT, TT name %(tt)s') + kwargs['tt'] = key[2] + rql = 'Any X WHERE %s' % ','.join(restrictions) + schemarset = self._cw.execute(rql, kwargs) + elif key[0].isupper(): # entity type + schemarset = self._cw.execute('CWEType X WHERE X name %(et)s', + {'et': key}) + else: # relation type + schemarset = self._cw.execute('CWRType X WHERE X name %(rt)s', + {'rt': key}) + for schemaentity in schemarset.entities(): + self._cw.create_entity('CWSourceSchemaConfig', + cw_for_source=self, + cw_schema=schemaentity, + options=options) + + @property + def repo_source(self): + """repository only property, not available from the web side (eg + self._cw is expected to be a server session) + """ + return self._cw.repo.sources_by_eid[self.eid] + + +class CWSourceHostConfig(_CWSourceCfgMixIn, AnyEntity): + __regid__ = 'CWSourceHostConfig' + fetch_attrs, cw_fetch_order = fetch_config(['match_host', 'config']) + + @property + def cwsource(self): + return self.cw_host_config_of[0] + + def match(self, hostname): + return re.match(self.match_host, hostname) + + +class CWSourceSchemaConfig(AnyEntity): + __regid__ = 'CWSourceSchemaConfig' + fetch_attrs, cw_fetch_order = fetch_config(['cw_for_source', 'cw_schema', 'options']) + + def dc_title(self): + return self._cw._(self.cw_etype) + ' #%s' % self.eid + + @property + def schema(self): + return self.cw_schema[0] + + @property + def cwsource(self): + return self.cw_for_source[0] + + +class CWDataImport(AnyEntity): + __regid__ = 'CWDataImport' + repo_source = _logs = None # please pylint + + def init(self): + self._logs = [] + self.repo_source = self.cwsource.repo_source + + def dc_title(self): + return '%s [%s]' % (self.printable_value('start_timestamp'), + self.printable_value('status')) + + @property + def cwsource(self): + return self.cw_import_of[0] + + def record_debug(self, msg, path=None, line=None): + self._log(logging.DEBUG, msg, path, line) + self.repo_source.debug(msg) + + def record_info(self, msg, path=None, line=None): + self._log(logging.INFO, msg, path, line) + self.repo_source.info(msg) + + def record_warning(self, msg, path=None, line=None): + self._log(logging.WARNING, msg, path, line) + self.repo_source.warning(msg) + + def record_error(self, msg, path=None, line=None): + self._status = u'failed' + self._log(logging.ERROR, msg, path, line) + self.repo_source.error(msg) + + def record_fatal(self, msg, path=None, line=None): + self._status = u'failed' + self._log(logging.FATAL, msg, path, line) + self.repo_source.fatal(msg) + + def _log(self, severity, msg, path=None, line=None): + encodedmsg = u'%s\t%s\t%s\t%s
' % (severity, path or u'', + line or u'', xml_escape(msg)) + self._logs.append(encodedmsg) + + def write_log(self, session, **kwargs): + if 'status' not in kwargs: + kwargs['status'] = getattr(self, '_status', u'success') + self.cw_set(log=u'
'.join(self._logs), **kwargs) + self._logs = [] diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/test/data/migration/postcreate.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/test/data/migration/postcreate.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,19 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +wf = add_workflow(u'bmk wf', 'Bookmark') +wf.add_state(u'hop', initial=True) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/test/data/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/test/data/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,37 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""entities tests schema""" + +from yams.buildobjs import EntityType, String, RichString, Int +from cubicweb.schema import make_workflowable + +class Company(EntityType): + order = Int() + name = String() + description = RichString() + +class Division(Company): + __specializes_schema__ = True + +class SubDivision(Division): + __specializes_schema__ = True + + +from cubicweb.schemas import bootstrap, Bookmark +make_workflowable(bootstrap.CWGroup) +make_workflowable(Bookmark.Bookmark) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +docutils diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/test/unittest_base.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/test/unittest_base.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for cubicweb.entities.base module +""" + +from logilab.common.testlib import unittest_main +from logilab.common.decorators import clear_cache +from logilab.common.registry import yes + +from cubicweb.devtools.testlib import CubicWebTC + +from cubicweb.entities import AnyEntity + + +class BaseEntityTC(CubicWebTC): + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + self.membereid = self.create_user(cnx, 'member').eid + cnx.commit() + + +class MetadataTC(BaseEntityTC): + + def test_creator(self): + with self.new_access('member').repo_cnx() as cnx: + entity = cnx.create_entity('Bookmark', title=u"hello", path=u'project/cubicweb') + cnx.commit() + self.assertEqual(entity.creator.eid, self.membereid) + self.assertEqual(entity.dc_creator(), u'member') + + def test_type(self): + # dc_type may be translated + with self.admin_access.client_cnx() as cnx: + member = cnx.entity_from_eid(self.membereid) + self.assertEqual(member.dc_type(), 'CWUser') + + def test_cw_etype(self): + # cw_etype is never translated + with self.admin_access.client_cnx() as cnx: + member = cnx.entity_from_eid(self.membereid) + self.assertEqual(member.cw_etype, 'CWUser') + + def test_entity_meta_attributes(self): + # XXX move to yams + self.assertEqual(self.schema['CWUser'].meta_attributes(), {}) + self.assertEqual(dict((str(k), v) + for k, v in self.schema['State'].meta_attributes().items()), + {'description_format': ('format', 'description')}) + + def test_fti_rql_method(self): + class EmailAddress(AnyEntity): + __regid__ = 'EmailAddress' + __select__ = AnyEntity.__select__ & yes(2) + + @classmethod + def cw_fti_index_rql_queries(cls, req): + return ['EmailAddress Y'] + + with self.admin_access.web_request() as req: + req.create_entity('EmailAddress', address=u'foo@bar.com') + eclass = self.vreg['etypes'].etype_class('EmailAddress') + # deprecated + self.assertEqual(['Any X, ADDRESS, ALIAS WHERE X is EmailAddress, ' + 'X address ADDRESS, X alias ALIAS'], + eclass.cw_fti_index_rql_queries(req)) + + self.assertEqual(['Any X, ADDRESS, ALIAS ORDERBY X LIMIT 1000 WHERE X is EmailAddress, ' + 'X address ADDRESS, X alias ALIAS, X eid > 0'], + [rset.rql for rset in eclass.cw_fti_index_rql_limit(req)]) + + # test backwards compatibility with custom method + with self.temporary_appobjects(EmailAddress): + self.vreg['etypes'].clear_caches() + eclass = self.vreg['etypes'].etype_class('EmailAddress') + self.assertEqual(['EmailAddress Y'], + [rset.rql for rset in eclass.cw_fti_index_rql_limit(req)]) + + +class EmailAddressTC(BaseEntityTC): + + def test_canonical_form(self): + with self.admin_access.repo_cnx() as cnx: + email1 = cnx.execute('INSERT EmailAddress X: ' + 'X address "maarten.ter.huurne@philips.com"').get_entity(0, 0) + email2 = cnx.execute('INSERT EmailAddress X: ' + 'X address "maarten@philips.com"').get_entity(0, 0) + email3 = cnx.execute('INSERT EmailAddress X: ' + 'X address "toto@logilab.fr"').get_entity(0, 0) + email1.cw_set(prefered_form=email2) + self.assertEqual(email1.prefered.eid, email2.eid) + self.assertEqual(email2.prefered.eid, email2.eid) + self.assertEqual(email3.prefered.eid, email3.eid) + + def test_mangling(self): + query = 'INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"' + with self.admin_access.repo_cnx() as cnx: + email = cnx.execute(query).get_entity(0, 0) + self.assertEqual(email.display_address(), 'maarten.ter.huurne@philips.com') + self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne@philips.com') + self.vreg.config.global_set_option('mangle-emails', True) + try: + self.assertEqual(email.display_address(), 'maarten.ter.huurne at philips dot com') + self.assertEqual(email.printable_value('address'), + 'maarten.ter.huurne at philips dot com') + email = cnx.execute('INSERT EmailAddress X: X address "syt"').get_entity(0, 0) + self.assertEqual(email.display_address(), 'syt') + self.assertEqual(email.printable_value('address'), 'syt') + finally: + self.vreg.config.global_set_option('mangle-emails', False) + + def test_printable_value_escape(self): + with self.admin_access.repo_cnx() as cnx: + email = cnx.execute('INSERT EmailAddress X: ' + 'X address "maarten&ter@philips.com"').get_entity(0, 0) + self.assertEqual(email.printable_value('address'), + 'maarten&ter@philips.com') + self.assertEqual(email.printable_value('address', format='text/plain'), + 'maarten&ter@philips.com') + + +class CWUserTC(BaseEntityTC): + + def test_complete(self): + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) + e.complete() + + def test_matching_groups(self): + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) + self.assertTrue(e.matching_groups('managers')) + self.assertFalse(e.matching_groups('xyz')) + self.assertTrue(e.matching_groups(('xyz', 'managers'))) + self.assertFalse(e.matching_groups(('xyz', 'abcd'))) + + def test_dc_title_and_name(self): + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), 'member') + e.cw_set(firstname=u'bouah') + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), u'bouah') + e.cw_set(surname=u'lôt') + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), u'bouah lôt') + + def test_falsey_dc_title(self): + with self.admin_access.repo_cnx() as cnx: + e = cnx.create_entity('Company', order=0, name=u'pythonian') + cnx.commit() + self.assertEqual(u'0', e.dc_title()) + + def test_allowed_massmail_keys(self): + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) + # Bytes/Password attributes should be omitted + self.assertEqual( + e.cw_adapt_to('IEmailable').allowed_massmail_keys(), + set(('surname', 'firstname', 'login', 'last_login_time', + 'creation_date', 'modification_date', 'cwuri', 'eid')) + ) + + def test_cw_instantiate_object_relation(self): + """ a weird non regression test """ + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) + cnx.create_entity('CWGroup', name=u'logilab', reverse_in_group=e) + + +class HTMLtransformTC(BaseEntityTC): + + def test_sanitized_html(self): + with self.admin_access.repo_cnx() as cnx: + c = cnx.create_entity('Company', name=u'Babar', + description=u""" +Title +===== + +Elephant management best practices. + +.. raw:: html + + +""", description_format=u'text/rest') + cnx.commit() + c.cw_clear_all_caches() + self.assertIn('alert', + c.printable_value('description', format='text/plain')) + self.assertNotIn('alert', + c.printable_value('description', format='text/html')) + + +class SpecializedEntityClassesTC(CubicWebTC): + + def select_eclass(self, etype): + # clear selector cache + clear_cache(self.vreg['etypes'], 'etype_class') + return self.vreg['etypes'].etype_class(etype) + + def test_etype_class_selection_and_specialization(self): + # no specific class for Subdivisions, the default one should be selected + eclass = self.select_eclass('SubDivision') + self.assertTrue(eclass.__autogenerated__) + # self.assertEqual(eclass.__bases__, (AnyEntity,)) + # build class from most generic to most specific and make + # sure the most specific is always selected + self.vreg._loadedmods[__name__] = {} + for etype in ('Company', 'Division', 'SubDivision'): + class Foo(AnyEntity): + __regid__ = etype + self.vreg.register(Foo) + eclass = self.select_eclass('SubDivision') + self.assertTrue(eclass.__autogenerated__) + self.assertFalse(eclass is Foo) + if etype == 'SubDivision': + self.assertEqual(eclass.__bases__, (Foo,)) + else: + self.assertEqual(eclass.__bases__[0].__bases__, (Foo,)) + # check Division eclass is still selected for plain Division entities + eclass = self.select_eclass('Division') + self.assertEqual(eclass.cw_etype, 'Division') + + +class ISerializableTC(CubicWebTC): + + def test_serialization(self): + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('CWGroup', name=u'tmp') + cnx.commit() + serializer = entity.cw_adapt_to('ISerializable') + expected = { + 'cw_etype': u'CWGroup', + 'cw_source': 'system', + 'eid': entity.eid, + 'cwuri': u'http://testing.fr/cubicweb/%s' % entity.eid, + 'creation_date': entity.creation_date, + 'modification_date': entity.modification_date, + 'name': u'tmp', + } + self.assertEqual(serializer.serialize(), expected) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/test/unittest_wfobjs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/test/unittest_wfobjs.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,705 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from cubicweb import ValidationError +from cubicweb.devtools.testlib import CubicWebTC + +def add_wf(shell, etype, name=None, default=False): + if name is None: + name = etype + return shell.add_workflow(name, etype, default=default, + ensure_workflowable=False) + +def parse_hist(wfhist): + return [(ti.previous_state.name, ti.new_state.name, + ti.transition and ti.transition.name, ti.comment) + for ti in wfhist] + + +class WorkflowBuildingTC(CubicWebTC): + + def test_wf_construction(self): + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'Company') + foo = wf.add_state(u'foo', initial=True) + bar = wf.add_state(u'bar') + self.assertEqual(wf.state_by_name('bar').eid, bar.eid) + self.assertEqual(wf.state_by_name('barrr'), None) + baz = wf.add_transition(u'baz', (foo,), bar, ('managers',)) + self.assertEqual(wf.transition_by_name('baz').eid, baz.eid) + self.assertEqual(len(baz.require_group), 1) + self.assertEqual(baz.require_group[0].name, 'managers') + + def test_duplicated_state(self): + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'Company') + wf.add_state(u'foo', initial=True) + shell.commit() + with self.assertRaises(ValidationError) as cm: + wf.add_state(u'foo') + self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint', + 'state_of': u'%(KEY-rtype)s is part of violated unicity constraint', + '': u'some relations violate a unicity constraint'}, + cm.exception.errors) + shell.rollback() + # no pb if not in the same workflow + wf2 = add_wf(shell, 'Company') + foo = wf2.add_state(u'foo', initial=True) + shell.commit() + # gnark gnark + bar = wf.add_state(u'bar') + shell.commit() + with self.assertRaises(ValidationError) as cm: + bar.cw_set(name=u'foo') + shell.rollback() + self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint', + 'state_of': u'%(KEY-rtype)s is part of violated unicity constraint', + '': u'some relations violate a unicity constraint'}, + cm.exception.errors) + + def test_duplicated_transition(self): + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'Company') + foo = wf.add_state(u'foo', initial=True) + bar = wf.add_state(u'bar') + wf.add_transition(u'baz', (foo,), bar, ('managers',)) + with self.assertRaises(ValidationError) as cm: + wf.add_transition(u'baz', (bar,), foo) + self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint', + 'transition_of': u'%(KEY-rtype)s is part of violated unicity constraint', + '': u'some relations violate a unicity constraint'}, + cm.exception.errors) + shell.rollback() + # no pb if not in the same workflow + wf2 = add_wf(shell, 'Company') + foo = wf2.add_state(u'foo', initial=True) + bar = wf2.add_state(u'bar') + wf2.add_transition(u'baz', (foo,), bar, ('managers',)) + shell.commit() + # gnark gnark + biz = wf2.add_transition(u'biz', (bar,), foo) + shell.commit() + with self.assertRaises(ValidationError) as cm: + biz.cw_set(name=u'baz') + shell.rollback() + self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint', + 'transition_of': u'%(KEY-rtype)s is part of violated unicity constraint', + '': u'some relations violate a unicity constraint'}, + cm.exception.errors) + + +class WorkflowTC(CubicWebTC): + + def setup_database(self): + rschema = self.schema['in_state'] + for rdef in rschema.rdefs.values(): + self.assertEqual(rdef.cardinality, '1*') + with self.admin_access.client_cnx() as cnx: + self.member_eid = self.create_user(cnx, 'member').eid + cnx.commit() + + def test_workflow_base(self): + with self.admin_access.web_request() as req: + e = self.create_user(req, 'toto') + iworkflowable = e.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'activated') + iworkflowable.change_state('deactivated', u'deactivate 1') + req.cnx.commit() + iworkflowable.change_state('activated', u'activate 1') + req.cnx.commit() + iworkflowable.change_state('deactivated', u'deactivate 2') + req.cnx.commit() + e.cw_clear_relation_cache('wf_info_for', 'object') + self.assertEqual([tr.comment for tr in e.reverse_wf_info_for], + ['deactivate 1', 'activate 1', 'deactivate 2']) + self.assertEqual(iworkflowable.latest_trinfo().comment, 'deactivate 2') + + def test_possible_transitions(self): + with self.admin_access.web_request() as req: + user = req.execute('CWUser X').get_entity(0, 0) + iworkflowable = user.cw_adapt_to('IWorkflowable') + trs = list(iworkflowable.possible_transitions()) + self.assertEqual(len(trs), 1) + self.assertEqual(trs[0].name, u'deactivate') + self.assertEqual(trs[0].destination(None).name, u'deactivated') + # test a std user get no possible transition + with self.new_access('member').web_request() as req: + # fetch the entity using the new session + trs = list(req.user.cw_adapt_to('IWorkflowable').possible_transitions()) + self.assertEqual(len(trs), 0) + + def _test_manager_deactivate(self, user): + iworkflowable = user.cw_adapt_to('IWorkflowable') + user.cw_clear_relation_cache('in_state', 'subject') + self.assertEqual(len(user.in_state), 1) + self.assertEqual(iworkflowable.state, 'deactivated') + trinfo = iworkflowable.latest_trinfo() + self.assertEqual(trinfo.previous_state.name, 'activated') + self.assertEqual(trinfo.new_state.name, 'deactivated') + self.assertEqual(trinfo.comment, 'deactivate user') + self.assertEqual(trinfo.comment_format, 'text/plain') + return trinfo + + def test_change_state(self): + with self.admin_access.client_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.change_state('deactivated', comment=u'deactivate user') + trinfo = self._test_manager_deactivate(user) + self.assertEqual(trinfo.transition, None) + + def test_set_in_state_bad_wf(self): + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + s = wf.add_state(u'foo', initial=True) + shell.commit() + with self.admin_access.repo_cnx() as cnx: + with cnx.security_enabled(write=False): + with self.assertRaises(ValidationError) as cm: + cnx.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', + {'x': cnx.user.eid, 's': s.eid}) + self.assertEqual(cm.exception.errors, {'in_state-subject': "state doesn't belong to entity's workflow. " + "You may want to set a custom workflow for this entity first."}) + + def test_fire_transition(self): + with self.admin_access.client_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate', comment=u'deactivate user') + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'deactivated') + self._test_manager_deactivate(user) + trinfo = self._test_manager_deactivate(user) + self.assertEqual(trinfo.transition.name, 'deactivate') + + def test_goback_transition(self): + with self.admin_access.web_request() as req: + wf = req.user.cw_adapt_to('IWorkflowable').current_workflow + asleep = wf.add_state('asleep') + wf.add_transition('rest', (wf.state_by_name('activated'), + wf.state_by_name('deactivated')), + asleep) + wf.add_transition('wake up', asleep) + user = self.create_user(req, 'stduser') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('rest') + req.cnx.commit() + iworkflowable.fire_transition('wake up') + req.cnx.commit() + self.assertEqual(iworkflowable.state, 'activated') + iworkflowable.fire_transition('deactivate') + req.cnx.commit() + iworkflowable.fire_transition('rest') + req.cnx.commit() + iworkflowable.fire_transition('wake up') + req.cnx.commit() + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'deactivated') + + # XXX test managers can change state without matching transition + + def _test_stduser_deactivate(self): + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, 'tutu') + with self.new_access('tutu').web_request() as req: + iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable') + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('deactivate') + self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"}) + with self.new_access('member').web_request() as req: + iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + req.cnx.commit() + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('activate') + self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"}) + + def test_fire_transition_owned_by(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' + 'X expression "X owned_by U", T condition X ' + 'WHERE T name "deactivate"') + cnx.commit() + self._test_stduser_deactivate() + + def test_fire_transition_has_update_perm(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' + 'X expression "U has_update_permission X", T condition X ' + 'WHERE T name "deactivate"') + cnx.commit() + self._test_stduser_deactivate() + + def test_swf_base(self): + """subworkflow + + +-----------+ tr1 +-----------+ + | swfstate1 | ------>| swfstate2 | + +-----------+ +-----------+ + | tr2 +-----------+ + `------>| swfstate3 | + +-----------+ + + main workflow + + +--------+ swftr1 +--------+ + | state1 | -------[swfstate2]->| state2 | + +--------+ | +--------+ + | +--------+ + `-[swfstate3]-->| state3 | + +--------+ + """ + # sub-workflow + with self.admin_access.shell() as shell: + swf = add_wf(shell, 'CWGroup', name='subworkflow') + swfstate1 = swf.add_state(u'swfstate1', initial=True) + swfstate2 = swf.add_state(u'swfstate2') + swfstate3 = swf.add_state(u'swfstate3') + tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2) + tr2 = swf.add_transition(u'tr2', (swfstate1,), swfstate3) + # main workflow + mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True) + state1 = mwf.add_state(u'state1', initial=True) + state2 = mwf.add_state(u'state2') + state3 = mwf.add_state(u'state3') + swftr1 = mwf.add_wftransition(u'swftr1', swf, state1, + [(swfstate2, state2), (swfstate3, state3)]) + swf.cw_clear_all_caches() + self.assertEqual(swftr1.destination(None).eid, swfstate1.eid) + # workflows built, begin test + with self.admin_access.web_request() as req: + group = req.create_entity('CWGroup', name=u'grp1') + req.cnx.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.current_state.eid, state1.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition(), None) + iworkflowable.fire_transition('swftr1', u'go') + req.cnx.commit() + group.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid) + self.assertEqual(iworkflowable.current_workflow.eid, swf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid) + iworkflowable.fire_transition('tr1', u'go') + req.cnx.commit() + group.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, state2.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition(), None) + # force back to swfstate1 is impossible since we can't any more find + # subworkflow input transition + with self.assertRaises(ValidationError) as cm: + iworkflowable.change_state(swfstate1, u'gadget') + self.assertEqual(cm.exception.errors, {'to_state-subject': "state doesn't belong to entity's workflow"}) + req.cnx.rollback() + # force back to state1 + iworkflowable.change_state('state1', u'gadget') + iworkflowable.fire_transition('swftr1', u'au') + group.cw_clear_all_caches() + iworkflowable.fire_transition('tr2', u'chapeau') + req.cnx.commit() + group.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, state3.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertListEqual(parse_hist(iworkflowable.workflow_history), + [('state1', 'swfstate1', 'swftr1', 'go'), + ('swfstate1', 'swfstate2', 'tr1', 'go'), + ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'), + ('state2', 'state1', None, 'gadget'), + ('state1', 'swfstate1', 'swftr1', 'au'), + ('swfstate1', 'swfstate3', 'tr2', 'chapeau'), + ('swfstate3', 'state3', 'swftr1', 'exiting from subworkflow subworkflow'), + ]) + + def test_swf_exit_consistency(self): + with self.admin_access.shell() as shell: + # sub-workflow + swf = add_wf(shell, 'CWGroup', name='subworkflow') + swfstate1 = swf.add_state(u'swfstate1', initial=True) + swfstate2 = swf.add_state(u'swfstate2') + tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2) + # main workflow + mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True) + state1 = mwf.add_state(u'state1', initial=True) + state2 = mwf.add_state(u'state2') + state3 = mwf.add_state(u'state3') + mwf.add_wftransition(u'swftr1', swf, state1, + [(swfstate2, state2), (swfstate2, state3)]) + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual(cm.exception.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"}) + + def test_swf_fire_in_a_row(self): + with self.admin_access.shell() as shell: + # sub-workflow + subwf = add_wf(shell, 'CWGroup', name='subworkflow') + xsigning = subwf.add_state('xsigning', initial=True) + xaborted = subwf.add_state('xaborted') + xsigned = subwf.add_state('xsigned') + xabort = subwf.add_transition('xabort', (xsigning,), xaborted) + xsign = subwf.add_transition('xsign', (xsigning,), xsigning) + xcomplete = subwf.add_transition('xcomplete', (xsigning,), xsigned, + type=u'auto') + # main workflow + twf = add_wf(shell, 'CWGroup', name='mainwf', default=True) + created = twf.add_state(_('created'), initial=True) + identified = twf.add_state(_('identified')) + released = twf.add_state(_('released')) + closed = twf.add_state(_('closed')) + twf.add_wftransition(_('identify'), subwf, (created,), + [(xsigned, identified), (xaborted, created)]) + twf.add_wftransition(_('release'), subwf, (identified,), + [(xsigned, released), (xaborted, identified)]) + twf.add_wftransition(_('close'), subwf, (released,), + [(xsigned, closed), (xaborted, released)]) + shell.commit() + with self.admin_access.repo_cnx() as cnx: + group = cnx.create_entity('CWGroup', name=u'grp1') + cnx.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') + for trans in ('identify', 'release', 'close'): + iworkflowable.fire_transition(trans) + cnx.commit() + + + def test_swf_magic_tr(self): + with self.admin_access.shell() as shell: + # sub-workflow + subwf = add_wf(shell, 'CWGroup', name='subworkflow') + xsigning = subwf.add_state('xsigning', initial=True) + xaborted = subwf.add_state('xaborted') + xsigned = subwf.add_state('xsigned') + xabort = subwf.add_transition('xabort', (xsigning,), xaborted) + xsign = subwf.add_transition('xsign', (xsigning,), xsigned) + # main workflow + twf = add_wf(shell, 'CWGroup', name='mainwf', default=True) + created = twf.add_state(_('created'), initial=True) + identified = twf.add_state(_('identified')) + released = twf.add_state(_('released')) + twf.add_wftransition(_('identify'), subwf, created, + [(xaborted, None), (xsigned, identified)]) + twf.add_wftransition(_('release'), subwf, identified, + [(xaborted, None)]) + shell.commit() + with self.admin_access.web_request() as req: + group = req.create_entity('CWGroup', name=u'grp1') + req.cnx.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') + for trans, nextstate in (('identify', 'xsigning'), + ('xabort', 'created'), + ('identify', 'xsigning'), + ('xsign', 'identified'), + ('release', 'xsigning'), + ('xabort', 'identified') + ): + iworkflowable.fire_transition(trans) + req.cnx.commit() + group.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, nextstate) + + def test_replace_state(self): + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWGroup', name='groupwf', default=True) + s_new = wf.add_state('new', initial=True) + s_state1 = wf.add_state('state1') + wf.add_transition('tr', (s_new,), s_state1) + shell.commit() + + with self.admin_access.repo_cnx() as cnx: + group = cnx.create_entity('CWGroup', name=u'grp1') + cnx.commit() + + iwf = group.cw_adapt_to('IWorkflowable') + iwf.fire_transition('tr') + cnx.commit() + group.cw_clear_all_caches() + + wf = cnx.entity_from_eid(wf.eid) + wf.add_state('state2') + with cnx.security_enabled(write=False): + wf.replace_state('state1', 'state2') + cnx.commit() + + self.assertEqual(iwf.state, 'state2') + self.assertEqual(iwf.latest_trinfo().to_state[0].name, 'state2') + + +class CustomWorkflowTC(CubicWebTC): + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + self.member_eid = self.create_user(cnx, 'member').eid + + def test_custom_wf_replace_state_no_history(self): + """member in inital state with no previous history, state is simply + redirected when changing workflow + """ + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + wf.add_state('asleep', initial=True) + with self.admin_access.web_request() as req: + req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'activated') # no change before commit + req.cnx.commit() + member.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual(iworkflowable.workflow_history, ()) + + def test_custom_wf_replace_state_keep_history(self): + """member in inital state with some history, state is redirected and + state change is recorded to history + """ + with self.admin_access.web_request() as req: + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + iworkflowable.fire_transition('activate') + req.cnx.commit() + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + wf.add_state('asleep', initial=True) + shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + with self.admin_access.web_request() as req: + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual(parse_hist(iworkflowable.workflow_history), + [('activated', 'deactivated', 'deactivate', None), + ('deactivated', 'activated', 'activate', None), + ('activated', 'asleep', None, 'workflow changed to "CWUser"')]) + + def test_custom_wf_no_initial_state(self): + """try to set a custom workflow which has no initial state""" + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + wf.add_state('asleep') + shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u'workflow has no initial state'}) + + def test_custom_wf_bad_etype(self): + """try to set a custom workflow which doesn't apply to entity type""" + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'Company') + wf.add_state('asleep', initial=True) + shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"}) + + def test_del_custom_wf(self): + """member in some state shared by the new workflow, nothing has to be + done + """ + with self.admin_access.web_request() as req: + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + req.cnx.commit() + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + wf.add_state('asleep', initial=True) + shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + shell.commit() + with self.admin_access.web_request() as req: + req.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'asleep')# no change before commit + req.cnx.commit() + member.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_workflow.name, "default user workflow") + self.assertEqual(iworkflowable.state, 'activated') + self.assertEqual(parse_hist(iworkflowable.workflow_history), + [('activated', 'deactivated', 'deactivate', None), + ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'), + ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),]) + + +class AutoTransitionTC(CubicWebTC): + + def setup_custom_wf(self): + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + asleep = wf.add_state('asleep', initial=True) + dead = wf.add_state('dead') + wf.add_transition('rest', asleep, asleep) + wf.add_transition('sick', asleep, dead, type=u'auto', + conditions=({'expr': u'X surname "toto"', + 'mainvars': u'X'},)) + return wf + + def test_auto_transition_fired(self): + wf = self.setup_custom_wf() + with self.admin_access.web_request() as req: + user = self.create_user(req, 'member') + iworkflowable = user.cw_adapt_to('IWorkflowable') + req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': user.eid}) + req.cnx.commit() + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual([t.name for t in iworkflowable.possible_transitions()], + ['rest']) + iworkflowable.fire_transition('rest') + req.cnx.commit() + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual([t.name for t in iworkflowable.possible_transitions()], + ['rest']) + self.assertEqual(parse_hist(iworkflowable.workflow_history), + [('asleep', 'asleep', 'rest', None)]) + user.cw_set(surname=u'toto') # fulfill condition + req.cnx.commit() + iworkflowable.fire_transition('rest') + req.cnx.commit() + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'dead') + self.assertEqual(parse_hist(iworkflowable.workflow_history), + [('asleep', 'asleep', 'rest', None), + ('asleep', 'asleep', 'rest', None), + ('asleep', 'dead', 'sick', None),]) + + def test_auto_transition_custom_initial_state_fired(self): + wf = self.setup_custom_wf() + with self.admin_access.web_request() as req: + user = self.create_user(req, 'member', surname=u'toto') + req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': user.eid}) + req.cnx.commit() + user.cw_clear_all_caches() + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'dead') + + def test_auto_transition_initial_state_fired(self): + with self.admin_access.web_request() as req: + wf = req.execute('Any WF WHERE ET default_workflow WF, ' + 'ET name %(et)s', {'et': 'CWUser'}).get_entity(0, 0) + dead = wf.add_state('dead') + wf.add_transition('sick', wf.state_by_name('activated'), dead, + type=u'auto', conditions=({'expr': u'X surname "toto"', + 'mainvars': u'X'},)) + req.cnx.commit() + with self.admin_access.web_request() as req: + user = self.create_user(req, 'member', surname=u'toto') + req.cnx.commit() + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'dead') + + +class WorkflowHooksTC(CubicWebTC): + + def setUp(self): + CubicWebTC.setUp(self) + with self.admin_access.web_request() as req: + self.wf = req.user.cw_adapt_to('IWorkflowable').current_workflow + self.s_activated = self.wf.state_by_name('activated').eid + self.s_deactivated = self.wf.state_by_name('deactivated').eid + self.s_dummy = self.wf.add_state(u'dummy').eid + self.wf.add_transition(u'dummy', (self.s_deactivated,), self.s_dummy) + ueid = self.create_user(req, 'stduser', commit=False).eid + # test initial state is set + rset = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', + {'x' : ueid}) + self.assertFalse(rset, rset.rows) + req.cnx.commit() + initialstate = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', + {'x' : ueid})[0][0] + self.assertEqual(initialstate, u'activated') + # give access to users group on the user's wf transitions + # so we can test wf enforcing on euser (managers don't have anymore this + # enforcement + req.execute('SET X require_group G ' + 'WHERE G name "users", X transition_of WF, WF eid %(wf)s', + {'wf': self.wf.eid}) + req.cnx.commit() + + # XXX currently, we've to rely on hooks to set initial state, or to use execute + # def test_initial_state(self): + # cnx = self.login('stduser') + # cu = cnx.cursor() + # self.assertRaises(ValidationError, cu.execute, + # 'INSERT CWUser X: X login "badaboum", X upassword %(pwd)s, ' + # 'X in_state S WHERE S name "deactivated"', {'pwd': 'oops'}) + # cnx.close() + # # though managers can do whatever he want + # self.execute('INSERT CWUser X: X login "badaboum", X upassword %(pwd)s, ' + # 'X in_state S, X in_group G WHERE S name "deactivated", G name "users"', {'pwd': 'oops'}) + # self.commit() + + # test that the workflow is correctly enforced + + def _cleanup_msg(self, msg): + """remove the variable part of one specific error message""" + lmsg = msg.split() + lmsg.pop(1) + lmsg.pop() + return ' '.join(lmsg) + + def test_transition_checking1(self): + with self.new_access('stduser').repo_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('activate') + self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), + u"transition isn't allowed from") + + def test_transition_checking2(self): + with self.new_access('stduser').repo_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('dummy') + self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), + u"transition isn't allowed from") + + def test_transition_checking3(self): + with self.new_access('stduser').repo_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + cnx.commit() + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('deactivate') + self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), + u"transition isn't allowed from") + cnx.rollback() + # get back now + iworkflowable.fire_transition('activate') + cnx.commit() + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entities/wfobjs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entities/wfobjs.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,589 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""workflow handling: + +* entity types defining workflow (Workflow, State, Transition...) +* workflow history (TrInfo) +* adapter for workflowable entities (IWorkflowableAdapter) +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from six import text_type, string_types + +from logilab.common.decorators import cached, clear_cache +from logilab.common.deprecation import deprecated + +from cubicweb.entities import AnyEntity, fetch_config +from cubicweb.view import EntityAdapter +from cubicweb.predicates import relation_possible + + +try: + from cubicweb import server +except ImportError: + # We need to lookup DEBUG from there, + # however a pure dbapi client may not have it. + class server(object): pass + server.DEBUG = False + + +class WorkflowException(Exception): pass + +class Workflow(AnyEntity): + __regid__ = 'Workflow' + + @property + def initial(self): + """return the initial state for this workflow""" + return self.initial_state and self.initial_state[0] or None + + def is_default_workflow_of(self, etype): + """return True if this workflow is the default workflow for the given + entity type + """ + return any(et for et in self.reverse_default_workflow + if et.name == etype) + + def iter_workflows(self, _done=None): + """return an iterator on actual workflows, eg this workflow and its + subworkflows + """ + # infinite loop safety belt + if _done is None: + _done = set() + yield self + _done.add(self.eid) + for tr in self._cw.execute('Any T WHERE T is WorkflowTransition, ' + 'T transition_of WF, WF eid %(wf)s', + {'wf': self.eid}).entities(): + if tr.subwf.eid in _done: + continue + for subwf in tr.subwf.iter_workflows(_done): + yield subwf + + # state / transitions accessors ############################################ + + def state_by_name(self, statename): + rset = self._cw.execute('Any S, SN WHERE S name SN, S name %(n)s, ' + 'S state_of WF, WF eid %(wf)s', + {'n': statename, 'wf': self.eid}) + if rset: + return rset.get_entity(0, 0) + return None + + def state_by_eid(self, eid): + rset = self._cw.execute('Any S, SN WHERE S name SN, S eid %(s)s, ' + 'S state_of WF, WF eid %(wf)s', + {'s': eid, 'wf': self.eid}) + if rset: + return rset.get_entity(0, 0) + return None + + def transition_by_name(self, trname): + rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, ' + 'T transition_of WF, WF eid %(wf)s', + {'n': text_type(trname), 'wf': self.eid}) + if rset: + return rset.get_entity(0, 0) + return None + + def transition_by_eid(self, eid): + rset = self._cw.execute('Any T, TN WHERE T name TN, T eid %(t)s, ' + 'T transition_of WF, WF eid %(wf)s', + {'t': eid, 'wf': self.eid}) + if rset: + return rset.get_entity(0, 0) + return None + + # wf construction methods ################################################## + + def add_state(self, name, initial=False, **kwargs): + """add a state to this workflow""" + state = self._cw.create_entity('State', name=text_type(name), **kwargs) + self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s', + {'s': state.eid, 'wf': self.eid}) + if initial: + assert not self.initial, "Initial state already defined as %s" % self.initial + self._cw.execute('SET WF initial_state S ' + 'WHERE S eid %(s)s, WF eid %(wf)s', + {'s': state.eid, 'wf': self.eid}) + return state + + def _add_transition(self, trtype, name, fromstates, + requiredgroups=(), conditions=(), **kwargs): + tr = self._cw.create_entity(trtype, name=text_type(name), **kwargs) + self._cw.execute('SET T transition_of WF ' + 'WHERE T eid %(t)s, WF eid %(wf)s', + {'t': tr.eid, 'wf': self.eid}) + assert fromstates, fromstates + if not isinstance(fromstates, (tuple, list)): + fromstates = (fromstates,) + for state in fromstates: + if hasattr(state, 'eid'): + state = state.eid + self._cw.execute('SET S allowed_transition T ' + 'WHERE S eid %(s)s, T eid %(t)s', + {'s': state, 't': tr.eid}) + tr.set_permissions(requiredgroups, conditions, reset=False) + return tr + + def add_transition(self, name, fromstates, tostate=None, + requiredgroups=(), conditions=(), **kwargs): + """add a transition to this workflow from some state(s) to another""" + tr = self._add_transition('Transition', name, fromstates, + requiredgroups, conditions, **kwargs) + if tostate is not None: + if hasattr(tostate, 'eid'): + tostate = tostate.eid + self._cw.execute('SET T destination_state S ' + 'WHERE S eid %(s)s, T eid %(t)s', + {'t': tr.eid, 's': tostate}) + return tr + + def add_wftransition(self, name, subworkflow, fromstates, exitpoints=(), + requiredgroups=(), conditions=(), **kwargs): + """add a workflow transition to this workflow""" + tr = self._add_transition('WorkflowTransition', name, fromstates, + requiredgroups, conditions, **kwargs) + if hasattr(subworkflow, 'eid'): + subworkflow = subworkflow.eid + assert self._cw.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s', + {'t': tr.eid, 'wf': subworkflow}) + for fromstate, tostate in exitpoints: + tr.add_exit_point(fromstate, tostate) + return tr + + def replace_state(self, todelstate, replacement): + """migration convenience method""" + if not hasattr(todelstate, 'eid'): + todelstate = self.state_by_name(todelstate) + if not hasattr(replacement, 'eid'): + replacement = self.state_by_name(replacement) + args = {'os': todelstate.eid, 'ns': replacement.eid} + execute = self._cw.execute + execute('SET X in_state NS WHERE X in_state OS, ' + 'NS eid %(ns)s, OS eid %(os)s', args) + execute('SET X from_state NS WHERE X from_state OS, ' + 'OS eid %(os)s, NS eid %(ns)s', args) + execute('SET X to_state NS WHERE X to_state OS, ' + 'OS eid %(os)s, NS eid %(ns)s', args) + todelstate.cw_delete() + + +class BaseTransition(AnyEntity): + """customized class for abstract transition + + provides a specific may_be_fired method to check if the relation may be + fired by the logged user + """ + __regid__ = 'BaseTransition' + fetch_attrs, cw_fetch_order = fetch_config(['name', 'type']) + + def __init__(self, *args, **kwargs): + if self.cw_etype == 'BaseTransition': + raise WorkflowException('should not be instantiated') + super(BaseTransition, self).__init__(*args, **kwargs) + + @property + def workflow(self): + return self.transition_of[0] + + def has_input_state(self, state): + if hasattr(state, 'eid'): + state = state.eid + return any(s for s in self.reverse_allowed_transition if s.eid == state) + + def may_be_fired(self, eid): + """return true if the logged user may fire this transition + + `eid` is the eid of the object on which we may fire the transition + """ + DBG = False + if server.DEBUG & server.DBG_SEC: + if 'transition' in server._SECURITY_CAPS: + DBG = True + user = self._cw.user + # check user is at least in one of the required groups if any + groups = frozenset(g.name for g in self.require_group) + if groups: + matches = user.matching_groups(groups) + if matches: + if DBG: + print('may_be_fired: %r may fire: user matches %s' % (self.name, groups)) + return matches + if 'owners' in groups and user.owns(eid): + if DBG: + print('may_be_fired: %r may fire: user is owner' % self.name) + return True + # check one of the rql expression conditions matches if any + if self.condition: + if DBG: + print('my_be_fired: %r: %s' % + (self.name, [(rqlexpr.expression, + rqlexpr.check_expression(self._cw, eid)) + for rqlexpr in self.condition])) + for rqlexpr in self.condition: + if rqlexpr.check_expression(self._cw, eid): + return True + if self.condition or groups: + return False + return True + + def set_permissions(self, requiredgroups=(), conditions=(), reset=True): + """set or add (if `reset` is False) groups and conditions for this + transition + """ + if reset: + self._cw.execute('DELETE T require_group G WHERE T eid %(x)s', + {'x': self.eid}) + self._cw.execute('DELETE T condition R WHERE T eid %(x)s', + {'x': self.eid}) + for gname in requiredgroups: + rset = self._cw.execute('SET T require_group G ' + 'WHERE T eid %(x)s, G name %(gn)s', + {'x': self.eid, 'gn': text_type(gname)}) + assert rset, '%s is not a known group' % gname + if isinstance(conditions, string_types): + conditions = (conditions,) + for expr in conditions: + if isinstance(expr, string_types): + kwargs = {'expr': text_type(expr)} + else: + assert isinstance(expr, dict) + kwargs = expr + kwargs['x'] = self.eid + kwargs.setdefault('mainvars', u'X') + self._cw.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' + 'X expression %(expr)s, X mainvars %(mainvars)s, ' + 'T condition X WHERE T eid %(x)s', kwargs) + # XXX clear caches? + + +class Transition(BaseTransition): + """customized class for Transition entities""" + __regid__ = 'Transition' + + def dc_long_title(self): + return '%s (%s)' % (self.name, self._cw._(self.name)) + + def destination(self, entity): + try: + return self.destination_state[0] + except IndexError: + return entity.cw_adapt_to('IWorkflowable').latest_trinfo().previous_state + + def potential_destinations(self): + try: + yield self.destination_state[0] + except IndexError: + for incomingstate in self.reverse_allowed_transition: + for tr in incomingstate.reverse_destination_state: + for previousstate in tr.reverse_allowed_transition: + yield previousstate + + +class WorkflowTransition(BaseTransition): + """customized class for WorkflowTransition entities""" + __regid__ = 'WorkflowTransition' + + @property + def subwf(self): + return self.subworkflow[0] + + def destination(self, entity): + return self.subwf.initial + + def potential_destinations(self): + yield self.subwf.initial + + def add_exit_point(self, fromstate, tostate): + if hasattr(fromstate, 'eid'): + fromstate = fromstate.eid + if tostate is None: + self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' + 'X subworkflow_state FS WHERE T eid %(t)s, FS eid %(fs)s', + {'t': self.eid, 'fs': fromstate}) + else: + if hasattr(tostate, 'eid'): + tostate = tostate.eid + self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' + 'X subworkflow_state FS, X destination_state TS ' + 'WHERE T eid %(t)s, FS eid %(fs)s, TS eid %(ts)s', + {'t': self.eid, 'fs': fromstate, 'ts': tostate}) + + def get_exit_point(self, entity, stateeid): + """if state is an exit point, return its associated destination state""" + if hasattr(stateeid, 'eid'): + stateeid = stateeid.eid + try: + tostateeid = self.exit_points()[stateeid] + except KeyError: + return None + if tostateeid is None: + # go back to state from which we've entered the subworkflow + return entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo().previous_state + return self._cw.entity_from_eid(tostateeid) + + @cached + def exit_points(self): + result = {} + for ep in self.subworkflow_exit: + result[ep.subwf_state.eid] = ep.destination and ep.destination.eid + return result + + def cw_clear_all_caches(self): + super(WorkflowTransition, self).cw_clear_all_caches() + clear_cache(self, 'exit_points') + + +class SubWorkflowExitPoint(AnyEntity): + """customized class for SubWorkflowExitPoint entities""" + __regid__ = 'SubWorkflowExitPoint' + + @property + def subwf_state(self): + return self.subworkflow_state[0] + + @property + def destination(self): + return self.destination_state and self.destination_state[0] or None + + +class State(AnyEntity): + """customized class for State entities""" + __regid__ = 'State' + fetch_attrs, cw_fetch_order = fetch_config(['name']) + rest_attr = 'eid' + + def dc_long_title(self): + return '%s (%s)' % (self.name, self._cw._(self.name)) + + @property + def workflow(self): + # take care, may be missing in multi-sources configuration + return self.state_of and self.state_of[0] or None + + +class TrInfo(AnyEntity): + """customized class for Transition information entities + """ + __regid__ = 'TrInfo' + fetch_attrs, cw_fetch_order = fetch_config(['creation_date', 'comment'], + pclass=None) # don't want modification_date + @property + def for_entity(self): + return self.wf_info_for[0] + + @property + def previous_state(self): + return self.from_state[0] + + @property + def new_state(self): + return self.to_state[0] + + @property + def transition(self): + return self.by_transition and self.by_transition[0] or None + + + +class IWorkflowableAdapter(EntityAdapter): + """base adapter providing workflow helper methods for workflowable entities. + """ + __regid__ = 'IWorkflowable' + __select__ = relation_possible('in_state') + + @cached + def cwetype_workflow(self): + """return the default workflow for entities of this type""" + # XXX CWEType method + wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' + 'ET name %(et)s', {'et': text_type(self.entity.cw_etype)}) + if wfrset: + return wfrset.get_entity(0, 0) + self.warning("can't find any workflow for %s", self.entity.cw_etype) + return None + + @property + def main_workflow(self): + """return current workflow applied to this entity""" + if self.entity.custom_workflow: + return self.entity.custom_workflow[0] + return self.cwetype_workflow() + + @property + def current_workflow(self): + """return current workflow applied to this entity""" + return self.current_state and self.current_state.workflow or self.main_workflow + + @property + def current_state(self): + """return current state entity""" + return self.entity.in_state and self.entity.in_state[0] or None + + @property + def state(self): + """return current state name""" + try: + return self.current_state.name + except AttributeError: + self.warning('entity %s has no state', self.entity) + return None + + @property + def printable_state(self): + """return current state name translated to context's language""" + state = self.current_state + if state: + return self._cw._(state.name) + return u'' + + @property + def workflow_history(self): + """return the workflow history for this entity (eg ordered list of + TrInfo entities) + """ + return self.entity.reverse_wf_info_for + + def latest_trinfo(self): + """return the latest transition information for this entity""" + try: + return self.workflow_history[-1] + except IndexError: + return None + + def possible_transitions(self, type='normal'): + """generates transition that MAY be fired for the given entity, + expected to be in this state + used only by the UI + """ + if self.current_state is None or self.current_workflow is None: + return + rset = self._cw.execute( + 'Any T,TT, TN WHERE S allowed_transition T, S eid %(x)s, ' + 'T type TT, T type %(type)s, ' + 'T name TN, T transition_of WF, WF eid %(wfeid)s', + {'x': self.current_state.eid, 'type': text_type(type), + 'wfeid': self.current_workflow.eid}) + for tr in rset.entities(): + if tr.may_be_fired(self.entity.eid): + yield tr + + def subworkflow_input_trinfo(self): + """return the TrInfo which has be recorded when this entity went into + the current sub-workflow + """ + if self.main_workflow.eid == self.current_workflow.eid: + return # doesn't make sense + subwfentries = [] + for trinfo in self.workflow_history: + if (trinfo.transition and + trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid): + # entering or leaving a subworkflow + if (subwfentries and + subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and + subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid): + # leave + del subwfentries[-1] + else: + # enter + subwfentries.append(trinfo) + if not subwfentries: + return None + return subwfentries[-1] + + def subworkflow_input_transition(self): + """return the transition which has went through the current sub-workflow + """ + return getattr(self.subworkflow_input_trinfo(), 'transition', None) + + def _add_trinfo(self, comment, commentformat, treid=None, tseid=None): + kwargs = {} + if comment is not None: + kwargs['comment'] = comment + if commentformat is not None: + kwargs['comment_format'] = commentformat + kwargs['wf_info_for'] = self.entity + if treid is not None: + kwargs['by_transition'] = self._cw.entity_from_eid(treid) + if tseid is not None: + kwargs['to_state'] = self._cw.entity_from_eid(tseid) + return self._cw.create_entity('TrInfo', **kwargs) + + def _get_transition(self, tr): + assert self.current_workflow + if isinstance(tr, string_types): + _tr = self.current_workflow.transition_by_name(tr) + assert _tr is not None, 'not a %s transition: %s' % ( + self.__regid__, tr) + tr = _tr + return tr + + def fire_transition(self, tr, comment=None, commentformat=None): + """change the entity's state by firing given transition (name or entity) + in entity's workflow + """ + tr = self._get_transition(tr) + return self._add_trinfo(comment, commentformat, tr.eid) + + def fire_transition_if_possible(self, tr, comment=None, commentformat=None): + """change the entity's state by firing given transition (name or entity) + in entity's workflow if this transition is possible + """ + tr = self._get_transition(tr) + if any(tr_ for tr_ in self.possible_transitions() + if tr_.eid == tr.eid): + self.fire_transition(tr, comment, commentformat) + + def change_state(self, statename, comment=None, commentformat=None, tr=None): + """change the entity's state to the given state (name or entity) in + entity's workflow. This method should only by used by manager to fix an + entity's state when their is no matching transition, otherwise + fire_transition should be used. + """ + assert self.current_workflow + if hasattr(statename, 'eid'): + stateeid = statename.eid + else: + state = self.current_workflow.state_by_name(statename) + if state is None: + raise WorkflowException('not a %s state: %s' % (self.__regid__, + statename)) + stateeid = state.eid + # XXX try to find matching transition? + return self._add_trinfo(comment, commentformat, tr and tr.eid, stateeid) + + def set_initial_state(self, statename): + """set a newly created entity's state to the given state (name or entity) + in entity's workflow. This is useful if you don't want it to be the + workflow's initial state. + """ + assert self.current_workflow + if hasattr(statename, 'eid'): + stateeid = statename.eid + else: + state = self.current_workflow.state_by_name(statename) + if state is None: + raise WorkflowException('not a %s state: %s' % (self.__regid__, + statename)) + stateeid = state.eid + self._cw.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', + {'x': self.entity.eid, 's': stateeid}) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/entity.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/entity.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1403 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Base class for entity objects manipulated in clients""" + +__docformat__ = "restructuredtext en" + +from warnings import warn +from functools import partial + +from six import text_type, string_types, integer_types +from six.moves import range + +from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated +from logilab.common.registry import yes +from logilab.mtconverter import TransformData, xml_escape + +from rql.utils import rqlvar_maker +from rql.stmts import Select +from rql.nodes import (Not, VariableRef, Constant, make_relation, + Relation as RqlRelation) + +from cubicweb import Unauthorized, neg_role +from cubicweb.utils import support_args +from cubicweb.rset import ResultSet +from cubicweb.appobject import AppObject +from cubicweb.schema import (RQLVocabularyConstraint, RQLConstraint, + GeneratedConstraint) +from cubicweb.rqlrewrite import RQLRewriter + +from cubicweb.uilib import soup2xhtml +from cubicweb.mttransforms import ENGINE + +_marker = object() + +def greater_card(rschema, subjtypes, objtypes, index): + for subjtype in subjtypes: + for objtype in objtypes: + card = rschema.rdef(subjtype, objtype).cardinality[index] + if card in '+*': + return card + return '1' + +def can_use_rest_path(value): + """return True if value can be used at the end of a Rest URL path""" + if value is None: + return False + value = text_type(value) + # the check for ?, /, & are to prevent problems when running + # behind Apache mod_proxy + if value == u'' or u'?' in value or u'/' in value or u'&' in value: + return False + return True + +def rel_vars(rel): + return ((isinstance(rel.children[0], VariableRef) + and rel.children[0].variable or None), + (isinstance(rel.children[1].children[0], VariableRef) + and rel.children[1].children[0].variable or None) + ) + +def rel_matches(rel, rtype, role, varname, operator='='): + if rel.r_type == rtype and rel.children[1].operator == operator: + same_role_var_idx = 0 if role == 'subject' else 1 + variables = rel_vars(rel) + if variables[same_role_var_idx].name == varname: + return variables[1 - same_role_var_idx] + +def build_cstr_with_linkto_infos(cstr, args, searchedvar, evar, + lt_infos, eidvars): + """restrict vocabulary as much as possible in entity creation, + based on infos provided by __linkto form param. + + Example based on following schema: + + class works_in(RelationDefinition): + subject = 'CWUser' + object = 'Lab' + cardinality = '1*' + constraints = [RQLConstraint('S in_group G, O welcomes G')] + + class welcomes(RelationDefinition): + subject = 'Lab' + object = 'CWGroup' + + If you create a CWUser in the "scientists" CWGroup you can show + only the labs that welcome them using : + + lt_infos = {('in_group', 'subject'): 321} + + You get following restriction : 'O welcomes G, G eid 321' + + """ + st = cstr.snippet_rqlst.copy() + # replace relations in ST by eid infos from linkto where possible + for (info_rtype, info_role), eids in lt_infos.items(): + eid = eids[0] # NOTE: we currently assume a pruned lt_info with only 1 eid + for rel in st.iget_nodes(RqlRelation): + targetvar = rel_matches(rel, info_rtype, info_role, evar.name) + if targetvar is not None: + if targetvar.name in eidvars: + rel.parent.remove(rel) + else: + eidrel = make_relation( + targetvar, 'eid', (targetvar.name, 'Substitute'), + Constant) + rel.parent.replace(rel, eidrel) + args[targetvar.name] = eid + eidvars.add(targetvar.name) + # if modified ST still contains evar references we must discard the + # constraint, otherwise evar is unknown in the final rql query which can + # lead to a SQL table cartesian product and multiple occurences of solutions + evarname = evar.name + for rel in st.iget_nodes(RqlRelation): + for variable in rel_vars(rel): + if variable and evarname == variable.name: + return + # else insert snippets into the global tree + return GeneratedConstraint(st, cstr.mainvars - set(evarname)) + +def pruned_lt_info(eschema, lt_infos): + pruned = {} + for (lt_rtype, lt_role), eids in lt_infos.items(): + # we can only use lt_infos describing relation with a cardinality + # of value 1 towards the linked entity + if not len(eids) == 1: + continue + lt_card = eschema.rdef(lt_rtype, lt_role).cardinality[ + 0 if lt_role == 'subject' else 1] + if lt_card not in '?1': + continue + pruned[(lt_rtype, lt_role)] = eids + return pruned + + +class Entity(AppObject): + """an entity instance has e_schema automagically set on + the class and instances has access to their issuing cursor. + + A property is set for each attribute and relation on each entity's type + class. Becare that among attributes, 'eid' is *NEITHER* stored in the + dict containment (which acts as a cache for other attributes dynamically + fetched) + + :type e_schema: `cubicweb.schema.EntitySchema` + :ivar e_schema: the entity's schema + + :type rest_attr: str + :cvar rest_attr: indicates which attribute should be used to build REST urls + If `None` is specified (the default), the first unique attribute will + be used ('eid' if none found) + + :type cw_skip_copy_for: list + :cvar cw_skip_copy_for: a list of couples (rtype, role) for each relation + that should be skipped when copying this kind of entity. Note that some + relations such as composite relations or relations that have '?1' as + object cardinality are always skipped. + """ + __registry__ = 'etypes' + __select__ = yes() + + # class attributes that must be set in class definition + rest_attr = None + fetch_attrs = None + skip_copy_for = () # bw compat (< 3.14), use cw_skip_copy_for instead + cw_skip_copy_for = [('in_state', 'subject')] + # class attributes set automatically at registration time + e_schema = None + + @classmethod + def __initialize__(cls, schema): + """initialize a specific entity class by adding descriptors to access + entity type's attributes and relations + """ + etype = cls.__regid__ + assert etype != 'Any', etype + cls.e_schema = eschema = schema.eschema(etype) + for rschema, _ in eschema.attribute_definitions(): + if rschema.type == 'eid': + continue + setattr(cls, rschema.type, Attribute(rschema.type)) + mixins = [] + for rschema, _, role in eschema.relation_definitions(): + if role == 'subject': + attr = rschema.type + else: + attr = 'reverse_%s' % rschema.type + setattr(cls, attr, Relation(rschema, role)) + + fetch_attrs = ('modification_date',) + + @classmethod + def cw_fetch_order(cls, select, attr, var): + """This class method may be used to control sort order when multiple + entities of this type are fetched through ORM methods. Its arguments + are: + + * `select`, the RQL syntax tree + + * `attr`, the attribute being watched + + * `var`, the variable through which this attribute's value may be + accessed in the query + + When you want to do some sorting on the given attribute, you should + modify the syntax tree accordingly. For instance: + + .. sourcecode:: python + + from rql import nodes + + class Version(AnyEntity): + __regid__ = 'Version' + + fetch_attrs = ('num', 'description', 'in_state') + + @classmethod + def cw_fetch_order(cls, select, attr, var): + if attr == 'num': + func = nodes.Function('version_sort_value') + func.append(nodes.variable_ref(var)) + sterm = nodes.SortTerm(func, asc=False) + select.add_sort_term(sterm) + + The default implementation call + :meth:`~cubicweb.entity.Entity.cw_fetch_unrelated_order` + """ + cls.cw_fetch_unrelated_order(select, attr, var) + + @classmethod + def cw_fetch_unrelated_order(cls, select, attr, var): + """This class method may be used to control sort order when multiple entities of + this type are fetched to use in edition (e.g. propose them to create a + new relation on an edited entity). + + See :meth:`~cubicweb.entity.Entity.cw_fetch_unrelated_order` for a + description of its arguments and usage. + + By default entities will be listed on their modification date descending, + i.e. you'll get entities recently modified first. + """ + if attr == 'modification_date': + select.add_sort_var(var, asc=False) + + @classmethod + def fetch_rql(cls, user, restriction=None, fetchattrs=None, mainvar='X', + settype=True, ordermethod='fetch_order'): + st = cls.fetch_rqlst(user, mainvar=mainvar, fetchattrs=fetchattrs, + settype=settype, ordermethod=ordermethod) + rql = st.as_string() + if restriction: + # cannot use RQLRewriter API to insert 'X rtype %(x)s' restriction + warn('[3.14] fetch_rql: use of `restriction` parameter is ' + 'deprecated, please use fetch_rqlst and supply a syntax' + 'tree with your restriction instead', DeprecationWarning) + insert = ' WHERE ' + ','.join(restriction) + if ' WHERE ' in rql: + select, where = rql.split(' WHERE ', 1) + rql = select + insert + ',' + where + else: + rql += insert + return rql + + @classmethod + def fetch_rqlst(cls, user, select=None, mainvar='X', fetchattrs=None, + settype=True, ordermethod='fetch_order'): + if select is None: + select = Select() + mainvar = select.get_variable(mainvar) + select.add_selected(mainvar) + elif isinstance(mainvar, string_types): + assert mainvar in select.defined_vars + mainvar = select.get_variable(mainvar) + # eases string -> syntax tree test transition: please remove once stable + select._varmaker = rqlvar_maker(defined=select.defined_vars, + aliases=select.aliases, index=26) + if settype: + rel = select.add_type_restriction(mainvar, cls.__regid__) + # should use 'is_instance_of' instead of 'is' so we retrieve + # subclasses instances as well + rel.r_type = 'is_instance_of' + if fetchattrs is None: + fetchattrs = cls.fetch_attrs + cls._fetch_restrictions(mainvar, select, fetchattrs, user, ordermethod) + return select + + @classmethod + def _fetch_ambiguous_rtypes(cls, select, var, fetchattrs, subjtypes, schema): + """find rtypes in `fetchattrs` that relate different subject etypes + taken from (`subjtypes`) to different target etypes; these so called + "ambiguous" relations, are added directly to the `select` syntax tree + selection but removed from `fetchattrs` to avoid the fetch recursion + because we have to choose only one targettype for the recursion and + adding its own fetch attrs to the selection -when we recurse- would + filter out the other possible target types from the result set + """ + for attr in fetchattrs.copy(): + rschema = schema.rschema(attr) + if rschema.final: + continue + ttypes = None + for subjtype in subjtypes: + cur_ttypes = set(rschema.objects(subjtype)) + if ttypes is None: + ttypes = cur_ttypes + elif cur_ttypes != ttypes: + # we found an ambiguous relation: remove it from fetchattrs + fetchattrs.remove(attr) + # ... and add it to the selection + targetvar = select.make_variable() + select.add_selected(targetvar) + rel = make_relation(var, attr, (targetvar,), VariableRef) + select.add_restriction(rel) + break + + @classmethod + def _fetch_restrictions(cls, mainvar, select, fetchattrs, + user, ordermethod='fetch_order', visited=None): + eschema = cls.e_schema + if visited is None: + visited = set((eschema.type,)) + elif eschema.type in visited: + # avoid infinite recursion + return + else: + visited.add(eschema.type) + _fetchattrs = [] + for attr in sorted(fetchattrs): + try: + rschema = eschema.subjrels[attr] + except KeyError: + cls.warning('skipping fetch_attr %s defined in %s (not found in schema)', + attr, cls.__regid__) + continue + # XXX takefirst=True to remove warning triggered by ambiguous inlined relations + rdef = eschema.rdef(attr, takefirst=True) + if not user.matching_groups(rdef.get_groups('read')): + continue + if rschema.final or rdef.cardinality[0] in '?1': + var = select.make_variable() + select.add_selected(var) + rel = make_relation(mainvar, attr, (var,), VariableRef) + select.add_restriction(rel) + else: + cls.warning('bad relation %s specified in fetch attrs for %s', + attr, cls) + continue + if not rschema.final: + # XXX we need outer join in case the relation is not mandatory + # (card == '?') *or if the entity is being added*, since in + # that case the relation may still be missing. As we miss this + # later information here, systematically add it. + rel.change_optional('right') + targettypes = rschema.objects(eschema.type) + vreg = user._cw.vreg # XXX user._cw.vreg iiiirk + etypecls = vreg['etypes'].etype_class(targettypes[0]) + if len(targettypes) > 1: + # find fetch_attrs common to all destination types + fetchattrs = vreg['etypes'].fetch_attrs(targettypes) + # ... and handle ambiguous relations + cls._fetch_ambiguous_rtypes(select, var, fetchattrs, + targettypes, vreg.schema) + else: + fetchattrs = etypecls.fetch_attrs + etypecls._fetch_restrictions(var, select, fetchattrs, + user, None, visited=visited) + if ordermethod is not None: + try: + cmeth = getattr(cls, ordermethod) + warn('[3.14] %s %s class method should be renamed to cw_%s' + % (cls.__regid__, ordermethod, ordermethod), + DeprecationWarning) + except AttributeError: + cmeth = getattr(cls, 'cw_' + ordermethod) + if support_args(cmeth, 'select'): + cmeth(select, attr, var) + else: + warn('[3.14] %s should now take (select, attr, var) and ' + 'modify the syntax tree when desired instead of ' + 'returning something' % cmeth, DeprecationWarning) + orderterm = cmeth(attr, var.name) + if orderterm is not None: + try: + var, order = orderterm.split() + except ValueError: + if '(' in orderterm: + cls.error('ignore %s until %s is upgraded', + orderterm, cmeth) + orderterm = None + elif not ' ' in orderterm.strip(): + var = orderterm + order = 'ASC' + if orderterm is not None: + select.add_sort_var(select.get_variable(var), + order=='ASC') + + @classmethod + @cached + def cw_rest_attr_info(cls): + """this class method return an attribute name to be used in URL for + entities of this type and a boolean flag telling if its value should be + checked for uniqness. + + The attribute returned is, in order of priority: + + * class's `rest_attr` class attribute + * an attribute defined as unique in the class'schema + * 'eid' + """ + mainattr, needcheck = 'eid', True + if cls.rest_attr: + mainattr = cls.rest_attr + needcheck = not cls.e_schema.has_unique_values(mainattr) + else: + for rschema in cls.e_schema.subject_relations(): + if (rschema.final + and rschema not in ('eid', 'cwuri') + and cls.e_schema.has_unique_values(rschema) + and cls.e_schema.rdef(rschema.type).cardinality[0] == '1'): + mainattr = str(rschema) + needcheck = False + break + if mainattr == 'eid': + needcheck = False + return mainattr, needcheck + + @classmethod + def _cw_build_entity_query(cls, kwargs): + relations = [] + restrictions = set() + pendingrels = [] + eschema = cls.e_schema + qargs = {} + attrcache = {} + for attr, value in kwargs.items(): + if attr.startswith('reverse_'): + attr = attr[len('reverse_'):] + role = 'object' + else: + role = 'subject' + assert eschema.has_relation(attr, role), '%s %s not found on %s' % (attr, role, eschema) + rschema = eschema.subjrels[attr] if role == 'subject' else eschema.objrels[attr] + if not rschema.final and isinstance(value, (tuple, list, set, frozenset)): + if len(value) == 0: + continue # avoid crash with empty IN clause + elif len(value) == 1: + value = next(iter(value)) + else: + # prepare IN clause + pendingrels.append( (attr, role, value) ) + continue + if rschema.final: # attribute + relations.append('X %s %%(%s)s' % (attr, attr)) + attrcache[attr] = value + elif value is None: + pendingrels.append( (attr, role, value) ) + else: + rvar = attr.upper() + if role == 'object': + relations.append('%s %s X' % (rvar, attr)) + else: + relations.append('X %s %s' % (attr, rvar)) + restriction = '%s eid %%(%s)s' % (rvar, attr) + if not restriction in restrictions: + restrictions.add(restriction) + if hasattr(value, 'eid'): + value = value.eid + qargs[attr] = value + rql = u'' + if relations: + rql += ', '.join(relations) + if restrictions: + rql += ' WHERE %s' % ', '.join(restrictions) + return rql, qargs, pendingrels, attrcache + + @classmethod + def _cw_handle_pending_relations(cls, eid, pendingrels, execute): + for attr, role, values in pendingrels: + if role == 'object': + restr = 'Y %s X' % attr + else: + restr = 'X %s Y' % attr + if values is None: + execute('DELETE %s WHERE X eid %%(x)s' % restr, {'x': eid}) + continue + execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % ( + restr, ','.join(str(getattr(r, 'eid', r)) for r in values)), + {'x': eid}, build_descr=False) + + @classmethod + def cw_instantiate(cls, execute, **kwargs): + """add a new entity of this given type + + Example (in a shell session): + + >>> companycls = vreg['etypes'].etype_class('Company') + >>> personcls = vreg['etypes'].etype_class('Person') + >>> c = companycls.cw_instantiate(session.execute, name=u'Logilab') + >>> p = personcls.cw_instantiate(session.execute, firstname=u'John', lastname=u'Doe', + ... works_for=c) + + You can also set relations where the entity has 'object' role by + prefixing the relation name by 'reverse_'. Also, relation values may be + an entity or eid, a list of entities or eids. + """ + rql, qargs, pendingrels, attrcache = cls._cw_build_entity_query(kwargs) + if rql: + rql = 'INSERT %s X: %s' % (cls.__regid__, rql) + else: + rql = 'INSERT %s X' % (cls.__regid__) + try: + created = execute(rql, qargs).get_entity(0, 0) + except IndexError: + raise Exception('could not create a %r with %r (%r)' % + (cls.__regid__, rql, qargs)) + created._cw_update_attr_cache(attrcache) + cls._cw_handle_pending_relations(created.eid, pendingrels, execute) + return created + + def __init__(self, req, rset=None, row=None, col=0): + AppObject.__init__(self, req, rset=rset, row=row, col=col) + self._cw_related_cache = {} + self._cw_adapters_cache = {} + if rset is not None: + self.eid = rset[row][col] + else: + self.eid = None + self._cw_is_saved = True + self.cw_attr_cache = {} + + def __repr__(self): + return '' % ( + self.e_schema, self.eid, list(self.cw_attr_cache), id(self)) + + def __lt__(self, other): + raise NotImplementedError('comparison not implemented for %s' % self.__class__) + + def __eq__(self, other): + if isinstance(self.eid, integer_types): + return self.eid == other.eid + return self is other + + def __hash__(self): + if isinstance(self.eid, integer_types): + return self.eid + return super(Entity, self).__hash__() + + def _cw_update_attr_cache(self, attrcache): + trdata = self._cw.transaction_data + uncached_attrs = trdata.get('%s.storage-special-process-attrs' % self.eid, set()) + uncached_attrs.update(trdata.get('%s.dont-cache-attrs' % self.eid, set())) + for attr in uncached_attrs: + attrcache.pop(attr, None) + self.cw_attr_cache.pop(attr, None) + self.cw_attr_cache.update(attrcache) + + def _cw_dont_cache_attribute(self, attr, repo_side=False): + """Called when some attribute has been transformed by a *storage*, + hence the original value should not be cached **by anyone**. + + For example we have a special "fs_importing" mode in BFSS + where a file path is given as attribute value and stored as is + in the data base. Later access to the attribute will provide + the content of the file at the specified path. We do not want + the "filepath" value to be cached. + + """ + trdata = self._cw.transaction_data + trdata.setdefault('%s.dont-cache-attrs' % self.eid, set()).add(attr) + if repo_side: + trdata.setdefault('%s.storage-special-process-attrs' % self.eid, set()).add(attr) + + def __json_encode__(self): + """custom json dumps hook to dump the entity's eid + which is not part of dict structure itself + """ + dumpable = self.cw_attr_cache.copy() + dumpable['eid'] = self.eid + return dumpable + + def cw_adapt_to(self, interface): + """return an adapter the entity to the given interface name. + + return None if it can not be adapted. + """ + cache = self._cw_adapters_cache + try: + return cache[interface] + except KeyError: + adapter = self._cw.vreg['adapters'].select_or_none( + interface, self._cw, entity=self) + cache[interface] = adapter + return adapter + + def has_eid(self): # XXX cw_has_eid + """return True if the entity has an attributed eid (False + meaning that the entity has to be created + """ + try: + int(self.eid) + return True + except (ValueError, TypeError): + return False + + def cw_is_saved(self): + """during entity creation, there is some time during which the entity + has an eid attributed though it's not saved (eg during + 'before_add_entity' hooks). You can use this method to ensure the entity + has an eid *and* is saved in its source. + """ + return self.has_eid() and self._cw_is_saved + + @cached + def cw_metainformation(self): + metas = self._cw.entity_metas(self.eid) + metas['source'] = self._cw.source_defs()[metas['source']] + return metas + + def cw_check_perm(self, action): + self.e_schema.check_perm(self._cw, action, eid=self.eid) + + def cw_has_perm(self, action): + return self.e_schema.has_perm(self._cw, action, eid=self.eid) + + def view(self, __vid, __registry='views', w=None, initargs=None, **kwargs): # XXX cw_view + """shortcut to apply a view on this entity""" + if initargs is None: + initargs = kwargs + else: + initargs.update(kwargs) + view = self._cw.vreg[__registry].select(__vid, self._cw, rset=self.cw_rset, + row=self.cw_row, col=self.cw_col, + **initargs) + return view.render(row=self.cw_row, col=self.cw_col, w=w, **kwargs) + + def absolute_url(self, *args, **kwargs): # XXX cw_url + """return an absolute url to view this entity""" + # use *args since we don't want first argument to be "anonymous" to + # avoid potential clash with kwargs + if args: + assert len(args) == 1, 'only 0 or 1 non-named-argument expected' + method = args[0] + else: + method = None + # in linksearch mode, we don't want external urls else selecting + # the object for use in the relation is tricky + # XXX search_state is web specific + use_ext_id = False + if 'base_url' not in kwargs and \ + getattr(self._cw, 'search_state', ('normal',))[0] == 'normal': + sourcemeta = self.cw_metainformation()['source'] + if sourcemeta.get('use-cwuri-as-url'): + return self.cwuri # XXX consider kwargs? + if sourcemeta.get('base-url'): + kwargs['base_url'] = sourcemeta['base-url'] + use_ext_id = True + if method in (None, 'view'): + kwargs['_restpath'] = self.rest_path(use_ext_id) + else: + kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid + return self._cw.build_url(method, **kwargs) + + def rest_path(self, use_ext_eid=False): # XXX cw_rest_path + """returns a REST-like (relative) path for this entity""" + mainattr, needcheck = self.cw_rest_attr_info() + etype = str(self.e_schema) + path = etype.lower() + fallback = False + if mainattr != 'eid': + value = getattr(self, mainattr) + if not can_use_rest_path(value): + mainattr = 'eid' + path = None + elif needcheck: + # make sure url is not ambiguous + try: + nbresults = self.__unique + except AttributeError: + rql = 'Any COUNT(X) WHERE X is %s, X %s %%(value)s' % ( + etype, mainattr) + nbresults = self.__unique = self._cw.execute(rql, {'value' : value})[0][0] + if nbresults != 1: # ambiguity? + mainattr = 'eid' + path = None + if mainattr == 'eid': + if use_ext_eid: + value = self.cw_metainformation()['extid'] + else: + value = self.eid + if path is None: + # fallback url: / url is used as cw entities uri, + # prefer it to //eid/ + return text_type(value) + return u'%s/%s' % (path, self._cw.url_quote(value)) + + def cw_attr_metadata(self, attr, metadata): + """return a metadata for an attribute (None if unspecified)""" + value = getattr(self, '%s_%s' % (attr, metadata), None) + if value is None and metadata == 'encoding': + value = self._cw.vreg.property_value('ui.encoding') + return value + + def printable_value(self, attr, value=_marker, attrtype=None, + format='text/html', displaytime=True): # XXX cw_printable_value + """return a displayable value (i.e. unicode string) which may contains + html tags + """ + attr = str(attr) + if value is _marker: + value = getattr(self, attr) + if isinstance(value, string_types): + value = value.strip() + if value is None or value == '': # don't use "not", 0 is an acceptable value + return u'' + if attrtype is None: + attrtype = self.e_schema.destination(attr) + props = self.e_schema.rdef(attr) + if attrtype == 'String': + # internalinalized *and* formatted string such as schema + # description... + if props.internationalizable: + value = self._cw._(value) + attrformat = self.cw_attr_metadata(attr, 'format') + if attrformat: + return self._cw_mtc_transform(value, attrformat, format, + self._cw.encoding) + elif attrtype == 'Bytes': + attrformat = self.cw_attr_metadata(attr, 'format') + if attrformat: + encoding = self.cw_attr_metadata(attr, 'encoding') + return self._cw_mtc_transform(value.getvalue(), attrformat, format, + encoding) + return u'' + value = self._cw.printable_value(attrtype, value, props, + displaytime=displaytime) + if format == 'text/html': + value = xml_escape(value) + return value + + def _cw_mtc_transform(self, data, format, target_format, encoding, + _engine=ENGINE): + trdata = TransformData(data, format, encoding, appobject=self) + data = _engine.convert(trdata, target_format).decode() + if target_format == 'text/html': + data = soup2xhtml(data, self._cw.encoding) + return data + + # entity cloning ########################################################## + + def copy_relations(self, ceid): # XXX cw_copy_relations + """copy relations of the object with the given eid on this + object (this method is called on the newly created copy, and + ceid designates the original entity). + + By default meta and composite relations are skipped. + Overrides this if you want another behaviour + """ + assert self.has_eid() + execute = self._cw.execute + skip_copy_for = {'subject': set(), 'object': set()} + for rtype in self.skip_copy_for: + skip_copy_for['subject'].add(rtype) + warn('[3.14] skip_copy_for on entity classes (%s) is deprecated, ' + 'use cw_skip_for instead with list of couples (rtype, role)' % self.cw_etype, + DeprecationWarning) + for rtype, role in self.cw_skip_copy_for: + assert role in ('subject', 'object'), role + skip_copy_for[role].add(rtype) + for rschema in self.e_schema.subject_relations(): + if rschema.type in skip_copy_for['subject']: + continue + if rschema.final or rschema.meta: + continue + # skip already defined relations + if getattr(self, rschema.type): + continue + # XXX takefirst=True to remove warning triggered by ambiguous relations + rdef = self.e_schema.rdef(rschema, takefirst=True) + # skip composite relation + if rdef.composite: + continue + # skip relation with card in ?1 else we either change the copied + # object (inlined relation) or inserting some inconsistency + if rdef.cardinality[1] in '?1': + continue + rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % ( + rschema.type, rschema.type) + execute(rql, {'x': self.eid, 'y': ceid}) + self.cw_clear_relation_cache(rschema.type, 'subject') + for rschema in self.e_schema.object_relations(): + if rschema.meta: + continue + # skip already defined relations + if self.related(rschema.type, 'object'): + continue + if rschema.type in skip_copy_for['object']: + continue + # XXX takefirst=True to remove warning triggered by ambiguous relations + rdef = self.e_schema.rdef(rschema, 'object', takefirst=True) + # skip composite relation + if rdef.composite: + continue + # skip relation with card in ?1 else we either change the copied + # object (inlined relation) or inserting some inconsistency + if rdef.cardinality[0] in '?1': + continue + rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % ( + rschema.type, rschema.type) + execute(rql, {'x': self.eid, 'y': ceid}) + self.cw_clear_relation_cache(rschema.type, 'object') + + # data fetching methods ################################################### + + @cached + def as_rset(self): # XXX .cw_as_rset + """returns a resultset containing `self` information""" + rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s', + {'x': self.eid}, [(self.cw_etype,)]) + rset.req = self._cw + return rset + + def _cw_to_complete_relations(self): + """by default complete final relations to when calling .complete()""" + for rschema in self.e_schema.subject_relations(): + if rschema.final: + continue + targets = rschema.objects(self.e_schema) + if rschema.inlined: + matching_groups = self._cw.user.matching_groups + if all(matching_groups(e.get_groups('read')) and + rschema.rdef(self.e_schema, e).get_groups('read') + for e in targets): + yield rschema, 'subject' + + def _cw_to_complete_attributes(self, skip_bytes=True, skip_pwd=True): + for rschema, attrschema in self.e_schema.attribute_definitions(): + # skip binary data by default + if skip_bytes and attrschema.type == 'Bytes': + continue + attr = rschema.type + if attr == 'eid': + continue + # password retrieval is blocked at the repository server level + rdef = rschema.rdef(self.e_schema, attrschema) + if not self._cw.user.matching_groups(rdef.get_groups('read')) \ + or (attrschema.type == 'Password' and skip_pwd): + self.cw_attr_cache[attr] = None + continue + yield attr + + _cw_completed = False + def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): # XXX cw_complete + """complete this entity by adding missing attributes (i.e. query the + repository to fill the entity) + + :type skip_bytes: bool + :param skip_bytes: + if true, attribute of type Bytes won't be considered + """ + assert self.has_eid() + if self._cw_completed: + return + if attributes is None: + self._cw_completed = True + varmaker = rqlvar_maker() + V = next(varmaker) + rql = ['WHERE %s eid %%(x)s' % V] + selected = [] + for attr in (attributes or self._cw_to_complete_attributes(skip_bytes, skip_pwd)): + # if attribute already in entity, nothing to do + if attr in self.cw_attr_cache: + continue + # case where attribute must be completed, but is not yet in entity + var = next(varmaker) + rql.append('%s %s %s' % (V, attr, var)) + selected.append((attr, var)) + # +1 since this doesn't include the main variable + lastattr = len(selected) + 1 + # don't fetch extra relation if attributes specified or of the entity is + # coming from an external source (may lead to error) + if attributes is None and self.cw_metainformation()['source']['uri'] == 'system': + # fetch additional relations (restricted to 0..1 relations) + for rschema, role in self._cw_to_complete_relations(): + rtype = rschema.type + if self.cw_relation_cached(rtype, role): + continue + # at this point we suppose that: + # * this is a inlined relation + # * entity (self) is the subject + # * user has read perm on the relation and on the target entity + assert rschema.inlined + assert role == 'subject' + var = next(varmaker) + # keep outer join anyway, we don't want .complete to crash on + # missing mandatory relation (see #1058267) + rql.append('%s %s %s?' % (V, rtype, var)) + selected.append(((rtype, role), var)) + if selected: + # select V, we need it as the left most selected variable + # if some outer join are included to fetch inlined relations + rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected), + ','.join(rql)) + try: + rset = self._cw.execute(rql, {'x': self.eid}, build_descr=False)[0] + except IndexError: + raise Exception('unable to fetch attributes for entity with eid %s' + % self.eid) + # handle attributes + for i in range(1, lastattr): + self.cw_attr_cache[str(selected[i-1][0])] = rset[i] + # handle relations + for i in range(lastattr, len(rset)): + rtype, role = selected[i-1][0] + value = rset[i] + if value is None: + rrset = ResultSet([], rql, {'x': self.eid}) + rrset.req = self._cw + else: + rrset = self._cw.eid_rset(value) + self.cw_set_relation_cache(rtype, role, rrset) + + def cw_attr_value(self, name): + """get value for the attribute relation , query the repository + to get the value if necessary. + + :type name: str + :param name: name of the attribute to get + """ + try: + return self.cw_attr_cache[name] + except KeyError: + if not self.cw_is_saved(): + return None + rql = "Any A WHERE X eid %%(x)s, X %s A" % name + try: + rset = self._cw.execute(rql, {'x': self.eid}) + except Unauthorized: + self.cw_attr_cache[name] = value = None + else: + assert rset.rowcount <= 1, (self, rql, rset.rowcount) + try: + self.cw_attr_cache[name] = value = rset.rows[0][0] + except IndexError: + # probably a multisource error + self.critical("can't get value for attribute %s of entity with eid %s", + name, self.eid) + if self.e_schema.destination(name) == 'String': + self.cw_attr_cache[name] = value = self._cw._('unaccessible') + else: + self.cw_attr_cache[name] = value = None + return value + + def related(self, rtype, role='subject', limit=None, entities=False, # XXX .cw_related + safe=False, targettypes=None): + """returns a resultset of related entities + + :param rtype: + the name of the relation, aka relation type + :param role: + the role played by 'self' in the relation ('subject' or 'object') + :param limit: + resultset's maximum size + :param entities: + if True, the entites are returned; if False, a result set is returned + :param safe: + if True, an empty rset/list of entities will be returned in case of + :exc:`Unauthorized`, else (the default), the exception is propagated + :param targettypes: + a tuple of target entity types to restrict the query + """ + rtype = str(rtype) + # Caching restricted/limited results is best avoided. + cacheable = limit is None and targettypes is None + if cacheable: + cache_key = '%s_%s' % (rtype, role) + if cache_key in self._cw_related_cache: + return self._cw_related_cache[cache_key][entities] + if not self.has_eid(): + if entities: + return [] + return self._cw.empty_rset() + rql = self.cw_related_rql(rtype, role, limit=limit, targettypes=targettypes) + try: + rset = self._cw.execute(rql, {'x': self.eid}) + except Unauthorized: + if not safe: + raise + rset = self._cw.empty_rset() + if entities: + if cacheable: + self.cw_set_relation_cache(rtype, role, rset) + return self.related(rtype, role, entities=entities) + return list(rset.entities()) + else: + return rset + + def cw_related_rql(self, rtype, role='subject', targettypes=None, limit=None): + vreg = self._cw.vreg + rschema = vreg.schema[rtype] + select = Select() + mainvar, evar = select.get_variable('X'), select.get_variable('E') + select.add_selected(mainvar) + if limit is not None: + select.set_limit(limit) + select.add_eid_restriction(evar, 'x', 'Substitute') + if role == 'subject': + rel = make_relation(evar, rtype, (mainvar,), VariableRef) + select.add_restriction(rel) + if targettypes is None: + targettypes = rschema.objects(self.e_schema) + else: + select.add_constant_restriction(mainvar, 'is', + targettypes, 'etype') + gcard = greater_card(rschema, (self.e_schema,), targettypes, 0) + else: + rel = make_relation(mainvar, rtype, (evar,), VariableRef) + select.add_restriction(rel) + if targettypes is None: + targettypes = rschema.subjects(self.e_schema) + else: + select.add_constant_restriction(mainvar, 'is', targettypes, + 'etype') + gcard = greater_card(rschema, targettypes, (self.e_schema,), 1) + etypecls = vreg['etypes'].etype_class(targettypes[0]) + if len(targettypes) > 1: + fetchattrs = vreg['etypes'].fetch_attrs(targettypes) + self._fetch_ambiguous_rtypes(select, mainvar, fetchattrs, + targettypes, vreg.schema) + else: + fetchattrs = etypecls.fetch_attrs + etypecls.fetch_rqlst(self._cw.user, select, mainvar, fetchattrs, + settype=False) + # optimisation: remove ORDERBY if cardinality is 1 or ? (though + # greater_card return 1 for those both cases) + if gcard == '1': + select.remove_sort_terms() + elif not select.orderby: + # if modification_date is already retrieved, we use it instead + # of adding another variable for sorting. This should not be + # problematic, but it is with sqlserver, see ticket #694445 + for rel in select.where.get_nodes(RqlRelation): + if (rel.r_type == 'modification_date' + and rel.children[0].variable == mainvar + and rel.children[1].operator == '='): + var = rel.children[1].children[0].variable + select.add_sort_var(var, asc=False) + break + else: + mdvar = select.make_variable() + rel = make_relation(mainvar, 'modification_date', + (mdvar,), VariableRef) + select.add_restriction(rel) + select.add_sort_var(mdvar, asc=False) + return select.as_string() + + # generic vocabulary methods ############################################## + + def cw_linkable_rql(self, rtype, targettype, role, ordermethod=None, + vocabconstraints=True, lt_infos={}, limit=None): + """build a rql to fetch targettype entities either related or unrelated + to this entity using (rtype, role) relation. + + Consider relation permissions so that returned entities may be actually + linked by `rtype`. + + `lt_infos` are supplementary informations, usually coming from __linkto + parameter, that can help further restricting the results in case current + entity is not yet created. It is a dict describing entities the current + entity will be linked to, which keys are (rtype, role) tuples and values + are a list of eids. + """ + return self._cw_compute_linkable_rql(rtype, targettype, role, ordermethod=None, + vocabconstraints=vocabconstraints, + lt_infos=lt_infos, limit=limit, + unrelated_only=False) + + def cw_unrelated_rql(self, rtype, targettype, role, ordermethod=None, + vocabconstraints=True, lt_infos={}, limit=None): + """build a rql to fetch `targettype` entities unrelated to this entity + using (rtype, role) relation. + + Consider relation permissions so that returned entities may be actually + linked by `rtype`. + + `lt_infos` are supplementary informations, usually coming from __linkto + parameter, that can help further restricting the results in case current + entity is not yet created. It is a dict describing entities the current + entity will be linked to, which keys are (rtype, role) tuples and values + are a list of eids. + """ + return self._cw_compute_linkable_rql(rtype, targettype, role, ordermethod=None, + vocabconstraints=vocabconstraints, + lt_infos=lt_infos, limit=limit, + unrelated_only=True) + + def _cw_compute_linkable_rql(self, rtype, targettype, role, ordermethod=None, + vocabconstraints=True, lt_infos={}, limit=None, + unrelated_only=False): + """build a rql to fetch `targettype` entities that may be related to + this entity using the (rtype, role) relation. + + By default (unrelated_only=False), this includes the already linked + entities as well as the unrelated ones. If `unrelated_only` is True, the + rql filters out the already related entities. + """ + ordermethod = ordermethod or 'fetch_unrelated_order' + rschema = self._cw.vreg.schema.rschema(rtype) + rdef = rschema.role_rdef(self.e_schema, targettype, role) + rewriter = RQLRewriter(self._cw) + select = Select() + # initialize some variables according to the `role` of `self` in the + # relation (variable names must respect constraints conventions): + # * variable for myself (`evar`) + # * variable for searched entities (`searchvedvar`) + if role == 'subject': + evar = subjvar = select.get_variable('S') + searchedvar = objvar = select.get_variable('O') + else: + searchedvar = subjvar = select.get_variable('S') + evar = objvar = select.get_variable('O') + select.add_selected(searchedvar) + if limit is not None: + select.set_limit(limit) + # initialize some variables according to `self` existence + if rdef.role_cardinality(neg_role(role)) in '?1': + # if cardinality in '1?', we want a target entity which isn't + # already linked using this relation + variable = select.make_variable() + if role == 'subject': + rel = make_relation(variable, rtype, (searchedvar,), VariableRef) + else: + rel = make_relation(searchedvar, rtype, (variable,), VariableRef) + select.add_restriction(Not(rel)) + elif self.has_eid() and unrelated_only: + # elif we have an eid, we don't want a target entity which is + # already linked to ourself through this relation + rel = make_relation(subjvar, rtype, (objvar,), VariableRef) + select.add_restriction(Not(rel)) + if self.has_eid(): + rel = make_relation(evar, 'eid', ('x', 'Substitute'), Constant) + select.add_restriction(rel) + args = {'x': self.eid} + if role == 'subject': + sec_check_args = {'fromeid': self.eid} + else: + sec_check_args = {'toeid': self.eid} + existant = None # instead of 'SO', improve perfs + else: + args = {} + sec_check_args = {} + existant = searchedvar.name + # undefine unused evar, or the type resolver will consider it + select.undefine_variable(evar) + # retrieve entity class for targettype to compute base rql + etypecls = self._cw.vreg['etypes'].etype_class(targettype) + etypecls.fetch_rqlst(self._cw.user, select, searchedvar, + ordermethod=ordermethod) + # from now on, we need variable type resolving + self._cw.vreg.solutions(self._cw, select, args) + # insert RQL expressions for schema constraints into the rql syntax tree + if vocabconstraints: + cstrcls = (RQLVocabularyConstraint, RQLConstraint) + else: + cstrcls = RQLConstraint + lt_infos = pruned_lt_info(self.e_schema, lt_infos or {}) + # if there are still lt_infos, use set to keep track of added eid + # relations (adding twice the same eid relation is incorrect RQL) + eidvars = set() + for cstr in rdef.constraints: + # consider constraint.mainvars to check if constraint apply + if isinstance(cstr, cstrcls) and searchedvar.name in cstr.mainvars: + if not self.has_eid(): + if lt_infos: + # we can perhaps further restrict with linkto infos using + # a custom constraint built from cstr and lt_infos + cstr = build_cstr_with_linkto_infos( + cstr, args, searchedvar, evar, lt_infos, eidvars) + if cstr is None: + continue # could not build constraint -> discard + elif evar.name in cstr.mainvars: + continue + # compute a varmap suitable to RQLRewriter.rewrite argument + varmap = dict((v, v) for v in (searchedvar.name, evar.name) + if v in select.defined_vars and v in cstr.mainvars) + # rewrite constraint by constraint since we want a AND between + # expressions. + rewriter.rewrite(select, [(varmap, (cstr,))], args, existant) + # insert security RQL expressions granting the permission to 'add' the + # relation into the rql syntax tree, if necessary + rqlexprs = rdef.get_rqlexprs('add') + if not self.has_eid(): + rqlexprs = [rqlexpr for rqlexpr in rqlexprs + if searchedvar.name in rqlexpr.mainvars] + if rqlexprs and not rdef.has_perm(self._cw, 'add', **sec_check_args): + # compute a varmap suitable to RQLRewriter.rewrite argument + varmap = dict((v, v) for v in (searchedvar.name, evar.name) + if v in select.defined_vars) + # rewrite all expressions at once since we want a OR between them. + rewriter.rewrite(select, [(varmap, rqlexprs)], args, existant) + # ensure we have an order defined + if not select.orderby: + select.add_sort_var(select.defined_vars[searchedvar.name]) + # we're done, turn the rql syntax tree as a string + rql = select.as_string() + return rql, args + + def unrelated(self, rtype, targettype, role='subject', limit=None, + ordermethod=None, lt_infos={}): # XXX .cw_unrelated + """return a result set of target type objects that may be related + by a given relation, with self as subject or object + """ + try: + rql, args = self.cw_unrelated_rql(rtype, targettype, role, limit=limit, + ordermethod=ordermethod, lt_infos=lt_infos) + except Unauthorized: + return self._cw.empty_rset() + return self._cw.execute(rql, args) + + # relations cache handling ################################################# + + def cw_relation_cached(self, rtype, role): + """return None if the given relation isn't already cached on the + instance, else the content of the cache (a 2-uple (rset, entities)). + """ + return self._cw_related_cache.get('%s_%s' % (rtype, role)) + + def cw_set_relation_cache(self, rtype, role, rset): + """set cached values for the given relation""" + if rset: + related = list(rset.entities(0)) + rschema = self._cw.vreg.schema.rschema(rtype) + if role == 'subject': + rcard = rschema.rdef(self.e_schema, related[0].e_schema).cardinality[1] + target = 'object' + else: + rcard = rschema.rdef(related[0].e_schema, self.e_schema).cardinality[0] + target = 'subject' + if rcard in '?1': + for rentity in related: + rentity._cw_related_cache['%s_%s' % (rtype, target)] = ( + self.as_rset(), (self,)) + else: + related = () + self._cw_related_cache['%s_%s' % (rtype, role)] = (rset, related) + + def cw_clear_relation_cache(self, rtype=None, role=None): + """clear cached values for the given relation or the entire cache if + no relation is given + """ + if rtype is None: + self._cw_related_cache.clear() + self._cw_adapters_cache.clear() + else: + assert role + self._cw_related_cache.pop('%s_%s' % (rtype, role), None) + + def cw_clear_all_caches(self): + """flush all caches on this entity. Further attributes/relations access + will triggers new database queries to get back values. + + If you use custom caches on your entity class (take care to @cached!), + you should override this method to clear them as well. + """ + # clear attributes cache + self._cw_completed = False + self.cw_attr_cache.clear() + # clear relations cache + self.cw_clear_relation_cache() + # rest path unique cache + try: + del self.__unique + except AttributeError: + pass + + # raw edition utilities ################################################### + + def cw_set(self, **kwargs): + """update this entity using given attributes / relation, working in the + same fashion as :meth:`cw_instantiate`. + + Example (in a shell session): + + >>> c = rql('Any X WHERE X is Company').get_entity(0, 0) + >>> p = rql('Any X WHERE X is Person').get_entity(0, 0) + >>> c.cw_set(name=u'Logilab') + >>> p.cw_set(firstname=u'John', lastname=u'Doe', works_for=c) + + You can also set relations where the entity has 'object' role by + prefixing the relation name by 'reverse_'. Also, relation values may be + an entity or eid, a list of entities or eids, or None (meaning that all + relations of the given type from or to this object should be deleted). + """ + assert kwargs + assert self.cw_is_saved(), "should not call set_attributes while entity "\ + "hasn't been saved yet" + rql, qargs, pendingrels, attrcache = self._cw_build_entity_query(kwargs) + if rql: + rql = 'SET ' + rql + qargs['x'] = self.eid + if ' WHERE ' in rql: + rql += ', X eid %(x)s' + else: + rql += ' WHERE X eid %(x)s' + self._cw.execute(rql, qargs) + # update current local object _after_ the rql query to avoid + # interferences between the query execution itself and the cw_edited / + # skip_security machinery + self._cw_update_attr_cache(attrcache) + self._cw_handle_pending_relations(self.eid, pendingrels, self._cw.execute) + # XXX update relation cache + + def cw_delete(self, **kwargs): + assert self.has_eid(), self.eid + self._cw.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema, + {'x': self.eid}, **kwargs) + + # server side utilities #################################################### + + def _cw_clear_local_perm_cache(self, action): + for rqlexpr in self.e_schema.get_rqlexprs(action): + self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) + + # deprecated stuff ######################################################### + + @deprecated('[3.16] use cw_set() instead of set_attributes()') + def set_attributes(self, **kwargs): # XXX cw_set_attributes + if kwargs: + self.cw_set(**kwargs) + + @deprecated('[3.16] use cw_set() instead of set_relations()') + def set_relations(self, **kwargs): # XXX cw_set_relations + """add relations to the given object. To set a relation where this entity + is the object of the relation, use 'reverse_' as argument name. + + Values may be an entity or eid, a list of entities or eids, or None + (meaning that all relations of the given type from or to this object + should be deleted). + """ + if kwargs: + self.cw_set(**kwargs) + + @deprecated('[3.13] use entity.cw_clear_all_caches()') + def clear_all_caches(self): + return self.cw_clear_all_caches() + + +# attribute and relation descriptors ########################################## + +class Attribute(object): + """descriptor that controls schema attribute access""" + + def __init__(self, attrname): + assert attrname != 'eid' + self._attrname = attrname + + def __get__(self, eobj, eclass): + if eobj is None: + return self + return eobj.cw_attr_value(self._attrname) + + @deprecated('[3.10] assign to entity.cw_attr_cache[attr] or entity.cw_edited[attr]') + def __set__(self, eobj, value): + if hasattr(eobj, 'cw_edited') and not eobj.cw_edited.saved: + eobj.cw_edited[self._attrname] = value + else: + eobj.cw_attr_cache[self._attrname] = value + + +class Relation(object): + """descriptor that controls schema relation access""" + + def __init__(self, rschema, role): + self._rtype = rschema.type + self._role = role + + def __get__(self, eobj, eclass): + if eobj is None: + raise AttributeError('%s can only be accessed from instances' + % self._rtype) + return eobj.related(self._rtype, self._role, entities=True) + + def __set__(self, eobj, value): + raise NotImplementedError + + +from logging import getLogger +from cubicweb import set_log_methods +set_log_methods(Entity, getLogger('cubicweb.entity')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,20 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" CW - nevow/twisted client + +""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/http.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/http.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,43 @@ +"""twisted server for CubicWeb web instances + +:organization: Logilab +:copyright: 2001-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" + +__docformat__ = "restructuredtext en" + +class HTTPResponse(object): + """An object representing an HTTP Response to be sent to the client. + """ + def __init__(self, twisted_request, code=None, headers=None, stream=None): + self._headers_out = headers + self._twreq = twisted_request + self._stream = stream + self._code = code + + self._init_headers() + self._finalize() + + def _init_headers(self): + if self._headers_out is None: + return + # initialize headers + for k, values in self._headers_out.getAllRawHeaders(): + self._twreq.responseHeaders.setRawHeaders(k, values) + # add content-length if not present + if (self._headers_out.getHeader('content-length') is None + and self._stream is not None): + self._twreq.setHeader('content-length', len(self._stream)) + + def _finalize(self): + # we must set code before writing anything, else it's too late + if self._code is not None: + self._twreq.setResponseCode(self._code) + if self._stream is not None: + self._twreq.write(str(self._stream)) + self._twreq.finish() + + def __repr__(self): + return "<%s.%s code=%d>" % (self.__module__, self.__class__.__name__, self._code) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/request.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/request.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,59 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Twisted request handler for CubicWeb""" + +__docformat__ = "restructuredtext en" + + +from cubicweb.web.request import CubicWebRequestBase + + +class CubicWebTwistedRequestAdapter(CubicWebRequestBase): + """ from twisted .req to cubicweb .form + req.files are put into .form[] + """ + def __init__(self, req, vreg, https): + self._twreq = req + super(CubicWebTwistedRequestAdapter, self).__init__( + vreg, https, req.args, headers=req.received_headers) + for key, name_stream_list in req.files.items(): + for name, stream in name_stream_list: + if name is not None: + name = unicode(name, self.encoding) + self.form.setdefault(key, []).append((name, stream)) + # 3.16.4 backward compat + if len(self.form[key]) == 1: + self.form[key] = self.form[key][0] + self.content = self._twreq.content # stream + + def http_method(self): + """returns 'POST', 'GET', 'HEAD', etc.""" + return self._twreq.method + + def relative_path(self, includeparams=True): + """return the normalized path of the request (ie at least relative to + the instance's root, but some other normalization may be needed so that + the returned path may be used to compare to generated urls + + :param includeparams: + boolean indicating if GET form parameters should be kept in the path + """ + path = self._twreq.uri[1:] # remove the root '/' + if not includeparams: + path = path.split('?', 1)[0] + return path diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/server.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/server.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,298 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""twisted server for CubicWeb web instances""" +__docformat__ = "restructuredtext en" + +import sys +import select +import traceback +import threading +from cgi import FieldStorage, parse_header + +from six.moves.urllib.parse import urlsplit, urlunsplit + +from cubicweb.statsd_logger import statsd_timeit + +from twisted.internet import reactor, task, threads +from twisted.web import http, server +from twisted.web import resource +from twisted.web.server import NOT_DONE_YET + + +from logilab.mtconverter import xml_escape +from logilab.common.decorators import monkeypatch + +from cubicweb import ConfigurationError, CW_EVENT_MANAGER +from cubicweb.utils import json_dumps +from cubicweb.web import DirectResponse +from cubicweb.web.application import CubicWebPublisher +from cubicweb.etwist.request import CubicWebTwistedRequestAdapter +from cubicweb.etwist.http import HTTPResponse + +def start_task(interval, func): + lc = task.LoopingCall(func) + # wait until interval has expired to actually start the task, else we have + # to wait all tasks to be finished for the server to be actually started + lc.start(interval, now=False) + + +class CubicWebRootResource(resource.Resource): + def __init__(self, config, repo): + resource.Resource.__init__(self) + self.config = config + # instantiate publisher here and not in init_publisher to get some + # checks done before daemonization (eg versions consistency) + self.appli = CubicWebPublisher(repo, config) + self.base_url = config['base-url'] + self.https_url = config['https-url'] + global MAX_POST_LENGTH + MAX_POST_LENGTH = config['max-post-length'] + + def init_publisher(self): + config = self.config + # when we have an in-memory repository, clean unused sessions every XX + # seconds and properly shutdown the server + if config['repository-uri'] == 'inmemory://': + if config.mode != 'test': + reactor.addSystemEventTrigger('before', 'shutdown', + self.shutdown_event) + self.appli.repo.start_looping_tasks() + self.set_url_rewriter() + CW_EVENT_MANAGER.bind('after-registry-reload', self.set_url_rewriter) + + def start_service(self): + start_task(self.appli.session_handler.clean_sessions_interval, + self.appli.session_handler.clean_sessions) + + def set_url_rewriter(self): + self.url_rewriter = self.appli.vreg['components'].select_or_none('urlrewriter') + + def shutdown_event(self): + """callback fired when the server is shutting down to properly + clean opened sessions + """ + self.appli.repo.shutdown() + + def getChild(self, path, request): + """Indicate which resource to use to process down the URL's path""" + return self + + def render(self, request): + """Render a page from the root resource""" + # reload modified files in debug mode + if self.config.debugmode: + self.config.uiprops.reload_if_needed() + if self.https_url: + self.config.https_uiprops.reload_if_needed() + self.appli.vreg.reload_if_needed() + if self.config['profile']: # default profiler don't trace threads + return self.render_request(request) + else: + deferred = threads.deferToThread(self.render_request, request) + return NOT_DONE_YET + + @statsd_timeit + def render_request(self, request): + try: + # processing HUGE files (hundred of megabytes) in http.processReceived + # blocks other HTTP requests processing + # due to the clumsy & slow parsing algorithm of cgi.FieldStorage + # so we deferred that part to the cubicweb thread + request.process_multipart() + return self._render_request(request) + except Exception: + trace = traceback.format_exc() + return HTTPResponse(stream='
%s
' % xml_escape(trace), + code=500, twisted_request=request) + + def _render_request(self, request): + origpath = request.path + host = request.host + # dual http/https access handling: expect a rewrite rule to prepend + # 'https' to the path to detect https access + https = False + if origpath.split('/', 2)[1] == 'https': + origpath = origpath[6:] + request.uri = request.uri[6:] + https = True + if self.url_rewriter is not None: + # XXX should occur before authentication? + path = self.url_rewriter.rewrite(host, origpath, request) + request.uri.replace(origpath, path, 1) + else: + path = origpath + req = CubicWebTwistedRequestAdapter(request, self.appli.vreg, https) + try: + ### Try to generate the actual request content + content = self.appli.handle_request(req, path) + except DirectResponse as ex: + return ex.response + # at last: create twisted object + return HTTPResponse(code = req.status_out, + headers = req.headers_out, + stream = content, + twisted_request=req._twreq) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + @classmethod + def debug(cls, msg, *a, **kw): + pass + info = warning = error = critical = exception = debug + + +JSON_PATHS = set(('json',)) +FRAME_POST_PATHS = set(('validateform',)) + +orig_gotLength = http.Request.gotLength +@monkeypatch(http.Request) +def gotLength(self, length): + orig_gotLength(self, length) + if length > MAX_POST_LENGTH: # length is 0 on GET + path = self.channel._path.split('?', 1)[0].rstrip('/').rsplit('/', 1)[-1] + self.clientproto = 'HTTP/1.1' # not yet initialized + self.channel.persistent = 0 # force connection close on cleanup + self.setResponseCode(http.REQUEST_ENTITY_TOO_LARGE) + if path in JSON_PATHS: # XXX better json path detection + self.setHeader('content-type',"application/json") + body = json_dumps({'reason': 'request max size exceeded'}) + elif path in FRAME_POST_PATHS: # XXX better frame post path detection + self.setHeader('content-type',"text/html") + body = ('' % json_dumps( (False, 'request max size exceeded', None) )) + else: + self.setHeader('content-type',"text/html") + body = ("Processing Failed" + "request max size exceeded") + self.setHeader('content-length', str(len(body))) + self.write(body) + # see request.finish(). Done here since we get error due to not full + # initialized request + self.finished = 1 + if not self.queued: + self._cleanup() + for d in self.notifications: + d.callback(None) + self.notifications = [] + +@monkeypatch(http.Request) +def requestReceived(self, command, path, version): + """Called by channel when all data has been received. + + This method is not intended for users. + """ + self.content.seek(0, 0) + self.args = {} + self.files = {} + self.stack = [] + self.method, self.uri = command, path + self.clientproto = version + x = self.uri.split('?', 1) + if len(x) == 1: + self.path = self.uri + else: + self.path, argstring = x + self.args = http.parse_qs(argstring, 1) + # cache the client and server information, we'll need this later to be + # serialized and sent with the request so CGIs will work remotely + self.client = self.channel.transport.getPeer() + self.host = self.channel.transport.getHost() + # Argument processing + ctype = self.getHeader('content-type') + self._do_process_multipart = False + if self.method == "POST" and ctype: + key, pdict = parse_header(ctype) + if key == 'application/x-www-form-urlencoded': + self.args.update(http.parse_qs(self.content.read(), 1)) + self.content.seek(0) + elif key == 'multipart/form-data': + # defer this as it can be extremely time consumming + # with big files + self._do_process_multipart = True + self.process() + +@monkeypatch(http.Request) +def process_multipart(self): + if not self._do_process_multipart: + return + form = FieldStorage(self.content, self.received_headers, + environ={'REQUEST_METHOD': 'POST'}, + keep_blank_values=1, + strict_parsing=1) + for key in form: + values = form[key] + if not isinstance(values, list): + values = [values] + for value in values: + if value.filename: + if value.done != -1: # -1 is transfer has been interrupted + self.files.setdefault(key, []).append((value.filename, value.file)) + else: + self.files.setdefault(key, []).append((None, None)) + else: + self.args.setdefault(key, []).append(value.value) + +from logging import getLogger +from cubicweb import set_log_methods +LOGGER = getLogger('cubicweb.twisted') +set_log_methods(CubicWebRootResource, LOGGER) + +def run(config, debug=None, repo=None): + # repo may by passed during test. + # + # Test has already created a repo object so we should not create a new one. + # Explicitly passing the repo object avoid relying on the fragile + # config.repository() cache. We could imagine making repo a mandatory + # argument and receives it from the starting command directly. + if debug is not None: + config.debugmode = debug + config.check_writeable_uid_directory(config.appdatahome) + # create the site + if repo is None: + repo = config.repository() + root_resource = CubicWebRootResource(config, repo) + website = server.Site(root_resource) + # serve it via standard HTTP on port set in the configuration + port = config['port'] or 8080 + interface = config['interface'] + reactor.suggestThreadPoolSize(config['webserver-threadpool-size']) + reactor.listenTCP(port, website, interface=interface) + if not config.debugmode: + if sys.platform == 'win32': + raise ConfigurationError("Under windows, you must use the service management " + "commands (e.g : 'net start my_instance)'") + from logilab.common.daemon import daemonize + LOGGER.info('instance started in the background on %s', root_resource.base_url) + whichproc = daemonize(config['pid-file'], umask=config['umask']) + if whichproc: # 1 = orig process, 2 = first fork, None = second fork (eg daemon process) + return whichproc # parent process + root_resource.init_publisher() # before changing uid + if config['uid'] is not None: + from logilab.common.daemon import setugid + setugid(config['uid']) + root_resource.start_service() + LOGGER.info('instance started on %s', root_resource.base_url) + # avoid annoying warnign if not in Main Thread + signals = threading.currentThread().getName() == 'MainThread' + if config['profile']: + import cProfile + cProfile.runctx('reactor.run(installSignalHandlers=%s)' % signals, + globals(), locals(), config['profile']) + else: + reactor.run(installSignalHandlers=signals) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/service.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/service.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,99 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from __future__ import print_function + +import os +import sys + +try: + import win32serviceutil + import win32service +except ImportError: + print('Win32 extensions for Python are likely not installed.') + sys.exit(3) + +from os.path import join + +from cubicweb.etwist.server import (CubicWebRootResource, reactor, server) + +from logilab.common.shellutils import rm + +import logging +from logging import getLogger, handlers +from cubicweb import set_log_methods +from cubicweb.cwconfig import CubicWebConfiguration as cwcfg + +def _check_env(env): + env_vars = ('CW_INSTANCES_DIR', 'CW_INSTANCES_DATA_DIR', 'CW_RUNTIME_DIR') + for var in env_vars: + if var not in env: + raise Exception('The environment variables %s must be set.' % \ + ', '.join(env_vars)) + if not env.get('USERNAME'): + env['USERNAME'] = 'cubicweb' + +class CWService(object, win32serviceutil.ServiceFramework): + _svc_name_ = None + _svc_display_name_ = None + instance = None + + def __init__(self, *args, **kwargs): + win32serviceutil.ServiceFramework.__init__(self, *args, **kwargs) + cwcfg.load_cwctl_plugins() + logger = getLogger('cubicweb') + set_log_methods(CubicWebRootResource, logger) + + def SvcStop(self): + self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) + logger = getLogger('cubicweb.twisted') + logger.info('stopping %s service' % self.instance) + reactor.stop() + self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) + + def SvcDoRun(self): + self.ReportServiceStatus(win32service.SERVICE_START_PENDING) + logger = getLogger('cubicweb.twisted') + handler = handlers.NTEventLogHandler('cubicweb') + handler.setLevel(logging.INFO) + logger.addHandler(handler) + logger.info('starting %s service' % self.instance) + try: + _check_env(os.environ) + # create the site + config = cwcfg.config_for(self.instance) + config.init_log(force=True) + config.debugmode = False + logger.info('starting cubicweb instance %s ', self.instance) + config.info('clear ui caches') + for cachedir in ('uicache', 'uicachehttps'): + rm(join(config.appdatahome, cachedir, '*')) + root_resource = CubicWebRootResource(config, config.repository()) + website = server.Site(root_resource) + # serve it via standard HTTP on port set in the configuration + port = config['port'] or 8080 + logger.info('listening on port %s' % port) + reactor.listenTCP(port, website) + root_resource.init_publisher() + root_resource.start_service() + logger.info('instance started on %s', root_resource.base_url) + self.ReportServiceStatus(win32service.SERVICE_RUNNING) + reactor.run() + except Exception as e: + logger.error('service %s stopped (cause: %s)' % (self.instance, e)) + logger.exception('what happened ...') + self.ReportServiceStatus(win32service.SERVICE_STOPPED) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/test/data/views.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/test/data/views.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,29 @@ +# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""only for unit tests !""" + +from cubicweb.view import View +from cubicweb.predicates import match_http_method + +class PutView(View): + __regid__ = 'put' + __select__ = match_http_method('PUT') | match_http_method('POST') + binary = True + + def call(self): + self.w(self._cw.content.read()) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +Twisted diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/test/unittest_server.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/test/unittest_server.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,38 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +import os, os.path as osp, glob +import urllib + +from cubicweb.devtools.httptest import CubicWebServerTC + + +class ETwistHTTPTC(CubicWebServerTC): + def test_put_content(self): + data = {'hip': 'hop'} + headers = {'Content-Type': 'application/x-www-form-urlencoded'} + body = urllib.urlencode(data) + response = self.web_request('?vid=put', method='PUT', body=body) + self.assertEqual(body, response.body) + response = self.web_request('?vid=put', method='POST', body=body, + headers=headers) + self.assertEqual(body, response.body) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/twconfig.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/twconfig.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,115 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""twisted server configurations: + +* the "all-in-one" configuration to get a web instance running in a twisted + web server integrating a repository server in the same process (only available + if the repository part of the software is installed +""" +__docformat__ = "restructuredtext en" + +from os.path import join + +from logilab.common.configuration import Method, merge_options + +from cubicweb.cwconfig import CONFIGURATIONS +from cubicweb.web.webconfig import WebConfiguration + + +class WebConfigurationBase(WebConfiguration): + """web instance (in a twisted web server) client of a RQL server""" + + options = merge_options(( + # ctl configuration + ('port', + {'type' : 'int', + 'default': None, + 'help': 'http server port number (default to 8080)', + 'group': 'web', 'level': 0, + }), + ('interface', + {'type' : 'string', + 'default': "", + 'help': 'http server address on which to listen (default to everywhere)', + 'group': 'web', 'level': 1, + }), + ('max-post-length', + {'type' : 'bytes', + 'default': '100MB', + 'help': 'maximum length of HTTP request. Default to 100 MB.', + 'group': 'web', 'level': 1, + }), + ('profile', + {'type' : 'string', + 'default': None, + 'help': 'profile code and use the specified file to store stats if this option is set', + 'group': 'web', 'level': 3, + }), + ('host', + {'type' : 'string', + 'default': None, + 'help': 'host name if not correctly detectable through gethostname', + 'group': 'main', 'level': 1, + }), + ('pid-file', + {'type' : 'string', + 'default': Method('default_pid_file'), + 'help': 'repository\'s pid file', + 'group': 'main', 'level': 2, + }), + ('uid', + {'type' : 'string', + 'default': None, + 'help': 'if this option is set, use the specified user to start \ +the repository rather than the user running the command', + 'group': 'main', 'level': WebConfiguration.mode == 'system' + }), + ('webserver-threadpool-size', + {'type': 'int', + 'default': 4, + 'help': "size of twisted's reactor threadpool. It should probably be not too \ +much greater than connection-poolsize", + 'group': 'web', 'level': 3, + }), + ) + WebConfiguration.options) + + def server_file(self): + return join(self.apphome, '%s-%s.py' % (self.appid, self.name)) + + def default_base_url(self): + from socket import getfqdn + return 'http://%s:%s/' % (self['host'] or getfqdn().lower(), self['port'] or 8080) + + +try: + from cubicweb.server.serverconfig import ServerConfiguration + + class AllInOneConfiguration(WebConfigurationBase, ServerConfiguration): + """repository and web instance in the same twisted process""" + name = 'all-in-one' + options = merge_options(WebConfigurationBase.options + + ServerConfiguration.options) + + cubicweb_appobject_path = WebConfigurationBase.cubicweb_appobject_path | ServerConfiguration.cubicweb_appobject_path + cube_appobject_path = WebConfigurationBase.cube_appobject_path | ServerConfiguration.cube_appobject_path + + + CONFIGURATIONS.append(AllInOneConfiguration) + +except ImportError: + pass diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/etwist/twctl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/etwist/twctl.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,79 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-clt handlers for twisted""" + +from cubicweb.toolsutils import CommandHandler +from cubicweb.web.webctl import WebCreateHandler, WebUpgradeHandler + +# trigger configuration registration +import cubicweb.etwist.twconfig # pylint: disable=W0611 + +class TWCreateHandler(WebCreateHandler): + cfgname = 'twisted' + +class TWStartHandler(CommandHandler): + cmdname = 'start' + cfgname = 'twisted' + + def start_server(self, config): + from cubicweb.etwist import server + return server.run(config) + +class TWStopHandler(CommandHandler): + cmdname = 'stop' + cfgname = 'twisted' + + def poststop(self): + pass + +class TWUpgradeHandler(WebUpgradeHandler): + cfgname = 'twisted' + + +try: + from cubicweb.server import serverctl + class AllInOneCreateHandler(serverctl.RepositoryCreateHandler, + TWCreateHandler): + """configuration to get an instance running in a twisted web server + integrating a repository server in the same process + """ + cfgname = 'all-in-one' + + def bootstrap(self, cubes, automatic=False, inputlevel=0): + """bootstrap this configuration""" + serverctl.RepositoryCreateHandler.bootstrap(self, cubes, automatic, inputlevel) + TWCreateHandler.bootstrap(self, cubes, automatic, inputlevel) + + class AllInOneStartHandler(TWStartHandler): + cmdname = 'start' + cfgname = 'all-in-one' + subcommand = 'cubicweb-twisted' + + class AllInOneStopHandler(CommandHandler): + cmdname = 'stop' + cfgname = 'all-in-one' + subcommand = 'cubicweb-twisted' + + def poststop(self): + pass + + class AllInOneUpgradeHandler(TWUpgradeHandler): + cfgname = 'all-in-one' + +except ImportError: + pass diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/ext/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/ext/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/ext/html4zope.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/ext/html4zope.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,174 @@ +# Author: David Goodger +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +# Contact: goodger@users.sourceforge.net +# Revision: $Revision: 1.2 $ +# Date: $Date: 2005-07-04 16:36:50 $ +# Copyright: This module has been placed in the public domain. + +""" +Simple HyperText Markup Language document tree Writer. + +The output conforms to the HTML 4.01 Transitional DTD and to the Extensible +HTML version 1.0 Transitional DTD (*almost* strict). The output contains a +minimum of formatting information. A cascading style sheet ("default.css" by +default) is required for proper viewing with a modern graphical browser. + +http://cvs.zope.org/Zope/lib/python/docutils/writers/Attic/html4zope.py?rev=1.1.2.2&only_with_tag=ajung-restructuredtext-integration-branch&content-type=text/vnd.viewcvs-markup +""" + +__docformat__ = 'reStructuredText' + +import os + +from logilab.mtconverter import xml_escape + +from docutils import nodes +from docutils.writers.html4css1 import Writer as CSS1Writer +from docutils.writers.html4css1 import HTMLTranslator as CSS1HTMLTranslator + +default_level = int(os.environ.get('STX_DEFAULT_LEVEL', 3)) + +class Writer(CSS1Writer): + """css writer using our html translator""" + def __init__(self, base_url): + CSS1Writer.__init__(self) + self.translator_class = URLBinder(base_url, HTMLTranslator) + + def apply_template(self): + """overriding this is necessary with docutils >= 0.5""" + return self.visitor.astext() + +class URLBinder: + def __init__(self, url, klass): + self.base_url = url + self.translator_class = HTMLTranslator + + def __call__(self, document): + translator = self.translator_class(document) + translator.base_url = self.base_url + return translator + +class HTMLTranslator(CSS1HTMLTranslator): + """ReST tree to html translator""" + + def astext(self): + """return the extracted html""" + return ''.join(self.body) + + def visit_title(self, node): + """Only 6 section levels are supported by HTML.""" + if isinstance(node.parent, nodes.topic): + self.body.append( + self.starttag(node, 'p', '', CLASS='topic-title')) + if node.parent.hasattr('id'): + self.body.append( + self.starttag({}, 'a', '', name=node.parent['id'])) + self.context.append('

\n') + else: + self.context.append('

\n') + elif self.section_level == 0: + # document title + self.head.append('%s\n' + % self.encode(node.astext())) + self.body.append(self.starttag(node, 'h%d' % default_level, '', + CLASS='title')) + self.context.append('\n' % default_level) + else: + self.body.append( + self.starttag(node, 'h%s' % ( + default_level+self.section_level-1), '')) + atts = {} + if node.hasattr('refid'): + atts['class'] = 'toc-backref' + atts['href'] = '%s#%s' % (self.base_url, node['refid']) + self.body.append(self.starttag({}, 'a', '', **atts)) + self.context.append('\n' % ( + default_level+self.section_level-1)) + + def visit_subtitle(self, node): + """format a subtitle""" + if isinstance(node.parent, nodes.sidebar): + self.body.append(self.starttag(node, 'p', '', + CLASS='sidebar-subtitle')) + self.context.append('

\n') + else: + self.body.append( + self.starttag(node, 'h%s' % (default_level+1), '', + CLASS='subtitle')) + self.context.append('\n' % (default_level+1)) + + def visit_document(self, node): + """syt: i don't want the enclosing
""" + def depart_document(self, node): + """syt: i don't want the enclosing
""" + + def visit_reference(self, node): + """syt: i want absolute urls""" + if 'refuri' in node: + href = node['refuri'] + if ( self.settings.cloak_email_addresses + and href.startswith('mailto:')): + href = self.cloak_mailto(href) + self.in_mailto = 1 + else: + assert 'refid' in node, \ + 'References must have "refuri" or "refid" attribute.' + href = '%s#%s' % (self.base_url, node['refid']) + atts = {'href': href, 'class': 'reference'} + if not isinstance(node.parent, nodes.TextElement): + assert len(node) == 1 and isinstance(node[0], nodes.image) + atts['class'] += ' image-reference' + self.body.append(self.starttag(node, 'a', '', **atts)) + + ## override error messages to avoid XHTML problems ######################## + def visit_problematic(self, node): + pass + + def depart_problematic(self, node): + pass + + def visit_system_message(self, node): + backref_text = '' + if len(node['backrefs']): + backrefs = node['backrefs'] + if len(backrefs) == 1: + backref_text = '; backlink' + else: + i = 1 + backlinks = [] + for backref in backrefs: + backlinks.append(str(i)) + i += 1 + backref_text = ('; backlinks: %s' + % ', '.join(backlinks)) + if node.hasattr('line'): + line = ', line %s' % node['line'] + else: + line = '' + a_start = a_end = '' + error = u'System Message: %s%s/%s%s (%s %s)%s

\n' % ( + a_start, node['type'], node['level'], a_end, + self.encode(node['source']), line, backref_text) + self.body.append(u'
ReST / HTML errors:%s
' % xml_escape(error)) + + def depart_system_message(self, node): + pass diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/ext/markdown.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/ext/markdown.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,27 @@ +from __future__ import absolute_import +import markdown + +import logging + +log = logging.getLogger(__name__) + + +def markdown_publish(context, data): + """publish a string formatted as MarkDown Text to HTML + + :type context: a cubicweb application object + + :type data: str + :param data: some MarkDown text + + :rtype: unicode + :return: + the data formatted as HTML or the original data if an error occurred + """ + md = markdown.Markdown() + try: + return md.convert(data) + except: + import traceback; traceback.print_exc() + log.exception("Error while converting Markdown to HTML") + return data diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/ext/rest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/ext/rest.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,469 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""rest publishing functions + +contains some functions and setup of docutils for cubicweb. Provides the +following ReST directives: + +* `eid`, create link to entity in the repository by their eid + +* `card`, create link to card entity in the repository by their wikiid + (proposing to create it when the refered card doesn't exist yet) + +* `winclude`, reference to a web documentation file (in wdoc/ directories) + +* `sourcecode` (if pygments is installed), source code colorization + +* `rql-table`, create a table from a RQL query + +""" +__docformat__ = "restructuredtext en" + +import sys +from itertools import chain +from logging import getLogger +from os.path import join + +from six import text_type +from six.moves.urllib.parse import urlsplit + +from docutils import statemachine, nodes, utils, io +from docutils.core import Publisher +from docutils.parsers.rst import Parser, states, directives, Directive +from docutils.parsers.rst.roles import register_canonical_role, set_classes + +from logilab.mtconverter import ESC_UCAR_TABLE, ESC_CAR_TABLE, xml_escape + +from cubicweb import UnknownEid +from cubicweb.ext.html4zope import Writer + +from cubicweb.web.views import vid_from_rset # XXX better not to import c.w.views here... + +# We provide our own parser as an attempt to get rid of +# state machine reinstanciation + +import re +# compile states.Body patterns +for k, v in states.Body.patterns.items(): + if isinstance(v, str): + states.Body.patterns[k] = re.compile(v) + +# register ReStructured Text mimetype / extensions +import mimetypes +mimetypes.add_type('text/rest', '.rest') +mimetypes.add_type('text/rest', '.rst') + + +LOGGER = getLogger('cubicweb.rest') + + +def eid_reference_role(role, rawtext, text, lineno, inliner, + options={}, content=[]): + try: + try: + eid_num, rest = text.split(u':', 1) + except ValueError: + eid_num, rest = text, '#'+text + eid_num = int(eid_num) + if eid_num < 0: + raise ValueError + except ValueError: + msg = inliner.reporter.error( + 'EID number must be a positive number; "%s" is invalid.' + % text, line=lineno) + prb = inliner.problematic(rawtext, rawtext, msg) + return [prb], [msg] + # Base URL mainly used by inliner.pep_reference; so this is correct: + context = inliner.document.settings.context + try: + refedentity = context._cw.entity_from_eid(eid_num) + except UnknownEid: + ref = '#' + rest += u' ' + context._cw._('(UNEXISTANT EID)') + else: + ref = refedentity.absolute_url() + set_classes(options) + return [nodes.reference(rawtext, utils.unescape(rest), refuri=ref, + **options)], [] + + +def rql_role(role, rawtext, text, lineno, inliner, options={}, content=[]): + """``:rql:```` or ``:rql:`:``` + + Example: ``:rql:`Any X,Y WHERE X is CWUser, X login Y:table``` + + Replace the directive with the output of applying the view to the resultset + returned by the query. + + "X eid %(userid)s" can be used in the RQL query for this query will be + executed with the argument {'userid': _cw.user.eid}. + """ + _cw = inliner.document.settings.context._cw + text = text.strip() + if ':' in text: + rql, vid = text.rsplit(u':', 1) + rql = rql.strip() + else: + rql, vid = text, None + _cw.ensure_ro_rql(rql) + try: + rset = _cw.execute(rql, {'userid': _cw.user.eid}) + if rset: + if vid is None: + vid = vid_from_rset(_cw, rset, _cw.vreg.schema) + else: + vid = 'noresult' + view = _cw.vreg['views'].select(vid, _cw, rset=rset) + content = view.render() + except Exception as exc: + content = 'an error occurred while interpreting this rql directive: %r' % exc + set_classes(options) + return [nodes.raw('', content, format='html')], [] + + +def bookmark_role(role, rawtext, text, lineno, inliner, options={}, content=[]): + """``:bookmark:```` or ``:bookmark:`:``` + + Example: ``:bookmark:`1234:table``` + + Replace the directive with the output of applying the view to the resultset + returned by the query stored in the bookmark. By default, the view is the one + stored in the bookmark, but it can be overridden by the directive as in the + example above. + + "X eid %(userid)s" can be used in the RQL query stored in the Bookmark, for + this query will be executed with the argument {'userid': _cw.user.eid}. + """ + _cw = inliner.document.settings.context._cw + text = text.strip() + try: + if ':' in text: + eid, vid = text.rsplit(u':', 1) + eid = int(eid) + else: + eid, vid = int(text), None + except ValueError: + msg = inliner.reporter.error( + 'EID number must be a positive number; "%s" is invalid.' + % text, line=lineno) + prb = inliner.problematic(rawtext, rawtext, msg) + return [prb], [msg] + try: + bookmark = _cw.entity_from_eid(eid) + except UnknownEid: + msg = inliner.reporter.error('Unknown EID %s.' % text, line=lineno) + prb = inliner.problematic(rawtext, rawtext, msg) + return [prb], [msg] + try: + params = dict(_cw.url_parse_qsl(urlsplit(bookmark.path).query)) + rql = params['rql'] + if vid is None: + vid = params.get('vid') + except (ValueError, KeyError) as exc: + msg = inliner.reporter.error('Could not parse bookmark path %s [%s].' + % (bookmark.path, exc), line=lineno) + prb = inliner.problematic(rawtext, rawtext, msg) + return [prb], [msg] + try: + rset = _cw.execute(rql, {'userid': _cw.user.eid}) + if rset: + if vid is None: + vid = vid_from_rset(_cw, rset, _cw.vreg.schema) + else: + vid = 'noresult' + view = _cw.vreg['views'].select(vid, _cw, rset=rset) + content = view.render() + except Exception as exc: + content = 'An error occurred while interpreting directive bookmark: %r' % exc + set_classes(options) + return [nodes.raw('', content, format='html')], [] + + +def winclude_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + """Include a reST file as part of the content of this reST file. + + same as standard include directive but using config.locate_doc_resource to + get actual file to include. + + Most part of this implementation is copied from `include` directive defined + in `docutils.parsers.rst.directives.misc` + """ + context = state.document.settings.context + cw = context._cw + source = state_machine.input_lines.source( + lineno - state_machine.input_offset - 1) + #source_dir = os.path.dirname(os.path.abspath(source)) + fid = arguments[0] + for lang in chain((cw.lang, cw.vreg.property_value('ui.language')), + cw.vreg.config.available_languages()): + rid = '%s_%s.rst' % (fid, lang) + resourcedir = cw.vreg.config.locate_doc_file(rid) + if resourcedir: + break + else: + severe = state_machine.reporter.severe( + 'Problems with "%s" directive path:\nno resource matching %s.' + % (name, fid), + nodes.literal_block(block_text, block_text), line=lineno) + return [severe] + path = join(resourcedir, rid) + encoding = options.get('encoding', state.document.settings.input_encoding) + try: + state.document.settings.record_dependencies.add(path) + include_file = io.FileInput( + source_path=path, encoding=encoding, + error_handler=state.document.settings.input_encoding_error_handler, + handle_io_errors=None) + except IOError as error: + severe = state_machine.reporter.severe( + 'Problems with "%s" directive path:\n%s: %s.' + % (name, error.__class__.__name__, error), + nodes.literal_block(block_text, block_text), line=lineno) + return [severe] + try: + include_text = include_file.read() + except UnicodeError as error: + severe = state_machine.reporter.severe( + 'Problem with "%s" directive:\n%s: %s' + % (name, error.__class__.__name__, error), + nodes.literal_block(block_text, block_text), line=lineno) + return [severe] + if 'literal' in options: + literal_block = nodes.literal_block(include_text, include_text, + source=path) + literal_block.line = 1 + return literal_block + else: + include_lines = statemachine.string2lines(include_text, + convert_whitespace=1) + state_machine.insert_input(include_lines, path) + return [] + +winclude_directive.arguments = (1, 0, 1) +winclude_directive.options = {'literal': directives.flag, + 'encoding': directives.encoding} + + +class RQLTableDirective(Directive): + """rql-table directive + + Example: + + .. rql-table:: + :vid: mytable + :headers: , , progress + :colvids: 2=progress + + Any X,U,X WHERE X is Project, X url U + + All fields but the RQL string are optionnal. The ``:headers:`` option can + contain empty column names. + """ + + required_arguments = 0 + optional_arguments = 0 + has_content= True + final_argument_whitespace = True + option_spec = {'vid': directives.unchanged, + 'headers': directives.unchanged, + 'colvids': directives.unchanged} + + def run(self): + errid = "rql-table directive" + self.assert_has_content() + if self.arguments: + raise self.warning('%s does not accept arguments' % errid) + rql = ' '.join([l.strip() for l in self.content]) + _cw = self.state.document.settings.context._cw + _cw.ensure_ro_rql(rql) + try: + rset = _cw.execute(rql) + except Exception as exc: + raise self.error("fail to execute RQL query in %s: %r" % + (errid, exc)) + if not rset: + raise self.warning("empty result set") + vid = self.options.get('vid', 'table') + try: + view = _cw.vreg['views'].select(vid, _cw, rset=rset) + except Exception as exc: + raise self.error("fail to select '%s' view in %s: %r" % + (vid, errid, exc)) + headers = None + if 'headers' in self.options: + headers = [h.strip() for h in self.options['headers'].split(',')] + while headers.count(''): + headers[headers.index('')] = None + if len(headers) != len(rset[0]): + raise self.error("the number of 'headers' does not match the " + "number of columns in %s" % errid) + cellvids = None + if 'colvids' in self.options: + cellvids = {} + for f in self.options['colvids'].split(','): + try: + idx, vid = f.strip().split('=') + except ValueError: + raise self.error("malformatted 'colvids' option in %s" % + errid) + cellvids[int(idx.strip())] = vid.strip() + try: + content = view.render(headers=headers, cellvids=cellvids) + except Exception as exc: + raise self.error("Error rendering %s (%s)" % (errid, exc)) + return [nodes.raw('', content, format='html')] + + +try: + from pygments import highlight + from pygments.lexers import get_lexer_by_name + from pygments.formatters.html import HtmlFormatter +except ImportError: + pygments_directive = None +else: + _PYGMENTS_FORMATTER = HtmlFormatter() + + def pygments_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + try: + lexer = get_lexer_by_name(arguments[0]) + except ValueError: + # no lexer found + lexer = get_lexer_by_name('text') + parsed = highlight(u'\n'.join(content), lexer, _PYGMENTS_FORMATTER) + # don't fail if no context set on the sourcecode directive + try: + context = state.document.settings.context + context._cw.add_css('pygments.css') + except AttributeError: + # used outside cubicweb XXX use hasattr instead + pass + return [nodes.raw('', parsed, format='html')] + + pygments_directive.arguments = (1, 0, 1) + pygments_directive.content = 1 + + +class CubicWebReSTParser(Parser): + """The (customized) reStructuredText parser.""" + + def __init__(self): + self.initial_state = 'Body' + self.state_classes = states.state_classes + self.inliner = states.Inliner() + self.statemachine = states.RSTStateMachine( + state_classes=self.state_classes, + initial_state=self.initial_state, + debug=0) + + def parse(self, inputstring, document): + """Parse `inputstring` and populate `document`, a document tree.""" + self.setup_parse(inputstring, document) + inputlines = statemachine.string2lines(inputstring, + convert_whitespace=1) + self.statemachine.run(inputlines, document, inliner=self.inliner) + self.finish_parse() + + +# XXX docutils keep a ref on context, can't find a correct way to remove it +class CWReSTPublisher(Publisher): + def __init__(self, context, settings, **kwargs): + Publisher.__init__(self, **kwargs) + self.set_components('standalone', 'restructuredtext', 'pseudoxml') + self.process_programmatic_settings(None, settings, None) + self.settings.context = context + + +def rest_publish(context, data): + """publish a string formatted as ReStructured Text to HTML + + :type context: a cubicweb application object + + :type data: str + :param data: some ReST text + + :rtype: unicode + :return: + the data formatted as HTML or the original data if an error occurred + """ + req = context._cw + if isinstance(data, text_type): + encoding = 'unicode' + # remove unprintable characters unauthorized in xml + data = data.translate(ESC_UCAR_TABLE) + else: + encoding = req.encoding + # remove unprintable characters unauthorized in xml + data = data.translate(ESC_CAR_TABLE) + settings = {'input_encoding': encoding, 'output_encoding': 'unicode', + 'warning_stream': False, + 'traceback': True, # don't sys.exit + 'stylesheet': None, # don't try to embed stylesheet (may cause + # obscure bug due to docutils computing + # relative path according to the directory + # used *at import time* + # dunno what's the max, severe is 4, and we never want a crash + # (though try/except may be a better option...). May be the + # above traceback option will avoid this? + 'halt_level': 10, + # disable stupid switch to colspan=2 if field name is above a size limit + 'field_name_limit': sys.maxsize, + } + if context: + if hasattr(req, 'url'): + base_url = req.url() + elif hasattr(context, 'absolute_url'): + base_url = context.absolute_url() + else: + base_url = req.base_url() + else: + base_url = None + try: + pub = CWReSTPublisher(context, settings, + parser=CubicWebReSTParser(), + writer=Writer(base_url=base_url), + source_class=io.StringInput, + destination_class=io.StringOutput) + pub.set_source(data) + pub.set_destination() + res = pub.publish(enable_exit_status=None) + # necessary for proper garbage collection, else a ref is kept somewhere in docutils... + del pub.settings.context + return res + except BaseException: + LOGGER.exception('error while publishing ReST text') + if not isinstance(data, text_type): + data = text_type(data, encoding, 'replace') + return xml_escape(req._('error while publishing ReST text') + + '\n\n' + data) + + +_INITIALIZED = False +def cw_rest_init(): + global _INITIALIZED + if _INITIALIZED: + return + _INITIALIZED = True + register_canonical_role('eid', eid_reference_role) + register_canonical_role('rql', rql_role) + register_canonical_role('bookmark', bookmark_role) + directives.register_directive('winclude', winclude_directive) + if pygments_directive is not None: + directives.register_directive('sourcecode', pygments_directive) + directives.register_directive('rql-table', RQLTableDirective) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/ext/tal.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/ext/tal.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,273 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""provides simpleTAL extensions for CubicWeb + +""" + +__docformat__ = "restructuredtext en" + +import sys +import re +from os.path import exists, isdir, join +from logging import getLogger +from StringIO import StringIO + +from simpletal import simpleTAL, simpleTALES + +from logilab.common.decorators import cached + +LOGGER = getLogger('cubicweb.tal') + + +class LoggerAdapter(object): + def __init__(self, tal_logger): + self.tal_logger = tal_logger + + def debug(self, msg): + LOGGER.debug(msg) + + def warn(self, msg): + LOGGER.warning(msg) + + def __getattr__(self, attrname): + return getattr(self.tal_logger, attrname) + + +class CubicWebContext(simpleTALES.Context): + """add facilities to access entity / resultset""" + + def __init__(self, options=None, allowPythonPath=1): + simpleTALES.Context.__init__(self, options, allowPythonPath) + self.log = LoggerAdapter(self.log) + + def update(self, context): + for varname, value in context.items(): + self.addGlobal(varname, value) + + def addRepeat(self, name, var, initialValue): + simpleTALES.Context.addRepeat(self, name, var, initialValue) + +# XXX FIXME need to find a clean to define OPCODE values for extensions +I18N_CONTENT = 18 +I18N_REPLACE = 19 +RQL_EXECUTE = 20 +# simpleTAL uses the OPCODE values to define priority over commands. +# TAL_ITER should have the same priority than TAL_REPEAT (i.e. 3), but +# we can't use the same OPCODE for two different commands without changing +# the simpleTAL implementation. Another solution would be to totally override +# the REPEAT implementation with the ITER one, but some specific operations +# (involving len() for instance) are not implemented for ITER, so we prefer +# to keep both implementations for now, and to fool simpleTAL by using a float +# number between 3 and 4 +TAL_ITER = 3.1 + + +# FIX simpleTAL HTML 4.01 stupidity +# (simpleTAL never closes tags like INPUT, IMG, HR ...) +simpleTAL.HTML_FORBIDDEN_ENDTAG.clear() + +class CubicWebTemplateCompiler(simpleTAL.HTMLTemplateCompiler): + """extends default compiler by adding i18n:content commands""" + + def __init__(self): + simpleTAL.HTMLTemplateCompiler.__init__(self) + self.commandHandler[I18N_CONTENT] = self.compile_cmd_i18n_content + self.commandHandler[I18N_REPLACE] = self.compile_cmd_i18n_replace + self.commandHandler[RQL_EXECUTE] = self.compile_cmd_rql + self.commandHandler[TAL_ITER] = self.compile_cmd_tal_iter + + def setTALPrefix(self, prefix): + simpleTAL.TemplateCompiler.setTALPrefix(self, prefix) + self.tal_attribute_map['i18n:content'] = I18N_CONTENT + self.tal_attribute_map['i18n:replace'] = I18N_REPLACE + self.tal_attribute_map['rql:execute'] = RQL_EXECUTE + self.tal_attribute_map['tal:iter'] = TAL_ITER + + def compile_cmd_i18n_content(self, argument): + # XXX tal:content structure=, text= should we support this ? + structure_flag = 0 + return (I18N_CONTENT, (argument, False, structure_flag, self.endTagSymbol)) + + def compile_cmd_i18n_replace(self, argument): + # XXX tal:content structure=, text= should we support this ? + structure_flag = 0 + return (I18N_CONTENT, (argument, True, structure_flag, self.endTagSymbol)) + + def compile_cmd_rql(self, argument): + return (RQL_EXECUTE, (argument, self.endTagSymbol)) + + def compile_cmd_tal_iter(self, argument): + original_id, (var_name, expression, end_tag_symbol) = \ + simpleTAL.HTMLTemplateCompiler.compileCmdRepeat(self, argument) + return (TAL_ITER, (var_name, expression, self.endTagSymbol)) + + def getTemplate(self): + return CubicWebTemplate(self.commandList, self.macroMap, self.symbolLocationTable) + + def compileCmdAttributes (self, argument): + """XXX modified to support single attribute + definition ending by a ';' + + backport this to simpleTAL + """ + # Compile tal:attributes into attribute command + # Argument: [(attributeName, expression)] + + # Break up the list of attribute settings first + commandArgs = [] + # We only want to match semi-colons that are not escaped + argumentSplitter = re.compile(r'(?. + + +from cubicweb.web.views import tableview + +class CustomRsetTableView(tableview.RsetTableView): + __regid__ = 'mytable' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/ext/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/ext/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +docutils diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/ext/test/unittest_rest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/ext/test/unittest_rest.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,244 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from six import PY3 + +from logilab.common.testlib import unittest_main +from cubicweb.devtools.testlib import CubicWebTC + +from cubicweb.ext.rest import rest_publish + +class RestTC(CubicWebTC): + + def context(self, req): + return req.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) + + def test_eid_role(self): + with self.admin_access.web_request() as req: + context = self.context(req) + self.assertEqual(rest_publish(context, ':eid:`%s`' % context.eid), + '

' + '#%s

\n' % context.eid) + self.assertEqual(rest_publish(context, ':eid:`%s:some text`' % context.eid), + '

' + 'some text

\n') + + def test_bad_rest_no_crash(self): + with self.admin_access.web_request() as req: + rest_publish(self.context(req), ''' +| card | implication | +-------------------------- +| 1-1 | N1 = N2 | +| 1-? | N1 <= N2 | +| 1-+ | N1 >= N2 | +| 1-* | N1>0 => N2>0 | +-------------------------- +| ?-? | N1 # N2 | +| ?-+ | N1 >= N2 | +| ?-* | N1 # N2 | +-------------------------- +| +-+ | N1>0 => N2>0 et | +| | N2>0 => N1>0 | +| +-* | N1>+ => N2>0 | +-------------------------- +| *-* | N1#N2 | +-------------------------- + +''') + + def test_disable_field_name_colspan(self): + with self.admin_access.web_request() as req: + context = self.context(req) + value = rest_publish(context, '''my field list: + +:a long dumb param name: value +''') + self.assertNotIn('colspan', value) + + def test_rql_role_with_vid(self): + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:table`') + self.assertTrue(out.endswith('anon\n' + '

\n')) + + def test_rql_role_with_vid_empty_rset(self): + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, ':rql:`Any X WHERE X is CWUser, X login "nono":table`') + self.assertTrue(out.endswith('

' + 'No result matching query
\n

\n')) + + def test_rql_role_with_unknown_vid(self): + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:toto`') + self.assertTrue(out.startswith("

an error occurred while interpreting this " + "rql directive: ObjectNotFound(%s'toto',)

" % + ('' if PY3 else 'u')), + out) + + def test_rql_role_without_vid(self): + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, ':rql:`Any X WHERE X is CWUser`') + self.assertEqual(out, u'

CWUser_plural

' + 'admin' + '
' + 'anon' + '

\n') + + def test_bookmark_role(self): + with self.admin_access.web_request() as req: + context = self.context(req) + rset = req.execute('INSERT Bookmark X: X title "hello", X path ' + '"/view?rql=Any X WHERE X is CWUser"') + eid = rset[0][0] + out = rest_publish(context, ':bookmark:`%s`' % eid) + self.assertEqual(out, u'

CWUser_plural

\n') + + def test_rqltable_nocontent(self): + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, """.. rql-table::""") + self.assertIn("System Message: ERROR", out) + self.assertIn("Content block expected for the "rql-table" " + "directive; none found" , out) + + def test_rqltable_norset(self): + with self.admin_access.web_request() as req: + context = self.context(req) + rql = "Any X WHERE X is CWUser, X firstname 'franky'" + out = rest_publish( + context, """\ +.. rql-table:: + + %(rql)s""" % {'rql': rql}) + self.assertIn("System Message: WARNING", out) + self.assertIn("empty result set", out) + + def test_rqltable_nooptions(self): + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + out = rest_publish( + self.context(req), """\ +.. rql-table:: + + %(rql)s + """ % {'rql': rql}) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + self.assertEqual(view.render(w=None)[49:], out[49:]) + + def test_rqltable_vid(self): + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + vid = 'mytable' + out = rest_publish( + self.context(req), """\ +.. rql-table:: + :vid: %(vid)s + + %(rql)s + """ % {'rql': rql, 'vid': vid}) + view = self.vreg['views'].select(vid, req, rset=req.execute(rql)) + self.assertEqual(view.render(w=None)[49:], out[49:]) + self.assertIn(vid, out[:49]) + + def test_rqltable_badvid(self): + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + vid = 'mytabel' + out = rest_publish( + self.context(req), """\ +.. rql-table:: + :vid: %(vid)s + + %(rql)s + """ % {'rql': rql, 'vid': vid}) + self.assertIn("fail to select '%s' view" % vid, out) + + def test_rqltable_headers(self): + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + headers = ["nom", "prenom", "identifiant"] + out = rest_publish( + self.context(req), """\ +.. rql-table:: + :headers: %(headers)s + + %(rql)s + """ % {'rql': rql, 'headers': ', '.join(headers)}) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + view.headers = headers + self.assertEqual(view.render(w=None)[49:], out[49:]) + + def test_rqltable_headers_missing(self): + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + headers = ["nom", "", "identifiant"] + out = rest_publish( + self.context(req), """\ +.. rql-table:: + :headers: %(headers)s + + %(rql)s + """ % {'rql': rql, 'headers': ', '.join(headers)}) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + view.headers = [headers[0], None, headers[2]] + self.assertEqual(view.render(w=None)[49:], out[49:]) + + def test_rqltable_headers_missing_edges(self): + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + headers = [" ", "prenom", ""] + out = rest_publish( + self.context(req), """\ +.. rql-table:: + :headers: %(headers)s + + %(rql)s + """ % {'rql': rql, 'headers': ', '.join(headers)}) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + view.headers = [None, headers[1], None] + self.assertEqual(view.render(w=None)[49:], out[49:]) + + def test_rqltable_colvids(self): + with self.admin_access.web_request() as req: + rql = "Any X,S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + colvids = {0: "oneline"} + out = rest_publish( + self.context(req), """\ +.. rql-table:: + :colvids: %(colvids)s + + %(rql)s + """ % {'rql': rql, + 'colvids': ', '.join(["%d=%s" % (k, v) + for k, v in colvids.items()]) + }) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + view.cellvids = colvids + self.assertEqual(view.render(w=None)[49:], out[49:]) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,84 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""core hooks registering some maintainance tasks as server startup time""" + +__docformat__ = "restructuredtext en" + +from datetime import timedelta, datetime + +from cubicweb.server import hook + +class TransactionsCleanupStartupHook(hook.Hook): + """start task to cleanup transaction data""" + __regid__ = 'cw.looping-tasks.transactions-cleanup' + events = ('server_startup',) + + def __call__(self): + # XXX use named args and inner functions to avoid referencing globals + # which may cause reloading pb + lifetime = timedelta(days=self.repo.config['keep-transaction-lifetime']) + def cleanup_old_transactions(repo=self.repo, lifetime=lifetime): + mindate = datetime.utcnow() - lifetime + with repo.internal_cnx() as cnx: + cnx.system_sql( + 'DELETE FROM transactions WHERE tx_time < %(time)s', + {'time': mindate}) + cnx.commit() + if self.repo.config['undo-enabled']: + self.repo.looping_task(60*60*24, cleanup_old_transactions, + self.repo) + +class UpdateFeedsStartupHook(hook.Hook): + """start task to update datafeed based sources""" + __regid__ = 'cw.looping-tasks.update-feeds' + events = ('server_startup',) + + def __call__(self): + def update_feeds(repo): + # take a list to avoid iterating on a dictionary whose size may + # change + for uri, source in list(repo.sources_by_uri.items()): + if (uri == 'system' + or not repo.config.source_enabled(source) + or not source.config['synchronize']): + continue + with repo.internal_cnx() as cnx: + try: + source.pull_data(cnx) + except Exception as exc: + cnx.exception('while trying to update feed %s', source) + self.repo.looping_task(60, update_feeds, self.repo) + + +class DataImportsCleanupStartupHook(hook.Hook): + """start task to cleanup old data imports (ie datafeed import logs)""" + __regid__ = 'cw.looping-tasks.dataimports-cleanup' + events = ('server_startup',) + + def __call__(self): + def expire_dataimports(repo=self.repo): + for uri, source in repo.sources_by_uri.items(): + if (uri == 'system' + or not repo.config.source_enabled(source)): + continue + with repo.internal_cnx() as cnx: + mindate = datetime.utcnow() - timedelta(seconds=source.config['logs-lifetime']) + cnx.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s', + {'time': mindate}) + cnx.commit() + self.repo.looping_task(60*60*24, expire_dataimports, self.repo) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/bookmark.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/bookmark.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,42 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""bookmark related hooks""" + +__docformat__ = "restructuredtext en" + +from cubicweb.server import hook + + +class AutoDeleteBookmarkOp(hook.Operation): + bookmark = None # make pylint happy + def precommit_event(self): + if not self.cnx.deleted_in_transaction(self.bookmark.eid): + if not self.bookmark.bookmarked_by: + self.bookmark.cw_delete() + + +class DelBookmarkedByHook(hook.Hook): + """ensure user logins are stripped""" + __regid__ = 'autodelbookmark' + __select__ = hook.Hook.__select__ & hook.match_rtype('bookmarked_by',) + category = 'bookmark' + events = ('after_delete_relation',) + + def __call__(self): + AutoDeleteBookmarkOp(self._cw, + bookmark=self._cw.entity_from_eid(self.eidfrom)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/email.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/email.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,80 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""hooks to ensure use_email / primary_email relations consistency""" + +__docformat__ = "restructuredtext en" + +from cubicweb.server import hook + + +class SetUseEmailRelationOp(hook.Operation): + """delay this operation to commit to avoid conflict with a late rql query + already setting the relation + """ + rtype = 'use_email' + entity = email = None # make pylint happy + + def condition(self): + """check entity has use_email set for the email address""" + return not any(e for e in self.entity.use_email + if self.email.eid == e.eid) + + def precommit_event(self): + if self.cnx.deleted_in_transaction(self.entity.eid): + return + if self.cnx.deleted_in_transaction(self.email.eid): + return + if self.condition(): + self.cnx.execute( + 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype, + {'x': self.entity.eid, 'y': self.email.eid}) + + +class SetPrimaryEmailRelationOp(SetUseEmailRelationOp): + rtype = 'primary_email' + + def condition(self): + """check entity has no primary_email set""" + return not self.entity.primary_email + + +class SetPrimaryEmailHook(hook.Hook): + """notify when a bug or story or version has its state modified""" + __regid__ = 'setprimaryemail' + __select__ = hook.Hook.__select__ & hook.match_rtype('use_email') + category = 'email' + events = ('after_add_relation',) + + def __call__(self): + entity = self._cw.entity_from_eid(self.eidfrom) + if 'primary_email' in entity.e_schema.subject_relations(): + SetPrimaryEmailRelationOp(self._cw, entity=entity, + email=self._cw.entity_from_eid(self.eidto)) + +class SetUseEmailHook(hook.Hook): + """notify when a bug or story or version has its state modified""" + __regid__ = 'setprimaryemail' + __select__ = hook.Hook.__select__ & hook.match_rtype('primary_email') + category = 'email' + events = ('after_add_relation',) + + def __call__(self): + entity = self._cw.entity_from_eid(self.eidfrom) + if 'use_email' in entity.e_schema.subject_relations(): + SetUseEmailRelationOp(self._cw, entity=entity, + email=self._cw.entity_from_eid(self.eidto)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/integrity.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/integrity.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,347 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Core hooks: check for data integrity according to the instance'schema +validity +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from threading import Lock + +from six import text_type + +from cubicweb import validation_error, neg_role +from cubicweb.schema import (META_RTYPES, WORKFLOW_RTYPES, + RQLConstraint, RQLUniqueConstraint) +from cubicweb.predicates import is_instance, composite_etype +from cubicweb.uilib import soup2xhtml +from cubicweb.server import hook + +# special relations that don't have to be checked for integrity, usually +# because they are handled internally by hooks (so we trust ourselves) +DONT_CHECK_RTYPES_ON_ADD = META_RTYPES | WORKFLOW_RTYPES +DONT_CHECK_RTYPES_ON_DEL = META_RTYPES | WORKFLOW_RTYPES + +_UNIQUE_CONSTRAINTS_LOCK = Lock() +_UNIQUE_CONSTRAINTS_HOLDER = None + + +def _acquire_unique_cstr_lock(cnx): + """acquire the _UNIQUE_CONSTRAINTS_LOCK for the cnx. + + This lock used to avoid potential integrity pb when checking + RQLUniqueConstraint in two different transactions, as explained in + https://extranet.logilab.fr/3577926 + """ + if 'uniquecstrholder' in cnx.transaction_data: + return + _UNIQUE_CONSTRAINTS_LOCK.acquire() + cnx.transaction_data['uniquecstrholder'] = True + # register operation responsible to release the lock on commit/rollback + _ReleaseUniqueConstraintsOperation(cnx) + +def _release_unique_cstr_lock(cnx): + if 'uniquecstrholder' in cnx.transaction_data: + del cnx.transaction_data['uniquecstrholder'] + _UNIQUE_CONSTRAINTS_LOCK.release() + +class _ReleaseUniqueConstraintsOperation(hook.Operation): + def postcommit_event(self): + _release_unique_cstr_lock(self.cnx) + def rollback_event(self): + _release_unique_cstr_lock(self.cnx) + + +class _CheckRequiredRelationOperation(hook.DataOperationMixIn, + hook.LateOperation): + """checking relation cardinality has to be done after commit in case the + relation is being replaced + """ + containercls = list + role = key = base_rql = None + + def precommit_event(self): + cnx = self.cnx + pendingeids = cnx.transaction_data.get('pendingeids', ()) + pendingrtypes = cnx.transaction_data.get('pendingrtypes', ()) + for eid, rtype in self.get_data(): + # recheck pending eids / relation types + if eid in pendingeids: + continue + if rtype in pendingrtypes: + continue + if not cnx.execute(self.base_rql % rtype, {'x': eid}): + etype = cnx.entity_metas(eid)['type'] + msg = _('at least one relation %(rtype)s is required on ' + '%(etype)s (%(eid)s)') + raise validation_error(eid, {(rtype, self.role): msg}, + {'rtype': rtype, 'etype': etype, 'eid': eid}, + ['rtype', 'etype']) + + +class _CheckSRelationOp(_CheckRequiredRelationOperation): + """check required subject relation""" + role = 'subject' + base_rql = 'Any O WHERE S eid %%(x)s, S %s O' + +class _CheckORelationOp(_CheckRequiredRelationOperation): + """check required object relation""" + role = 'object' + base_rql = 'Any S WHERE O eid %%(x)s, S %s O' + + +class IntegrityHook(hook.Hook): + __abstract__ = True + category = 'integrity' + + +class _EnsureSymmetricRelationsAdd(hook.Hook): + """ ensure X r Y => Y r X iff r is symmetric """ + __regid__ = 'cw.add_ensure_symmetry' + __abstract__ = True + category = 'activeintegrity' + events = ('after_add_relation',) + # __select__ is set in the registration callback + + def __call__(self): + self._cw.repo.system_source.add_relation(self._cw, self.eidto, + self.rtype, self.eidfrom) + + +class _EnsureSymmetricRelationsDelete(hook.Hook): + """ ensure X r Y => Y r X iff r is symmetric """ + __regid__ = 'cw.delete_ensure_symmetry' + __abstract__ = True + category = 'activeintegrity' + events = ('after_delete_relation',) + # __select__ is set in the registration callback + + def __call__(self): + self._cw.repo.system_source.delete_relation(self._cw, self.eidto, + self.rtype, self.eidfrom) + + +class CheckCardinalityHookBeforeDeleteRelation(IntegrityHook): + """check cardinalities are satisfied""" + __regid__ = 'checkcard_before_delete_relation' + events = ('before_delete_relation',) + + def __call__(self): + rtype = self.rtype + if rtype in DONT_CHECK_RTYPES_ON_DEL: + return + cnx = self._cw + eidfrom, eidto = self.eidfrom, self.eidto + rdef = cnx.rtype_eids_rdef(rtype, eidfrom, eidto) + if (rdef.subject, rtype, rdef.object) in cnx.transaction_data.get('pendingrdefs', ()): + return + card = rdef.cardinality + if card[0] in '1+' and not cnx.deleted_in_transaction(eidfrom): + _CheckSRelationOp.get_instance(cnx).add_data((eidfrom, rtype)) + if card[1] in '1+' and not cnx.deleted_in_transaction(eidto): + _CheckORelationOp.get_instance(cnx).add_data((eidto, rtype)) + + +class CheckCardinalityHookAfterAddEntity(IntegrityHook): + """check cardinalities are satisfied""" + __regid__ = 'checkcard_after_add_entity' + events = ('after_add_entity',) + + def __call__(self): + eid = self.entity.eid + eschema = self.entity.e_schema + for rschema, targetschemas, role in eschema.relation_definitions(): + # skip automatically handled relations + if rschema.type in DONT_CHECK_RTYPES_ON_ADD: + continue + rdef = rschema.role_rdef(eschema, targetschemas[0], role) + if rdef.role_cardinality(role) in '1+': + if role == 'subject': + op = _CheckSRelationOp.get_instance(self._cw) + else: + op = _CheckORelationOp.get_instance(self._cw) + op.add_data((eid, rschema.type)) + + +class _CheckConstraintsOp(hook.DataOperationMixIn, hook.LateOperation): + """ check a new relation satisfy its constraints """ + containercls = list + def precommit_event(self): + cnx = self.cnx + for values in self.get_data(): + eidfrom, rtype, eidto, constraints = values + # first check related entities have not been deleted in the same + # transaction + if cnx.deleted_in_transaction(eidfrom): + continue + if cnx.deleted_in_transaction(eidto): + continue + for constraint in constraints: + # XXX + # * lock RQLConstraint as well? + # * use a constraint id to use per constraint lock and avoid + # unnecessary commit serialization ? + if isinstance(constraint, RQLUniqueConstraint): + _acquire_unique_cstr_lock(cnx) + try: + constraint.repo_check(cnx, eidfrom, rtype, eidto) + except NotImplementedError: + self.critical('can\'t check constraint %s, not supported', + constraint) + + +class CheckConstraintHook(IntegrityHook): + """check the relation satisfy its constraints + + this is delayed to a precommit time operation since other relation which + will make constraint satisfied (or unsatisfied) may be added later. + """ + __regid__ = 'checkconstraint' + events = ('after_add_relation',) + + def __call__(self): + # XXX get only RQL[Unique]Constraints? + rdef = self._cw.rtype_eids_rdef(self.rtype, self.eidfrom, self.eidto) + constraints = rdef.constraints + if constraints: + _CheckConstraintsOp.get_instance(self._cw).add_data( + (self.eidfrom, self.rtype, self.eidto, constraints)) + + +class CheckAttributeConstraintHook(IntegrityHook): + """check the attribute relation satisfy its constraints + + this is delayed to a precommit time operation since other relation which + will make constraint satisfied (or unsatisfied) may be added later. + """ + __regid__ = 'checkattrconstraint' + events = ('after_add_entity', 'after_update_entity') + + def __call__(self): + eschema = self.entity.e_schema + for attr in self.entity.cw_edited: + if eschema.subjrels[attr].final: + constraints = [c for c in eschema.rdef(attr).constraints + if isinstance(c, (RQLUniqueConstraint, RQLConstraint))] + if constraints: + _CheckConstraintsOp.get_instance(self._cw).add_data( + (self.entity.eid, attr, None, constraints)) + + +class CheckUniqueHook(IntegrityHook): + __regid__ = 'checkunique' + events = ('before_add_entity', 'before_update_entity') + + def __call__(self): + entity = self.entity + eschema = entity.e_schema + for attr, val in entity.cw_edited.items(): + if eschema.subjrels[attr].final and eschema.has_unique_values(attr): + if val is None: + continue + rql = '%s X WHERE X %s %%(val)s' % (entity.e_schema, attr) + rset = self._cw.execute(rql, {'val': val}) + if rset and rset[0][0] != entity.eid: + msg = _('the value "%s" is already used, use another one') + raise validation_error(entity, {(attr, 'subject'): msg}, + (val,)) + + +class DontRemoveOwnersGroupHook(IntegrityHook): + """delete the composed of a composite relation when this relation is deleted + """ + __regid__ = 'checkownersgroup' + __select__ = IntegrityHook.__select__ & is_instance('CWGroup') + events = ('before_delete_entity', 'before_update_entity') + + def __call__(self): + entity = self.entity + if self.event == 'before_delete_entity' and entity.name == 'owners': + raise validation_error(entity, {None: _("can't be deleted")}) + elif self.event == 'before_update_entity' \ + and 'name' in entity.cw_edited: + oldname, newname = entity.cw_edited.oldnewvalue('name') + if oldname == 'owners' and newname != oldname: + raise validation_error(entity, {('name', 'subject'): _("can't be changed")}) + + +class TidyHtmlFields(IntegrityHook): + """tidy HTML in rich text strings""" + __regid__ = 'htmltidy' + events = ('before_add_entity', 'before_update_entity') + + def __call__(self): + entity = self.entity + metaattrs = entity.e_schema.meta_attributes() + edited = entity.cw_edited + for metaattr, (metadata, attr) in metaattrs.items(): + if metadata == 'format' and attr in edited: + try: + value = edited[attr] + except KeyError: + continue # no text to tidy + if isinstance(value, text_type): # filter out None and Binary + if getattr(entity, str(metaattr)) == 'text/html': + edited[attr] = soup2xhtml(value, self._cw.encoding) + + +class StripCWUserLoginHook(IntegrityHook): + """ensure user logins are stripped""" + __regid__ = 'stripuserlogin' + __select__ = IntegrityHook.__select__ & is_instance('CWUser') + events = ('before_add_entity', 'before_update_entity',) + + def __call__(self): + login = self.entity.cw_edited.get('login') + if login: + self.entity.cw_edited['login'] = login.strip() + + +class DeleteCompositeOrphanHook(hook.Hook): + """Delete the composed of a composite relation when the composite is + deleted (this is similar to the cascading ON DELETE CASCADE + semantics of sql). + """ + __regid__ = 'deletecomposite' + __select__ = hook.Hook.__select__ & composite_etype() + events = ('before_delete_entity',) + category = 'activeintegrity' + # give the application's before_delete_entity hooks a chance to run before we cascade + order = 99 + + def __call__(self): + eid = self.entity.eid + for rdef, role in self.entity.e_schema.composite_rdef_roles: + rtype = rdef.rtype.type + target = getattr(rdef, neg_role(role)) + expr = ('C %s X' % rtype) if role == 'subject' else ('X %s C' % rtype) + self._cw.execute('DELETE %s X WHERE C eid %%(c)s, %s' % (target, expr), + {'c': eid}) + + +def registration_callback(vreg): + vreg.register_all(globals().values(), __name__) + symmetric_rtypes = [rschema.type for rschema in vreg.schema.relations() + if rschema.symmetric] + class EnsureSymmetricRelationsAdd(_EnsureSymmetricRelationsAdd): + __select__ = _EnsureSymmetricRelationsAdd.__select__ & hook.match_rtype(*symmetric_rtypes) + vreg.register(EnsureSymmetricRelationsAdd) + class EnsureSymmetricRelationsDelete(_EnsureSymmetricRelationsDelete): + __select__ = _EnsureSymmetricRelationsDelete.__select__ & hook.match_rtype(*symmetric_rtypes) + vreg.register(EnsureSymmetricRelationsDelete) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/logstats.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/logstats.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,59 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +"""looping task for dumping instance's stats in a file +""" + +__docformat__ = "restructuredtext en" + +from datetime import datetime +import json + +from cubicweb.server import hook + +class LogStatsStartHook(hook.Hook): + """register task to regularly dump instance's stats in a file + + data are stored as one json entry per row + """ + __regid__ = 'cubicweb.hook.logstats.start' + events = ('server_startup',) + + def __call__(self): + interval = self.repo.config.get('logstat-interval', 0) + if interval <= 0: + return + + def dump_stats(repo): + statsfile = repo.config.get('logstat-file') + with repo.internal_cnx() as cnx: + stats = cnx.call_service('repo_stats') + gcstats = cnx.call_service('repo_gc_stats', nmax=5) + + allstats = {'resources': stats, + 'memory': gcstats, + 'timestamp': datetime.utcnow().isoformat(), + } + try: + with open(statsfile, 'ab') as ofile: + json.dump(allstats, ofile) + ofile.write('\n') + except IOError: + repo.warning('Cannot open stats file for writing: %s', statsfile) + + self.repo.looping_task(interval, dump_stats, self.repo) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/metadata.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/metadata.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,219 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Core hooks: set generic metadata""" + +__docformat__ = "restructuredtext en" + +from datetime import datetime +from base64 import b64encode + +from pytz import utc + +from cubicweb.predicates import is_instance +from cubicweb.server import hook +from cubicweb.server.edition import EditedEntity + + +class MetaDataHook(hook.Hook): + __abstract__ = True + category = 'metadata' + + +class InitMetaAttrsHook(MetaDataHook): + """before create a new entity -> set creation and modification date + + this is a conveniency hook, you shouldn't have to disable it + """ + __regid__ = 'metaattrsinit' + events = ('before_add_entity',) + + def __call__(self): + timestamp = datetime.now(utc) + edited = self.entity.cw_edited + if not edited.get('creation_date'): + edited['creation_date'] = timestamp + if not edited.get('modification_date'): + edited['modification_date'] = timestamp + if not self._cw.transaction_data.get('do-not-insert-cwuri'): + cwuri = u'%s%s' % (self._cw.base_url(), self.entity.eid) + edited.setdefault('cwuri', cwuri) + + +class UpdateMetaAttrsHook(MetaDataHook): + """update an entity -> set modification date""" + __regid__ = 'metaattrsupdate' + events = ('before_update_entity',) + + def __call__(self): + # repairing is true during c-c upgrade/shell and similar commands. We + # usually don't want to update modification date in such cases. + # + # XXX to be really clean, we should turn off modification_date update + # explicitly on each command where we do not want that behaviour. + if not self._cw.vreg.config.repairing: + self.entity.cw_edited.setdefault('modification_date', datetime.now(utc)) + + +class SetCreatorOp(hook.DataOperationMixIn, hook.Operation): + + def precommit_event(self): + cnx = self.cnx + relations = [(eid, cnx.user.eid) for eid in self.get_data() + # don't consider entities that have been created and deleted in + # the same transaction, nor ones where created_by has been + # explicitly set + if not cnx.deleted_in_transaction(eid) and \ + not cnx.entity_from_eid(eid).created_by] + cnx.add_relations([('created_by', relations)]) + + +class SetOwnershipHook(MetaDataHook): + """create a new entity -> set owner and creator metadata""" + __regid__ = 'setowner' + events = ('after_add_entity',) + + def __call__(self): + if not self._cw.is_internal_session: + self._cw.add_relation(self.entity.eid, 'owned_by', self._cw.user.eid) + SetCreatorOp.get_instance(self._cw).add_data(self.entity.eid) + + +class SyncOwnersOp(hook.DataOperationMixIn, hook.Operation): + def precommit_event(self): + for compositeeid, composedeid in self.get_data(): + if self.cnx.deleted_in_transaction(compositeeid): + continue + if self.cnx.deleted_in_transaction(composedeid): + continue + self.cnx.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,' + 'NOT EXISTS(X owned_by U, X eid %(x)s)', + {'c': compositeeid, 'x': composedeid}) + + +class SyncCompositeOwner(MetaDataHook): + """when adding composite relation, the composed should have the same owners + has the composite + """ + __regid__ = 'synccompositeowner' + events = ('after_add_relation',) + + def __call__(self): + if self.rtype == 'wf_info_for': + # skip this special composite relation # XXX (syt) why? + return + eidfrom, eidto = self.eidfrom, self.eidto + composite = self._cw.rtype_eids_rdef(self.rtype, eidfrom, eidto).composite + if composite == 'subject': + SyncOwnersOp.get_instance(self._cw).add_data( (eidfrom, eidto) ) + elif composite == 'object': + SyncOwnersOp.get_instance(self._cw).add_data( (eidto, eidfrom) ) + + +class FixUserOwnershipHook(MetaDataHook): + """when a user has been created, add owned_by relation on itself""" + __regid__ = 'fixuserowner' + __select__ = MetaDataHook.__select__ & is_instance('CWUser') + events = ('after_add_entity',) + + def __call__(self): + self._cw.add_relation(self.entity.eid, 'owned_by', self.entity.eid) + + +class UpdateFTIHook(MetaDataHook): + """sync fulltext index text index container when a relation with + fulltext_container set is added / removed + """ + __regid__ = 'updateftirel' + events = ('after_add_relation', 'after_delete_relation') + + def __call__(self): + rtype = self.rtype + cnx = self._cw + ftcontainer = cnx.vreg.schema.rschema(rtype).fulltext_container + if ftcontainer == 'subject': + cnx.repo.system_source.index_entity( + cnx, cnx.entity_from_eid(self.eidfrom)) + elif ftcontainer == 'object': + cnx.repo.system_source.index_entity( + cnx, cnx.entity_from_eid(self.eidto)) + + + +# entity source handling ####################################################### + +class ChangeEntitySourceUpdateCaches(hook.Operation): + oldsource = newsource = entity = None # make pylint happy + + def postcommit_event(self): + self.oldsource.reset_caches() + repo = self.cnx.repo + entity = self.entity + extid = entity.cw_metainformation()['extid'] + repo._type_source_cache[entity.eid] = ( + entity.cw_etype, None, self.newsource.uri) + repo._extid_cache[extid] = -entity.eid + + +class ChangeEntitySourceDeleteHook(MetaDataHook): + """support for moving an entity from an external source by watching 'Any + cw_source CWSource' relation + """ + + __regid__ = 'cw.metadata.source-change' + __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source') + events = ('before_delete_relation',) + + def __call__(self): + if (self._cw.deleted_in_transaction(self.eidfrom) + or self._cw.deleted_in_transaction(self.eidto)): + return + schange = self._cw.transaction_data.setdefault('cw_source_change', {}) + schange[self.eidfrom] = self.eidto + + +class ChangeEntitySourceAddHook(MetaDataHook): + __regid__ = 'cw.metadata.source-change' + __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source') + events = ('before_add_relation',) + + def __call__(self): + schange = self._cw.transaction_data.get('cw_source_change') + if schange is not None and self.eidfrom in schange: + newsource = self._cw.entity_from_eid(self.eidto) + if newsource.name != 'system': + raise Exception('changing source to something else than the ' + 'system source is unsupported') + syssource = newsource.repo_source + oldsource = self._cw.entity_from_eid(schange[self.eidfrom]) + entity = self._cw.entity_from_eid(self.eidfrom) + # we don't want the moved entity to be reimported later. To + # distinguish this state, move the record from the 'entities' table + # to 'moved_entities'. External source will then have consider + # case where `extid2eid` returns a negative eid as 'this entity was + # known but has been moved, ignore it'. + extid = self._cw.entity_metas(entity.eid)['extid'] + assert extid is not None + attrs = {'eid': entity.eid, 'extid': b64encode(extid).decode('ascii')} + self._cw.system_sql(syssource.sqlgen.insert('moved_entities', attrs), attrs) + attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': None, + 'asource': 'system'} + self._cw.system_sql(syssource.sqlgen.update('entities', attrs, ['eid']), attrs) + # register an operation to update repository/sources caches + ChangeEntitySourceUpdateCaches(self._cw, entity=entity, + oldsource=oldsource.repo_source, + newsource=syssource) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/notification.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/notification.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,244 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some hooks to handle notification on entity's changes""" + +__docformat__ = "restructuredtext en" + +from logilab.common.textutils import normalize_text +from logilab.common.deprecation import deprecated + +from cubicweb import RegistryNotFound +from cubicweb.predicates import is_instance +from cubicweb.server import hook +from cubicweb.sobjects.supervising import SupervisionMailOp + + +@deprecated('[3.17] use notify_on_commit instead') +def RenderAndSendNotificationView(cnx, view, viewargs=None): + notify_on_commit(cnx, view, viewargs) + + +def notify_on_commit(cnx, view, viewargs=None): + """register a notification view (see + :class:`~cubicweb.sobjects.notification.NotificationView`) to be sent at + post-commit time, ie only if the transaction has succeeded. + + `viewargs` is an optional dictionary containing extra argument to be given + to :meth:`~cubicweb.sobjects.notification.NotificationView.render_and_send` + """ + if viewargs is None: + viewargs = {} + notif_op = _RenderAndSendNotificationOp.get_instance(cnx) + notif_op.add_data((view, viewargs)) + + +class _RenderAndSendNotificationOp(hook.DataOperationMixIn, hook.Operation): + """End of the notification chain. Do render and send views after commit + + All others Operations end up adding data to this Operation. + The notification are done on ``postcommit_event`` to make sure to prevent + sending notification about rolled back data. + """ + + containercls = list + + def postcommit_event(self): + deleted = self.cnx.deleted_in_transaction + for view, viewargs in self.get_data(): + if view.cw_rset is not None: + if not view.cw_rset: + # entity added and deleted in the same transaction + # (cache effect) + continue + elif deleted(view.cw_rset[view.cw_row or 0][view.cw_col or 0]): + # entity added and deleted in the same transaction + continue + try: + view.render_and_send(**viewargs) + except Exception: + # error in post commit are not propagated + # We keep this logic here to prevent a small notification error + # to prevent them all. + self.exception('Notification failed') + + +class NotificationHook(hook.Hook): + __abstract__ = True + category = 'notification' + + def select_view(self, vid, rset, row=0, col=0): + try: + return self._cw.vreg['views'].select_or_none(vid, self._cw, rset=rset, + row=row, col=col) + except RegistryNotFound: # can happen in some config + # (e.g. repo only config with no + # notification views registered by + # the instance's cubes) + return None + + +class StatusChangeHook(NotificationHook): + """notify when a workflowable entity has its state modified""" + __regid__ = 'notifystatuschange' + __select__ = NotificationHook.__select__ & is_instance('TrInfo') + events = ('after_add_entity',) + + def __call__(self): + entity = self.entity + if not entity.from_state: # not a transition + return + rset = entity.related('wf_info_for') + view = self.select_view('notif_status_change', rset=rset, row=0) + if view is None: + return + comment = entity.printable_value('comment', format='text/plain') + # XXX don't try to wrap rest until we've a proper transformation (see + # #103822) + if comment and entity.comment_format != 'text/rest': + comment = normalize_text(comment, 80) + viewargs = {'comment': comment, + 'previous_state': entity.previous_state.name, + 'current_state': entity.new_state.name} + notify_on_commit(self._cw, view, viewargs=viewargs) + +class RelationChangeHook(NotificationHook): + __regid__ = 'notifyrelationchange' + events = ('before_add_relation', 'after_add_relation', + 'before_delete_relation', 'after_delete_relation') + + def __call__(self): + """if a notification view is defined for the event, send notification + email defined by the view + """ + rset = self._cw.eid_rset(self.eidfrom) + view = self.select_view('notif_%s_%s' % (self.event, self.rtype), + rset=rset, row=0) + if view is None: + return + notify_on_commit(self._cw, view) + + +class EntityChangeHook(NotificationHook): + """if a notification view is defined for the event, send notification + email defined by the view + """ + __regid__ = 'notifyentitychange' + events = ('after_add_entity', 'after_update_entity') + + def __call__(self): + rset = self.entity.as_rset() + view = self.select_view('notif_%s' % self.event, rset=rset, row=0) + if view is None: + return + notify_on_commit(self._cw, view) + + +class EntityUpdatedNotificationOp(hook.SingleLastOperation): + """scrap all changed entity to prepare a Notification Operation for them""" + + def precommit_event(self): + # precommit event that creates postcommit operation + cnx = self.cnx + for eid in cnx.transaction_data['changes']: + view = cnx.vreg['views'].select('notif_entity_updated', cnx, + rset=cnx.eid_rset(eid), + row=0) + notify_on_commit(self.cnx, view, + viewargs={'changes': cnx.transaction_data['changes'][eid]}) + + +class EntityUpdateHook(NotificationHook): + __regid__ = 'notifentityupdated' + __abstract__ = True # do not register by default + __select__ = NotificationHook.__select__ & hook.issued_from_user_query() + events = ('before_update_entity',) + skip_attrs = set() + + def __call__(self): + cnx = self._cw + if cnx.added_in_transaction(self.entity.eid): + return # entity is being created + # then compute changes + attrs = [k for k in self.entity.cw_edited + if not k in self.skip_attrs] + if not attrs: + return + changes = cnx.transaction_data.setdefault('changes', {}) + thisentitychanges = changes.setdefault(self.entity.eid, set()) + rqlsel, rqlrestr = [], ['X eid %(x)s'] + for i, attr in enumerate(attrs): + var = chr(65+i) + rqlsel.append(var) + rqlrestr.append('X %s %s' % (attr, var)) + rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr)) + rset = cnx.execute(rql, {'x': self.entity.eid}) + for i, attr in enumerate(attrs): + oldvalue = rset[0][i] + newvalue = self.entity.cw_edited[attr] + if oldvalue != newvalue: + thisentitychanges.add((attr, oldvalue, newvalue)) + if thisentitychanges: + EntityUpdatedNotificationOp(cnx) + + +# supervising ################################################################## + +class SomethingChangedHook(NotificationHook): + __regid__ = 'supervising' + __select__ = NotificationHook.__select__ & hook.issued_from_user_query() + events = ('before_add_relation', 'before_delete_relation', + 'after_add_entity', 'before_update_entity') + + def __call__(self): + dest = self._cw.vreg.config['supervising-addrs'] + if not dest: # no supervisors, don't do this for nothing... + return + if self._call(): + SupervisionMailOp(self._cw) + + def _call(self): + event = self.event.split('_', 1)[1] + if event == 'update_entity': + if self._cw.added_in_transaction(self.entity.eid): + return False + if self.entity.e_schema == 'CWUser': + if not (frozenset(self.entity.cw_edited) + - frozenset(('eid', 'modification_date', + 'last_login_time'))): + # don't record last_login_time update which are done + # automatically at login time + return False + self._cw.transaction_data.setdefault('pendingchanges', []).append( + (event, self)) + return True + + +class EntityDeleteHook(SomethingChangedHook): + __regid__ = 'supervisingentitydel' + events = ('before_delete_entity',) + + def _call(self): + try: + title = self.entity.dc_title() + except Exception: + # may raise an error during deletion process, for instance due to + # missing required relation + title = '#%s' % self.entity.eid + self._cw.transaction_data.setdefault('pendingchanges', []).append( + ('delete_entity', (self.entity.eid, self.entity.cw_etype, title))) + return True diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/security.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/security.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,209 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Security hooks: check permissions to add/delete/update entities according to +the connected user +""" + +__docformat__ = "restructuredtext en" +from warnings import warn + +from logilab.common.registry import objectify_predicate + +from yams import buildobjs + +from cubicweb import Unauthorized +from cubicweb.server import BEFORE_ADD_RELATIONS, ON_COMMIT_ADD_RELATIONS, hook + + + +def check_entity_attributes(cnx, entity, action, editedattrs=None): + eid = entity.eid + eschema = entity.e_schema + if action == 'delete': + eschema.check_perm(session, action, eid=eid) + return + # ._cw_skip_security_attributes is there to bypass security for attributes + # set by hooks by modifying the entity's dictionary + if editedattrs is None: + editedattrs = entity.cw_edited + dontcheck = editedattrs.skip_security + etypechecked = False + for attr in editedattrs: + if attr in dontcheck: + continue + rdef = eschema.rdef(attr, takefirst=True) + if rdef.final: # non final relation are checked by standard hooks + perms = rdef.permissions.get(action) + # comparison below works because the default update perm is: + # + # ('managers', ERQLExpression(Any X WHERE U has_update_permission X, + # X eid %(x)s, U eid %(u)s)) + # + # is deserialized in this order (groups first), and ERQLExpression + # implements comparison by rql expression. + if perms == buildobjs.DEFAULT_ATTRPERMS[action]: + # The default rule is to delegate to the entity + # rule. This needs to be checked only once. + if not etypechecked: + entity.cw_check_perm(action) + etypechecked = True + continue + if perms == (): + # That means an immutable attribute; as an optimization, avoid + # going through check_perm. + raise Unauthorized(action, str(rdef)) + rdef.check_perm(cnx, action, eid=eid) + + if action == 'add' and not etypechecked: + # think about cnx.create_entity('Foo') + # the standard metadata were inserted by a hook + # with a bypass ... we conceptually need to check + # the eid attribute at *creation* time + entity.cw_check_perm(action) + + +class CheckEntityPermissionOp(hook.DataOperationMixIn, hook.LateOperation): + def precommit_event(self): + cnx = self.cnx + for eid, action, edited in self.get_data(): + entity = cnx.entity_from_eid(eid) + check_entity_attributes(cnx, entity, action, edited) + + +class CheckRelationPermissionOp(hook.DataOperationMixIn, hook.LateOperation): + def precommit_event(self): + cnx = self.cnx + for action, rschema, eidfrom, eidto in self.get_data(): + rdef = rschema.rdef(cnx.entity_metas(eidfrom)['type'], + cnx.entity_metas(eidto)['type']) + rdef.check_perm(cnx, action, fromeid=eidfrom, toeid=eidto) + + +@objectify_predicate +def write_security_enabled(cls, req, **kwargs): + if req is None or not req.write_security: + return 0 + return 1 + +class SecurityHook(hook.Hook): + __abstract__ = True + category = 'security' + __select__ = hook.Hook.__select__ & write_security_enabled() + + +class AfterAddEntitySecurityHook(SecurityHook): + __regid__ = 'securityafteraddentity' + events = ('after_add_entity',) + + def __call__(self): + CheckEntityPermissionOp.get_instance(self._cw).add_data( + (self.entity.eid, 'add', self.entity.cw_edited) ) + + +class AfterUpdateEntitySecurityHook(SecurityHook): + __regid__ = 'securityafterupdateentity' + events = ('after_update_entity',) + + def __call__(self): + # save back editedattrs in case the entity is reedited later in the + # same transaction, which will lead to cw_edited being + # overwritten + action = 'add' if self._cw.added_in_transaction(self.entity.eid) else 'update' + CheckEntityPermissionOp.get_instance(self._cw).add_data( + (self.entity.eid, action, self.entity.cw_edited) ) + + +class BeforeDelEntitySecurityHook(SecurityHook): + __regid__ = 'securitybeforedelentity' + events = ('before_delete_entity',) + + def __call__(self): + self.entity.cw_check_perm('delete') + + +def skip_inlined_relation_security(cnx, rschema, eid): + """return True if security for the given inlined relation should be skipped, + in case where the relation has been set through modification of + `entity.cw_edited` in a hook + """ + assert rschema.inlined + try: + entity = cnx.entity_cache(eid) + except KeyError: + return False + edited = getattr(entity, 'cw_edited', None) + if edited is None: + return False + return rschema.type in edited.skip_security + + +class BeforeAddRelationSecurityHook(SecurityHook): + __regid__ = 'securitybeforeaddrelation' + events = ('before_add_relation',) + + def __call__(self): + if self.rtype in BEFORE_ADD_RELATIONS: + nocheck = self._cw.transaction_data.get('skip-security', ()) + if (self.eidfrom, self.rtype, self.eidto) in nocheck: + return + rschema = self._cw.repo.schema[self.rtype] + if rschema.inlined and skip_inlined_relation_security( + self._cw, rschema, self.eidfrom): + return + rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], + self._cw.entity_metas(self.eidto)['type']) + rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto) + + +class AfterAddRelationSecurityHook(SecurityHook): + __regid__ = 'securityafteraddrelation' + events = ('after_add_relation',) + + def __call__(self): + if self.rtype not in BEFORE_ADD_RELATIONS: + nocheck = self._cw.transaction_data.get('skip-security', ()) + if (self.eidfrom, self.rtype, self.eidto) in nocheck: + return + rschema = self._cw.repo.schema[self.rtype] + if rschema.inlined and skip_inlined_relation_security( + self._cw, rschema, self.eidfrom): + return + if self.rtype in ON_COMMIT_ADD_RELATIONS: + CheckRelationPermissionOp.get_instance(self._cw).add_data( + ('add', rschema, self.eidfrom, self.eidto) ) + else: + rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], + self._cw.entity_metas(self.eidto)['type']) + rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto) + + +class BeforeDeleteRelationSecurityHook(SecurityHook): + __regid__ = 'securitybeforedelrelation' + events = ('before_delete_relation',) + + def __call__(self): + nocheck = self._cw.transaction_data.get('skip-security', ()) + if (self.eidfrom, self.rtype, self.eidto) in nocheck: + return + rschema = self._cw.repo.schema[self.rtype] + if rschema.inlined and skip_inlined_relation_security( + self._cw, rschema, self.eidfrom): + return + rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], + self._cw.entity_metas(self.eidto)['type']) + rdef.check_perm(self._cw, 'delete', fromeid=self.eidfrom, toeid=self.eidto) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/synccomputed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/synccomputed.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,227 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Hooks for synchronizing computed attributes""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from collections import defaultdict + +from rql import nodes + +from cubicweb.server import hook + + +class RecomputeAttributeOperation(hook.DataOperationMixIn, hook.Operation): + """Operation to recompute caches of computed attribute at commit time, + depending on what's have been modified in the transaction and avoiding to + recompute twice the same attribute + """ + containercls = dict + def add_data(self, computed_attribute, eid=None): + try: + self._container[computed_attribute].add(eid) + except KeyError: + self._container[computed_attribute] = set((eid,)) + + def precommit_event(self): + for computed_attribute_rdef, eids in self.get_data().items(): + attr = computed_attribute_rdef.rtype + formula = computed_attribute_rdef.formula + select = self.cnx.repo.vreg.rqlhelper.parse(formula).children[0] + xvar = select.get_variable('X') + select.add_selected(xvar, index=0) + select.add_group_var(xvar, index=0) + if None in eids: + select.add_type_restriction(xvar, computed_attribute_rdef.subject) + else: + select.add_eid_restriction(xvar, eids) + update_rql = 'SET X %s %%(value)s WHERE X eid %%(x)s' % attr + for eid, value in self.cnx.execute(select.as_string()): + self.cnx.execute(update_rql, {'value': value, 'x': eid}) + + +class EntityWithCACreatedHook(hook.Hook): + """When creating an entity that has some computed attribute, those + attributes have to be computed. + + Concret class of this hook are generated at registration time by + introspecting the schema. + """ + __abstract__ = True + events = ('after_add_entity',) + # list of computed attribute rdefs that have to be recomputed + computed_attributes = None + + def __call__(self): + for rdef in self.computed_attributes: + RecomputeAttributeOperation.get_instance(self._cw).add_data( + rdef, self.entity.eid) + + +class RelationInvolvedInCAModifiedHook(hook.Hook): + """When some relation used in a computed attribute is updated, those + attributes have to be recomputed. + + Concret class of this hook are generated at registration time by + introspecting the schema. + """ + __abstract__ = True + events = ('after_add_relation', 'before_delete_relation') + # list of (computed attribute rdef, optimize_on) that have to be recomputed + optimized_computed_attributes = None + + def __call__(self): + for rdef, optimize_on in self.optimized_computed_attributes: + if optimize_on is None: + eid = None + else: + eid = getattr(self, optimize_on) + RecomputeAttributeOperation.get_instance(self._cw).add_data(rdef, eid) + + +class AttributeInvolvedInCAModifiedHook(hook.Hook): + """When some attribute used in a computed attribute is updated, those + attributes have to be recomputed. + + Concret class of this hook are generated at registration time by + introspecting the schema. + """ + __abstract__ = True + events = ('after_update_entity',) + # list of (computed attribute rdef, attributes of this entity type involved) + # that may have to be recomputed + attributes_computed_attributes = None + + def __call__(self): + edited_attributes = frozenset(self.entity.cw_edited) + for rdef, used_attributes in self.attributes_computed_attributes.items(): + if edited_attributes.intersection(used_attributes): + # XXX optimize if the modified attributes belong to the same + # entity as the computed attribute + RecomputeAttributeOperation.get_instance(self._cw).add_data(rdef) + + +# code generation at registration time ######################################### + +def _optimize_on(formula_select, rtype): + """Given a formula and some rtype, tells whether on update of the given + relation, formula may be recomputed only for rhe relation's subject + ('eidfrom' returned), object ('eidto' returned) or None. + + Optimizing is only possible when X is used as direct subject/object of this + relation, else we may miss some necessary update. + """ + for rel in formula_select.get_nodes(nodes.Relation): + if rel.r_type == rtype: + sub = rel.get_variable_parts()[0] + obj = rel.get_variable_parts()[1] + if sub.name == 'X': + return 'eidfrom' + elif obj.name == 'X': + return 'eidto' + else: + return None + + +class _FormulaDependenciesMatrix(object): + """This class computes and represents the dependencies of computed attributes + towards relations and attributes + """ + + def __init__(self, schema): + """Analyzes the schema to compute the dependencies""" + # entity types holding some computed attribute {etype: [computed rdefs]} + self.computed_attribute_by_etype = defaultdict(list) + # depending entity types {dep. etype: {computed rdef: dep. etype attributes}} + self.computed_attribute_by_etype_attrs = defaultdict(lambda: defaultdict(set)) + # depending relations def {dep. rdef: [computed rdefs] + self.computed_attribute_by_relation = defaultdict(list) # by rdef + # Walk through all attributes definitions + for rdef in schema.iter_computed_attributes(): + self.computed_attribute_by_etype[rdef.subject.type].append(rdef) + # extract the relations it depends upon - `rdef.formula_select` is + # expected to have been set by finalize_computed_attributes + select = rdef.formula_select + for rel_node in select.get_nodes(nodes.Relation): + if rel_node.is_types_restriction(): + continue + rschema = schema.rschema(rel_node.r_type) + lhs, rhs = rel_node.get_variable_parts() + for sol in select.solutions: + subject_etype = sol[lhs.name] + if isinstance(rhs, nodes.VariableRef): + object_etypes = set(sol[rhs.name] for sol in select.solutions) + else: + object_etypes = rschema.objects(subject_etype) + for object_etype in object_etypes: + if rschema.final: + attr_for_computations = self.computed_attribute_by_etype_attrs[subject_etype] + attr_for_computations[rdef].add(rschema.type) + else: + depend_on_rdef = rschema.rdefs[subject_etype, object_etype] + self.computed_attribute_by_relation[depend_on_rdef].append(rdef) + + def generate_entity_creation_hooks(self): + for etype, computed_attributes in self.computed_attribute_by_etype.items(): + regid = 'computed_attribute.%s_created' % etype + selector = hook.is_instance(etype) + yield type('%sCreatedHook' % etype, + (EntityWithCACreatedHook,), + {'__regid__': regid, + '__select__': hook.Hook.__select__ & selector, + 'computed_attributes': computed_attributes}) + + def generate_relation_change_hooks(self): + for rdef, computed_attributes in self.computed_attribute_by_relation.items(): + regid = 'computed_attribute.%s_modified' % rdef.rtype + selector = hook.match_rtype(rdef.rtype.type, + frometypes=(rdef.subject.type,), + toetypes=(rdef.object.type,)) + optimized_computed_attributes = [] + for computed_rdef in computed_attributes: + optimized_computed_attributes.append( + (computed_rdef, + _optimize_on(computed_rdef.formula_select, rdef.rtype)) + ) + yield type('%sModifiedHook' % rdef.rtype, + (RelationInvolvedInCAModifiedHook,), + {'__regid__': regid, + '__select__': hook.Hook.__select__ & selector, + 'optimized_computed_attributes': optimized_computed_attributes}) + + def generate_entity_update_hooks(self): + for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.items(): + regid = 'computed_attribute.%s_updated' % etype + selector = hook.is_instance(etype) + yield type('%sModifiedHook' % etype, + (AttributeInvolvedInCAModifiedHook,), + {'__regid__': regid, + '__select__': hook.Hook.__select__ & selector, + 'attributes_computed_attributes': attributes_computed_attributes}) + + +def registration_callback(vreg): + vreg.register_all(globals().values(), __name__) + dependencies = _FormulaDependenciesMatrix(vreg.schema) + for hook_class in dependencies.generate_entity_creation_hooks(): + vreg.register(hook_class) + for hook_class in dependencies.generate_relation_change_hooks(): + vreg.register(hook_class) + for hook_class in dependencies.generate_entity_update_hooks(): + vreg.register(hook_class) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/syncschema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/syncschema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1417 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""schema hooks: + +- synchronize the living schema object with the persistent schema +- perform physical update on the source when necessary + +checking for schema consistency is done in hooks.py +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +import json +from copy import copy +from hashlib import md5 + +from yams.schema import (BASE_TYPES, BadSchemaDefinition, + RelationSchema, RelationDefinitionSchema) +from yams import buildobjs as ybo, convert_default_value + +from logilab.common.decorators import clear_cache + +from cubicweb import validation_error +from cubicweb.predicates import is_instance +from cubicweb.schema import (SCHEMA_TYPES, META_RTYPES, VIRTUAL_RTYPES, + CONSTRAINTS, UNIQUE_CONSTRAINTS, ETYPE_NAME_MAP) +from cubicweb.server import hook, schemaserial as ss, schema2sql as y2sql +from cubicweb.server.sqlutils import SQL_PREFIX +from cubicweb.hooks.synccomputed import RecomputeAttributeOperation + +# core entity and relation types which can't be removed +CORE_TYPES = BASE_TYPES | SCHEMA_TYPES | META_RTYPES | set( + ('CWUser', 'CWGroup','login', 'upassword', 'name', 'in_group')) + + +def get_constraints(cnx, entity): + constraints = [] + for cstreid in cnx.transaction_data.get(entity.eid, ()): + cstrent = cnx.entity_from_eid(cstreid) + cstr = CONSTRAINTS[cstrent.type].deserialize(cstrent.value) + cstr.eid = cstreid + constraints.append(cstr) + return constraints + +def group_mapping(cw): + try: + return cw.transaction_data['groupmap'] + except KeyError: + cw.transaction_data['groupmap'] = gmap = ss.group_mapping(cw) + return gmap + +def add_inline_relation_column(cnx, etype, rtype): + """add necessary column and index for an inlined relation""" + attrkey = '%s.%s' % (etype, rtype) + createdattrs = cnx.transaction_data.setdefault('createdattrs', set()) + if attrkey in createdattrs: + return + createdattrs.add(attrkey) + table = SQL_PREFIX + etype + column = SQL_PREFIX + rtype + try: + cnx.system_sql(str('ALTER TABLE %s ADD %s integer REFERENCES entities (eid)' % (table, column)), + rollback_on_failure=False) + cnx.info('added column %s to table %s', column, table) + except Exception: + # silent exception here, if this error has not been raised because the + # column already exists, index creation will fail anyway + cnx.exception('error while adding column %s to table %s', + table, column) + # create index before alter table which may expectingly fail during test + # (sqlite) while index creation should never fail (test for index existence + # is done by the dbhelper) + cnx.repo.system_source.create_index(cnx, table, column) + cnx.info('added index on %s(%s)', table, column) + + +def insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, props): + # XXX 'infered': True/False, not clear actually + props.update({'constraints': rdefdef.constraints, + 'description': rdefdef.description, + 'cardinality': rdefdef.cardinality, + 'permissions': rdefdef.get_permissions(), + 'order': rdefdef.order, + 'infered': False, 'eid': None + }) + cstrtypemap = ss.cstrtype_mapping(cnx) + groupmap = group_mapping(cnx) + object = rschema.schema.eschema(rdefdef.object) + for specialization in eschema.specialized_by(False): + if (specialization, rdefdef.object) in rschema.rdefs: + continue + sperdef = RelationDefinitionSchema(specialization, rschema, + object, None, values=props) + ss.execschemarql(cnx.execute, sperdef, + ss.rdef2rql(sperdef, cstrtypemap, groupmap)) + + +def check_valid_changes(cnx, entity, ro_attrs=('name', 'final')): + errors = {} + # don't use getattr(entity, attr), we would get the modified value if any + for attr in entity.cw_edited: + if attr in ro_attrs: + origval, newval = entity.cw_edited.oldnewvalue(attr) + if newval != origval: + errors[attr] = _("can't change this attribute") + if errors: + raise validation_error(entity, errors) + + +class _MockEntity(object): # XXX use a named tuple with python 2.6 + def __init__(self, eid): + self.eid = eid + + +class SyncSchemaHook(hook.Hook): + """abstract class for schema synchronization hooks (in the `syncschema` + category) + """ + __abstract__ = True + category = 'syncschema' + + +# operations for low-level database alteration ################################ + +class DropTable(hook.Operation): + """actually remove a database from the instance's schema""" + table = None # make pylint happy + def precommit_event(self): + dropped = self.cnx.transaction_data.setdefault('droppedtables', + set()) + if self.table in dropped: + return # already processed + dropped.add(self.table) + self.cnx.system_sql('DROP TABLE %s' % self.table) + self.info('dropped table %s', self.table) + + # XXX revertprecommit_event + + +class DropRelationTable(DropTable): + def __init__(self, cnx, rtype): + super(DropRelationTable, self).__init__( + cnx, table='%s_relation' % rtype) + cnx.transaction_data.setdefault('pendingrtypes', set()).add(rtype) + + +class DropColumn(hook.DataOperationMixIn, hook.Operation): + """actually remove the attribut's column from entity table in the system + database + """ + def precommit_event(self): + cnx = self.cnx + for etype, attr in self.get_data(): + table = SQL_PREFIX + etype + column = SQL_PREFIX + attr + source = cnx.repo.system_source + # drop index if any + source.drop_index(cnx, table, column) + if source.dbhelper.alter_column_support: + cnx.system_sql('ALTER TABLE %s DROP COLUMN %s' % (table, column), + rollback_on_failure=False) + self.info('dropped column %s from table %s', column, table) + else: + # not supported by sqlite for instance + self.error('dropping column not supported by the backend, handle ' + 'it yourself (%s.%s)', table, column) + + # XXX revertprecommit_event + + +# base operations for in-memory schema synchronization ######################## + +class MemSchemaNotifyChanges(hook.SingleLastOperation): + """the update schema operation: + + special operation which should be called once and after all other schema + operations. It will trigger internal structures rebuilding to consider + schema changes. + """ + + def __init__(self, cnx): + hook.SingleLastOperation.__init__(self, cnx) + + def precommit_event(self): + for eschema in self.cnx.repo.schema.entities(): + if not eschema.final: + clear_cache(eschema, 'ordered_relations') + + def postcommit_event(self): + repo = self.cnx.repo + # commit event should not raise error, while set_schema has chances to + # do so because it triggers full vreg reloading + try: + repo.schema.rebuild_infered_relations() + # trigger vreg reload + repo.set_schema(repo.schema) + # CWUser class might have changed, update current session users + cwuser_cls = self.cnx.vreg['etypes'].etype_class('CWUser') + for session in repo._sessions.values(): + session.user.__class__ = cwuser_cls + except Exception: + self.critical('error while setting schema', exc_info=True) + + def rollback_event(self): + self.precommit_event() + + +class MemSchemaOperation(hook.Operation): + """base class for schema operations""" + def __init__(self, cnx, **kwargs): + hook.Operation.__init__(self, cnx, **kwargs) + # every schema operation is triggering a schema update + MemSchemaNotifyChanges(cnx) + + +# operations for high-level source database alteration ######################## + +class CWETypeAddOp(MemSchemaOperation): + """after adding a CWEType entity: + * add it to the instance's schema + * create the necessary table + * set creation_date and modification_date by creating the necessary + CWAttribute entities + * add relation by creating the necessary CWRelation entity + """ + entity = None # make pylint happy + + def precommit_event(self): + cnx = self.cnx + entity = self.entity + schema = cnx.vreg.schema + etype = ybo.EntityType(eid=entity.eid, name=entity.name, + description=entity.description) + eschema = schema.add_entity_type(etype) + # create the necessary table + tablesql = y2sql.eschema2sql(cnx.repo.system_source.dbhelper, + eschema, prefix=SQL_PREFIX) + for sql in tablesql.split(';'): + if sql.strip(): + cnx.system_sql(sql) + # add meta relations + gmap = group_mapping(cnx) + cmap = ss.cstrtype_mapping(cnx) + for rtype in (META_RTYPES - VIRTUAL_RTYPES): + try: + rschema = schema[rtype] + except KeyError: + self.critical('rtype %s was not handled at cwetype creation time', rtype) + continue + if not rschema.rdefs: + self.warning('rtype %s has no relation definition yet', rtype) + continue + sampletype = rschema.subjects()[0] + desttype = rschema.objects()[0] + try: + rdef = copy(rschema.rdef(sampletype, desttype)) + except KeyError: + # this combo does not exist because this is not a universal META_RTYPE + continue + rdef.subject = _MockEntity(eid=entity.eid) + mock = _MockEntity(eid=None) + ss.execschemarql(cnx.execute, mock, ss.rdef2rql(rdef, cmap, gmap)) + + def revertprecommit_event(self): + # revert changes on in memory schema + self.cnx.vreg.schema.del_entity_type(self.entity.name) + # revert changes on database + self.cnx.system_sql('DROP TABLE %s%s' % (SQL_PREFIX, self.entity.name)) + + +class CWETypeRenameOp(MemSchemaOperation): + """this operation updates physical storage accordingly""" + oldname = newname = None # make pylint happy + + def rename(self, oldname, newname): + self.cnx.vreg.schema.rename_entity_type(oldname, newname) + # we need sql to operate physical changes on the system database + sqlexec = self.cnx.system_sql + dbhelper = self.cnx.repo.system_source.dbhelper + sql = dbhelper.sql_rename_table(SQL_PREFIX+oldname, + SQL_PREFIX+newname) + sqlexec(sql) + self.info('renamed table %s to %s', oldname, newname) + sqlexec('UPDATE entities SET type=%(newname)s WHERE type=%(oldname)s', + {'newname': newname, 'oldname': oldname}) + for eid, (etype, extid, auri) in self.cnx.repo._type_source_cache.items(): + if etype == oldname: + self.cnx.repo._type_source_cache[eid] = (newname, extid, auri) + # XXX transaction records + + def precommit_event(self): + self.rename(self.oldname, self.newname) + + def revertprecommit_event(self): + self.rename(self.newname, self.oldname) + + +class CWRTypeUpdateOp(MemSchemaOperation): + """actually update some properties of a relation definition""" + rschema = entity = values = None # make pylint happy + oldvalues = None + + def precommit_event(self): + rschema = self.rschema + if rschema.final: + return # watched changes to final relation type are unexpected + cnx = self.cnx + if 'fulltext_container' in self.values: + op = UpdateFTIndexOp.get_instance(cnx) + for subjtype, objtype in rschema.rdefs: + if self.values['fulltext_container'] == 'subject': + op.add_data(subjtype) + op.add_data(objtype) + else: + op.add_data(objtype) + op.add_data(subjtype) + # update the in-memory schema first + self.oldvalues = dict( (attr, getattr(rschema, attr)) for attr in self.values) + self.rschema.__dict__.update(self.values) + # then make necessary changes to the system source database + if 'inlined' not in self.values: + return # nothing to do + inlined = self.values['inlined'] + # check in-lining is possible when inlined + if inlined: + self.entity.check_inlined_allowed() + # inlined changed, make necessary physical changes! + sqlexec = self.cnx.system_sql + rtype = rschema.type + eidcolumn = SQL_PREFIX + 'eid' + if not inlined: + # need to create the relation if it has not been already done by + # another event of the same transaction + if not rschema.type in cnx.transaction_data.get('createdtables', ()): + tablesql = y2sql.rschema2sql(rschema) + # create the necessary table + for sql in tablesql.split(';'): + if sql.strip(): + sqlexec(sql) + cnx.transaction_data.setdefault('createdtables', []).append( + rschema.type) + # copy existant data + column = SQL_PREFIX + rtype + for etype in rschema.subjects(): + table = SQL_PREFIX + str(etype) + sqlexec('INSERT INTO %s_relation SELECT %s, %s FROM %s WHERE NOT %s IS NULL' + % (rtype, eidcolumn, column, table, column)) + # drop existant columns + #if cnx.repo.system_source.dbhelper.alter_column_support: + for etype in rschema.subjects(): + DropColumn.get_instance(cnx).add_data((str(etype), rtype)) + else: + for etype in rschema.subjects(): + try: + add_inline_relation_column(cnx, str(etype), rtype) + except Exception as ex: + # the column probably already exists. this occurs when the + # entity's type has just been added or if the column has not + # been previously dropped (eg sqlite) + self.error('error while altering table %s: %s', etype, ex) + # copy existant data. + # XXX don't use, it's not supported by sqlite (at least at when i tried it) + #sqlexec('UPDATE %(etype)s SET %(rtype)s=eid_to ' + # 'FROM %(rtype)s_relation ' + # 'WHERE %(etype)s.eid=%(rtype)s_relation.eid_from' + # % locals()) + table = SQL_PREFIX + str(etype) + cursor = sqlexec('SELECT eid_from, eid_to FROM %(table)s, ' + '%(rtype)s_relation WHERE %(table)s.%(eidcolumn)s=' + '%(rtype)s_relation.eid_from' % locals()) + args = [{'val': eid_to, 'x': eid} for eid, eid_to in cursor.fetchall()] + if args: + column = SQL_PREFIX + rtype + cursor.executemany('UPDATE %s SET %s=%%(val)s WHERE %s=%%(x)s' + % (table, column, eidcolumn), args) + # drop existant table + DropRelationTable(cnx, rtype) + + def revertprecommit_event(self): + # revert changes on in memory schema + self.rschema.__dict__.update(self.oldvalues) + # XXX revert changes on database + + +class CWComputedRTypeUpdateOp(MemSchemaOperation): + """actually update some properties of a computed relation definition""" + rschema = entity = rule = None # make pylint happy + old_rule = None + + def precommit_event(self): + # update the in-memory schema first + self.old_rule = self.rschema.rule + self.rschema.rule = self.rule + + def revertprecommit_event(self): + # revert changes on in memory schema + self.rschema.rule = self.old_rule + + +class CWAttributeAddOp(MemSchemaOperation): + """an attribute relation (CWAttribute) has been added: + * add the necessary column + * set default on this column if any and possible + * register an operation to add the relation definition to the + instance's schema on commit + + constraints are handled by specific hooks + """ + entity = None # make pylint happy + + def init_rdef(self, **kwargs): + entity = self.entity + fromentity = entity.stype + rdefdef = self.rdefdef = ybo.RelationDefinition( + str(fromentity.name), entity.rtype.name, str(entity.otype.name), + description=entity.description, cardinality=entity.cardinality, + constraints=get_constraints(self.cnx, entity), + order=entity.ordernum, eid=entity.eid, **kwargs) + try: + self.cnx.vreg.schema.add_relation_def(rdefdef) + except BadSchemaDefinition: + # rdef has been infered then explicitly added (current consensus is + # not clear at all versus infered relation handling (and much + # probably buggy) + rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object] + assert rdef.infered + else: + rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object] + + self.cnx.execute('SET X ordernum Y+1 ' + 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, ' + 'X ordernum >= %(order)s, NOT X eid %(x)s', + {'x': entity.eid, 'se': fromentity.eid, + 'order': entity.ordernum or 0}) + return rdefdef, rdef + + def precommit_event(self): + cnx = self.cnx + entity = self.entity + # entity.defaultval is a Binary or None, but we need a correctly typed + # value + default = entity.defaultval + if default is not None: + default = default.unzpickle() + props = {'default': default, + 'indexed': entity.indexed, + 'fulltextindexed': entity.fulltextindexed, + 'internationalizable': entity.internationalizable} + if entity.extra_props: + props.update(json.loads(entity.extra_props.getvalue().decode('ascii'))) + # entity.formula may not exist yet if we're migrating to 3.20 + if hasattr(entity, 'formula'): + props['formula'] = entity.formula + # update the in-memory schema first + rdefdef, rdef = self.init_rdef(**props) + # then make necessary changes to the system source database + syssource = cnx.repo.system_source + attrtype = y2sql.type_from_rdef(syssource.dbhelper, rdef) + # XXX should be moved somehow into lgdb: sqlite doesn't support to + # add a new column with UNIQUE, it should be added after the ALTER TABLE + # using ADD INDEX + if syssource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype: + extra_unique_index = True + attrtype = attrtype.replace(' UNIQUE', '') + else: + extra_unique_index = False + # added some str() wrapping query since some backend (eg psycopg) don't + # allow unicode queries + table = SQL_PREFIX + rdefdef.subject + column = SQL_PREFIX + rdefdef.name + try: + cnx.system_sql(str('ALTER TABLE %s ADD %s %s' + % (table, column, attrtype)), + rollback_on_failure=False) + self.info('added column %s to table %s', column, table) + except Exception as ex: + # the column probably already exists. this occurs when + # the entity's type has just been added or if the column + # has not been previously dropped + self.error('error while altering table %s: %s', table, ex) + if extra_unique_index or entity.indexed: + try: + syssource.create_index(cnx, table, column, + unique=extra_unique_index) + except Exception as ex: + self.error('error while creating index for %s.%s: %s', + table, column, ex) + # final relations are not infered, propagate + schema = cnx.vreg.schema + try: + eschema = schema.eschema(rdefdef.subject) + except KeyError: + return # entity type currently being added + # propagate attribute to children classes + rschema = schema.rschema(rdefdef.name) + # if relation type has been inserted in the same transaction, its final + # attribute is still set to False, so we've to ensure it's False + rschema.final = True + insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, props) + # update existing entities with the default value of newly added attribute + if default is not None: + default = convert_default_value(self.rdefdef, default) + cnx.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column), + {'default': default}) + # if attribute is computed, compute it + if getattr(entity, 'formula', None): + # add rtype attribute for RelationDefinitionSchema api compat, this + # is what RecomputeAttributeOperation expect + rdefdef.rtype = rdefdef.name + RecomputeAttributeOperation.get_instance(cnx).add_data(rdefdef) + + def revertprecommit_event(self): + # revert changes on in memory schema + if getattr(self, 'rdefdef', None) is None: + return + self.cnx.vreg.schema.del_relation_def( + self.rdefdef.subject, self.rdefdef.name, self.rdefdef.object) + # XXX revert changes on database + + +class CWRelationAddOp(CWAttributeAddOp): + """an actual relation has been added: + + * add the relation definition to the instance's schema + + * if this is an inlined relation, add the necessary column else if it's the + first instance of this relation type, add the necessary table and set + default permissions + + constraints are handled by specific hooks + """ + entity = None # make pylint happy + + def precommit_event(self): + cnx = self.cnx + entity = self.entity + # update the in-memory schema first + rdefdef, rdef = self.init_rdef(composite=entity.composite) + # then make necessary changes to the system source database + schema = cnx.vreg.schema + rtype = rdefdef.name + rschema = schema.rschema(rtype) + # this have to be done before permissions setting + if rschema.inlined: + # need to add a column if the relation is inlined and if this is the + # first occurence of "Subject relation Something" whatever Something + if len(rschema.objects(rdefdef.subject)) == 1: + add_inline_relation_column(cnx, rdefdef.subject, rtype) + eschema = schema[rdefdef.subject] + insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, + {'composite': entity.composite}) + else: + if rschema.symmetric: + # for symmetric relations, rdefs will store relation definitions + # in both ways (i.e. (subj -> obj) and (obj -> subj)) + relation_already_defined = len(rschema.rdefs) > 2 + else: + relation_already_defined = len(rschema.rdefs) > 1 + # need to create the relation if no relation definition in the + # schema and if it has not been added during other event of the same + # transaction + if not (relation_already_defined or + rtype in cnx.transaction_data.get('createdtables', ())): + rschema = schema.rschema(rtype) + # create the necessary table + for sql in y2sql.rschema2sql(rschema).split(';'): + if sql.strip(): + cnx.system_sql(sql) + cnx.transaction_data.setdefault('createdtables', []).append( + rtype) + + # XXX revertprecommit_event + + +class RDefDelOp(MemSchemaOperation): + """an actual relation has been removed""" + rdef = None # make pylint happy + + def precommit_event(self): + cnx = self.cnx + rdef = self.rdef + rschema = rdef.rtype + # make necessary changes to the system source database first + rdeftype = rschema.final and 'CWAttribute' or 'CWRelation' + execute = cnx.execute + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' + 'R eid %%(x)s' % rdeftype, {'x': rschema.eid}) + lastrel = rset[0][0] == 0 + # we have to update physical schema systematically for final and inlined + # relations, but only if it's the last instance for this relation type + # for other relations + if (rschema.final or rschema.inlined): + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' + 'R eid %%(r)s, X from_entity E, E eid %%(e)s' + % rdeftype, + {'r': rschema.eid, 'e': rdef.subject.eid}) + if rset[0][0] == 0 and not cnx.deleted_in_transaction(rdef.subject.eid): + ptypes = cnx.transaction_data.setdefault('pendingrtypes', set()) + ptypes.add(rschema.type) + DropColumn.get_instance(cnx).add_data((str(rdef.subject), str(rschema))) + elif rschema.inlined: + cnx.system_sql('UPDATE %s%s SET %s%s=NULL WHERE ' + 'EXISTS(SELECT 1 FROM entities ' + ' WHERE eid=%s%s AND type=%%(to_etype)s)' + % (SQL_PREFIX, rdef.subject, SQL_PREFIX, rdef.rtype, + SQL_PREFIX, rdef.rtype), + {'to_etype': rdef.object.type}) + elif lastrel: + DropRelationTable(cnx, str(rschema)) + else: + cnx.system_sql('DELETE FROM %s_relation WHERE ' + 'EXISTS(SELECT 1 FROM entities ' + ' WHERE eid=eid_from AND type=%%(from_etype)s)' + ' AND EXISTS(SELECT 1 FROM entities ' + ' WHERE eid=eid_to AND type=%%(to_etype)s)' + % rschema, + {'from_etype': rdef.subject.type, 'to_etype': rdef.object.type}) + # then update the in-memory schema + if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP: + rschema.del_relation_def(rdef.subject, rdef.object) + # if this is the last relation definition of this type, drop associated + # relation type + if lastrel and not cnx.deleted_in_transaction(rschema.eid): + execute('DELETE CWRType X WHERE X eid %(x)s', {'x': rschema.eid}) + + def revertprecommit_event(self): + # revert changes on in memory schema + # + # Note: add_relation_def takes a RelationDefinition, not a + # RelationDefinitionSchema, needs to fake it + rdef = self.rdef + rdef.name = str(rdef.rtype) + if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP: + self.cnx.vreg.schema.add_relation_def(rdef) + + + +class RDefUpdateOp(MemSchemaOperation): + """actually update some properties of a relation definition""" + rschema = rdefkey = values = None # make pylint happy + rdef = oldvalues = None + indexed_changed = null_allowed_changed = False + + def precommit_event(self): + cnx = self.cnx + rdef = self.rdef = self.rschema.rdefs[self.rdefkey] + # update the in-memory schema first + self.oldvalues = dict( (attr, getattr(rdef, attr)) for attr in self.values) + rdef.update(self.values) + # then make necessary changes to the system source database + syssource = cnx.repo.system_source + if 'indexed' in self.values: + syssource.update_rdef_indexed(cnx, rdef) + self.indexed_changed = True + if 'cardinality' in self.values and rdef.rtype.final \ + and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]: + syssource.update_rdef_null_allowed(self.cnx, rdef) + self.null_allowed_changed = True + if 'fulltextindexed' in self.values: + UpdateFTIndexOp.get_instance(cnx).add_data(rdef.subject) + if 'formula' in self.values: + RecomputeAttributeOperation.get_instance(cnx).add_data(rdef) + + def revertprecommit_event(self): + if self.rdef is None: + return + # revert changes on in memory schema + self.rdef.update(self.oldvalues) + # revert changes on database + syssource = self.cnx.repo.system_source + if self.indexed_changed: + syssource.update_rdef_indexed(self.cnx, self.rdef) + if self.null_allowed_changed: + syssource.update_rdef_null_allowed(self.cnx, self.rdef) + + +def _set_modifiable_constraints(rdef): + # for proper in-place modification of in-memory schema: if rdef.constraints + # is already a list, reuse it (we're updating multiple constraints of the + # same rdef in the same transaction) + if not isinstance(rdef.constraints, list): + rdef.constraints = list(rdef.constraints) + + +class CWConstraintDelOp(MemSchemaOperation): + """actually remove a constraint of a relation definition""" + rdef = oldcstr = newcstr = None # make pylint happy + size_cstr_changed = unique_changed = False + + def precommit_event(self): + cnx = self.cnx + rdef = self.rdef + # in-place modification of in-memory schema first + _set_modifiable_constraints(rdef) + if self.oldcstr in rdef.constraints: + rdef.constraints.remove(self.oldcstr) + else: + self.critical('constraint %s for rdef %s was missing or already removed', + self.oldcstr, rdef) + if cnx.deleted_in_transaction(rdef.eid): + # don't try to alter a table that's going away (or is already gone) + return + # then update database: alter the physical schema on size/unique + # constraint changes + syssource = cnx.repo.system_source + cstrtype = self.oldcstr.type() + if cstrtype == 'SizeConstraint': + # if the size constraint is being replaced with a new max size, we'll + # call update_rdef_column in CWConstraintAddOp, skip it here + for cstr in cnx.transaction_data.get('newsizecstr', ()): + rdefentity = cstr.reverse_constrained_by[0] + cstrrdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid) + if cstrrdef == rdef: + return + + # we found that the size constraint for this rdef is really gone, + # not just replaced by another + syssource.update_rdef_column(cnx, rdef) + self.size_cstr_changed = True + elif cstrtype == 'UniqueConstraint': + syssource.update_rdef_unique(cnx, rdef) + self.unique_changed = True + if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'): + cstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype + + (self.oldcstr.serialize() or '')).encode('utf-8')).hexdigest() + cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' % (SQL_PREFIX, rdef.subject.type, cstrname)) + + def revertprecommit_event(self): + # revert changes on in memory schema + if self.newcstr is not None: + self.rdef.constraints.remove(self.newcstr) + if self.oldcstr is not None: + self.rdef.constraints.append(self.oldcstr) + # revert changes on database + syssource = self.cnx.repo.system_source + if self.size_cstr_changed: + syssource.update_rdef_column(self.cnx, self.rdef) + if self.unique_changed: + syssource.update_rdef_unique(self.cnx, self.rdef) + + +class CWConstraintAddOp(CWConstraintDelOp): + """actually update constraint of a relation definition""" + entity = None # make pylint happy + + def precommit_event(self): + cnx = self.cnx + rdefentity = self.entity.reverse_constrained_by[0] + # when the relation is added in the same transaction, the constraint + # object is created by the operation adding the attribute or relation, + # so there is nothing to do here + if cnx.added_in_transaction(rdefentity.eid): + return + rdef = self.rdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid) + cstrtype = self.entity.type + if cstrtype in UNIQUE_CONSTRAINTS: + oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype) + else: + oldcstr = None + newcstr = self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) + # in-place modification of in-memory schema first + _set_modifiable_constraints(rdef) + newcstr.eid = self.entity.eid + if oldcstr is not None: + rdef.constraints.remove(oldcstr) + rdef.constraints.append(newcstr) + # then update database: alter the physical schema on size/unique + # constraint changes + syssource = cnx.repo.system_source + if cstrtype == 'SizeConstraint' and (oldcstr is None or + oldcstr.max != newcstr.max): + syssource.update_rdef_column(cnx, rdef) + self.size_cstr_changed = True + elif cstrtype == 'UniqueConstraint' and oldcstr is None: + syssource.update_rdef_unique(cnx, rdef) + self.unique_changed = True + if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'): + if oldcstr is not None: + oldcstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype + + (self.oldcstr.serialize() or '')).encode('ascii')).hexdigest() + cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' % + (SQL_PREFIX, rdef.subject.type, oldcstrname)) + cstrname, check = y2sql.check_constraint(rdef.subject, rdef.object, rdef.rtype.type, + newcstr, syssource.dbhelper, prefix=SQL_PREFIX) + cnx.system_sql('ALTER TABLE %s%s ADD CONSTRAINT %s CHECK(%s)' % + (SQL_PREFIX, rdef.subject.type, cstrname, check)) + + +class CWUniqueTogetherConstraintAddOp(MemSchemaOperation): + entity = None # make pylint happy + + def precommit_event(self): + cnx = self.cnx + prefix = SQL_PREFIX + entity = self.entity + table = '%s%s' % (prefix, entity.constraint_of[0].name) + cols = ['%s%s' % (prefix, r.name) for r in entity.relations] + dbhelper = cnx.repo.system_source.dbhelper + sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, entity.name) + for sql in sqls: + cnx.system_sql(sql) + + def postcommit_event(self): + entity = self.entity + eschema = self.cnx.vreg.schema.schema_by_eid(entity.constraint_of[0].eid) + attrs = [r.name for r in entity.relations] + eschema._unique_together.append(attrs) + + +class CWUniqueTogetherConstraintDelOp(MemSchemaOperation): + entity = cstrname = None # for pylint + cols = () # for pylint + + def insert_index(self): + # We need to run before CWConstraintDelOp: if a size constraint is + # removed and the column is part of a unique_together constraint, we + # remove the unique_together index before changing the column's type. + # SQL Server does not support unique indices on unlimited text columns. + return 0 + + def precommit_event(self): + cnx = self.cnx + prefix = SQL_PREFIX + table = '%s%s' % (prefix, self.entity.type) + dbhelper = cnx.repo.system_source.dbhelper + cols = ['%s%s' % (prefix, c) for c in self.cols] + sqls = dbhelper.sqls_drop_multicol_unique_index(table, cols, self.cstrname) + for sql in sqls: + cnx.system_sql(sql) + + def postcommit_event(self): + eschema = self.cnx.vreg.schema.schema_by_eid(self.entity.eid) + cols = set(self.cols) + unique_together = [ut for ut in eschema._unique_together + if set(ut) != cols] + eschema._unique_together = unique_together + + +# operations for in-memory schema synchronization ############################# + +class MemSchemaCWETypeDel(MemSchemaOperation): + """actually remove the entity type from the instance's schema""" + etype = None # make pylint happy + + def postcommit_event(self): + # del_entity_type also removes entity's relations + self.cnx.vreg.schema.del_entity_type(self.etype) + + +class MemSchemaCWRTypeAdd(MemSchemaOperation): + """actually add the relation type to the instance's schema""" + rtypedef = None # make pylint happy + + def precommit_event(self): + self.cnx.vreg.schema.add_relation_type(self.rtypedef) + + def revertprecommit_event(self): + self.cnx.vreg.schema.del_relation_type(self.rtypedef.name) + + +class MemSchemaCWRTypeDel(MemSchemaOperation): + """actually remove the relation type from the instance's schema""" + rtype = None # make pylint happy + + def postcommit_event(self): + try: + self.cnx.vreg.schema.del_relation_type(self.rtype) + except KeyError: + # s/o entity type have already been deleted + pass + + +class MemSchemaPermissionAdd(MemSchemaOperation): + """synchronize schema when a *_permission relation has been added on a group + """ + eid = action = group_eid = expr = None # make pylint happy + + def precommit_event(self): + """the observed connections.cnxset has been commited""" + try: + erschema = self.cnx.vreg.schema.schema_by_eid(self.eid) + except KeyError: + # duh, schema not found, log error and skip operation + self.warning('no schema for %s', self.eid) + return + perms = list(erschema.action_permissions(self.action)) + if self.group_eid is not None: + perm = self.cnx.entity_from_eid(self.group_eid).name + else: + perm = erschema.rql_expression(self.expr) + try: + perms.index(perm) + self.warning('%s already in permissions for %s on %s', + perm, self.action, erschema) + except ValueError: + perms.append(perm) + erschema.set_action_permissions(self.action, perms) + + # XXX revertprecommit_event + + +class MemSchemaPermissionDel(MemSchemaPermissionAdd): + """synchronize schema when a *_permission relation has been deleted from a + group + """ + + def precommit_event(self): + """the observed connections set has been commited""" + try: + erschema = self.cnx.vreg.schema.schema_by_eid(self.eid) + except KeyError: + # duh, schema not found, log error and skip operation + self.warning('no schema for %s', self.eid) + return + perms = list(erschema.action_permissions(self.action)) + if self.group_eid is not None: + perm = self.cnx.entity_from_eid(self.group_eid).name + else: + perm = erschema.rql_expression(self.expr) + try: + perms.remove(perm) + erschema.set_action_permissions(self.action, perms) + except ValueError: + self.error('can\'t remove permission %s for %s on %s', + perm, self.action, erschema) + + # XXX revertprecommit_event + + +class MemSchemaSpecializesAdd(MemSchemaOperation): + etypeeid = parentetypeeid = None # make pylint happy + + def precommit_event(self): + eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid) + parenteschema = self.cnx.vreg.schema.schema_by_eid(self.parentetypeeid) + eschema._specialized_type = parenteschema.type + parenteschema._specialized_by.append(eschema.type) + + # XXX revertprecommit_event + + +class MemSchemaSpecializesDel(MemSchemaOperation): + etypeeid = parentetypeeid = None # make pylint happy + + def precommit_event(self): + try: + eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid) + parenteschema = self.cnx.vreg.schema.schema_by_eid(self.parentetypeeid) + except KeyError: + # etype removed, nothing to do + return + eschema._specialized_type = None + parenteschema._specialized_by.remove(eschema.type) + + # XXX revertprecommit_event + + +# CWEType hooks ################################################################ + +class DelCWETypeHook(SyncSchemaHook): + """before deleting a CWEType entity: + * check that we don't remove a core entity type + * cascade to delete related CWAttribute and CWRelation entities + * instantiate an operation to delete the entity type on commit + """ + __regid__ = 'syncdelcwetype' + __select__ = SyncSchemaHook.__select__ & is_instance('CWEType') + events = ('before_delete_entity',) + + def __call__(self): + # final entities can't be deleted, don't care about that + name = self.entity.name + if name in CORE_TYPES: + raise validation_error(self.entity, {None: _("can't be deleted")}) + # delete every entities of this type + if name not in ETYPE_NAME_MAP: + MemSchemaCWETypeDel(self._cw, etype=name) + DropTable(self._cw, table=SQL_PREFIX + name) + + +class AfterDelCWETypeHook(DelCWETypeHook): + __regid__ = 'wfcleanup' + events = ('after_delete_entity',) + + def __call__(self): + # workflow cleanup + self._cw.execute('DELETE Workflow X WHERE NOT X workflow_of Y') + + +class AfterAddCWETypeHook(DelCWETypeHook): + """after adding a CWEType entity: + * create the necessary table + * set creation_date and modification_date by creating the necessary + CWAttribute entities + * add owned_by relation by creating the necessary CWRelation entity + * register an operation to add the entity type to the instance's + schema on commit + """ + __regid__ = 'syncaddcwetype' + events = ('after_add_entity',) + + def __call__(self): + entity = self.entity + if entity.cw_edited.get('final'): + # final entity types don't need a table in the database and are + # systematically added by yams at schema initialization time so + # there is no need to do further processing. Simply assign its eid. + self._cw.vreg.schema[entity.name].eid = entity.eid + return + CWETypeAddOp(self._cw, entity=entity) + + +class BeforeUpdateCWETypeHook(DelCWETypeHook): + """check name change, handle final""" + __regid__ = 'syncupdatecwetype' + events = ('before_update_entity',) + + def __call__(self): + entity = self.entity + check_valid_changes(self._cw, entity, ro_attrs=('final',)) + # don't use getattr(entity, attr), we would get the modified value if any + if 'name' in entity.cw_edited: + oldname, newname = entity.cw_edited.oldnewvalue('name') + if newname.lower() != oldname.lower(): + CWETypeRenameOp(self._cw, oldname=oldname, newname=newname) + + +# CWRType hooks ################################################################ + +class DelCWRTypeHook(SyncSchemaHook): + """before deleting a CWRType entity: + * check that we don't remove a core relation type + * cascade to delete related CWAttribute and CWRelation entities + * instantiate an operation to delete the relation type on commit + """ + __regid__ = 'syncdelcwrtype' + __select__ = SyncSchemaHook.__select__ & is_instance('CWRType') + events = ('before_delete_entity',) + + def __call__(self): + name = self.entity.name + if name in CORE_TYPES: + raise validation_error(self.entity, {None: _("can't be deleted")}) + # delete relation definitions using this relation type + self._cw.execute('DELETE CWAttribute X WHERE X relation_type Y, Y eid %(x)s', + {'x': self.entity.eid}) + self._cw.execute('DELETE CWRelation X WHERE X relation_type Y, Y eid %(x)s', + {'x': self.entity.eid}) + MemSchemaCWRTypeDel(self._cw, rtype=name) + + +class AfterAddCWComputedRTypeHook(SyncSchemaHook): + """after a CWComputedRType entity has been added: + * register an operation to add the relation type to the instance's + schema on commit + + We don't know yet this point if a table is necessary + """ + __regid__ = 'syncaddcwcomputedrtype' + __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType') + events = ('after_add_entity',) + + def __call__(self): + entity = self.entity + rtypedef = ybo.ComputedRelation(name=entity.name, + eid=entity.eid, + rule=entity.rule) + MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef) + + +class AfterAddCWRTypeHook(SyncSchemaHook): + """after a CWRType entity has been added: + * register an operation to add the relation type to the instance's + schema on commit + + We don't know yet this point if a table is necessary + """ + __regid__ = 'syncaddcwrtype' + __select__ = SyncSchemaHook.__select__ & is_instance('CWRType') + events = ('after_add_entity',) + + def __call__(self): + entity = self.entity + rtypedef = ybo.RelationType(name=entity.name, + description=entity.description, + inlined=entity.cw_edited.get('inlined', False), + symmetric=entity.cw_edited.get('symmetric', False), + eid=entity.eid) + MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef) + + +class BeforeUpdateCWRTypeHook(SyncSchemaHook): + """check name change, handle final""" + __regid__ = 'syncupdatecwrtype' + __select__ = SyncSchemaHook.__select__ & is_instance('CWRType') + events = ('before_update_entity',) + + def __call__(self): + entity = self.entity + check_valid_changes(self._cw, entity) + newvalues = {} + for prop in ('symmetric', 'inlined', 'fulltext_container'): + if prop in entity.cw_edited: + old, new = entity.cw_edited.oldnewvalue(prop) + if old != new: + newvalues[prop] = new + if newvalues: + rschema = self._cw.vreg.schema.rschema(entity.name) + CWRTypeUpdateOp(self._cw, rschema=rschema, entity=entity, + values=newvalues) + + +class BeforeUpdateCWComputedRTypeHook(SyncSchemaHook): + """check name change, handle final""" + __regid__ = 'syncupdatecwcomputedrtype' + __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType') + events = ('before_update_entity',) + + def __call__(self): + entity = self.entity + check_valid_changes(self._cw, entity) + if 'rule' in entity.cw_edited: + old, new = entity.cw_edited.oldnewvalue('rule') + if old != new: + rschema = self._cw.vreg.schema.rschema(entity.name) + CWComputedRTypeUpdateOp(self._cw, rschema=rschema, + entity=entity, rule=new) + + +class AfterDelRelationTypeHook(SyncSchemaHook): + """before deleting a CWAttribute or CWRelation entity: + * if this is a final or inlined relation definition, instantiate an + operation to drop necessary column, else if this is the last instance + of a non final relation, instantiate an operation to drop necessary + table + * instantiate an operation to delete the relation definition on commit + * delete the associated relation type when necessary + """ + __regid__ = 'syncdelrelationtype' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('relation_type') + events = ('after_delete_relation',) + + def __call__(self): + cnx = self._cw + try: + rdef = cnx.vreg.schema.schema_by_eid(self.eidfrom) + except KeyError: + self.critical('cant get schema rdef associated to %s', self.eidfrom) + return + subjschema, rschema, objschema = rdef.as_triple() + pendingrdefs = cnx.transaction_data.setdefault('pendingrdefs', set()) + # first delete existing relation if necessary + if rschema.final: + rdeftype = 'CWAttribute' + pendingrdefs.add((subjschema, rschema)) + else: + rdeftype = 'CWRelation' + pendingrdefs.add((subjschema, rschema, objschema)) + RDefDelOp(cnx, rdef=rdef) + + +# CWComputedRType hooks ####################################################### + +class DelCWComputedRTypeHook(SyncSchemaHook): + """before deleting a CWComputedRType entity: + * check that we don't remove a core relation type + * instantiate an operation to delete the relation type on commit + """ + __regid__ = 'syncdelcwcomputedrtype' + __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType') + events = ('before_delete_entity',) + + def __call__(self): + name = self.entity.name + if name in CORE_TYPES: + raise validation_error(self.entity, {None: _("can't be deleted")}) + MemSchemaCWRTypeDel(self._cw, rtype=name) + + +# CWAttribute / CWRelation hooks ############################################### + +class AfterAddCWAttributeHook(SyncSchemaHook): + __regid__ = 'syncaddcwattribute' + __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute') + events = ('after_add_entity',) + + def __call__(self): + CWAttributeAddOp(self._cw, entity=self.entity) + + +class AfterAddCWRelationHook(AfterAddCWAttributeHook): + __regid__ = 'syncaddcwrelation' + __select__ = SyncSchemaHook.__select__ & is_instance('CWRelation') + + def __call__(self): + CWRelationAddOp(self._cw, entity=self.entity) + + +class AfterUpdateCWRDefHook(SyncSchemaHook): + __regid__ = 'syncaddcwattribute' + __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute', + 'CWRelation') + events = ('before_update_entity',) + + def __call__(self): + entity = self.entity + if self._cw.deleted_in_transaction(entity.eid): + return + subjtype = entity.stype.name + objtype = entity.otype.name + if subjtype in ETYPE_NAME_MAP or objtype in ETYPE_NAME_MAP: + return + rschema = self._cw.vreg.schema[entity.rtype.name] + # note: do not access schema rdef here, it may be added later by an + # operation + newvalues = {} + for prop in RelationDefinitionSchema.rproperty_defs(objtype): + if prop == 'constraints': + continue + if prop == 'order': + attr = 'ordernum' + else: + attr = prop + if attr in entity.cw_edited: + old, new = entity.cw_edited.oldnewvalue(attr) + if old != new: + newvalues[prop] = new + if newvalues: + RDefUpdateOp(self._cw, rschema=rschema, rdefkey=(subjtype, objtype), + values=newvalues) + + +# constraints synchronization hooks ############################################ + +class AfterAddCWConstraintHook(SyncSchemaHook): + __regid__ = 'syncaddcwconstraint' + __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint') + events = ('after_add_entity', 'after_update_entity') + + def __call__(self): + if self.entity.cstrtype[0].name == 'SizeConstraint': + txdata = self._cw.transaction_data + if 'newsizecstr' not in txdata: + txdata['newsizecstr'] = set() + txdata['newsizecstr'].add(self.entity) + CWConstraintAddOp(self._cw, entity=self.entity) + + +class AfterAddConstrainedByHook(SyncSchemaHook): + __regid__ = 'syncaddconstrainedby' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constrained_by') + events = ('after_add_relation',) + + def __call__(self): + if self._cw.added_in_transaction(self.eidfrom): + # used by get_constraints() which is called in CWAttributeAddOp + self._cw.transaction_data.setdefault(self.eidfrom, []).append(self.eidto) + + +class BeforeDeleteCWConstraintHook(SyncSchemaHook): + __regid__ = 'syncdelcwconstraint' + __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint') + events = ('before_delete_entity',) + + def __call__(self): + entity = self.entity + schema = self._cw.vreg.schema + try: + # KeyError, e.g. composite chain deletion + rdef = schema.schema_by_eid(entity.reverse_constrained_by[0].eid) + # IndexError + cstr = rdef.constraint_by_eid(entity.eid) + except (KeyError, IndexError): + self._cw.critical('constraint type no more accessible') + else: + CWConstraintDelOp(self._cw, rdef=rdef, oldcstr=cstr) + +# unique_together constraints +# XXX: use setoperations and before_add_relation here (on constraint_of and relations) +class AfterAddCWUniqueTogetherConstraintHook(SyncSchemaHook): + __regid__ = 'syncadd_cwuniquetogether_constraint' + __select__ = SyncSchemaHook.__select__ & is_instance('CWUniqueTogetherConstraint') + events = ('after_add_entity',) + + def __call__(self): + CWUniqueTogetherConstraintAddOp(self._cw, entity=self.entity) + + +class BeforeDeleteConstraintOfHook(SyncSchemaHook): + __regid__ = 'syncdelconstraintof' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constraint_of') + events = ('before_delete_relation',) + + def __call__(self): + if self._cw.deleted_in_transaction(self.eidto): + return + schema = self._cw.vreg.schema + cstr = self._cw.entity_from_eid(self.eidfrom) + entity = schema.schema_by_eid(self.eidto) + cols = tuple(r.name for r in cstr.relations) + CWUniqueTogetherConstraintDelOp(self._cw, entity=entity, + cstrname=cstr.name, cols=cols) + + +# permissions synchronization hooks ############################################ + +class AfterAddPermissionHook(SyncSchemaHook): + """added entity/relation *_permission, need to update schema""" + __regid__ = 'syncaddperm' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype( + 'read_permission', 'add_permission', 'delete_permission', + 'update_permission') + events = ('after_add_relation',) + + def __call__(self): + action = self.rtype.split('_', 1)[0] + if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup': + MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom, + group_eid=self.eidto) + else: # RQLExpression + expr = self._cw.entity_from_eid(self.eidto).expression + MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom, + expr=expr) + + +class BeforeDelPermissionHook(AfterAddPermissionHook): + """delete entity/relation *_permission, need to update schema + + skip the operation if the related type is being deleted + """ + __regid__ = 'syncdelperm' + events = ('before_delete_relation',) + + def __call__(self): + if self._cw.deleted_in_transaction(self.eidfrom): + return + action = self.rtype.split('_', 1)[0] + if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup': + MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom, + group_eid=self.eidto) + else: # RQLExpression + expr = self._cw.entity_from_eid(self.eidto).expression + MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom, + expr=expr) + + + +class UpdateFTIndexOp(hook.DataOperationMixIn, hook.SingleLastOperation): + """operation to update full text indexation of entity whose schema change + + We wait after the commit to as the schema in memory is only updated after + the commit. + """ + containercls = list + + def postcommit_event(self): + cnx = self.cnx + source = cnx.repo.system_source + schema = cnx.repo.vreg.schema + to_reindex = self.get_data() + self.info('%i etypes need full text indexed reindexation', + len(to_reindex)) + for etype in to_reindex: + rset = cnx.execute('Any X WHERE X is %s' % etype) + self.info('Reindexing full text index for %i entity of type %s', + len(rset), etype) + still_fti = list(schema[etype].indexable_attributes()) + for entity in rset.entities(): + source.fti_unindex_entities(cnx, [entity]) + for container in entity.cw_adapt_to('IFTIndexable').fti_containers(): + if still_fti or container is not entity: + source.fti_unindex_entities(cnx, [container]) + source.fti_index_entities(cnx, [container]) + if to_reindex: + # Transaction has already been committed + cnx.cnxset.commit() + + + + +# specializes synchronization hooks ############################################ + + +class AfterAddSpecializesHook(SyncSchemaHook): + __regid__ = 'syncaddspecializes' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes') + events = ('after_add_relation',) + + def __call__(self): + MemSchemaSpecializesAdd(self._cw, etypeeid=self.eidfrom, + parentetypeeid=self.eidto) + + +class AfterDelSpecializesHook(SyncSchemaHook): + __regid__ = 'syncdelspecializes' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes') + events = ('after_delete_relation',) + + def __call__(self): + MemSchemaSpecializesDel(self._cw, etypeeid=self.eidfrom, + parentetypeeid=self.eidto) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/syncsession.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/syncsession.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,255 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Core hooks: synchronize living session on persistent data changes""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from cubicweb import UnknownProperty, BadConnectionId, validation_error +from cubicweb.predicates import is_instance +from cubicweb.server import hook + + +def get_user_sessions(repo, ueid): + for session in repo._sessions.values(): + if ueid == session.user.eid: + yield session + + +class SyncSessionHook(hook.Hook): + __abstract__ = True + category = 'syncsession' + + +# user/groups synchronisation ################################################# + +class _GroupOperation(hook.Operation): + """base class for group operation""" + cnxuser = None # make pylint happy + + def __init__(self, cnx, *args, **kwargs): + """override to get the group name before actual groups manipulation: + + we may temporarily loose right access during a commit event, so + no query should be emitted while comitting + """ + rql = 'Any N WHERE G eid %(x)s, G name N' + result = cnx.execute(rql, {'x': kwargs['geid']}, build_descr=False) + hook.Operation.__init__(self, cnx, *args, **kwargs) + self.group = result[0][0] + + +class _DeleteGroupOp(_GroupOperation): + """synchronize user when a in_group relation has been deleted""" + + def postcommit_event(self): + """the observed connections set has been commited""" + groups = self.cnxuser.groups + try: + groups.remove(self.group) + except KeyError: + self.error('user %s not in group %s', self.cnxuser, self.group) + + +class _AddGroupOp(_GroupOperation): + """synchronize user when a in_group relation has been added""" + def postcommit_event(self): + """the observed connections set has been commited""" + groups = self.cnxuser.groups + if self.group in groups: + self.warning('user %s already in group %s', self.cnxuser, + self.group) + else: + groups.add(self.group) + + +class SyncInGroupHook(SyncSessionHook): + __regid__ = 'syncingroup' + __select__ = SyncSessionHook.__select__ & hook.match_rtype('in_group') + events = ('after_delete_relation', 'after_add_relation') + + def __call__(self): + if self.event == 'after_delete_relation': + opcls = _DeleteGroupOp + else: + opcls = _AddGroupOp + for session in get_user_sessions(self._cw.repo, self.eidfrom): + opcls(self._cw, cnxuser=session.user, geid=self.eidto) + + +class _DelUserOp(hook.Operation): + """close associated user's session when it is deleted""" + def __init__(self, cnx, sessionid): + self.sessionid = sessionid + hook.Operation.__init__(self, cnx) + + def postcommit_event(self): + """the observed connections set has been commited""" + try: + self.cnx.repo.close(self.sessionid) + except BadConnectionId: + pass # already closed + + +class CloseDeletedUserSessionsHook(SyncSessionHook): + __regid__ = 'closession' + __select__ = SyncSessionHook.__select__ & is_instance('CWUser') + events = ('after_delete_entity',) + + def __call__(self): + """modify user permission, need to update users""" + for session in get_user_sessions(self._cw.repo, self.entity.eid): + _DelUserOp(self._cw, session.sessionid) + + +# CWProperty hooks ############################################################# + +class _DelCWPropertyOp(hook.Operation): + """a user's custom properties has been deleted""" + cwpropdict = key = None # make pylint happy + + def postcommit_event(self): + """the observed connections set has been commited""" + try: + del self.cwpropdict[self.key] + except KeyError: + self.error('%s has no associated value', self.key) + + +class _ChangeCWPropertyOp(hook.Operation): + """a user's custom properties has been added/changed""" + cwpropdict = key = value = None # make pylint happy + + def postcommit_event(self): + """the observed connections set has been commited""" + self.cwpropdict[self.key] = self.value + + +class _AddCWPropertyOp(hook.Operation): + """a user's custom properties has been added/changed""" + cwprop = None # make pylint happy + + def postcommit_event(self): + """the observed connections set has been commited""" + cwprop = self.cwprop + if not cwprop.for_user: + self.cnx.vreg['propertyvalues'][cwprop.pkey] = cwprop.value + # if for_user is set, update is handled by a ChangeCWPropertyOp operation + + +class AddCWPropertyHook(SyncSessionHook): + __regid__ = 'addcwprop' + __select__ = SyncSessionHook.__select__ & is_instance('CWProperty') + events = ('after_add_entity',) + + def __call__(self): + key, value = self.entity.pkey, self.entity.value + if key.startswith('sources.'): + return + cnx = self._cw + try: + value = cnx.vreg.typed_value(key, value) + except UnknownProperty: + msg = _('unknown property key %s') + raise validation_error(self.entity, {('pkey', 'subject'): msg}, (key,)) + except ValueError as ex: + raise validation_error(self.entity, + {('value', 'subject'): str(ex)}) + if not cnx.user.matching_groups('managers'): + cnx.add_relation(self.entity.eid, 'for_user', cnx.user.eid) + else: + _AddCWPropertyOp(cnx, cwprop=self.entity) + + +class UpdateCWPropertyHook(AddCWPropertyHook): + __regid__ = 'updatecwprop' + events = ('after_update_entity',) + + def __call__(self): + entity = self.entity + if not ('pkey' in entity.cw_edited or + 'value' in entity.cw_edited): + return + key, value = entity.pkey, entity.value + if key.startswith('sources.'): + return + cnx = self._cw + try: + value = cnx.vreg.typed_value(key, value) + except UnknownProperty: + return + except ValueError as ex: + raise validation_error(entity, {('value', 'subject'): str(ex)}) + if entity.for_user: + for session in get_user_sessions(cnx.repo, entity.for_user[0].eid): + _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties, + key=key, value=value) + else: + # site wide properties + _ChangeCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'], + key=key, value=value) + + +class DeleteCWPropertyHook(AddCWPropertyHook): + __regid__ = 'delcwprop' + events = ('before_delete_entity',) + + def __call__(self): + eid = self.entity.eid + cnx = self._cw + for eidfrom, rtype, eidto in cnx.transaction_data.get('pendingrelations', ()): + if rtype == 'for_user' and eidfrom == self.entity.eid: + # if for_user was set, delete has already been handled + break + else: + _DelCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'], + key=self.entity.pkey) + + +class AddForUserRelationHook(SyncSessionHook): + __regid__ = 'addcwpropforuser' + __select__ = SyncSessionHook.__select__ & hook.match_rtype('for_user') + events = ('after_add_relation',) + + def __call__(self): + cnx = self._cw + eidfrom = self.eidfrom + if not cnx.entity_metas(eidfrom)['type'] == 'CWProperty': + return + key, value = cnx.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V', + {'x': eidfrom})[0] + if cnx.vreg.property_info(key)['sitewide']: + msg = _("site-wide property can't be set for user") + raise validation_error(eidfrom, {('for_user', 'subject'): msg}) + for session in get_user_sessions(cnx.repo, self.eidto): + _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties, + key=key, value=value) + + +class DelForUserRelationHook(AddForUserRelationHook): + __regid__ = 'delcwpropforuser' + events = ('after_delete_relation',) + + def __call__(self): + cnx = self._cw + key = cnx.execute('Any K WHERE P eid %(x)s, P pkey K', + {'x': self.eidfrom})[0][0] + cnx.transaction_data.setdefault('pendingrelations', []).append( + (self.eidfrom, self.rtype, self.eidto)) + for session in get_user_sessions(cnx.repo, self.eidto): + _DelCWPropertyOp(cnx, cwpropdict=session.user.properties, key=key) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/syncsources.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/syncsources.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,208 @@ +# copyright 2010-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""hooks for repository sources synchronization""" + +from cubicweb import _ + +from socket import gethostname + +from logilab.common.decorators import clear_cache + +from cubicweb import validation_error +from cubicweb.predicates import is_instance +from cubicweb.server import SOURCE_TYPES, hook + +class SourceHook(hook.Hook): + __abstract__ = True + category = 'cw.sources' + + +# repo sources synchronization ################################################# + +class SourceAddedOp(hook.Operation): + entity = None # make pylint happy + def postcommit_event(self): + self.cnx.repo.add_source(self.entity) + +class SourceAddedHook(SourceHook): + __regid__ = 'cw.sources.added' + __select__ = SourceHook.__select__ & is_instance('CWSource') + events = ('after_add_entity',) + def __call__(self): + try: + sourcecls = SOURCE_TYPES[self.entity.type] + except KeyError: + msg = _('Unknown source type') + raise validation_error(self.entity, {('type', 'subject'): msg}) + # ignore creation of the system source done during database + # initialisation, as config for this source is in a file and handling + # is done separatly (no need for the operation either) + if self.entity.name != 'system': + sourcecls.check_conf_dict(self.entity.eid, self.entity.host_config, + fail_if_unknown=not self._cw.vreg.config.repairing) + SourceAddedOp(self._cw, entity=self.entity) + + +class SourceRemovedOp(hook.Operation): + uri = None # make pylint happy + def postcommit_event(self): + self.cnx.repo.remove_source(self.uri) + +class SourceRemovedHook(SourceHook): + __regid__ = 'cw.sources.removed' + __select__ = SourceHook.__select__ & is_instance('CWSource') + events = ('before_delete_entity',) + def __call__(self): + if self.entity.name == 'system': + msg = _("You cannot remove the system source") + raise validation_error(self.entity, {None: msg}) + SourceRemovedOp(self._cw, uri=self.entity.name) + + +class SourceConfigUpdatedOp(hook.DataOperationMixIn, hook.Operation): + + def precommit_event(self): + self.__processed = [] + for source in self.get_data(): + if not self.cnx.deleted_in_transaction(source.eid): + conf = source.repo_source.check_config(source) + self.__processed.append( (source, conf) ) + + def postcommit_event(self): + for source, conf in self.__processed: + source.repo_source.update_config(source, conf) + + +class SourceRenamedOp(hook.LateOperation): + oldname = newname = None # make pylint happy + + def precommit_event(self): + source = self.cnx.repo.sources_by_uri[self.oldname] + sql = 'UPDATE entities SET asource=%(newname)s WHERE asource=%(oldname)s' + self.cnx.system_sql(sql, {'oldname': self.oldname, + 'newname': self.newname}) + + def postcommit_event(self): + repo = self.cnx.repo + # XXX race condition + source = repo.sources_by_uri.pop(self.oldname) + source.uri = self.newname + source.public_config['uri'] = self.newname + repo.sources_by_uri[self.newname] = source + repo._type_source_cache.clear() + clear_cache(repo, 'source_defs') + + +class SourceUpdatedHook(SourceHook): + __regid__ = 'cw.sources.configupdate' + __select__ = SourceHook.__select__ & is_instance('CWSource') + events = ('before_update_entity',) + def __call__(self): + if 'name' in self.entity.cw_edited: + oldname, newname = self.entity.cw_edited.oldnewvalue('name') + if oldname == 'system': + msg = _("You cannot rename the system source") + raise validation_error(self.entity, {('name', 'subject'): msg}) + SourceRenamedOp(self._cw, oldname=oldname, newname=newname) + if 'config' in self.entity.cw_edited or 'url' in self.entity.cw_edited: + if self.entity.name == 'system' and self.entity.config: + msg = _("Configuration of the system source goes to " + "the 'sources' file, not in the database") + raise validation_error(self.entity, {('config', 'subject'): msg}) + SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity) + + +class SourceHostConfigUpdatedHook(SourceHook): + __regid__ = 'cw.sources.hostconfigupdate' + __select__ = SourceHook.__select__ & is_instance('CWSourceHostConfig') + events = ('after_add_entity', 'after_update_entity', 'before_delete_entity',) + def __call__(self): + if self.entity.match(gethostname()): + if self.event == 'after_update_entity' and \ + not 'config' in self.entity.cw_edited: + return + try: + SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity.cwsource) + except IndexError: + # XXX no source linked to the host config yet + pass + + +# source mapping synchronization ############################################### +# +# Expect cw_for_source/cw_schema are immutable relations (i.e. can't change from +# a source or schema to another). + +class SourceMappingImmutableHook(SourceHook): + """check cw_for_source and cw_schema are immutable relations + + XXX empty delete perms would be enough? + """ + __regid__ = 'cw.sources.mapping.immutable' + __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source', 'cw_schema') + events = ('before_add_relation',) + def __call__(self): + if not self._cw.added_in_transaction(self.eidfrom): + msg = _("You can't change this relation") + raise validation_error(self.eidfrom, {self.rtype: msg}) + + +class SourceMappingChangedOp(hook.DataOperationMixIn, hook.Operation): + def check_or_update(self, checkonly): + cnx = self.cnx + # take care, can't call get_data() twice + try: + data = self.__data + except AttributeError: + data = self.__data = self.get_data() + for schemacfg, source in data: + if source is None: + source = schemacfg.cwsource.repo_source + if cnx.added_in_transaction(schemacfg.eid): + if not cnx.deleted_in_transaction(schemacfg.eid): + source.add_schema_config(schemacfg, checkonly=checkonly) + elif cnx.deleted_in_transaction(schemacfg.eid): + source.del_schema_config(schemacfg, checkonly=checkonly) + else: + source.update_schema_config(schemacfg, checkonly=checkonly) + + def precommit_event(self): + self.check_or_update(True) + + def postcommit_event(self): + self.check_or_update(False) + + +class SourceMappingChangedHook(SourceHook): + __regid__ = 'cw.sources.schemaconfig' + __select__ = SourceHook.__select__ & is_instance('CWSourceSchemaConfig') + events = ('after_add_entity', 'after_update_entity') + def __call__(self): + if self.event == 'after_add_entity' or ( + self.event == 'after_update_entity' and 'options' in self.entity.cw_edited): + SourceMappingChangedOp.get_instance(self._cw).add_data( + (self.entity, None) ) + +class SourceMappingDeleteHook(SourceHook): + __regid__ = 'cw.sources.delschemaconfig' + __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source') + events = ('before_delete_relation',) + def __call__(self): + SourceMappingChangedOp.get_instance(self._cw).add_data( + (self._cw.entity_from_eid(self.eidfrom), + self._cw.entity_from_eid(self.eidto).repo_source) ) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/data-computed/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/data-computed/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,46 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from yams.buildobjs import EntityType, String, Int, SubjectRelation, RelationDefinition + +THISYEAR = 2014 + +class Person(EntityType): + name = String() + salaire = Int() + birth_year = Int(required=True) + travaille = SubjectRelation('Societe') + age = Int(formula='Any %d - D WHERE X birth_year D' % THISYEAR) + +class Societe(EntityType): + nom = String() + salaire_total = Int(formula='Any SUM(SA) GROUPBY X WHERE P travaille X, P salaire SA') + + +class Agent(EntityType): + asalae_id = String(formula='Any E WHERE M mirror_of X, M extid E') + +class MirrorEntity(EntityType): + extid = String(required=True, unique=True, + description=_('external identifier of the object')) + + +class mirror_of(RelationDefinition): + subject = 'MirrorEntity' + object = ('Agent', 'Societe') + cardinality = '?*' + inlined = True diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/data/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/data/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,85 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import (RelationDefinition, RelationType, EntityType, + String, Datetime, Int) +from yams.reader import context + +from cubicweb.schema import ERQLExpression + +from cubicweb import _ + +class friend(RelationDefinition): + subject = ('CWUser', 'CWGroup') + object = ('CWUser', 'CWGroup') + symmetric = True + +class Folder(EntityType): + name = String() + +class parent(RelationDefinition): + subject = 'Folder' + object = 'Folder' + composite = 'object' + cardinality = '?*' + +class children(RelationDefinition): + subject = 'Folder' + object = 'Folder' + composite = 'subject' + + +class Email(EntityType): + """electronic mail""" + subject = String(fulltextindexed=True) + date = Datetime(description=_('UTC time on which the mail was sent')) + messageid = String(required=True, indexed=True) + headers = String(description=_('raw headers')) + + + +class EmailPart(EntityType): + """an email attachment""" + __permissions__ = { + 'read': ('managers', 'users', 'guests',), # XXX if E parts X, U has_read_permission E + 'add': ('managers', ERQLExpression('E parts X, U has_update_permission E'),), + 'delete': ('managers', ERQLExpression('E parts X, U has_update_permission E')), + 'update': ('managers', 'owners',), + } + + content = String(fulltextindexed=True) + content_format = String(required=True, maxsize=50) + ordernum = Int(required=True) + + +class parts(RelationType): + subject = 'Email' + object = 'EmailPart' + cardinality = '*1' + composite = 'subject' + fulltext_container = 'subject' + +class sender(RelationDefinition): + subject = 'Email' + object = 'EmailAddress' + cardinality = '?*' + inlined = True + +class recipients(RelationDefinition): + subject = 'Email' + object = 'EmailAddress' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +psycopg2 diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/unittest_bookmarks.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/unittest_bookmarks.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,38 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from logilab.common.testlib import unittest_main +from cubicweb.devtools.testlib import CubicWebTC + +class BookmarkHooksTC(CubicWebTC): + + + def test_auto_delete_bookmarks(self): + with self.admin_access.repo_cnx() as cnx: + beid = cnx.execute('INSERT Bookmark X: X title "hop", X path "view", X bookmarked_by U ' + 'WHERE U login "admin"')[0][0] + cnx.execute('SET X bookmarked_by U WHERE U login "anon"') + cnx.commit() + cnx.execute('DELETE X bookmarked_by U WHERE U login "admin"') + cnx.commit() + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': beid})) + cnx.execute('DELETE X bookmarked_by U WHERE U login "anon"') + cnx.commit() + self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': beid})) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/unittest_hooks.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/unittest_hooks.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,221 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""functional tests for core hooks + +Note: + syncschema.py hooks are mostly tested in server/test/unittest_migrations.py +""" + +from datetime import datetime + +from six import text_type + +from pytz import utc +from cubicweb import ValidationError, AuthenticationError, BadConnectionId +from cubicweb.devtools.testlib import CubicWebTC + + +class CoreHooksTC(CubicWebTC): + + def test_inlined(self): + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(self.repo.schema['sender'].inlined, True) + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' + 'X content "this is a test"') + eeid = cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", ' + 'X sender Y, X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart')[0][0] + cnx.execute('SET X sender Y WHERE X is Email, Y is EmailAddress') + rset = cnx.execute('Any S WHERE X sender S, X eid %s' % eeid) + self.assertEqual(len(rset), 1) + + def test_symmetric(self): + with self.admin_access.repo_cnx() as cnx: + u1 = self.create_user(cnx, u'1') + u2 = self.create_user(cnx, u'2') + u3 = self.create_user(cnx, u'3') + ga = cnx.create_entity('CWGroup', name=u'A') + gb = cnx.create_entity('CWGroup', name=u'B') + u1.cw_set(friend=u2) + u2.cw_set(friend=u3) + ga.cw_set(friend=gb) + ga.cw_set(friend=u1) + cnx.commit() + for l1, l2 in ((u'1', u'2'), + (u'2', u'3')): + self.assertTrue(cnx.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s', + {'l1': l1, 'l2': l2})) + self.assertTrue(cnx.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s', + {'l1': l1, 'l2': l2})) + self.assertTrue(cnx.execute('Any GA,GB WHERE GA friend GB, GA name "A", GB name "B"')) + self.assertTrue(cnx.execute('Any GA,GB WHERE GB friend GA, GA name "A", GB name "B"')) + self.assertTrue(cnx.execute('Any GA,U1 WHERE GA friend U1, GA name "A", U1 login "1"')) + self.assertTrue(cnx.execute('Any GA,U1 WHERE U1 friend GA, GA name "A", U1 login "1"')) + self.assertFalse(cnx.execute('Any GA,U WHERE GA friend U, GA name "A", U login "2"')) + for l1, l2 in ((u'1', u'3'), + (u'3', u'1')): + self.assertFalse(cnx.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s', + {'l1': l1, 'l2': l2})) + self.assertFalse(cnx.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s', + {'l1': l1, 'l2': l2})) + + def test_html_tidy_hook(self): + with self.admin_access.client_cnx() as cnx: + entity = cnx.create_entity('Workflow', name=u'wf1', + description_format=u'text/html', + description=u'yo') + self.assertEqual(u'yo', entity.description) + entity = cnx.create_entity('Workflow', name=u'wf2', + description_format=u'text/html', + description=u'yo') + self.assertEqual(u'yo', entity.description) + entity = cnx.create_entity('Workflow', name=u'wf3', + description_format=u'text/html', + description=u'yo') + self.assertEqual(u'yo', entity.description) + entity = cnx.create_entity('Workflow', name=u'wf4', + description_format=u'text/html', + description=u'R&D') + self.assertEqual(u'R&D', entity.description, ) + entity = cnx.create_entity('Workflow', name=u'wf5', + description_format=u'text/html', + description=u"
c'est l'été") + self.assertEqual(u"
c'est l'été
", entity.description) + + def test_nonregr_html_tidy_hook_no_update(self): + with self.admin_access.client_cnx() as cnx: + entity = cnx.create_entity('Workflow', name=u'wf1', + description_format=u'text/html', + description=u'yo') + entity.cw_set(name=u'wf2') + self.assertEqual(entity.description, u'yo') + entity.cw_set(description=u'R&D

yo') + self.assertEqual(entity.description, u'R&D

yo

') + + def test_metadata_cwuri(self): + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('Workflow', name=u'wf1') + self.assertEqual(entity.cwuri, self.repo.config['base-url'] + str(entity.eid)) + + def test_metadata_creation_modification_date(self): + with self.admin_access.repo_cnx() as cnx: + _now = datetime.now(utc) + entity = cnx.create_entity('Workflow', name=u'wf1') + self.assertEqual((entity.creation_date - _now).seconds, 0) + self.assertEqual((entity.modification_date - _now).seconds, 0) + + def test_metadata_created_by(self): + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('Bookmark', title=u'wf1', path=u'/view') + cnx.commit() # fire operations + self.assertEqual(len(entity.created_by), 1) # make sure we have only one creator + self.assertEqual(entity.created_by[0].eid, cnx.user.eid) + + def test_metadata_owned_by(self): + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('Bookmark', title=u'wf1', path=u'/view') + cnx.commit() # fire operations + self.assertEqual(len(entity.owned_by), 1) # make sure we have only one owner + self.assertEqual(entity.owned_by[0].eid, cnx.user.eid) + + def test_user_login_stripped(self): + with self.admin_access.repo_cnx() as cnx: + u = self.create_user(cnx, ' joe ') + tname = cnx.execute('Any L WHERE E login L, E eid %(e)s', + {'e': u.eid})[0][0] + self.assertEqual(tname, 'joe') + cnx.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid}) + tname = cnx.execute('Any L WHERE E login L, E eid %(e)s', + {'e': u.eid})[0][0] + self.assertEqual(tname, 'jijoe') + + + +class UserGroupHooksTC(CubicWebTC): + + def test_user_group_synchronization(self): + with self.admin_access.repo_cnx() as cnx: + user = cnx.user + self.assertEqual(user.groups, set(('managers',))) + cnx.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) + self.assertEqual(user.groups, set(('managers',))) + cnx.commit() + self.assertEqual(user.groups, set(('managers', 'guests'))) + cnx.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) + self.assertEqual(user.groups, set(('managers', 'guests'))) + cnx.commit() + self.assertEqual(user.groups, set(('managers',))) + + def test_user_composite_owner(self): + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, 'toto').eid + # composite of euser should be owned by the euser regardless of who created it + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X ' + 'WHERE U login "toto"') + cnx.commit() + self.assertEqual(cnx.execute('Any A WHERE X owned_by U, U use_email X,' + 'U login "toto", X address A')[0][0], + 'toto@logilab.fr') + + def test_user_composite_no_owner_on_deleted_entity(self): + with self.admin_access.repo_cnx() as cnx: + u = self.create_user(cnx, 'toto').eid + cnx.commit() + e = cnx.create_entity('EmailAddress', address=u'toto@logilab.fr', reverse_use_email=u) + e.cw_delete() + cnx.commit() + self.assertFalse(cnx.system_sql( + 'SELECT * FROM owned_by_relation ' + 'WHERE eid_from NOT IN (SELECT eid FROM entities)').fetchall()) + + def test_no_created_by_on_deleted_entity(self): + with self.admin_access.repo_cnx() as cnx: + eid = cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0] + cnx.execute('DELETE EmailAddress X WHERE X eid %s' % eid) + cnx.commit() + self.assertFalse(cnx.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid})) + + + +class SchemaHooksTC(CubicWebTC): + + def test_duplicate_etype_error(self): + with self.admin_access.repo_cnx() as cnx: + # check we can't add a CWEType or CWRType entity if it already exists one + # with the same name + self.assertRaises(ValidationError, + cnx.execute, 'INSERT CWEType X: X name "CWUser"') + cnx.rollback() + self.assertRaises(ValidationError, + cnx.execute, 'INSERT CWRType X: X name "in_group"') + + def test_validation_unique_constraint(self): + with self.admin_access.repo_cnx() as cnx: + with self.assertRaises(ValidationError) as cm: + cnx.execute('INSERT CWUser X: X login "admin"') + ex = cm.exception + ex.translate(text_type) + self.assertIsInstance(ex.entity, int) + self.assertEqual(ex.errors, {'login-subject': 'the value "admin" is already used, use another one'}) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/unittest_integrity.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/unittest_integrity.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""functional tests for integrity hooks""" + +from cubicweb import ValidationError +from cubicweb.devtools.testlib import CubicWebTC + +class CoreHooksTC(CubicWebTC): + + def test_delete_internal_entities(self): + with self.admin_access.repo_cnx() as cnx: + self.assertRaises(ValidationError, cnx.execute, + 'DELETE CWEType X WHERE X name "CWEType"') + cnx.rollback() + self.assertRaises(ValidationError, cnx.execute, + 'DELETE CWRType X WHERE X name "relation_type"') + cnx.rollback() + self.assertRaises(ValidationError, cnx.execute, + 'DELETE CWGroup X WHERE X name "owners"') + + def test_delete_required_relations_subject(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y ' + 'WHERE Y name "users"') + cnx.commit() + cnx.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"') + self.assertRaises(ValidationError, cnx.commit) + cnx.rollback() + cnx.execute('DELETE X in_group Y WHERE X login "toto"') + cnx.execute('SET X in_group Y WHERE X login "toto", Y name "guests"') + cnx.commit() + + def test_static_vocabulary_check(self): + with self.admin_access.repo_cnx() as cnx: + self.assertRaises(ValidationError, + cnx.execute, + 'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", ' + 'X relation_type RT, RT name "in_group"') + + def test_missing_required_relations_subject_inline(self): + with self.admin_access.repo_cnx() as cnx: + # missing in_group relation + cnx.execute('INSERT CWUser X: X login "toto", X upassword "hop"') + self.assertRaises(ValidationError, cnx.commit) + + def test_composite_1(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' + 'X content "this is a test"') + cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' + 'X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + self.assertTrue(cnx.execute('Email X WHERE X sender Y')) + cnx.commit() + cnx.execute('DELETE Email X') + rset = cnx.execute('Any X WHERE X is EmailPart') + self.assertEqual(len(rset), 0) + cnx.commit() + rset = cnx.execute('Any X WHERE X is EmailPart') + self.assertEqual(len(rset), 0) + + def test_composite_2(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' + 'X content "this is a test"') + cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' + 'X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + cnx.commit() + cnx.execute('DELETE Email X') + cnx.execute('DELETE EmailPart X') + cnx.commit() + rset = cnx.execute('Any X WHERE X is EmailPart') + self.assertEqual(len(rset), 0) + + def test_composite_redirection(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' + 'X content "this is a test"') + cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' + 'X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + cnx.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, ' + 'X recipients Y ' + 'WHERE Y is EmailAddress') + cnx.commit() + cnx.execute('DELETE X parts Y WHERE X messageid "<1234>"') + cnx.execute('SET X parts Y WHERE X messageid "<2345>"') + cnx.commit() + rset = cnx.execute('Any X WHERE X is EmailPart') + self.assertEqual(len(rset), 1) + self.assertEqual(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') + + def test_composite_object_relation_deletion(self): + with self.admin_access.repo_cnx() as cnx: + root = cnx.create_entity('Folder', name=u'root') + a = cnx.create_entity('Folder', name=u'a', parent=root) + cnx.create_entity('Folder', name=u'b', parent=a) + cnx.create_entity('Folder', name=u'c', parent=root) + cnx.commit() + cnx.execute('DELETE Folder F WHERE F name "a"') + cnx.execute('DELETE F parent R WHERE R name "root"') + cnx.commit() + self.assertEqual([['root'], ['c']], + cnx.execute('Any NF WHERE F is Folder, F name NF').rows) + self.assertEqual([], cnx.execute('Any NF,NP WHERE F parent P, F name NF, P name NP').rows) + + def test_composite_subject_relation_deletion(self): + with self.admin_access.repo_cnx() as cnx: + root = cnx.create_entity('Folder', name=u'root') + a = cnx.create_entity('Folder', name=u'a') + b = cnx.create_entity('Folder', name=u'b') + c = cnx.create_entity('Folder', name=u'c') + root.cw_set(children=(a, c)) + a.cw_set(children=b) + cnx.commit() + cnx.execute('DELETE Folder F WHERE F name "a"') + cnx.execute('DELETE R children F WHERE R name "root"') + cnx.commit() + self.assertEqual([['root'], ['c']], + cnx.execute('Any NF WHERE F is Folder, F name NF').rows) + self.assertEqual([], cnx.execute('Any NF,NP WHERE F parent P, F name NF, P name NP').rows) + + def test_unsatisfied_constraints(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0] + with self.assertRaises(ValidationError) as cm: + cnx.commit() + self.assertEqual(cm.exception.errors, + {'in_group-object': u'RQLConstraint NOT O name "owners" failed'}) + + def test_unique_constraint(self): + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('CWGroup', name=u'trout') + cnx.commit() + self.assertRaises(ValidationError, cnx.create_entity, 'CWGroup', name=u'trout') + cnx.rollback() + cnx.execute('SET X name "trout" WHERE X eid %(x)s', {'x': entity.eid}) + cnx.commit() + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/unittest_security.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/unittest_security.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,56 @@ +# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server import hook +from cubicweb.predicates import is_instance + + +class SecurityHooksTC(CubicWebTC): + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + self.add_eid = cnx.create_entity('EmailAddress', + address=u'hop@perdu.com', + reverse_use_email=cnx.user.eid).eid + cnx.commit() + + def test_inlined_cw_edited_relation(self): + """modification of cw_edited to add an inlined relation shouldn't trigger a security error. + + Test for https://www.cubicweb.org/ticket/5477315 + """ + sender = self.repo.schema['Email'].rdef('sender') + with self.temporary_permissions((sender, {'add': ()})): + + class MyHook(hook.Hook): + __regid__ = 'test.pouet' + __select__ = hook.Hook.__select__ & is_instance('Email') + events = ('before_add_entity',) + + def __call__(self): + self.entity.cw_edited['sender'] = self._cw.user.primary_email[0].eid + + with self.temporary_appobjects(MyHook): + with self.admin_access.repo_cnx() as cnx: + email = cnx.create_entity('Email', messageid=u'1234') + cnx.commit() + self.assertEqual(email.sender[0].eid, self.add_eid) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/unittest_synccomputed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/unittest_synccomputed.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,146 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for computed attributes/relations hooks""" + +from unittest import TestCase + +from yams.buildobjs import EntityType, String, Int, SubjectRelation + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.schema import build_schema_from_namespace + + +class FormulaDependenciesMatrixTC(TestCase): + + def simple_schema(self): + THISYEAR = 2014 + + class Person(EntityType): + name = String() + salary = Int() + birth_year = Int(required=True) + works_for = SubjectRelation('Company') + age = Int(formula='Any %d - D WHERE X birth_year D' % THISYEAR) + + class Company(EntityType): + name = String() + total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA') + + schema = build_schema_from_namespace(vars().items()) + return schema + + def setUp(self): + from cubicweb.hooks.synccomputed import _FormulaDependenciesMatrix + self.schema = self.simple_schema() + self.dependencies = _FormulaDependenciesMatrix(self.schema) + + def test_computed_attributes_by_etype(self): + comp_by_etype = self.dependencies.computed_attribute_by_etype + self.assertEqual(len(comp_by_etype), 2) + values = comp_by_etype['Person'] + self.assertEqual(len(values), 1) + self.assertEqual(values[0].rtype, 'age') + values = comp_by_etype['Company'] + self.assertEqual(len(values), 1) + self.assertEqual(values[0].rtype, 'total_salary') + + def test_computed_attribute_by_relation(self): + comp_by_rdef = self.dependencies.computed_attribute_by_relation + self.assertEqual(len(comp_by_rdef), 1) + key, values = next(iter(comp_by_rdef.items())) + self.assertEqual(key.rtype, 'works_for') + self.assertEqual(len(values), 1) + self.assertEqual(values[0].rtype, 'total_salary') + + def test_computed_attribute_by_etype_attrs(self): + comp_by_attr = self.dependencies.computed_attribute_by_etype_attrs + self.assertEqual(len(comp_by_attr), 1) + values = comp_by_attr['Person'] + self.assertEqual(len(values), 2) + values = set((rdef.formula, tuple(v)) + for rdef, v in values.items()) + self.assertEquals(values, + set((('Any 2014 - D WHERE X birth_year D', tuple(('birth_year',))), + ('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', tuple(('salary',))))) + ) + + +class ComputedAttributeTC(CubicWebTC): + appid = 'data-computed' + + def setup_entities(self, req): + self.societe = req.create_entity('Societe', nom=u'Foo') + req.create_entity('Person', name=u'Titi', salaire=1000, + travaille=self.societe, birth_year=2001) + self.tata = req.create_entity('Person', name=u'Tata', salaire=2000, + travaille=self.societe, birth_year=1990) + + + def test_update_on_add_remove_relation(self): + """check the rewriting of a computed attribute""" + with self.admin_access.web_request() as req: + self.setup_entities(req) + req.cnx.commit() + rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') + self.assertEqual(rset[0][0], 3000) + # Add relation. + toto = req.create_entity('Person', name=u'Toto', salaire=1500, + travaille=self.societe, birth_year=1988) + req.cnx.commit() + rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') + self.assertEqual(rset[0][0], 4500) + # Delete relation. + toto.cw_set(travaille=None) + req.cnx.commit() + rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') + self.assertEqual(rset[0][0], 3000) + + def test_recompute_on_attribute_update(self): + """check the modification of an attribute triggers the update of the + computed attributes that depend on it""" + with self.admin_access.web_request() as req: + self.setup_entities(req) + req.cnx.commit() + rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') + self.assertEqual(rset[0][0], 3000) + # Update attribute. + self.tata.cw_set(salaire=1000) + req.cnx.commit() + rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') + self.assertEqual(rset[0][0], 2000) + + def test_init_on_entity_creation(self): + """check the computed attribute is initialized on entity creation""" + with self.admin_access.web_request() as req: + p = req.create_entity('Person', name=u'Tata', salaire=2000, + birth_year=1990) + req.cnx.commit() + rset = req.execute('Any A, X WHERE X age A, X name "Tata"') + self.assertEqual(rset[0][0], 2014 - 1990) + + + def test_recompute_on_ambiguous_relation(self): + # check we don't end up with TypeResolverException as in #4901163 + with self.admin_access.client_cnx() as cnx: + societe = cnx.create_entity('Societe', nom=u'Foo') + cnx.create_entity('MirrorEntity', mirror_of=societe, extid=u'1') + cnx.commit() + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/unittest_syncschema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/unittest_syncschema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,405 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb.server.hooks.syncschema unit and functional tests""" + +from logilab.common.testlib import unittest_main + +from yams.constraints import BoundaryConstraint +from cubicweb import ValidationError, Binary +from cubicweb.schema import META_RTYPES +from cubicweb.devtools import startpgcluster, stoppgcluster, PostgresApptestConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server.sqlutils import SQL_PREFIX +from cubicweb.devtools.repotest import schema_eids_idx + + +def setUpModule(): + startpgcluster(__file__) + + +def tearDownModule(*args): + stoppgcluster(__file__) + del SchemaModificationHooksTC.schema_eids + + +class SchemaModificationHooksTC(CubicWebTC): + configcls = PostgresApptestConfiguration + + def setUp(self): + super(SchemaModificationHooksTC, self).setUp() + self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) + self.__class__.schema_eids = schema_eids_idx(self.repo.schema) + + def index_exists(self, cnx, etype, attr, unique=False): + dbhelper = self.repo.system_source.dbhelper + sqlcursor = cnx.cnxset.cu + return dbhelper.index_exists(sqlcursor, + SQL_PREFIX + etype, + SQL_PREFIX + attr, + unique=unique) + + def _set_perms(self, cnx, eid): + cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', + {'x': eid}) + cnx.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, ' + 'G name "managers"', {'x': eid}) + cnx.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, ' + 'G name "owners"', {'x': eid}) + + def _set_attr_perms(self, cnx, eid): + cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', + {'x': eid}) + cnx.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', + {'x': eid}) + + def test_base(self): + with self.admin_access.repo_cnx() as cnx: + schema = self.repo.schema + self.assertFalse(schema.has_entity('Societe2')) + self.assertFalse(schema.has_entity('concerne2')) + # schema should be update on insertion (after commit) + eeid = cnx.execute('INSERT CWEType X: X name "Societe2", ' + 'X description "", X final FALSE')[0][0] + self._set_perms(cnx, eeid) + cnx.execute('INSERT CWRType X: X name "concerne2", X description "", ' + 'X final FALSE, X symmetric FALSE') + self.assertFalse(schema.has_entity('Societe2')) + self.assertFalse(schema.has_entity('concerne2')) + # have to commit before adding definition relations + cnx.commit() + self.assertTrue(schema.has_entity('Societe2')) + self.assertTrue(schema.has_relation('concerne2')) + attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", ' + 'X defaultval %(default)s, X indexed TRUE, ' + 'X relation_type RT, X from_entity E, X to_entity F ' + 'WHERE RT name "name", E name "Societe2", ' + 'F name "String"', + {'default': Binary.zpickle('noname')})[0][0] + self._set_attr_perms(cnx, attreid) + concerne2_rdef_eid = cnx.execute( + 'INSERT CWRelation X: X cardinality "**", X relation_type RT, ' + 'X from_entity E, X to_entity E ' + 'WHERE RT name "concerne2", E name "Societe2"')[0][0] + self._set_perms(cnx, concerne2_rdef_eid) + self.assertNotIn('name', schema['Societe2'].subject_relations()) + self.assertNotIn('concerne2', schema['Societe2'].subject_relations()) + self.assertFalse(self.index_exists(cnx, 'Societe2', 'name')) + cnx.commit() + self.assertIn('name', schema['Societe2'].subject_relations()) + self.assertIn('concerne2', schema['Societe2'].subject_relations()) + self.assertTrue(self.index_exists(cnx, 'Societe2', 'name')) + # now we should be able to insert and query Societe2 + s2eid = cnx.execute('INSERT Societe2 X: X name "logilab"')[0][0] + cnx.execute('Societe2 X WHERE X name "logilab"') + cnx.execute('SET X concerne2 X WHERE X name "logilab"') + rset = cnx.execute('Any X WHERE X concerne2 Y') + self.assertEqual(rset.rows, [[s2eid]]) + # check that when a relation definition is deleted, existing relations are deleted + rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' + ' X from_entity E, X to_entity E ' + 'WHERE RT name "concerne2", E name "CWUser"')[0][0] + self._set_perms(cnx, rdefeid) + cnx.commit() + cnx.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}) + cnx.commit() + self.assertIn('concerne2', schema['CWUser'].subject_relations()) + self.assertNotIn('concerne2', schema['Societe2'].subject_relations()) + self.assertFalse(cnx.execute('Any X WHERE X concerne2 Y')) + # schema should be cleaned on delete (after commit) + cnx.execute('DELETE CWEType X WHERE X name "Societe2"') + cnx.execute('DELETE CWRType X WHERE X name "concerne2"') + self.assertTrue(self.index_exists(cnx, 'Societe2', 'name')) + self.assertTrue(schema.has_entity('Societe2')) + self.assertTrue(schema.has_relation('concerne2')) + cnx.commit() + self.assertFalse(self.index_exists(cnx, 'Societe2', 'name')) + self.assertFalse(schema.has_entity('Societe2')) + self.assertFalse(schema.has_entity('concerne2')) + self.assertNotIn('concerne2', schema['CWUser'].subject_relations()) + + def test_metartype_with_nordefs(self): + with self.admin_access.repo_cnx() as cnx: + META_RTYPES.add('custom_meta') + cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", ' + 'X final FALSE, X symmetric FALSE') + cnx.commit() + eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", ' + 'X description "", X final FALSE')[0][0] + self._set_perms(cnx, eeid) + cnx.commit() + META_RTYPES.remove('custom_meta') + + def test_metartype_with_somerdefs(self): + with self.admin_access.repo_cnx() as cnx: + META_RTYPES.add('custom_meta') + cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", ' + 'X final FALSE, X symmetric FALSE') + cnx.commit() + rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' + ' X from_entity E, X to_entity E ' + 'WHERE RT name "custom_meta", E name "CWUser"')[0][0] + self._set_perms(cnx, rdefeid) + cnx.commit() + eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", ' + 'X description "", X final FALSE')[0][0] + self._set_perms(cnx, eeid) + cnx.commit() + META_RTYPES.remove('custom_meta') + + def test_is_instance_of_insertions(self): + with self.admin_access.repo_cnx() as cnx: + seid = cnx.execute('INSERT Transition T: T name "subdiv"')[0][0] + is_etypes = [etype for etype, in cnx.execute('Any ETN WHERE X eid %s, ' + 'X is ET, ET name ETN' % seid)] + self.assertEqual(is_etypes, ['Transition']) + instanceof_etypes = [etype + for etype, in cnx.execute('Any ETN WHERE X eid %s, ' + 'X is_instance_of ET, ET name ETN' + % seid)] + self.assertEqual(sorted(instanceof_etypes), ['BaseTransition', 'Transition']) + snames = [name for name, in cnx.execute('Any N WHERE S is BaseTransition, S name N')] + self.assertNotIn('subdiv', snames) + snames = [name for name, in cnx.execute('Any N WHERE S is_instance_of BaseTransition, ' + 'S name N')] + self.assertIn('subdiv', snames) + + + def test_perms_synchronization_1(self): + with self.admin_access.repo_cnx() as cnx: + schema = self.repo.schema + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users'))) + self.assertTrue(cnx.execute('Any X, Y WHERE X is CWEType, X name "CWUser", ' + 'Y is CWGroup, Y name "users"')[0]) + cnx.execute('DELETE X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users', ))) + cnx.commit() + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers',))) + cnx.execute('SET X read_permission Y WHERE X is CWEType, ' + 'X name "CWUser", Y name "users"') + cnx.commit() + self.assertEqual(schema['CWUser'].get_groups('read'), + set(('managers', 'users',))) + + def test_perms_synchronization_2(self): + with self.admin_access.repo_cnx() as cnx: + schema = self.repo.schema['in_group'].rdefs[('CWUser', 'CWGroup')] + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users', 'guests'))) + cnx.execute('DELETE X read_permission Y WHERE X relation_type RT, ' + 'RT name "in_group", Y name "guests"') + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users', 'guests'))) + cnx.commit() + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users'))) + cnx.execute('SET X read_permission Y WHERE X relation_type RT, ' + 'RT name "in_group", Y name "guests"') + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users'))) + cnx.commit() + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users', 'guests'))) + + def test_nonregr_user_edit_itself(self): + with self.admin_access.repo_cnx() as cnx: + ueid = cnx.user.eid + groupeids = [eid for eid, in cnx.execute('CWGroup G WHERE G name ' + 'in ("managers", "users")')] + cnx.execute('DELETE X in_group Y WHERE X eid %s' % ueid) + cnx.execute('SET X surname "toto" WHERE X eid %s' % ueid) + cnx.execute('SET X in_group Y WHERE X eid %s, Y name "managers"' % ueid) + cnx.commit() + eeid = cnx.execute('Any X WHERE X is CWEType, X name "CWEType"')[0][0] + cnx.execute('DELETE X read_permission Y WHERE X eid %s' % eeid) + cnx.execute('SET X final FALSE WHERE X eid %s' % eeid) + cnx.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)' + % (eeid, groupeids[0], groupeids[1])) + cnx.commit() + cnx.execute('Any X WHERE X is CWEType, X name "CWEType"') + + # schema modification hooks tests ######################################### + + def test_uninline_relation(self): + with self.admin_access.repo_cnx() as cnx: + try: + self.assertTrue(self.schema['state_of'].inlined) + cnx.execute('SET X inlined FALSE WHERE X name "state_of"') + self.assertTrue(self.schema['state_of'].inlined) + cnx.commit() + self.assertFalse(self.schema['state_of'].inlined) + self.assertFalse(self.index_exists(cnx, 'State', 'state_of')) + rset = cnx.execute('Any X, Y WHERE X state_of Y') + self.assertEqual(len(rset), 2) # user states + finally: + cnx.execute('SET X inlined TRUE WHERE X name "state_of"') + self.assertFalse(self.schema['state_of'].inlined) + cnx.commit() + self.assertTrue(self.schema['state_of'].inlined) + self.assertTrue(self.index_exists(cnx, 'State', 'state_of')) + rset = cnx.execute('Any X, Y WHERE X state_of Y') + self.assertEqual(len(rset), 2) + + def test_indexed_change(self): + with self.admin_access.repo_cnx() as cnx: + try: + cnx.execute('SET X indexed FALSE WHERE X relation_type R, R name "name"') + self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed) + self.assertTrue(self.index_exists(cnx, 'Workflow', 'name')) + cnx.commit() + self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed) + self.assertFalse(self.index_exists(cnx, 'Workflow', 'name')) + finally: + cnx.execute('SET X indexed TRUE WHERE X relation_type R, R name "name"') + self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed) + self.assertFalse(self.index_exists(cnx, 'Workflow', 'name')) + cnx.commit() + self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed) + self.assertTrue(self.index_exists(cnx, 'Workflow', 'name')) + + def test_unique_change(self): + with self.admin_access.repo_cnx() as cnx: + try: + eid = cnx.execute('INSERT CWConstraint X: X cstrtype CT, DEF constrained_by X ' + 'WHERE CT name "UniqueConstraint", DEF relation_type RT, ' + 'DEF from_entity E, RT name "name", ' + 'E name "Workflow"').rows[0][0] + self.assertFalse(self.schema['Workflow'].has_unique_values('name')) + self.assertFalse(self.index_exists(cnx, 'Workflow', 'name', unique=True)) + cnx.commit() + self.assertTrue(self.schema['Workflow'].has_unique_values('name')) + self.assertTrue(self.index_exists(cnx, 'Workflow', 'name', unique=True)) + finally: + cnx.execute('DELETE CWConstraint C WHERE C eid %(eid)s', {'eid': eid}) + cnx.commit() + self.assertFalse(self.schema['Workflow'].has_unique_values('name')) + self.assertFalse(self.index_exists(cnx, 'Workflow', 'name', unique=True)) + + def test_required_change_1(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('SET DEF cardinality "?1" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "title", E name "Bookmark"') + cnx.commit() + # should now be able to add bookmark without title + cnx.execute('INSERT Bookmark X: X path "/view"') + cnx.commit() + + def test_required_change_2(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('SET DEF cardinality "11" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "surname", E name "CWUser"') + cnx.execute('SET U surname "Doe" WHERE U surname NULL') + cnx.commit() + # should not be able anymore to add cwuser without surname + self.assertRaises(ValidationError, self.create_user, cnx, "toto") + cnx.rollback() + cnx.execute('SET DEF cardinality "?1" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "surname", E name "CWUser"') + cnx.commit() + + def test_add_attribute_to_base_class(self): + with self.admin_access.repo_cnx() as cnx: + attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", X defaultval %(default)s, ' + 'X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' + 'WHERE RT name "messageid", E name "BaseTransition", F name "String"', + {'default': Binary.zpickle('noname')})[0][0] + assert cnx.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', + {'x': attreid}) + cnx.commit() + self.schema.rebuild_infered_relations() + self.assertIn('Transition', self.schema['messageid'].subjects()) + self.assertIn('WorkflowTransition', self.schema['messageid'].subjects()) + cnx.execute('Any X WHERE X is_instance_of BaseTransition, X messageid "hop"') + + def test_change_fulltextindexed(self): + with self.admin_access.repo_cnx() as cnx: + target = cnx.create_entity(u'Email', messageid=u'1234', + subject=u'rick.roll@dance.com') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(target.eid, [item[0] for item in rset]) + assert cnx.execute('SET A fulltextindexed FALSE ' + 'WHERE E is CWEType, E name "Email", A is CWAttribute,' + 'A from_entity E, A relation_type R, R name "subject"') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertFalse(rset) + assert cnx.execute('SET A fulltextindexed TRUE ' + 'WHERE A from_entity E, A relation_type R, ' + 'E name "Email", R name "subject"') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(target.eid, [item[0] for item in rset]) + + def test_change_fulltext_container(self): + with self.admin_access.repo_cnx() as cnx: + target = cnx.create_entity(u'EmailAddress', address=u'rick.roll@dance.com') + target.cw_set(reverse_use_email=cnx.user) + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(cnx.user.eid, [item[0] for item in rset]) + assert cnx.execute('SET R fulltext_container NULL ' + 'WHERE R name "use_email"') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(target.eid, [item[0] for item in rset]) + assert cnx.execute('SET R fulltext_container "subject" ' + 'WHERE R name "use_email"') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(cnx.user.eid, [item[0] for item in rset]) + + def test_update_constraint(self): + with self.admin_access.repo_cnx() as cnx: + rdef = self.schema['Transition'].rdef('type') + cstr = rdef.constraint_by_type('StaticVocabularyConstraint') + if not getattr(cstr, 'eid', None): + # bug in schema reloading, constraint's eid not restored + self.skipTest('start me alone') + cnx.execute('SET X value %(v)s WHERE X eid %(x)s', + {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}) + cnx.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, ' + 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', + {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}) + cnx.commit() + cstr = rdef.constraint_by_type('StaticVocabularyConstraint') + self.assertEqual(cstr.values, (u'normal', u'auto', u'new')) + cnx.execute('INSERT Transition T: T name "hop", T type "new"') + + def test_add_constraint(self): + with self.admin_access.repo_cnx() as cnx: + rdef = self.schema['EmailPart'].rdef('ordernum') + cstr = BoundaryConstraint('>=', 0) + cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, ' + 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', + {'ct': cstr.__class__.__name__, 'v': cstr.serialize(), 'x': rdef.eid}) + cnx.commit() + cstr2 = rdef.constraint_by_type('BoundaryConstraint') + self.assertEqual(cstr, cstr2) + cstr3 = BoundaryConstraint('<=', 1000) + cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, ' + 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', + {'ct': cstr3.__class__.__name__, 'v': cstr3.serialize(), 'x': rdef.eid}) + cnx.commit() + self.assertCountEqual(rdef.constraints, [cstr, cstr3]) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/test/unittest_syncsession.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/test/unittest_syncsession.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""functional tests for core hooks + +Note: + syncschema.py hooks are mostly tested in server/test/unittest_migrations.py +""" + +from six import text_type + +from cubicweb import ValidationError +from cubicweb.devtools.testlib import CubicWebTC + +class CWPropertyHooksTC(CubicWebTC): + + def test_unexistant_cwproperty(self): + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "bla.bla", ' + 'X value "hop", X for_user U') + cm.exception.translate(text_type) + self.assertEqual(cm.exception.errors, + {'pkey-subject': 'unknown property key bla.bla'}) + + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"') + cm.exception.translate(text_type) + self.assertEqual(cm.exception.errors, + {'pkey-subject': 'unknown property key bla.bla'}) + + def test_site_wide_cwproperty(self): + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "ui.site-title", ' + 'X value "hop", X for_user U') + self.assertEqual(cm.exception.errors, + {'for_user-subject': "site-wide property can't be set for user"}) + + def test_system_cwproperty(self): + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "system.version.cubicweb", ' + 'X value "hop", X for_user U') + self.assertEqual(cm.exception.errors, + {'for_user-subject': "site-wide property can't be set for user"}) + + def test_bad_type_cwproperty(self): + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "ui.language", ' + 'X value "hop", X for_user U') + self.assertEqual(cm.exception.errors, + {'value-subject': u'unauthorized value'}) + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop"') + self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'}) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/workflow.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/workflow.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,357 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Core hooks: workflow related hooks""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from datetime import datetime + + +from cubicweb import RepositoryError, validation_error +from cubicweb.predicates import is_instance, adaptable +from cubicweb.server import hook + + +def _change_state(cnx, x, oldstate, newstate): + nocheck = cnx.transaction_data.setdefault('skip-security', set()) + nocheck.add((x, 'in_state', oldstate)) + nocheck.add((x, 'in_state', newstate)) + # delete previous state first + cnx.delete_relation(x, 'in_state', oldstate) + cnx.add_relation(x, 'in_state', newstate) + + +# operations ################################################################### + +class _SetInitialStateOp(hook.Operation): + """make initial state be a default state""" + entity = None # make pylint happy + + def precommit_event(self): + cnx = self.cnx + entity = self.entity + iworkflowable = entity.cw_adapt_to('IWorkflowable') + # if there is an initial state and the entity's state is not set, + # use the initial state as a default state + if not (cnx.deleted_in_transaction(entity.eid) or entity.in_state) \ + and iworkflowable.current_workflow: + state = iworkflowable.current_workflow.initial + if state: + cnx.add_relation(entity.eid, 'in_state', state.eid) + _FireAutotransitionOp(cnx, entity=entity) + +class _FireAutotransitionOp(hook.Operation): + """try to fire auto transition after state changes""" + entity = None # make pylint happy + + def precommit_event(self): + entity = self.entity + iworkflowable = entity.cw_adapt_to('IWorkflowable') + autotrs = list(iworkflowable.possible_transitions('auto')) + if autotrs: + assert len(autotrs) == 1 + iworkflowable.fire_transition(autotrs[0]) + + +class _WorkflowChangedOp(hook.Operation): + """fix entity current state when changing its workflow""" + eid = wfeid = None # make pylint happy + + def precommit_event(self): + # notice that enforcement that new workflow apply to the entity's type is + # done by schema rule, no need to check it here + cnx = self.cnx + pendingeids = cnx.transaction_data.get('pendingeids', ()) + if self.eid in pendingeids: + return + entity = cnx.entity_from_eid(self.eid) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + # check custom workflow has not been rechanged to another one in the same + # transaction + mainwf = iworkflowable.main_workflow + if mainwf.eid == self.wfeid: + deststate = mainwf.initial + if not deststate: + msg = _('workflow has no initial state') + raise validation_error(entity, {('custom_workflow', 'subject'): msg}) + if mainwf.state_by_eid(iworkflowable.current_state.eid): + # nothing to do + return + # if there are no history, simply go to new workflow's initial state + if not iworkflowable.workflow_history: + if iworkflowable.current_state.eid != deststate.eid: + _change_state(cnx, entity.eid, + iworkflowable.current_state.eid, deststate.eid) + _FireAutotransitionOp(cnx, entity=entity) + return + msg = cnx._('workflow changed to "%s"') + msg %= cnx._(mainwf.name) + cnx.transaction_data[(entity.eid, 'customwf')] = self.wfeid + iworkflowable.change_state(deststate, msg, u'text/plain') + + +class _CheckTrExitPoint(hook.Operation): + treid = None # make pylint happy + + def precommit_event(self): + tr = self.cnx.entity_from_eid(self.treid) + outputs = set() + for ep in tr.subworkflow_exit: + if ep.subwf_state.eid in outputs: + msg = _("can't have multiple exits on the same state") + raise validation_error(self.treid, {('subworkflow_exit', 'subject'): msg}) + outputs.add(ep.subwf_state.eid) + + +class _SubWorkflowExitOp(hook.Operation): + forentity = trinfo = None # make pylint happy + + def precommit_event(self): + cnx = self.cnx + forentity = self.forentity + iworkflowable = forentity.cw_adapt_to('IWorkflowable') + trinfo = self.trinfo + # we're in a subworkflow, check if we've reached an exit point + wftr = iworkflowable.subworkflow_input_transition() + if wftr is None: + # inconsistency detected + msg = _("state doesn't belong to entity's current workflow") + raise validation_error(self.trinfo, {('to_state', 'subject'): msg}) + tostate = wftr.get_exit_point(forentity, trinfo.cw_attr_cache['to_state']) + if tostate is not None: + # reached an exit point + msg = _('exiting from subworkflow %s') + msg %= cnx._(iworkflowable.current_workflow.name) + cnx.transaction_data[(forentity.eid, 'subwfentrytr')] = True + iworkflowable.change_state(tostate, msg, u'text/plain', tr=wftr) + + +# hooks ######################################################################## + +class WorkflowHook(hook.Hook): + __abstract__ = True + category = 'metadata' + + +class SetInitialStateHook(WorkflowHook): + __regid__ = 'wfsetinitial' + __select__ = WorkflowHook.__select__ & adaptable('IWorkflowable') + events = ('after_add_entity',) + + def __call__(self): + _SetInitialStateOp(self._cw, entity=self.entity) + + +class FireTransitionHook(WorkflowHook): + """check the transition is allowed and add missing information into the + TrInfo entity. + + Expect that: + * wf_info_for inlined relation is set + * by_transition or to_state (managers only) inlined relation is set + + Check for automatic transition to be fired at the end + """ + __regid__ = 'wffiretransition' + __select__ = WorkflowHook.__select__ & is_instance('TrInfo') + events = ('before_add_entity',) + + def __call__(self): + cnx = self._cw + entity = self.entity + # first retreive entity to which the state change apply + try: + foreid = entity.cw_attr_cache['wf_info_for'] + except KeyError: + msg = _('mandatory relation') + raise validation_error(entity, {('wf_info_for', 'subject'): msg}) + forentity = cnx.entity_from_eid(foreid) + # see comment in the TrInfo entity definition + entity.cw_edited['tr_count']=len(forentity.reverse_wf_info_for) + iworkflowable = forentity.cw_adapt_to('IWorkflowable') + # then check it has a workflow set, unless we're in the process of changing + # entity's workflow + if cnx.transaction_data.get((forentity.eid, 'customwf')): + wfeid = cnx.transaction_data[(forentity.eid, 'customwf')] + wf = cnx.entity_from_eid(wfeid) + else: + wf = iworkflowable.current_workflow + if wf is None: + msg = _('related entity has no workflow set') + raise validation_error(entity, {None: msg}) + # then check it has a state set + fromstate = iworkflowable.current_state + if fromstate is None: + msg = _('related entity has no state') + raise validation_error(entity, {None: msg}) + # True if we are coming back from subworkflow + swtr = cnx.transaction_data.pop((forentity.eid, 'subwfentrytr'), None) + cowpowers = (cnx.user.is_in_group('managers') + or not cnx.write_security) + # no investigate the requested state change... + try: + treid = entity.cw_attr_cache['by_transition'] + except KeyError: + # no transition set, check user is a manager and destination state + # is specified (and valid) + if not cowpowers: + msg = _('mandatory relation') + raise validation_error(entity, {('by_transition', 'subject'): msg}) + deststateeid = entity.cw_attr_cache.get('to_state') + if not deststateeid: + msg = _('mandatory relation') + raise validation_error(entity, {('by_transition', 'subject'): msg}) + deststate = wf.state_by_eid(deststateeid) + if deststate is None: + msg = _("state doesn't belong to entity's workflow") + raise validation_error(entity, {('to_state', 'subject'): msg}) + else: + # check transition is valid and allowed, unless we're coming back + # from subworkflow + tr = cnx.entity_from_eid(treid) + if swtr is None: + qname = ('by_transition', 'subject') + if tr is None: + msg = _("transition doesn't belong to entity's workflow") + raise validation_error(entity, {qname: msg}) + if not tr.has_input_state(fromstate): + msg = _("transition %(tr)s isn't allowed from %(st)s") + raise validation_error(entity, {qname: msg}, { + 'tr': tr.name, 'st': fromstate.name}, ['tr', 'st']) + if not tr.may_be_fired(foreid): + msg = _("transition may not be fired") + raise validation_error(entity, {qname: msg}) + deststateeid = entity.cw_attr_cache.get('to_state') + if deststateeid is not None: + if not cowpowers and deststateeid != tr.destination(forentity).eid: + msg = _("transition isn't allowed") + raise validation_error(entity, {('by_transition', 'subject'): msg}) + if swtr is None: + deststate = cnx.entity_from_eid(deststateeid) + if not cowpowers and deststate is None: + msg = _("state doesn't belong to entity's workflow") + raise validation_error(entity, {('to_state', 'subject'): msg}) + else: + deststateeid = tr.destination(forentity).eid + # everything is ok, add missing information on the trinfo entity + entity.cw_edited['from_state'] = fromstate.eid + entity.cw_edited['to_state'] = deststateeid + nocheck = cnx.transaction_data.setdefault('skip-security', set()) + nocheck.add((entity.eid, 'from_state', fromstate.eid)) + nocheck.add((entity.eid, 'to_state', deststateeid)) + _FireAutotransitionOp(cnx, entity=forentity) + + +class FiredTransitionHook(WorkflowHook): + """change related entity state and handle exit of subworkflow""" + __regid__ = 'wffiretransition' + __select__ = WorkflowHook.__select__ & is_instance('TrInfo') + events = ('after_add_entity',) + + def __call__(self): + trinfo = self.entity + rcache = trinfo.cw_attr_cache + _change_state(self._cw, rcache['wf_info_for'], rcache['from_state'], + rcache['to_state']) + forentity = self._cw.entity_from_eid(rcache['wf_info_for']) + iworkflowable = forentity.cw_adapt_to('IWorkflowable') + assert iworkflowable.current_state.eid == rcache['to_state'] + if iworkflowable.main_workflow.eid != iworkflowable.current_workflow.eid: + _SubWorkflowExitOp(self._cw, forentity=forentity, trinfo=trinfo) + + +class CheckInStateChangeAllowed(WorkflowHook): + """check state apply, in case of direct in_state change using unsafe execute + """ + __regid__ = 'wfcheckinstate' + __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state') + events = ('before_add_relation',) + category = 'integrity' + + def __call__(self): + cnx = self._cw + nocheck = cnx.transaction_data.get('skip-security', ()) + if (self.eidfrom, 'in_state', self.eidto) in nocheck: + # state changed through TrInfo insertion, so we already know it's ok + return + entity = cnx.entity_from_eid(self.eidfrom) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + mainwf = iworkflowable.main_workflow + if mainwf is None: + msg = _('entity has no workflow set') + raise validation_error(entity, {None: msg}) + for wf in mainwf.iter_workflows(): + if wf.state_by_eid(self.eidto): + break + else: + msg = _("state doesn't belong to entity's workflow. You may " + "want to set a custom workflow for this entity first.") + raise validation_error(self.eidfrom, {('in_state', 'subject'): msg}) + if iworkflowable.current_workflow and wf.eid != iworkflowable.current_workflow.eid: + msg = _("state doesn't belong to entity's current workflow") + raise validation_error(self.eidfrom, {('in_state', 'subject'): msg}) + + +class SetModificationDateOnStateChange(WorkflowHook): + """update entity's modification date after changing its state""" + __regid__ = 'wfsyncmdate' + __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state') + events = ('after_add_relation',) + + def __call__(self): + if self._cw.added_in_transaction(self.eidfrom): + # new entity, not needed + return + entity = self._cw.entity_from_eid(self.eidfrom) + try: + entity.cw_set(modification_date=datetime.utcnow()) + except RepositoryError as ex: + # usually occurs if entity is coming from a read-only source + # (eg ldap user) + self.warning('cant change modification date for %s: %s', entity, ex) + + +class CheckWorkflowTransitionExitPoint(WorkflowHook): + """check that there is no multiple exits from the same state""" + __regid__ = 'wfcheckwftrexit' + __select__ = WorkflowHook.__select__ & hook.match_rtype('subworkflow_exit') + events = ('after_add_relation',) + + def __call__(self): + _CheckTrExitPoint(self._cw, treid=self.eidfrom) + + +class SetCustomWorkflow(WorkflowHook): + __regid__ = 'wfsetcustom' + __select__ = WorkflowHook.__select__ & hook.match_rtype('custom_workflow') + events = ('after_add_relation',) + + def __call__(self): + _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=self.eidto) + + +class DelCustomWorkflow(SetCustomWorkflow): + __regid__ = 'wfdelcustom' + events = ('after_delete_relation',) + + def __call__(self): + entity = self._cw.entity_from_eid(self.eidfrom) + typewf = entity.cw_adapt_to('IWorkflowable').cwetype_workflow() + if typewf is not None: + _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=typewf.eid) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/hooks/zmq.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/hooks/zmq.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from cubicweb.server import hook + +class ZMQStopHook(hook.Hook): + __regid__ = 'zmqstop' + events = ('server_shutdown',) + + def __call__(self): + self.repo.app_instances_bus.stop() + +class ZMQStartHook(hook.Hook): + __regid__ = 'zmqstart' + events = ('server_startup',) + order = -1 + + def __call__(self): + config = self.repo.config + address_pub = config.get('zmq-address-pub') + address_sub = config.get('zmq-address-sub') + if not address_pub and not address_sub: + return + from cubicweb.server import cwzmq + self.repo.app_instances_bus = cwzmq.ZMQComm() + if address_pub: + self.repo.app_instances_bus.add_publisher(address_pub) + def clear_cache_callback(msg): + self.debug('clear_caches: %s', ' '.join(msg)) + self.repo.clear_caches(msg[1:]) + self.repo.app_instances_bus.add_subscription('delete', clear_cache_callback) + for address in address_sub: + self.repo.app_instances_bus.add_subscriber(address) + self.repo.app_instances_bus.start() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/i18n.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/i18n.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,117 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Some i18n/gettext utilities.""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import re +import os +from os.path import join, basename, splitext, exists +from glob import glob + +from six import PY2 + +from cubicweb.toolsutils import create_dir + +def extract_from_tal(files, output_file): + """extract i18n strings from tal and write them into the given output file + using standard python gettext marker (_) + """ + output = open(output_file, 'w') + for filepath in files: + for match in re.finditer('i18n:(content|replace)="([^"]+)"', open(filepath).read()): + output.write('_("%s")' % match.group(2)) + output.close() + + +def add_msg(w, msgid, msgctx=None): + """write an empty pot msgid definition""" + if PY2 and isinstance(msgid, unicode): + msgid = msgid.encode('utf-8') + if msgctx: + if PY2 and isinstance(msgctx, unicode): + msgctx = msgctx.encode('utf-8') + w('msgctxt "%s"\n' % msgctx) + msgid = msgid.replace('"', r'\"').splitlines() + if len(msgid) > 1: + w('msgid ""\n') + for line in msgid: + w('"%s"' % line.replace('"', r'\"')) + else: + w('msgid "%s"\n' % msgid[0]) + w('msgstr ""\n\n') + +def execute2(args): + # XXX replace this with check_output in Python 2.7 + from subprocess import Popen, PIPE, CalledProcessError + p = Popen(args, stdout=PIPE, stderr=PIPE) + out, err = p.communicate() + if p.returncode != 0: + exc = CalledProcessError(p.returncode, args[0]) + exc.cmd = args + exc.data = (out, err) + raise exc + +def available_catalogs(i18ndir=None): + if i18ndir is None: + wildcard = '*.po' + else: + wildcard = join(i18ndir, '*.po') + for popath in glob(wildcard): + lang = splitext(basename(popath))[0] + yield lang, popath + + +def compile_i18n_catalogs(sourcedirs, destdir, langs): + """generate .mo files for a set of languages into the `destdir` i18n directory + """ + from subprocess import CalledProcessError + from logilab.common.fileutils import ensure_fs_mode + print('-> compiling message catalogs to %s' % destdir) + errors = [] + for lang in langs: + langdir = join(destdir, lang, 'LC_MESSAGES') + if not exists(langdir): + create_dir(langdir) + pofiles = [join(path, '%s.po' % lang) for path in sourcedirs] + pofiles = [pof for pof in pofiles if exists(pof)] + mergedpo = join(destdir, '%s_merged.po' % lang) + try: + # merge instance/cubes messages catalogs with the stdlib's one + cmd = ['msgcat', '--use-first', '--sort-output', '--strict', + '-o', mergedpo] + pofiles + execute2(cmd) + # make sure the .mo file is writeable and compiles with *msgfmt* + applmo = join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo') + try: + ensure_fs_mode(applmo) + except OSError: + pass # suppose not exists + execute2(['msgfmt', mergedpo, '-o', applmo]) + except CalledProcessError as exc: + errors.append(u'while handling language %s:\ncmd:\n%s\nstdout:\n%s\nstderr:\n%s\n' % + (lang, exc.cmd, repr(exc.data[0]), repr(exc.data[1]))) + except Exception as exc: + errors.append(u'while handling language %s: %s' % (lang, exc)) + try: + # clean everything + os.unlink(mergedpo) + except Exception: + continue + return errors diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/i18n/de.po --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/i18n/de.po Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4690 @@ +# cubicweb i18n catalog +# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# Logilab +msgid "" +msgstr "" +"Project-Id-Version: 2.0\n" +"POT-Creation-Date: 2006-01-12 17:35+CET\n" +"PO-Revision-Date: 2010-09-15 14:55+0200\n" +"Last-Translator: Dr. Leo \n" +"Language-Team: English \n" +"Language: de\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +#, python-format +msgid "" +"\n" +"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for " +"entity\n" +"'%(title)s'\n" +"\n" +"%(comment)s\n" +"\n" +"url: %(url)s\n" +msgstr "" +"\n" +"%(user)s hat den Zustand geändert von <%(previous_state)s> in <" +"%(current_state)s> für die Entität\n" +"'%(title)s'\n" +"\n" +"%(comment)s\n" +"\n" +"url: %(url)s\n" + +#, python-format +msgid " from state %(fromstate)s to state %(tostate)s\n" +msgstr " aus dem Zustand %(fromstate)s in den Zustand %(tostate)s\n" + +msgid " :" +msgstr " :" + +#, python-format +msgid "\"action\" must be specified in options; allowed values are %s" +msgstr "" + +msgid "\"role=subject\" or \"role=object\" must be specified in options" +msgstr "" + +#, python-format +msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" +msgstr "" + +#, python-format +msgid "%(KEY-rtype)s is part of violated unicity constraint" +msgstr "" + +#, python-format +msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" +msgstr "" + +#, python-format +msgid "%(attr)s set to %(newvalue)s" +msgstr "%(attr)s geändert in %(newvalue)s" + +#, python-format +msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" +msgstr "%(attr)s geändert von %(oldvalue)s in %(newvalue)s" + +#, python-format +msgid "%(etype)s by %(author)s" +msgstr "" + +#, python-format +msgid "%(firstname)s %(surname)s" +msgstr "%(firstname)s %(surname)s" + +#, python-format +msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)" +msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)" + +#, python-format +msgid "%d days" +msgstr "%d Tage" + +#, python-format +msgid "%d hours" +msgstr "%d Stunden" + +#, python-format +msgid "%d minutes" +msgstr "%d Minuten" + +#, python-format +msgid "%d months" +msgstr "%d Monate" + +#, python-format +msgid "%d seconds" +msgstr "%d Sekunden" + +#, python-format +msgid "%d weeks" +msgstr "%d Wochen" + +#, python-format +msgid "%d years" +msgstr "%d Jahre" + +#, python-format +msgid "%s could be supported" +msgstr "" + +#, python-format +msgid "%s error report" +msgstr "%s Fehlerbericht" + +#, python-format +msgid "%s software version of the database" +msgstr "Software-Version der Datenbank %s" + +#, python-format +msgid "%s updated" +msgstr "%s aktualisiert" + +#, python-format +msgid "'%s' action doesn't take any options" +msgstr "" + +#, python-format +msgid "" +"'%s' action for in_state relation should at least have 'linkattr=name' option" +msgstr "" + +#, python-format +msgid "'%s' action requires 'linkattr' option" +msgstr "" + +msgid "(UNEXISTANT EID)" +msgstr "(EID nicht gefunden)" + +#, python-format +msgid "(suppressed) entity #%d" +msgstr "" + +msgid "**" +msgstr "0..n 0..n" + +msgid "*+" +msgstr "0..n 1..n" + +msgid "*1" +msgstr "0..n 1" + +msgid "*?" +msgstr "0..n 0..1" + +msgid "+*" +msgstr "1..n 0..n" + +msgid "++" +msgstr "1..n 1..n" + +msgid "+1" +msgstr "1..n 1" + +msgid "+?" +msgstr "1..n 0..1" + +msgid "1*" +msgstr "1 0..n" + +msgid "1+" +msgstr "1 1..n" + +msgid "11" +msgstr "1 1" + +msgid "1?" +msgstr "1 0..1" + +#, python-format +msgid "<%s not specified>" +msgstr "<%s nicht spezifiziert>" + +#, python-format +msgid "" +"
This schema of the data model excludes the meta-data, but you " +"can also display a complete schema with meta-data.
" +msgstr "" +"
Dieses Schema des Datenmodells enthält<>keine Meta-Daten, aber Sie " +"können ein vollständiges Schema mit Meta-Daten anzeigen." + +msgid "" +msgstr "" + +msgid "" +msgstr "" + +msgid "?*" +msgstr "0..1 0..n" + +msgid "?+" +msgstr "0..1 1..n" + +msgid "?1" +msgstr "0..1 1" + +msgid "??" +msgstr "0..1 0..1" + +msgid "AND" +msgstr "UND" + +msgid "About this site" +msgstr "Über diese Seite" + +#, python-format +msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" +msgstr "" + +msgid "Attributes permissions:" +msgstr "Rechte der Attribute" + +# schema pot file, generated on 2009-09-16 16:46:55 +# +# singular and plural forms for each entity type +msgid "BaseTransition" +msgstr "Übergang (abstrakt)" + +msgid "BaseTransition_plural" +msgstr "Übergänge (abstrakt)" + +msgid "BigInt" +msgstr "" + +msgid "BigInt_plural" +msgstr "" + +msgid "Bookmark" +msgstr "Lesezeichen" + +msgid "Bookmark_plural" +msgstr "Lesezeichen" + +msgid "Boolean" +msgstr "Boolean" + +msgid "Boolean_plural" +msgstr "Booleans" + +msgid "BoundConstraint" +msgstr "gebundene Einschränkung" + +msgid "BoundaryConstraint" +msgstr "Rand-einschränkung" + +msgid "Browse by entity type" +msgstr "nach Identitätstyp navigieren" + +#, python-format +msgid "By %(user)s on %(dt)s [%(undo_link)s]" +msgstr "" + +msgid "Bytes" +msgstr "Bytes" + +msgid "Bytes_plural" +msgstr "Bytes" + +msgid "CWAttribute" +msgstr "Attribut" + +msgid "CWAttribute_plural" +msgstr "Attribute" + +msgid "CWCache" +msgstr "Cache" + +msgid "CWCache_plural" +msgstr "Caches" + +msgid "CWComputedRType" +msgstr "" + +msgid "CWComputedRType_plural" +msgstr "" + +msgid "CWConstraint" +msgstr "Einschränkung" + +msgid "CWConstraintType" +msgstr "Einschränkungstyp" + +msgid "CWConstraintType_plural" +msgstr "Einschränkungstypen" + +msgid "CWConstraint_plural" +msgstr "Einschränkungen" + +msgid "CWDataImport" +msgstr "" + +msgid "CWDataImport_plural" +msgstr "" + +msgid "CWEType" +msgstr "Entitätstyp" + +msgctxt "inlined:CWRelation.from_entity.subject" +msgid "CWEType" +msgstr "Entitätstyp" + +msgctxt "inlined:CWRelation.to_entity.subject" +msgid "CWEType" +msgstr "Entitätstyp" + +msgid "CWEType_plural" +msgstr "Entitätstypen" + +msgid "CWGroup" +msgstr "Gruppe" + +msgid "CWGroup_plural" +msgstr "Gruppen" + +msgid "CWProperty" +msgstr "Eigenschaft" + +msgid "CWProperty_plural" +msgstr "Eigenschaften" + +msgid "CWRType" +msgstr "Relationstyp" + +msgctxt "inlined:CWRelation.relation_type.subject" +msgid "CWRType" +msgstr "Relationstyp" + +msgid "CWRType_plural" +msgstr "Relationstypen" + +msgid "CWRelation" +msgstr "Relation" + +msgid "CWRelation_plural" +msgstr "Relationen" + +msgid "CWSource" +msgstr "" + +msgid "CWSourceHostConfig" +msgstr "" + +msgid "CWSourceHostConfig_plural" +msgstr "" + +msgid "CWSourceSchemaConfig" +msgstr "" + +msgid "CWSourceSchemaConfig_plural" +msgstr "" + +msgid "CWSource_plural" +msgstr "" + +msgid "CWUniqueTogetherConstraint" +msgstr "unique-together-Einschränkung" + +msgid "CWUniqueTogetherConstraint_plural" +msgstr "unique-together-Einschränkungen" + +msgid "CWUser" +msgstr "Nutzer" + +msgid "CWUser_plural" +msgstr "Nutzer" + +#, python-format +msgid "" +"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " +"linked using this relation." +msgstr "" +"Kann die Relation %(role)s %(rtype)s zu einer Entität %(eid)s nicht wieder " +"herstellen, die durch diese Relation bereits mit einer anderen Entität " +"verbunden ist." + +#, python-format +msgid "" +"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " +"does not exists anymore in the schema." +msgstr "" +"Kann die Relation %(rtype)s zwischen %(subj)s und %(obj)s nicht wieder " +"herstellen, diese Relation existiert nicht mehr in dem Schema." + +#, python-format +msgid "" +"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " +"anymore." +msgstr "" +"Kann die Relation %(rtype)s nicht wieder herstellen, die Entität %(role)s " +"%(eid)s existiert nicht mehr." + +#, python-format +msgid "" +"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " +"exist anymore" +msgstr "" +"Kann das Hinzufügen der Relation %(rtype)s von %(subj)s zu %(obj)s nicht " +"rückgängig machen , diese Relation existiert nicht mehr." + +#, python-format +msgid "" +"Can't undo creation of entity %(eid)s of type %(etype)s, type no more " +"supported" +msgstr "" +"Kann die Erstelllung der Entität %(eid)s vom Typ %(etype)s nicht rückgängig " +"machen, dieser Typ existiert nicht mehr." + +msgid "Click to sort on this column" +msgstr "" + +msgid "" +"Configuration of the system source goes to the 'sources' file, not in the " +"database" +msgstr "" + +#, python-format +msgid "Created %(etype)s : %(entity)s" +msgstr "" + +msgid "DEBUG" +msgstr "" + +msgid "Date" +msgstr "Datum" + +msgid "Date_plural" +msgstr "Daten" + +msgid "Datetime" +msgstr "Datum und Uhrzeit" + +msgid "Datetime_plural" +msgstr "Daten und Uhrzeiten" + +msgid "Decimal" +msgstr "Dezimalzahl" + +msgid "Decimal_plural" +msgstr "Dezimalzahlen" + +#, python-format +msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" +msgstr "" + +#, python-format +msgid "Deleted %(etype)s : %(entity)s" +msgstr "" + +msgid "Detected problems" +msgstr "" + +msgid "Do you want to delete the following element(s)?" +msgstr "Wollen Sie das/die folgend(n) Element(e) löschen?" + +msgid "Download schema as OWL" +msgstr "Herunterladen des Schemas im OWL-Format" + +msgid "ERROR" +msgstr "" + +msgid "EmailAddress" +msgstr "Email-Adresse" + +msgctxt "inlined:CWUser.use_email.subject" +msgid "EmailAddress" +msgstr "Email-Adresse" + +msgid "EmailAddress_plural" +msgstr "Email-Adressen" + +msgid "Entities" +msgstr "Entitäten" + +#, python-format +msgid "" +"Entity %(eid)s has changed since you started to edit it. Reload the page and " +"reapply your changes." +msgstr "" + +msgid "Entity and relation supported by this source" +msgstr "" + +msgid "ExternalUri" +msgstr "Externer Uri" + +msgid "ExternalUri_plural" +msgstr "Externe Uris" + +msgid "FATAL" +msgstr "" + +msgid "Float" +msgstr "Gleitkommazahl" + +msgid "Float_plural" +msgstr "Gleitkommazahlen" + +# schema pot file, generated on 2009-12-03 09:22:35 +# +# singular and plural forms for each entity type +msgid "FormatConstraint" +msgstr "Format-Einschränkung" + +msgid "Garbage collection information" +msgstr "Information zur Speicherbereinigung" + +msgid "Help" +msgstr "Hilfe" + +msgid "INFO" +msgstr "" + +msgid "Instance" +msgstr "Instanz" + +msgid "Int" +msgstr "Ganzzahl" + +msgid "Int_plural" +msgstr "Ganzzahlen" + +msgid "Interval" +msgstr "Zeitraum" + +msgid "IntervalBoundConstraint" +msgstr "interval-Einschränkung" + +msgid "Interval_plural" +msgstr "Intervalle" + +msgid "Link:" +msgstr "" + +msgid "Looked up classes" +msgstr "gesuchte Klassen" + +msgid "Manage" +msgstr "" + +msgid "Manage security" +msgstr "Sicherheitsverwaltung" + +msgid "Message threshold" +msgstr "" + +msgid "Most referenced classes" +msgstr "meist-referenzierte Klassen" + +msgid "New BaseTransition" +msgstr "neuer Übergang (abstrakt)" + +msgid "New Bookmark" +msgstr "Neues Lesezeichen" + +msgid "New CWAttribute" +msgstr "Neue finale Relationsdefinition" + +msgid "New CWCache" +msgstr "Neuer Anwendungs-Cache" + +msgid "New CWComputedRType" +msgstr "" + +msgid "New CWConstraint" +msgstr "Neue Einschränkung" + +msgid "New CWConstraintType" +msgstr "Neuer Einschränkungstyp" + +msgid "New CWDataImport" +msgstr "" + +msgid "New CWEType" +msgstr "Neuer Entitätstyp" + +msgid "New CWGroup" +msgstr "Neue Gruppe" + +msgid "New CWProperty" +msgstr "Neue Eigenschaft" + +msgid "New CWRType" +msgstr "Neuer Relationstyp" + +msgid "New CWRelation" +msgstr "Neue Relation" + +msgid "New CWSource" +msgstr "" + +msgid "New CWSourceHostConfig" +msgstr "" + +msgid "New CWSourceSchemaConfig" +msgstr "" + +msgid "New CWUniqueTogetherConstraint" +msgstr "Neue unique-together-Einschränkung" + +msgid "New CWUser" +msgstr "Neuer Nutzer" + +msgid "New EmailAddress" +msgstr "Neue Email-Adresse" + +msgid "New ExternalUri" +msgstr "Neuer externer URI" + +msgid "New RQLExpression" +msgstr "Neuer RQL Ausdruck" + +msgid "New State" +msgstr "Neuer Zustand" + +msgid "New SubWorkflowExitPoint" +msgstr "Neuer subworkflow-Endpunkt" + +msgid "New TrInfo" +msgstr "Neue Übergangsinformation" + +msgid "New Transition" +msgstr "Neuer Übergang" + +msgid "New Workflow" +msgstr "Neuer workflow" + +msgid "New WorkflowTransition" +msgstr "Neuer workflow-Übergang" + +msgid "No result matching query" +msgstr "Ihre Suche ergab keine Treffer." + +msgid "Non exhaustive list of views that may apply to entities of this type" +msgstr "" +"nicht abschließende Liste von Ansichten, die auf Entitäten dieses Typs " +"Anwendung finden" + +msgid "OR" +msgstr "oder" + +msgid "Ownership" +msgstr "Eigentum" + +msgid "Parent class:" +msgstr "Elternklasse" + +msgid "Password" +msgstr "Passwort" + +msgid "Password_plural" +msgstr "Passwörter" + +msgid "Please note that this is only a shallow copy" +msgstr "Achtung: dies ist nur eine flache Kopie!" + +msgid "Powered by CubicWeb" +msgstr "Powered by CubicWeb" + +msgid "RQLConstraint" +msgstr "RQL-Einschränkung" + +msgid "RQLExpression" +msgstr "RQL-Ausdruck" + +msgid "RQLExpression_plural" +msgstr "RQL-Ausdrücke" + +msgid "RQLUniqueConstraint" +msgstr "RQL Einschränkung bzgl. Eindeutigkeit" + +msgid "RQLVocabularyConstraint" +msgstr "RQL Wortschatz-Einschränkung" + +msgid "RegexpConstraint" +msgstr "regulärer Ausdruck Einschränkung" + +msgid "Registry's content" +msgstr "Inhalt der Registry" + +msgid "Relations" +msgstr "Relationen" + +msgid "Repository" +msgstr "Ablage" + +#, python-format +msgid "Schema %s" +msgstr "Schema %s" + +msgid "Schema's permissions definitions" +msgstr "Im Schema definierte Rechte" + +msgid "Search for" +msgstr "Suchen" + +msgid "Site information" +msgstr "" + +msgid "SizeConstraint" +msgstr "Größeneinschränkung" + +msgid "" +"Source's configuration for a particular host. One key=value per line, " +"authorized keys depending on the source's type, overriding values defined on " +"the source." +msgstr "" + +msgid "Startup views" +msgstr "Startansichten" + +msgid "State" +msgstr "Zustand" + +msgid "State_plural" +msgstr "Zustände" + +msgid "StaticVocabularyConstraint" +msgstr "Wortschatz-Einschränkung" + +msgid "String" +msgstr "String" + +msgid "String_plural" +msgstr "Strings" + +msgid "Sub-classes:" +msgstr "Unterklassen" + +msgid "SubWorkflowExitPoint" +msgstr "Subworkflow Endpunkt" + +msgid "SubWorkflowExitPoint_plural" +msgstr "subworkflow Endpunkte" + +msgid "Submit bug report" +msgstr "Fehlerbericht senden" + +msgid "Submit bug report by mail" +msgstr "Diesen Bericht als E-Mail senden" + +msgid "TZDatetime" +msgstr "" + +msgid "TZDatetime_plural" +msgstr "" + +msgid "TZTime" +msgstr "" + +msgid "TZTime_plural" +msgstr "" + +#, python-format +msgid "The view %s can not be applied to this query" +msgstr "Die Ansicht %s ist auf diese Anfrage nicht anwendbar." + +#, python-format +msgid "The view %s could not be found" +msgstr "Die Ansicht %s konnte nicht gefunden werden." + +msgid "There is no default workflow" +msgstr "Dieser Entitätstyp hat standardmäßig keinen Workflow." + +msgid "This BaseTransition:" +msgstr "Diese abstracte Transition:" + +msgid "This Bookmark:" +msgstr "Dieses Lesezeichen:" + +msgid "This CWAttribute:" +msgstr "diese finale Relationsdefinition:" + +msgid "This CWCache:" +msgstr "Dieser Anwendungs-Cache:" + +msgid "This CWComputedRType:" +msgstr "" + +msgid "This CWConstraint:" +msgstr "diese Einschränkung:" + +msgid "This CWConstraintType:" +msgstr "Dieser Einschränkungstyp:" + +msgid "This CWDataImport:" +msgstr "" + +msgid "This CWEType:" +msgstr "Dieser Entitätstyp:" + +msgid "This CWGroup:" +msgstr "Diese Gruppe:" + +msgid "This CWProperty:" +msgstr "Diese Eigenschaft:" + +msgid "This CWRType:" +msgstr "Dieser Relationstyp:" + +msgid "This CWRelation:" +msgstr "Diese Relation:" + +msgid "This CWSource:" +msgstr "" + +msgid "This CWSourceHostConfig:" +msgstr "" + +msgid "This CWSourceSchemaConfig:" +msgstr "" + +msgid "This CWUniqueTogetherConstraint:" +msgstr "Diese unique-together-Einschränkung:" + +msgid "This CWUser:" +msgstr "Dieser Nutzer:" + +msgid "This EmailAddress:" +msgstr "Diese E-Mail-Adresse:" + +msgid "This ExternalUri:" +msgstr "dieser externe URI:" + +msgid "This RQLExpression:" +msgstr "Dieser RQL-Ausdruck:" + +msgid "This State:" +msgstr "Dieser Zustand:" + +msgid "This SubWorkflowExitPoint:" +msgstr "Dieser Subworkflow Endpunkt:" + +msgid "This TrInfo:" +msgstr "Diese Übergangs-Information:" + +msgid "This Transition:" +msgstr "Dieser Übergang:" + +msgid "This Workflow:" +msgstr "Dieser Workflow:" + +msgid "This WorkflowTransition:" +msgstr "Dieser Workflow-Übergang:" + +msgid "" +"This action is forbidden. If you think it should be allowed, please contact " +"the site administrator." +msgstr "" + +msgid "This entity type permissions:" +msgstr "Berechtigungen für diesen Entitätstyp" + +msgid "Time" +msgstr "Zeit" + +msgid "Time_plural" +msgstr "Zeiten" + +msgid "TrInfo" +msgstr "Übergangs-Information" + +msgid "TrInfo_plural" +msgstr "Übergangs-Informationen" + +msgid "Transition" +msgstr "Übergang" + +msgid "Transition_plural" +msgstr "Übergänge" + +msgid "URLs from which content will be imported. You can put one url per line" +msgstr "" + +msgid "Undoable actions" +msgstr "" + +msgid "Undoing" +msgstr "" + +msgid "UniqueConstraint" +msgstr "eindeutige Einschränkung" + +msgid "Unknown source type" +msgstr "" + +msgid "Unreachable objects" +msgstr "unzugängliche Objekte" + +#, python-format +msgid "Updated %(etype)s : %(entity)s" +msgstr "" + +msgid "Used by:" +msgstr "benutzt von:" + +msgid "Users and groups management" +msgstr "" + +msgid "WARNING" +msgstr "" + +msgid "Web server" +msgstr "Web-Server" + +msgid "Workflow" +msgstr "Workflow" + +msgid "Workflow history" +msgstr "Workflow-Chronik" + +msgid "WorkflowTransition" +msgstr "Workflow-Übergang" + +msgid "WorkflowTransition_plural" +msgstr "Workflow-Übergänge" + +msgid "Workflow_plural" +msgstr "Workflows" + +msgid "" +"You can either submit a new file using the browse button above, or choose to " +"remove already uploaded file by checking the \"detach attached file\" check-" +"box, or edit file content online with the widget below." +msgstr "" +"Sie können entweder mit dem bouton\n" +"\"Durchsuchen\" oberhalb eine neue Datei hochladen, eine bereits " +"hochgeladene Datei durch anklicken des Kästchens \"angehängte Datei abtrennen" +"\" entfernen, oder den Datei-Inhalt mit dem Widget unterhalb editieren." + +msgid "" +"You can either submit a new file using the browse button above, or edit file " +"content online with the widget below." +msgstr "" +"Sie können entweder mit dem bouton\n" +"\"Durchsuchen\" oberhalb eine neue Datei hochladen, oder den Datei-Inhalt " +"mit dem Widget unterhalb editieren." + +msgid "You can't change this relation" +msgstr "" + +msgid "You cannot remove the system source" +msgstr "" + +msgid "You cannot rename the system source" +msgstr "" + +msgid "" +"You have no access to this view or it can not be used to display the current " +"data." +msgstr "" +"Sie haben entweder keinen Zugriff auf diese Ansicht, oder die Ansicht kann " +"nicht zur Anzeite dieser Daten verwendet werden." + +msgid "" +"You're not authorized to access this page. If you think you should, please " +"contact the site administrator." +msgstr "" +"Sie haben keinen Zugriff auf diese Seite.Bitte wenden Sie sich ggfs. an " +"Ihren Administrator." + +#, python-format +msgid "[%s supervision] changes summary" +msgstr "[%s supervision] Beschreibung der Änderungen" + +msgid "" +"a RQL expression which should return some results, else the transition won't " +"be available. This query may use X and U variables that will respectivly " +"represents the current entity and the current user." +msgstr "" +"ein RQL-Ausdruck, der einige Treffer liefern sollte, sonst wird der Übergang " +"nicht verfügbar sein. Diese Abfrage kann X und U Variable benutzen, die " +"jeweils die aktuelle Entität und den aktuellen Nutzer repräsentieren." + +msgid "a URI representing an object in external data store" +msgstr "ein URI, der ein Objekt in einem externen Data-Store repräsentiert" + +msgid "a float is expected" +msgstr "Eine Dezimalzahl (float) wird erwartet." + +msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" +msgstr "" + +msgid "" +"a simple cache entity characterized by a name and a validity date. The " +"target application is responsible for updating timestamp when necessary to " +"invalidate the cache (typically in hooks). Also, checkout the AppObject." +"get_cache() method." +msgstr "" + +msgid "abstract base class for transitions" +msgstr "abstrakte Basisklasse für Übergänge" + +msgid "action menu" +msgstr "" + +msgid "action(s) on this selection" +msgstr "Aktionen(en) bei dieser Auswahl" + +msgid "actions" +msgstr "Aktionen" + +msgid "activate" +msgstr "aktivieren" + +msgid "activated" +msgstr "aktiviert" + +msgid "add" +msgstr "hinzufügen" + +msgid "add Bookmark bookmarked_by CWUser object" +msgstr "Lesezeichen" + +msgid "add CWAttribute add_permission RQLExpression subject" +msgstr "" + +msgid "add CWAttribute constrained_by CWConstraint subject" +msgstr "Einschränkung" + +msgid "add CWAttribute read_permission RQLExpression subject" +msgstr "RQL-Ausdruck zum lesen" + +msgid "add CWAttribute relation_type CWRType object" +msgstr "Attributdefinition" + +msgid "add CWAttribute update_permission RQLExpression subject" +msgstr "RQL-Ausdruck für Berechtigung zum Aktualisieren" + +msgid "add CWEType add_permission RQLExpression subject" +msgstr "RQL-Ausdruck für Berechtigung zum Hinzufügen" + +msgid "add CWEType delete_permission RQLExpression subject" +msgstr "RQL-Ausdruck für Berechtigung zum Löschen" + +msgid "add CWEType read_permission RQLExpression subject" +msgstr "RQL-Ausdruck für Berechtigung zum Lesen" + +msgid "add CWEType update_permission RQLExpression subject" +msgstr "RQL-Ausdruck für Berechtigung zum Aktualisieren" + +msgid "add CWProperty for_user CWUser object" +msgstr "Eigenschaft" + +msgid "add CWRelation add_permission RQLExpression subject" +msgstr "RQL-Ausdruck hinzufügen" + +msgid "add CWRelation constrained_by CWConstraint subject" +msgstr "Einschränkung" + +msgid "add CWRelation delete_permission RQLExpression subject" +msgstr "RQL-Ausdruck löschen" + +msgid "add CWRelation read_permission RQLExpression subject" +msgstr "RQL-Ausdruck lesen" + +msgid "add CWRelation relation_type CWRType object" +msgstr "Relationsdefinition" + +msgid "add CWSourceHostConfig cw_host_config_of CWSource object" +msgstr "" + +msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" +msgstr "unique-together-Einschränkung hinzufügen" + +msgid "add CWUser in_group CWGroup object" +msgstr "Nutzer" + +msgid "add CWUser use_email EmailAddress subject" +msgstr "Email-Adresse" + +msgid "add State allowed_transition Transition object" +msgstr "Anfangszustand" + +msgid "add State allowed_transition Transition subject" +msgstr "erlaubter Übergang" + +msgid "add State allowed_transition WorkflowTransition subject" +msgstr "Workflow-Übergang" + +msgid "add State state_of Workflow object" +msgstr "Status" + +msgid "add Transition condition RQLExpression subject" +msgstr "Bedingung" + +msgid "add Transition destination_state State object" +msgstr "ausstehender Übergang" + +msgid "add Transition destination_state State subject" +msgstr "Zielstatus" + +msgid "add Transition transition_of Workflow object" +msgstr "Übergang" + +msgid "add WorkflowTransition condition RQLExpression subject" +msgstr "Workflow-Übergang" + +msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" +msgstr "Subworkflow Exit-Punkt" + +msgid "add WorkflowTransition transition_of Workflow object" +msgstr "Workflow-Übergang" + +msgid "add a BaseTransition" +msgstr "" + +msgid "add a Bookmark" +msgstr "" + +msgid "add a CWAttribute" +msgstr "" + +msgid "add a CWCache" +msgstr "" + +msgid "add a CWComputedRType" +msgstr "" + +msgid "add a CWConstraint" +msgstr "" + +msgid "add a CWConstraintType" +msgstr "" + +msgid "add a CWDataImport" +msgstr "" + +msgid "add a CWEType" +msgstr "" + +msgctxt "inlined:CWRelation.from_entity.subject" +msgid "add a CWEType" +msgstr "einen Entitätstyp hinzufügen" + +msgctxt "inlined:CWRelation.to_entity.subject" +msgid "add a CWEType" +msgstr "einen Entitätstyp hinzufügen" + +msgid "add a CWGroup" +msgstr "" + +msgid "add a CWProperty" +msgstr "" + +msgid "add a CWRType" +msgstr "" + +msgctxt "inlined:CWRelation.relation_type.subject" +msgid "add a CWRType" +msgstr "einen Relationstyp hinzufügen" + +msgid "add a CWRelation" +msgstr "" + +msgid "add a CWSource" +msgstr "" + +msgid "add a CWSourceHostConfig" +msgstr "" + +msgid "add a CWSourceSchemaConfig" +msgstr "" + +msgid "add a CWUniqueTogetherConstraint" +msgstr "" + +msgid "add a CWUser" +msgstr "" + +msgid "add a EmailAddress" +msgstr "" + +msgctxt "inlined:CWUser.use_email.subject" +msgid "add a EmailAddress" +msgstr "Email-Adresse hinzufügen" + +msgid "add a ExternalUri" +msgstr "" + +msgid "add a RQLExpression" +msgstr "" + +msgid "add a State" +msgstr "" + +msgid "add a SubWorkflowExitPoint" +msgstr "" + +msgid "add a TrInfo" +msgstr "" + +msgid "add a Transition" +msgstr "" + +msgid "add a Workflow" +msgstr "" + +msgid "add a WorkflowTransition" +msgstr "" + +# subject and object forms for each relation type +# (no object form for final relation types) +msgid "add_permission" +msgstr "kann hinzugefügt werden durch" + +msgctxt "CWAttribute" +msgid "add_permission" +msgstr "" + +# subject and object forms for each relation type +# (no object form for final relation types) +msgctxt "CWEType" +msgid "add_permission" +msgstr "Berechtigung hinzufügen" + +msgctxt "CWRelation" +msgid "add_permission" +msgstr "Berechtigung hinzufügen" + +msgid "add_permission_object" +msgstr "hat die Berechtigung zum Hinzufügen" + +msgctxt "CWGroup" +msgid "add_permission_object" +msgstr "kann hinzufügen" + +msgctxt "RQLExpression" +msgid "add_permission_object" +msgstr "benutzt, um die Hinzufüge-Berechtigung zu festzulegen für" + +msgid "add_relation" +msgstr "hinzufügen" + +#, python-format +msgid "added %(etype)s #%(eid)s (%(title)s)" +msgstr "Hinzufügen der Entität %(etype)s #%(eid)s (%(title)s)" + +#, python-format +msgid "" +"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" +msgstr "" +"Die Relation %(rtype)s von %(frometype)s #%(eidfrom)s zu %(toetype)s #" +"%(eidto)s wurde hinzugefügt." + +msgid "additional type specific properties" +msgstr "" + +msgid "addrelated" +msgstr "hinzufügen" + +msgid "address" +msgstr "Adresse" + +msgctxt "EmailAddress" +msgid "address" +msgstr "Adresse" + +msgid "alias" +msgstr "Alias" + +msgctxt "EmailAddress" +msgid "alias" +msgstr "Alias" + +msgid "allow to set a specific workflow for an entity" +msgstr "erlaube, einen bestimmten Workflow für eine Entität zu setzen" + +msgid "allowed options depends on the source type" +msgstr "" + +msgid "allowed transitions from this state" +msgstr "erlaubte Übergänge von diesem Zustand" + +#, python-format +msgid "allowed values for \"action\" are %s" +msgstr "" + +msgid "allowed_transition" +msgstr "erlaubter Übergang" + +msgctxt "State" +msgid "allowed_transition" +msgstr "erlaubter Übergang" + +msgid "allowed_transition_object" +msgstr "ausstehende Zustände" + +msgctxt "BaseTransition" +msgid "allowed_transition_object" +msgstr "ausstehende Zustände" + +msgctxt "Transition" +msgid "allowed_transition_object" +msgstr "ausstehende Zustände" + +msgctxt "WorkflowTransition" +msgid "allowed_transition_object" +msgstr "ausstehende Zustände" + +msgid "an electronic mail address associated to a short alias" +msgstr "Eine E-Mail-Adresse wurde mit einem Alias verknüpft." + +msgid "an error occurred" +msgstr "Es ist ein Fehler aufgetreten." + +msgid "an error occurred while processing your request" +msgstr "Während der Bearbeitung Ihrer Anfrage ist ein Fehler aufgetreten." + +msgid "an error occurred, the request cannot be fulfilled" +msgstr "" +"Es ist ein Fehler aufgetreten, Ihre Anfrage kann nicht bearbeitet werden." + +msgid "an integer is expected" +msgstr "Ganze Zahl (integer) erwartet." + +msgid "and linked" +msgstr "und verknüpft" + +msgid "and/or between different values" +msgstr "und/oder zwischen verschiedenen Werten" + +msgid "anyrsetview" +msgstr "" + +msgid "april" +msgstr "April" + +#, python-format +msgid "archive for %(author)s" +msgstr "" + +#, python-format +msgid "archive for %(month)s/%(year)s" +msgstr "" + +#, python-format +msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" +msgstr "" +"Die Entität %(eid)s ´vom Typ %(etype)s muss mindestens mit einer \n" +"anderen durch die Relation %(rtype)s verknüpft sein." + +msgid "attribute" +msgstr "Attribut" + +msgid "august" +msgstr "August" + +msgid "authentication failure" +msgstr "Nutzername oder Passwort falsch" + +msgid "auto" +msgstr "automatisch" + +msgid "autocomputed attribute used to ensure transition coherency" +msgstr "" + +msgid "automatic" +msgstr "automatisch" + +#, python-format +msgid "back to pagination (%s results)" +msgstr "" + +msgid "bad value" +msgstr "Unzulässiger Wert" + +msgid "badly formatted url" +msgstr "" + +msgid "base url" +msgstr "Basis-URL" + +msgid "bookmark has been removed" +msgstr "Das Lesezeichen wurde gelöscht." + +msgid "bookmark this page" +msgstr "diese Seite merken" + +msgid "bookmark this search" +msgstr "diese Suche merken" + +msgid "bookmarked_by" +msgstr "Lesezeichen angelegt durch" + +msgctxt "Bookmark" +msgid "bookmarked_by" +msgstr "Lesezeichen angelegt durch" + +msgid "bookmarked_by_object" +msgstr "hat Lesezeichen" + +msgctxt "CWUser" +msgid "bookmarked_by_object" +msgstr "verwendet Lesezeichen" + +msgid "bookmarks" +msgstr "Lesezeichen" + +msgid "bookmarks are used to have user's specific internal links" +msgstr "Lesezeichen werden für nutzer-spezifische interne Links verwendet" + +msgid "boxes" +msgstr "Boxen" + +msgid "bug report sent" +msgstr "Fehlerbericht gesendet" + +msgid "button_apply" +msgstr "Anwenden" + +msgid "button_cancel" +msgstr "Abbrechen" + +msgid "button_delete" +msgstr "Löschen" + +msgid "button_ok" +msgstr "OK" + +msgid "by" +msgstr "durch" + +msgid "by relation" +msgstr "durch die Relation" + +msgid "by_transition" +msgstr "je Übergang" + +msgctxt "TrInfo" +msgid "by_transition" +msgstr "je Übergang" + +msgid "by_transition_object" +msgstr "Übergangsinformation" + +msgctxt "BaseTransition" +msgid "by_transition_object" +msgstr "Übergangsinformation" + +msgctxt "Transition" +msgid "by_transition_object" +msgstr "Übergangsinformation" + +msgctxt "WorkflowTransition" +msgid "by_transition_object" +msgstr "Übergangsinformation" + +msgid "calendar" +msgstr "Kalender anzeigen" + +msgid "can not resolve entity types:" +msgstr "Die Typen konnten nicht ermittelt werden:" + +msgid "can only have one url" +msgstr "" + +msgid "can't be changed" +msgstr "kann nicht geändert werden" + +msgid "can't be deleted" +msgstr "kann nicht entfernt werden" + +msgid "can't change this attribute" +msgstr "" + +#, python-format +msgid "can't display data, unexpected error: %s" +msgstr "Kann die Daten aufgrund des folgenden Fehlers nicht anzeigen: %s" + +msgid "can't have multiple exits on the same state" +msgstr "Mehrere Ausgänge aus demselben Zustand nicht möglich." + +#, python-format +msgid "can't parse %(value)r (expected %(format)s)" +msgstr "" +"Kann den Wert %(value)r nicht analysieren (erwartetes Format: %(format)s)" + +#, python-format +msgid "" +"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " +"%(value)s) does not exist any longer" +msgstr "" + +#, python-format +msgid "" +"can't restore relation %(rtype)s of entity %(eid)s, this relation does not " +"exist in the schema anymore." +msgstr "" + +#, python-format +msgid "can't restore state of entity %s, it has been deleted inbetween" +msgstr "" + +#, python-format +msgid "" +"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality=" +"%(card)s" +msgstr "" + +msgid "cancel" +msgstr "" + +msgid "cancel select" +msgstr "Auswahl aufheben" + +msgid "cancel this insert" +msgstr "diese Einfügung aufheben" + +msgid "cardinality" +msgstr "Kardinalität" + +msgctxt "CWAttribute" +msgid "cardinality" +msgstr "Kardinalität" + +msgctxt "CWRelation" +msgid "cardinality" +msgstr "Kardinalität" + +msgid "category" +msgstr "Kategorie" + +#, python-format +msgid "changed state of %(etype)s #%(eid)s (%(title)s)" +msgstr "Änderung des Zustands von %(etype)s #%(eid)s (%(title)s)" + +msgid "changes applied" +msgstr "Änderungen übernommen" + +msgid "click here to see created entity" +msgstr "Hier klicken, um die angelegte Entität anzusehen" + +msgid "click here to see edited entity" +msgstr "" + +msgid "click on the box to cancel the deletion" +msgstr "Klicken Sie die Box an, um das Löschen rückgängig zu machen." + +msgid "click to add a value" +msgstr "Klicken Sie, um einen Wert hinzuzufügen" + +msgid "click to delete this value" +msgstr "Klicken Sie, um diesen Wert zu löschen" + +msgid "click to edit this field" +msgstr "Klicken Sie, um dieses Feld zu editieren" + +msgid "close all" +msgstr "" + +msgid "comment" +msgstr "Kommentar" + +msgctxt "TrInfo" +msgid "comment" +msgstr "Kommentar" + +msgid "comment_format" +msgstr "Format" + +msgctxt "TrInfo" +msgid "comment_format" +msgstr "Format" + +msgid "components" +msgstr "Komponenten" + +msgid "components_navigation" +msgstr "Seitennavigation" + +msgid "components_navigation_description" +msgstr "Paginierungs-Komponente für große Ergebnismengen" + +msgid "components_rqlinput" +msgstr "rql Eingabefeld" + +msgid "components_rqlinput_description" +msgstr "das rql-Eingabefeld im Seitenkopf" + +msgid "composite" +msgstr "" + +msgctxt "CWRelation" +msgid "composite" +msgstr "composite" + +msgid "condition" +msgstr "Bedingung" + +msgctxt "BaseTransition" +msgid "condition" +msgstr "Bedingung" + +msgctxt "Transition" +msgid "condition" +msgstr "Bedingung" + +msgctxt "WorkflowTransition" +msgid "condition" +msgstr "Bedingung" + +msgid "condition_object" +msgstr "Bedingung von" + +msgctxt "RQLExpression" +msgid "condition_object" +msgstr "Bedingung von" + +msgid "conditions" +msgstr "Bedingungen" + +msgid "config" +msgstr "" + +msgctxt "CWSource" +msgid "config" +msgstr "" + +msgctxt "CWSourceHostConfig" +msgid "config" +msgstr "" + +msgid "config mode" +msgstr "Konfigurationsmodus" + +msgid "config type" +msgstr "Konfigurationstyp" + +msgid "confirm password" +msgstr "Passwort bestätigen" + +msgid "constrained_by" +msgstr "eingeschränkt durch" + +msgctxt "CWAttribute" +msgid "constrained_by" +msgstr "eingeschränkt durch" + +msgctxt "CWRelation" +msgid "constrained_by" +msgstr "eingeschränkt durch" + +msgid "constrained_by_object" +msgstr "Einschränkungen" + +msgctxt "CWConstraint" +msgid "constrained_by_object" +msgstr "Einschränkungen" + +msgid "constraint factory" +msgstr "Einschränkungs-Factory" + +msgid "constraint_of" +msgstr "" + +msgctxt "CWUniqueTogetherConstraint" +msgid "constraint_of" +msgstr "" + +msgid "constraint_of_object" +msgstr "" + +msgctxt "CWEType" +msgid "constraint_of_object" +msgstr "" + +msgid "constraints" +msgstr "Einschränkungen" + +msgid "constraints applying on this relation" +msgstr "auf diese Relation angewandte Einschränkung" + +msgid "content type" +msgstr "MIME-Typ" + +msgid "context" +msgstr "Kontext" + +msgid "context where this box should be displayed" +msgstr "Kontext, in dem diese Box angezeigt werden soll" + +msgid "context where this component should be displayed" +msgstr "Kontext, in dem diese Komponente angezeigt werden soll" + +msgid "context where this facet should be displayed, leave empty for both" +msgstr "" +"Kontext, wo diese Nachricht angezeigt werden soll; für beides: frei lassen." + +msgid "control subject entity's relations order" +msgstr "" + +msgid "copy" +msgstr "kopieren" + +msgid "core relation indicating a user's groups" +msgstr "Kernrelation für die Gruppen eines Nutzers" + +msgid "" +"core relation indicating owners of an entity. This relation implicitly put " +"the owner into the owners group for the entity" +msgstr "" + +msgid "core relation indicating the original creator of an entity" +msgstr "Kernrelation für den Urheber einer Entität" + +msgid "core relation indicating the type of an entity" +msgstr "Kernrelation für den Identitätstyp" + +msgid "" +"core relation indicating the types (including specialized types) of an entity" +msgstr "" + +msgid "could not connect to the SMTP server" +msgstr "Keine Verbindung mit dem SMTP-Server" + +msgid "create an index for quick search on this attribute" +msgstr "Erstelle einen Index zur schnellen Suche über dieses Attribut" + +msgid "created on" +msgstr "angelegt am" + +msgid "created_by" +msgstr "erstellt von" + +msgid "created_by_object" +msgstr "hat erstellt" + +msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)" +msgstr "Erstelle Lesezeichen für %(linkto)s" + +msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)" +msgstr "Erstelle Attribut %(linkto)s" + +msgid "" +"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)" +msgstr "Erstelle Einschränkung für attribute %(linkto)s" + +msgid "" +"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)" +msgstr "Erstelle Einschränkung für Relation %(linkto)s" + +msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)" +msgstr "Erstelle Eigenschaft für Nutzer %(linkto)s" + +msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" +msgstr "Erstelle Relation %(linkto)s" + +msgid "" +"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource " +"%(linkto)s)" +msgstr "" + +msgid "" +"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " +"constraint_of CWEType %(linkto)s)" +msgstr "" + +msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" +msgstr "Erstelle neuen Nutzer in Gruppe %(linkto)s" + +msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)" +msgstr "Erstelle E-Mail-Adresse für Nutzer %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" +msgstr "" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" +msgstr "RQL-Ausdruck für Leseberechtigung für %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s update_permission " +"RQLExpression)" +msgstr "RQL Ausdruck für Aktualisierungs-Berechtigung für %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" +msgstr "Erstelle rql-Ausdruck für Hinzufüge-Berechtigung für %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)" +msgstr "Erstelle rql-Ausdruck für Lösch-Berechtigung für %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" +msgstr "Erstelle rql-Ausdruck für Lese-Berechtigung für %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" +msgstr "Erstelle rql-Ausdruck für Aktualisierungs-Berechtigung für %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" +msgstr "RQL-Ausdruck zur Vergabe der Hinzufüge-Berechtigung für %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s delete_permission " +"RQLExpression)" +msgstr "RQL-Ausdruck zur Vergabe der Lösch-Berechtigung für %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" +msgstr "RQL-Ausdruck zur Vergabe der Lese-Berechtigung für %(linkto)s" + +msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" +msgstr "Erstelle RQL-Ausdruck für Übergang %(linkto)s" + +msgid "" +"creating RQLExpression (WorkflowTransition %(linkto)s condition " +"RQLExpression)" +msgstr "Erstelle RQL-Ausdruck für Workflow-Übergang %(linkto)s" + +msgid "creating State (State allowed_transition Transition %(linkto)s)" +msgstr "Erstelle einen zustand, der den Übergang %(linkto)s auslösen kann." + +msgid "creating State (State state_of Workflow %(linkto)s)" +msgstr "Erstelle Zustand des Workflows %(linkto)s" + +msgid "creating State (Transition %(linkto)s destination_state State)" +msgstr "Erstelle Zielzustand für Übergang %(linkto)s" + +msgid "" +"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " +"subworkflow_exit SubWorkflowExitPoint)" +msgstr "Erstelle Subworkflow Exit-Punkt für Workflow-Übergang %(linkto)s" + +msgid "creating Transition (State %(linkto)s allowed_transition Transition)" +msgstr "Erstelle auslösbaren Übergang für Zustand %(linkto)s" + +msgid "creating Transition (Transition destination_state State %(linkto)s)" +msgstr "Erstelle Übergang, der zu Zustand %(linkto)s führt." + +msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" +msgstr "Erstelle Übergang des Workflows %(linkto)s" + +msgid "" +"creating WorkflowTransition (State %(linkto)s allowed_transition " +"WorkflowTransition)" +msgstr "Erstelle Workflow-Übergang, der zum Zustand %(linkto)s führt." + +msgid "" +"creating WorkflowTransition (WorkflowTransition transition_of Workflow " +"%(linkto)s)" +msgstr "Erstelle Workflow-Übergang des Workflows %(linkto)s" + +msgid "creation" +msgstr "Erstellung" + +msgid "creation date" +msgstr "Erstellungsdatum" + +msgid "creation time of an entity" +msgstr "Erstellungszeitpunkt einer Entität" + +msgid "creation_date" +msgstr "Erstellungsdatum" + +msgid "cstrtype" +msgstr "Typ der Einschränkung" + +msgctxt "CWConstraint" +msgid "cstrtype" +msgstr "Einschränkungstyp" + +msgid "cstrtype_object" +msgstr "benutzt von" + +msgctxt "CWConstraintType" +msgid "cstrtype_object" +msgstr "Einschränkungstyp von" + +msgid "csv export" +msgstr "CSV-Export" + +msgid "csv export (entities)" +msgstr "" + +msgid "ctxcomponents" +msgstr "Kontext-Komponenten" + +msgid "ctxcomponents_anonuserlink" +msgstr "" + +msgid "ctxcomponents_anonuserlink_description" +msgstr "" + +msgid "ctxcomponents_appliname" +msgstr "" + +msgid "ctxcomponents_appliname_description" +msgstr "" + +msgid "ctxcomponents_bookmarks_box" +msgstr "Lesezeichen-Box" + +msgid "ctxcomponents_bookmarks_box_description" +msgstr "Box mit einer Liste der Lesezeichen des Nutzers" + +msgid "ctxcomponents_breadcrumbs" +msgstr "Brotkrumen" + +msgid "ctxcomponents_breadcrumbs_description" +msgstr "" +"Anzeigen eines Pfads zur Lokalisierung der aktuellen Seite innerhalb der Site" + +msgid "ctxcomponents_download_box" +msgstr "Download-Box" + +msgid "ctxcomponents_download_box_description" +msgstr "" + +msgid "ctxcomponents_edit_box" +msgstr "Aktionsbox" + +msgid "ctxcomponents_edit_box_description" +msgstr "Box mit verfügbaren Aktionen für die angezeigten Daten" + +msgid "ctxcomponents_facet.filterbox" +msgstr "Filter" + +msgid "ctxcomponents_facet.filterbox_description" +msgstr "Box mit Filter für aktuelle Suchergebnis-Funktionalität" + +msgid "ctxcomponents_logo" +msgstr "Icon" + +msgid "ctxcomponents_logo_description" +msgstr "Das Anwendungs-Ikon angezeigt im Bildschirmkopf" + +msgid "ctxcomponents_metadata" +msgstr "Metadaten für Entität metadata" + +msgid "ctxcomponents_metadata_description" +msgstr "" + +msgid "ctxcomponents_possible_views_box" +msgstr "Box mit möglichen Ansichten" + +msgid "ctxcomponents_possible_views_box_description" +msgstr "Box mit möglichen Ansichten für die angezeigten Daten" + +msgid "ctxcomponents_prevnext" +msgstr "vorherige/nächste Entität" + +msgid "ctxcomponents_prevnext_description" +msgstr "" +"display link to go from one entity to another on entities implementing the " +"\"previous/next\" interface." + +msgid "ctxcomponents_rss" +msgstr "RSS-Box" + +msgid "ctxcomponents_rss_description" +msgstr "RSS icon um die angezeigten Daten als RSS-Thread zu erhalten" + +msgid "ctxcomponents_search_box" +msgstr "Suchbox" + +msgid "ctxcomponents_search_box_description" +msgstr "Suchbox" + +msgid "ctxcomponents_startup_views_box" +msgstr "Box für Start-Ansicht" + +msgid "ctxcomponents_startup_views_box_description" +msgstr "Box mit möglichen Start-Ansichten" + +msgid "ctxcomponents_userstatus" +msgstr "" + +msgid "ctxcomponents_userstatus_description" +msgstr "" + +msgid "ctxcomponents_wfhistory" +msgstr "Workflow-Chronik" + +msgid "ctxcomponents_wfhistory_description" +msgstr "Zeite die Workflow-Chronik." + +msgid "ctxtoolbar" +msgstr "Werkzeugleiste" + +msgid "custom_workflow" +msgstr "angepasster Workflow" + +msgid "custom_workflow_object" +msgstr "angepasster Workflow von" + +msgid "cw.groups-management" +msgstr "" + +msgid "cw.users-management" +msgstr "" + +msgid "cw_for_source" +msgstr "" + +msgctxt "CWSourceSchemaConfig" +msgid "cw_for_source" +msgstr "" + +msgid "cw_for_source_object" +msgstr "" + +msgctxt "CWSource" +msgid "cw_for_source_object" +msgstr "" + +msgid "cw_host_config_of" +msgstr "" + +msgctxt "CWSourceHostConfig" +msgid "cw_host_config_of" +msgstr "" + +msgid "cw_host_config_of_object" +msgstr "" + +msgctxt "CWSource" +msgid "cw_host_config_of_object" +msgstr "" + +msgid "cw_import_of" +msgstr "" + +msgctxt "CWDataImport" +msgid "cw_import_of" +msgstr "" + +msgid "cw_import_of_object" +msgstr "" + +msgctxt "CWSource" +msgid "cw_import_of_object" +msgstr "" + +msgid "cw_schema" +msgstr "" + +msgctxt "CWSourceSchemaConfig" +msgid "cw_schema" +msgstr "" + +msgid "cw_schema_object" +msgstr "" + +msgctxt "CWEType" +msgid "cw_schema_object" +msgstr "" + +msgctxt "CWRType" +msgid "cw_schema_object" +msgstr "" + +msgctxt "CWRelation" +msgid "cw_schema_object" +msgstr "" + +msgid "cw_source" +msgstr "" + +msgid "cw_source_object" +msgstr "" + +msgid "cwetype-box" +msgstr "Box-Ansicht" + +msgid "cwetype-description" +msgstr "Beschreibung" + +msgid "cwetype-permissions" +msgstr "Berechtigungen" + +msgid "cwetype-views" +msgstr "Ansichten" + +msgid "cwetype-workflow" +msgstr "Workflow" + +msgid "cwgroup-main" +msgstr "Beschreibung" + +msgid "cwgroup-permissions" +msgstr "Berechtigungen" + +msgid "cwrtype-description" +msgstr "Beschreibung" + +msgid "cwrtype-permissions" +msgstr "Berechtigungen" + +msgid "cwsource-imports" +msgstr "" + +msgid "cwsource-main" +msgstr "" + +msgid "cwsource-mapping" +msgstr "" + +msgid "cwuri" +msgstr "interner URI" + +msgid "data directory url" +msgstr "URL des Daten-Pools" + +msgid "data model schema" +msgstr "Schema der Website" + +msgid "data sources" +msgstr "" + +msgid "data sources management" +msgstr "" + +msgid "date" +msgstr "Datum" + +msgid "deactivate" +msgstr "deaktivieren" + +msgid "deactivated" +msgstr "deaktiviert" + +msgid "december" +msgstr "Dezember" + +msgid "default" +msgstr "Standardwert" + +msgid "default text format for rich text fields." +msgstr "Standardformat für Textfelder" + +msgid "default user workflow" +msgstr "Standard-Workflow für Nutzer" + +msgid "default value" +msgstr "Standardwert" + +msgid "default value as gziped pickled python object" +msgstr "" + +msgid "default workflow for an entity type" +msgstr "Standard-Workflow eines Entitätstyps" + +msgid "default_workflow" +msgstr "Standard-Workflow" + +msgctxt "CWEType" +msgid "default_workflow" +msgstr "Standard-Workflow" + +msgid "default_workflow_object" +msgstr "Standard-Workflow von" + +msgctxt "Workflow" +msgid "default_workflow_object" +msgstr "Standard-Workflow von" + +msgid "defaultval" +msgstr "Standard-Wert" + +msgctxt "CWAttribute" +msgid "defaultval" +msgstr "Standard-Wert" + +msgid "define a CubicWeb user" +msgstr "Einen CubicWeb-Nutzer definieren" + +msgid "define a CubicWeb users group" +msgstr "Eine CubicWeb-Nutzergruppe definieren" + +msgid "" +"define a final relation: link a final relation type from a non final entity " +"to a final entity type. used to build the instance schema" +msgstr "" + +msgid "" +"define a non final relation: link a non final relation type from a non final " +"entity to a non final entity type. used to build the instance schema" +msgstr "" + +msgid "define a relation type, used to build the instance schema" +msgstr "" +"Definieren eines Relationstyps, der zur Erstellung des Instanz-Schemas " +"benutzt wird." + +msgid "define a rql expression used to define permissions" +msgstr "Definieren eines RQL-Ausdrucks zur Festlegung von Berechtigungen." + +msgid "define a schema constraint" +msgstr "Eine Schema-Einschränkung definieren" + +msgid "define a schema constraint type" +msgstr "den Typ einer Schema-Einschränkung definieren" + +msgid "define a virtual relation type, used to build the instance schema" +msgstr "" + +msgid "define an entity type, used to build the instance schema" +msgstr "definieren eines Entitätstyps zur Erstellung des Instanz-Schemas" + +msgid "define how we get out from a sub-workflow" +msgstr "Definieren, wie man aus einem Sub-Workflow herauskommt" + +msgid "defines a sql-level multicolumn unique index" +msgstr "definiert auf SQL-Ebene einen eindeutigen Index über mehrere Spalten" + +msgid "" +"defines what's the property is applied for. You must select this first to be " +"able to set value" +msgstr "" +"definiert, worauf die Eigenschaft angewendet wird. Sie müssen dies zunächst " +"markieren,um den Wert zuzuweisen." + +msgid "delete" +msgstr "löschen" + +msgid "delete this bookmark" +msgstr "dieses Lesezeichen löschen" + +msgid "delete this relation" +msgstr "diese Relation löschen" + +msgid "delete_permission" +msgstr "kann gelöscht werden durch" + +msgctxt "CWEType" +msgid "delete_permission" +msgstr "Lösch-Berechtigung" + +msgctxt "CWRelation" +msgid "delete_permission" +msgstr "Lösch-Berechtigung" + +msgid "delete_permission_object" +msgstr "hat Lösch-Berechtigung" + +msgctxt "CWGroup" +msgid "delete_permission_object" +msgstr "hat Lösch-Berechtigung für" + +msgctxt "RQLExpression" +msgid "delete_permission_object" +msgstr "hat die Berechtigung, zu löschen" + +#, python-format +msgid "deleted %(etype)s #%(eid)s (%(title)s)" +msgstr "Löschen der Entität %(etype)s #%(eid)s (%(title)s)" + +#, python-format +msgid "" +"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" +msgstr "" +"Relation %(rtype)s von %(frometype)s #%(eidfrom)s zu %(toetype)s #%(eidto)s " +"gelöscht" + +msgid "depends on the constraint type" +msgstr "hängt vom Einschränkungsgyp ab" + +msgid "description" +msgstr "Beschreibung" + +msgctxt "BaseTransition" +msgid "description" +msgstr "Beschreibung" + +msgctxt "CWAttribute" +msgid "description" +msgstr "Beschreibung" + +msgctxt "CWComputedRType" +msgid "description" +msgstr "" + +msgctxt "CWEType" +msgid "description" +msgstr "Beschreibung" + +msgctxt "CWRType" +msgid "description" +msgstr "Beschreibung" + +msgctxt "CWRelation" +msgid "description" +msgstr "Beschreibung" + +msgctxt "State" +msgid "description" +msgstr "Beschreibung" + +msgctxt "Transition" +msgid "description" +msgstr "Beschreibung" + +msgctxt "Workflow" +msgid "description" +msgstr "Beschreibung" + +msgctxt "WorkflowTransition" +msgid "description" +msgstr "Beschreibung" + +msgid "description_format" +msgstr "Format" + +msgctxt "BaseTransition" +msgid "description_format" +msgstr "Format" + +msgctxt "CWAttribute" +msgid "description_format" +msgstr "Format" + +msgctxt "CWComputedRType" +msgid "description_format" +msgstr "" + +msgctxt "CWEType" +msgid "description_format" +msgstr "Format" + +msgctxt "CWRType" +msgid "description_format" +msgstr "Format" + +msgctxt "CWRelation" +msgid "description_format" +msgstr "Format" + +msgctxt "State" +msgid "description_format" +msgstr "Format" + +msgctxt "Transition" +msgid "description_format" +msgstr "Format" + +msgctxt "Workflow" +msgid "description_format" +msgstr "Format" + +msgctxt "WorkflowTransition" +msgid "description_format" +msgstr "Format" + +msgid "destination state for this transition" +msgstr "Zielzustand dieses Übergangs" + +msgid "destination state must be in the same workflow as our parent transition" +msgstr "Zielzustand muss im selben Workflow sein wie unser Parent-Übergang" + +msgid "destination state of a transition" +msgstr "Zielzustand eines Übergangs" + +msgid "" +"destination state. No destination state means that transition should go back " +"to the state from which we've entered the subworkflow." +msgstr "" +"Zielzustand. Kein Zielzustand bedeutet, dass der Übergang in den Zustand " +"zurückführen soll, von dem aus der Subworkflow erreicht wurde." + +msgid "destination_state" +msgstr "Zielzustand" + +msgctxt "SubWorkflowExitPoint" +msgid "destination_state" +msgstr "Zielzustand" + +msgctxt "Transition" +msgid "destination_state" +msgstr "Zielzustand" + +msgid "destination_state_object" +msgstr "Ziel von" + +msgctxt "State" +msgid "destination_state_object" +msgstr "Ziel von" + +msgid "detach attached file" +msgstr "angehängte Datei abtrennen" + +msgid "display order of the box" +msgstr "angezeigte Reihenfolge der Boxen" + +msgid "display order of the component" +msgstr "angezeigte Reihenfolge der Komponenten" + +msgid "display order of the facet" +msgstr "angezeigte Reihenfolge der Facetten" + +msgid "display the box or not" +msgstr "die Box anzeigen oder nicht" + +msgid "display the component or not" +msgstr "die Komponente anzeigen oder nicht" + +msgid "display the facet or not" +msgstr "die Facette anzeigen oder nicht" + +msgid "download" +msgstr "Herunterladen" + +#, python-format +msgid "download %s" +msgstr "Herunterladen von %s" + +msgid "download icon" +msgstr "Ikone 'herunterladen'" + +msgid "download schema as owl" +msgstr "Schema als OWL herunterladen" + +msgid "edit bookmarks" +msgstr "Lesezeichen bearbeiten" + +msgid "edit canceled" +msgstr "Änderungen verwerfen" + +msgid "editable-table" +msgstr "bearbeitbare Tabelle" + +msgid "eid" +msgstr "" + +msgid "embedded html" +msgstr "HTML-Inhalt" + +msgid "end_timestamp" +msgstr "" + +msgctxt "CWDataImport" +msgid "end_timestamp" +msgstr "" + +msgid "entities deleted" +msgstr "Entitäten gelöscht" + +msgid "entity and relation types can't be mapped, only attributes or relations" +msgstr "" + +msgid "entity copied" +msgstr "Entität kopiert" + +msgid "entity created" +msgstr "Entität erstellt" + +msgid "entity creation" +msgstr "Erstellung der Entität" + +msgid "entity deleted" +msgstr "Entität gelöscht" + +msgid "entity deletion" +msgstr "Löschen der Entität" + +msgid "entity edited" +msgstr "Entität bearbeitet" + +msgid "entity has no workflow set" +msgstr "Entität hat keinen Workflow" + +msgid "entity linked" +msgstr "Entität verknüpft" + +msgid "entity type" +msgstr "Entitätstyp" + +msgid "entity types which may use this workflow" +msgstr "Entitätstypen, die diesen Workflow benutzen können." + +msgid "entity update" +msgstr "Aktualisierung der Entität" + +msgid "entityview" +msgstr "" + +msgid "error" +msgstr "" + +msgid "error while publishing ReST text" +msgstr "Fehler beim Übersetzen von reST" + +msgid "exit state must be a subworkflow state" +msgstr "Exit-Zustand muss ein Subworkflow-Zustand sein." + +msgid "exit_point" +msgstr "Exit-Punkt " + +msgid "exit_point_object" +msgstr "Exit-Punkt für" + +#, python-format +msgid "exiting from subworkflow %s" +msgstr "verlasse Subworkflow %s" + +msgid "expression" +msgstr "Ausdruck" + +msgctxt "RQLExpression" +msgid "expression" +msgstr "Ausdruck" + +msgid "exprtype" +msgstr "Typ des Ausdrucks" + +msgctxt "RQLExpression" +msgid "exprtype" +msgstr "Typ des Ausdrucks" + +msgid "extra_props" +msgstr "" + +msgctxt "CWAttribute" +msgid "extra_props" +msgstr "" + +msgid "facet-loading-msg" +msgstr "" + +msgid "facet.filters" +msgstr "" + +msgid "facetbox" +msgstr "Facetten-Box" + +msgid "facets_created_by-facet" +msgstr "\"erstellt durch\" facet" + +msgid "facets_created_by-facet_description" +msgstr "" + +msgid "facets_cw_source-facet" +msgstr "" + +msgid "facets_cw_source-facet_description" +msgstr "" + +msgid "facets_cwfinal-facet" +msgstr "\"finaler Entitäts- oder Relationstyp\" facet" + +msgid "facets_cwfinal-facet_description" +msgstr "" + +msgid "facets_datafeed.dataimport.status" +msgstr "" + +msgid "facets_datafeed.dataimport.status_description" +msgstr "" + +msgid "facets_etype-facet" +msgstr "\"Entitätstyp\" facet" + +msgid "facets_etype-facet_description" +msgstr "" + +msgid "facets_has_text-facet" +msgstr "\"hat Text\" facet" + +msgid "facets_has_text-facet_description" +msgstr "" + +msgid "facets_in_group-facet" +msgstr "\"in Gruppe\" facet" + +msgid "facets_in_group-facet_description" +msgstr "" + +msgid "facets_in_state-facet" +msgstr "\"in Zustand\" facet" + +msgid "facets_in_state-facet_description" +msgstr "" + +msgid "failed" +msgstr "" + +#, python-format +msgid "failed to uniquify path (%s, %s)" +msgstr "Konnte keinen eindeutigen Dateinamen erzeugen (%s, %s)" + +msgid "february" +msgstr "Februar" + +msgid "file tree view" +msgstr "Baumansicht (Dateien)" + +msgid "final" +msgstr "" + +msgctxt "CWEType" +msgid "final" +msgstr "" + +msgctxt "CWRType" +msgid "final" +msgstr "" + +msgid "first name" +msgstr "Vorname" + +msgid "firstname" +msgstr "Vorname" + +msgctxt "CWUser" +msgid "firstname" +msgstr "Vorname" + +msgid "foaf" +msgstr "FOAF" + +msgid "focus on this selection" +msgstr "" + +msgid "follow" +msgstr "dem Link folgen" + +#, python-format +msgid "follow this link for more information on this %s" +msgstr "Folgend Sie dem Link für mehr Informationen über %s" + +msgid "for_user" +msgstr "für den Nutzer" + +msgctxt "CWProperty" +msgid "for_user" +msgstr "für Nutzer" + +msgid "for_user_object" +msgstr "hat als Eigenschaft" + +msgctxt "CWUser" +msgid "for_user_object" +msgstr "verwendet die Eigenschaften" + +msgid "formula" +msgstr "" + +msgctxt "CWAttribute" +msgid "formula" +msgstr "" + +msgid "friday" +msgstr "Freitag" + +msgid "from" +msgstr "von" + +#, python-format +msgid "from %(date)s" +msgstr "vom %(date)s" + +msgid "from_entity" +msgstr "der Entität" + +msgctxt "CWAttribute" +msgid "from_entity" +msgstr "Attribut der Entität" + +msgctxt "CWRelation" +msgid "from_entity" +msgstr "Relation der Entität" + +msgid "from_entity_object" +msgstr "der Entität" + +msgctxt "CWEType" +msgid "from_entity_object" +msgstr "Subjektrelation" + +msgid "from_interval_start" +msgstr "Von" + +msgid "from_state" +msgstr "des Zustands" + +msgctxt "TrInfo" +msgid "from_state" +msgstr "Anfangszustand" + +msgid "from_state_object" +msgstr "Übergänge aus diesem Zustand" + +msgctxt "State" +msgid "from_state_object" +msgstr "Anfangszustand von" + +msgid "full text or RQL query" +msgstr "Volltextsuche oder RQL-Anfrage" + +msgid "fulltext_container" +msgstr "Container des indizierten Textes" + +msgctxt "CWRType" +msgid "fulltext_container" +msgstr "zu indizierendes Objekt" + +msgid "fulltextindexed" +msgstr "Indizierung des Textes" + +msgctxt "CWAttribute" +msgid "fulltextindexed" +msgstr "indizierter Text" + +msgid "gc" +msgstr "" + +msgid "generic plot" +msgstr "generischer Plot" + +msgid "generic relation to link one entity to another" +msgstr "generische Relation zur Verbindung einer Entität mit einer anderen" + +msgid "" +"generic relation to specify that an external entity represent the same " +"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" +msgstr "" +"generische Relation, die anzeigt, dass eine Entität mit einer anderen Web-" +"Ressource identisch ist (siehe http://www.w3.org/TR/owl-ref/#sameAs-def)." + +msgid "granted to groups" +msgstr "an Gruppen gewährt" + +#, python-format +msgid "graphical representation of %(appid)s data model" +msgstr "graphische Darstellung des Datenmodells von %(appid)s" + +#, fuzzy, python-format +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" +"graphische Darstellung des Datenmodells des Entitätstyps (etype)s von " +"%(appid)s" + +#, python-format +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" +msgstr "" +"graphische Darstellung des Datenmodells des Relationstyps %(rtype)s von " +"%(appid)s" + +msgid "group in which a user should be to be allowed to pass this transition" +msgstr "" +"Gruppe, zu welcher der Nutzer gehören muss, um die Transaktion durchzuführen" + +msgid "groups" +msgstr "Gruppen" + +msgid "groups allowed to add entities/relations of this type" +msgstr "" + +msgid "groups allowed to delete entities/relations of this type" +msgstr "" + +msgid "groups allowed to read entities/relations of this type" +msgstr "" + +msgid "groups allowed to update entities/relations of this type" +msgstr "" + +msgid "groups grant permissions to the user" +msgstr "die Gruppen geben dem Nutzer Rechte" + +msgid "guests" +msgstr "Gäste" + +msgid "hCalendar" +msgstr "hCalendar" + +msgid "has_text" +msgstr "enthält Text" + +msgid "header-center" +msgstr "" + +msgid "header-left" +msgstr "" + +msgid "header-right" +msgstr "" + +msgid "hide filter form" +msgstr "Filter verbergen" + +msgid "" +"how to format date and time in the ui (see this page for format " +"description)" +msgstr "" +"Wie formatiert man das Datum im Interface (Beschreibung des Formats)" + +msgid "" +"how to format date in the ui (see this page for format " +"description)" +msgstr "" +"Wie formatiert man das Datum im Interface (Beschreibung des Formats)" + +msgid "how to format float numbers in the ui" +msgstr "Wie man Dezimalzahlen (float) im Interface formatiert" + +msgid "" +"how to format time in the ui (see this page for format " +"description)" +msgstr "" +"Wie formatiert man die Uhrzeit im Interface (Beschreibung des " +"Formats)" + +msgid "i18n_bookmark_url_fqs" +msgstr "Parameter" + +msgid "i18n_bookmark_url_path" +msgstr "Pfad" + +msgid "i18n_login_popup" +msgstr "Anmelden" + +msgid "i18ncard_*" +msgstr "0..n" + +msgid "i18ncard_+" +msgstr "1..n" + +msgid "i18ncard_1" +msgstr "1" + +msgid "i18ncard_?" +msgstr "0..1" + +msgid "i18nprevnext_next" +msgstr "Weiter" + +msgid "i18nprevnext_previous" +msgstr "Zurück" + +msgid "i18nprevnext_up" +msgstr "eine Ebene auf" + +msgid "iCalendar" +msgstr "iCalendar" + +msgid "id of main template used to render pages" +msgstr "id der Hauptvorlage" + +msgid "identical to" +msgstr "identisch mit" + +msgid "identical_to" +msgstr "identisch mit" + +msgid "identity" +msgstr "ist identisch mit" + +msgid "identity_object" +msgstr "ist identisch mit" + +msgid "" +"if full text content of subject/object entity should be added to other side " +"entity (the container)." +msgstr "" +"falls der indizierte Text der Subjekt/Objekt-Entität der anderen Seite der " +"Relation (dem Container) hinzugefügt werden muss" + +msgid "image" +msgstr "Bild" + +msgid "in progress" +msgstr "" + +msgid "in_group" +msgstr "in der Gruppe" + +msgctxt "CWUser" +msgid "in_group" +msgstr "gehört zu der Gruppe" + +msgid "in_group_object" +msgstr "Mitglieder" + +msgctxt "CWGroup" +msgid "in_group_object" +msgstr "enthält die Nutzer" + +msgid "in_state" +msgstr "Zustand" + +msgid "in_state_object" +msgstr "Zustand von" + +msgid "in_synchronization" +msgstr "" + +msgctxt "CWSource" +msgid "in_synchronization" +msgstr "" + +msgid "incontext" +msgstr "im Kontext" + +msgid "incorrect captcha value" +msgstr "Unzulässiger Wert für Überschrift" + +#, python-format +msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" +msgstr "" + +msgid "index this attribute's value in the plain text index" +msgstr "indizieren des Wertes dieses Attributs im Volltext-Index" + +msgid "indexed" +msgstr "Index" + +msgctxt "CWAttribute" +msgid "indexed" +msgstr "indiziert" + +msgid "indicate the current state of an entity" +msgstr "zeigt den aktuellen Zustand einer Entität an" + +msgid "" +"indicate which state should be used by default when an entity using states " +"is created" +msgstr "" +"zeigt an, welcher Zustand standardmäßig benutzt werden soll, wenn eine " +"Entität erstellt wird" + +msgid "indifferent" +msgstr "gleichgültig" + +msgid "info" +msgstr "Information" + +msgid "initial state for this workflow" +msgstr "Anfangszustand für diesen Workflow" + +msgid "initial_state" +msgstr "Anfangszustand" + +msgctxt "Workflow" +msgid "initial_state" +msgstr "Anfangszustand" + +msgid "initial_state_object" +msgstr "Anfangszustand von" + +msgctxt "State" +msgid "initial_state_object" +msgstr "Anfangszustand von" + +msgid "inlined" +msgstr "eingereiht" + +msgctxt "CWRType" +msgid "inlined" +msgstr "eingereiht" + +msgid "instance home" +msgstr "Startseite der Instanz" + +msgid "internal entity uri" +msgstr "interner URI" + +msgid "internationalizable" +msgstr "internationalisierbar" + +msgctxt "CWAttribute" +msgid "internationalizable" +msgstr "internationalisierbar" + +#, python-format +msgid "invalid action %r" +msgstr "Ungültige Aktion %r" + +#, python-format +msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" +msgstr "" + +msgid "is" +msgstr "vom Typ" + +msgid "is object of:" +msgstr "ist Objekt von" + +msgid "is subject of:" +msgstr "ist Subjekt von" + +msgid "" +"is the subject/object entity of the relation composed of the other ? This " +"implies that when the composite is deleted, composants are also deleted." +msgstr "" +"Ist die Subjekt/Objekt-Entität der Relation aus der anderen Seite " +"zusammengesetzt?Falls ja, werden beim Löschen der Entität auch deren " +"Bausteine gelöscht." + +msgid "is this attribute's value translatable" +msgstr "Ist der Wert dieses Attributs übersetzbar?" + +msgid "is this relation equivalent in both direction ?" +msgstr "Ist diese Relation in beiden Richtungen äquivalent?" + +msgid "" +"is this relation physically inlined? you should know what you're doing if " +"you are changing this!" +msgstr "" +"Ist diese Relation in die Datenbank eingereiht? Sie sollten wissen, was Sie " +"tun, wenn Sie dies ändern." + +msgid "is_instance_of" +msgstr "ist eine Instanz von" + +msgid "is_instance_of_object" +msgstr "Typ von" + +msgid "is_object" +msgstr "hat als Instanz" + +msgid "january" +msgstr "Januar" + +msgid "json-entities-export-view" +msgstr "" + +msgid "json-export-view" +msgstr "" + +msgid "july" +msgstr "Juli" + +msgid "june" +msgstr "Juni" + +msgid "language of the user interface" +msgstr "Sprache der Nutzer-Schnittstelle" + +msgid "last connection date" +msgstr "Datum der letzten Verbindung" + +msgid "last login time" +msgstr "Datum der letzten Verbindung" + +msgid "last name" +msgstr "Name" + +msgid "last usage" +msgstr "letzte Benutzung" + +msgid "last_login_time" +msgstr "Datum der letzten Verbindung" + +msgctxt "CWUser" +msgid "last_login_time" +msgstr "Datum der letzten Verbindung" + +msgid "latest import" +msgstr "" + +msgid "latest modification time of an entity" +msgstr "Datum der letzten Änderung einer Entität" + +msgid "latest synchronization time" +msgstr "" + +msgid "latest update on" +msgstr "letzte Änderung am" + +msgid "latest_retrieval" +msgstr "" + +msgctxt "CWSource" +msgid "latest_retrieval" +msgstr "" + +msgid "left" +msgstr "links" + +msgid "line" +msgstr "" + +msgid "" +"link a property to the user which want this property customization. Unless " +"you're a site manager, this relation will be handled automatically." +msgstr "" +"verknüpft eine Eigenschaft mit einem Nutzer, der diese Personalisierung " +"wünscht. Sofern Sie nicht Site-Manager sind, wird diese Relation automatisch " +"behandelt." + +msgid "link a relation definition to its object entity type" +msgstr "verknüpft eine Relationsdefinition mit dem Entitätstyp ihres Objekts" + +msgid "link a relation definition to its relation type" +msgstr "verknüpft eine Relationsdefinition mit ihrem Relationstyp" + +msgid "link a relation definition to its subject entity type" +msgstr "verknüpft eine Relationsdefinition mit dem Entitätstyp ihres Subjekts" + +msgid "link a state to one or more workflow" +msgstr "verknüpft einen Zustand mit einem oder mehreren Workflows" + +msgid "link a transition information to its object" +msgstr "verknüpft eine Übergangsinformation mit ihrem Objekt" + +msgid "link a transition to one or more workflow" +msgstr "verknüpft einen Übergang mit einem oder mehreren Workflows" + +msgid "link a workflow to one or more entity type" +msgstr "verknüpft einen Workflow mit einem oder mehreren Entitätstypen" + +msgid "list" +msgstr "Liste" + +msgid "log" +msgstr "" + +msgctxt "CWDataImport" +msgid "log" +msgstr "" + +msgid "log in" +msgstr "anmelden" + +msgid "login" +msgstr "Anmeldung" + +msgctxt "CWUser" +msgid "login" +msgstr "Anmeldung" + +msgid "login / password" +msgstr "" + +msgid "login or email" +msgstr "Nutzername oder E-Mail-Adresse" + +msgid "login_action" +msgstr "Melden Sie sich an" + +msgid "logout" +msgstr "Abmelden" + +#, python-format +msgid "loop in %(rel)s relation (%(eid)s)" +msgstr "" +"Endlosschleife gefunden in der Relation %(rel)s von der Entität #%(eid)s" + +msgid "main informations" +msgstr "Allgemeine Informationen" + +msgid "main_tab" +msgstr "" + +msgid "mainvars" +msgstr "Hauptvariablen" + +msgctxt "RQLExpression" +msgid "mainvars" +msgstr "Hauptvariablen" + +msgid "manage" +msgstr "Verwalten" + +msgid "manage bookmarks" +msgstr "Lesezeichen verwalten" + +msgid "manage permissions" +msgstr "Rechte verwalten" + +msgid "managers" +msgstr "Administratoren" + +msgid "mandatory relation" +msgstr "obligatorische Relation" + +msgid "march" +msgstr "März" + +msgid "match_host" +msgstr "" + +msgctxt "CWSourceHostConfig" +msgid "match_host" +msgstr "" + +msgid "maximum number of characters in short description" +msgstr "Maximale Anzahl von Zeichen in der Kurzbeschreibung" + +msgid "maximum number of entities to display in related combo box" +msgstr "maximale Anzahl Entitäten zur Anzeige im Listenfeld" + +msgid "maximum number of objects displayed by page of results" +msgstr "maximale Anzahl pro Seite angezeigter Objekte" + +msgid "maximum number of related entities to display in the primary view" +msgstr "maximale anzahl verknüpfter Entitäten zur Anzeige auf der hauptseite" + +msgid "may" +msgstr "Mai" + +msgid "memory leak debugging" +msgstr "Fehlersuche bei Speicherlöschern" + +msgid "message" +msgstr "" + +#, python-format +msgid "missing parameters for entity %s" +msgstr "Fehlende Parameter für Entität %s" + +msgid "modification" +msgstr "Änderung" + +msgid "modification_date" +msgstr "Datum der Änderung" + +msgid "modify" +msgstr "ändern" + +msgid "monday" +msgstr "Montag" + +msgid "more actions" +msgstr "weitere Aktionen" + +msgid "more info about this workflow" +msgstr "mehr Informationen zu diesem Workflow" + +msgid "multiple edit" +msgstr "mehrfache Bearbeitung" + +msgid "my custom search" +msgstr "meine personalisierte Suche" + +msgid "name" +msgstr "Name" + +msgctxt "BaseTransition" +msgid "name" +msgstr "Name" + +msgctxt "CWCache" +msgid "name" +msgstr "Name" + +msgctxt "CWComputedRType" +msgid "name" +msgstr "" + +msgctxt "CWConstraintType" +msgid "name" +msgstr "Name" + +msgctxt "CWEType" +msgid "name" +msgstr "Name" + +msgctxt "CWGroup" +msgid "name" +msgstr "Name" + +msgctxt "CWRType" +msgid "name" +msgstr "Name" + +msgctxt "CWSource" +msgid "name" +msgstr "" + +msgctxt "CWUniqueTogetherConstraint" +msgid "name" +msgstr "" + +msgctxt "State" +msgid "name" +msgstr "Name" + +msgctxt "Transition" +msgid "name" +msgstr "Name" + +msgctxt "Workflow" +msgid "name" +msgstr "Name" + +msgctxt "WorkflowTransition" +msgid "name" +msgstr "Name" + +msgid "name of the cache" +msgstr "Name des Caches" + +msgid "" +"name of the main variables which should be used in the selection if " +"necessary (comma separated)" +msgstr "" +"Name der Hauptvariablen, die in der Auswahl benutzt werden sollten (mehrere " +"Variablen durch ',' trennen!)" + +msgid "name of the source" +msgstr "" + +msgid "navbottom" +msgstr "zum Seitenende" + +msgid "navcontentbottom" +msgstr "zum Hauptinhalt" + +msgid "navcontenttop" +msgstr "zum Seitenanfang" + +msgid "navigation" +msgstr "Navigation" + +msgid "navigation.combobox-limit" +msgstr "Anzahl Entitäten pro Listenfeld" + +msgid "navigation.page-size" +msgstr "Anzahl Suchergebnisse" + +msgid "navigation.related-limit" +msgstr "Anzahl Entitäten in der Hauptansicht" + +msgid "navigation.short-line-size" +msgstr "Kurzbeschreibung" + +msgid "navtop" +msgstr "zum Hauptinhalt springen" + +msgid "new" +msgstr "neu" + +msgid "next page" +msgstr "" + +msgid "next_results" +msgstr "weitere Ergebnisse" + +msgid "no" +msgstr "Nein" + +msgid "no content next link" +msgstr "" + +msgid "no content prev link" +msgstr "" + +msgid "no edited fields specified" +msgstr "" + +msgid "no log to display" +msgstr "" + +msgid "no related entity" +msgstr "keine verknüpfte Entität" + +msgid "no repository sessions found" +msgstr "keine Datenbank-Sitzung gefunden" + +msgid "no selected entities" +msgstr "keine Entitäten ausgewählt" + +#, python-format +msgid "no such entity type %s" +msgstr "Der Entitätstyp '%s' existiert nicht." + +msgid "no version information" +msgstr "Keine Versionsangaben." + +msgid "no web sessions found" +msgstr "Keine Sitzung gefunden." + +msgid "normal" +msgstr "normal" + +msgid "not authorized" +msgstr "nicht authrisiert" + +msgid "not selected" +msgstr "nicht ausgewählt" + +msgid "november" +msgstr "November" + +msgid "num. users" +msgstr "" + +msgid "object" +msgstr "Objekt" + +msgid "object type" +msgstr "Objekttyp" + +msgid "october" +msgstr "Oktober" + +msgid "one month" +msgstr "ein Monat" + +msgid "one week" +msgstr "eine Woche" + +msgid "oneline" +msgstr "eine Zeile" + +msgid "only select queries are authorized" +msgstr "Nur Auswahl-Anfragen sind erlaubt." + +msgid "open all" +msgstr "alle öffnen" + +msgid "opened sessions" +msgstr "offene Sitzungen" + +msgid "opened web sessions" +msgstr "offene Web-Sitzungen" + +msgid "options" +msgstr "Optionen" + +msgctxt "CWSourceSchemaConfig" +msgid "options" +msgstr "" + +msgid "order" +msgstr "Reihenfolge" + +msgid "ordernum" +msgstr "Reihenfolge" + +msgctxt "CWAttribute" +msgid "ordernum" +msgstr "Ordnungszahl" + +msgctxt "CWRelation" +msgid "ordernum" +msgstr "Ordnungszahl" + +msgid "owl" +msgstr "OWL" + +msgid "owlabox" +msgstr "OWL ABox" + +msgid "owned_by" +msgstr "gehört zu" + +msgid "owned_by_object" +msgstr "besitzt" + +msgid "owners" +msgstr "Besitzer" + +msgid "ownerships have been changed" +msgstr "Die Eigentumsrechte sind geändert worden." + +msgid "pageid-not-found" +msgstr "" +"Notwendige Daten scheinen nicht mehr gültig zu sein. Bitte laden Sie die " +"Seite neu und beginnen Sie von vorn." + +msgid "parser" +msgstr "" + +msgctxt "CWSource" +msgid "parser" +msgstr "" + +msgid "parser to use to extract entities from content retrieved at given URLs." +msgstr "" + +msgid "password" +msgstr "Passwort" + +msgid "password and confirmation don't match" +msgstr "Das Passwort stimmt nicht mit der Bestätigung überein." + +msgid "path" +msgstr "Pfad" + +msgctxt "Bookmark" +msgid "path" +msgstr "Pfad" + +msgid "permalink to this message" +msgstr "" + +msgid "permission" +msgstr "Recht" + +msgid "permissions" +msgstr "Rechte" + +msgid "pick existing bookmarks" +msgstr "Wählen Sie aus den bestehenden lesezeichen aus" + +msgid "pkey" +msgstr "Schlüssel" + +msgctxt "CWProperty" +msgid "pkey" +msgstr "code der Eigenschaft" + +msgid "please correct errors below" +msgstr "Bitte die nachstehenden Fehler korrigieren" + +msgid "please correct the following errors:" +msgstr "Bitte korrigieren Sie die folgenden Fehler:" + +msgid "possible views" +msgstr "Mögliche Ansichten" + +msgid "prefered_form" +msgstr "bevorzugte form" + +msgctxt "EmailAddress" +msgid "prefered_form" +msgstr "bevorzugte form" + +msgid "prefered_form_object" +msgstr "bevorzugte form vor" + +msgctxt "EmailAddress" +msgid "prefered_form_object" +msgstr "bevorzugte form von" + +msgid "preferences" +msgstr "Einstellungen" + +msgid "previous page" +msgstr "" + +msgid "previous_results" +msgstr "vorige Ergebnisse" + +msgid "primary" +msgstr "primär" + +msgid "primary_email" +msgstr "primäre E-Mail-Adresse" + +msgctxt "CWUser" +msgid "primary_email" +msgstr "primäre E-Mail-Adresse" + +msgid "primary_email_object" +msgstr "Objekt der primären E-Mail-Adresse" + +msgctxt "EmailAddress" +msgid "primary_email_object" +msgstr "primäre E-Mail-Adresse von" + +msgid "profile" +msgstr "Profil" + +msgid "rdef-description" +msgstr "Beschreibung" + +msgid "rdef-permissions" +msgstr "Rechte" + +msgid "rdf export" +msgstr "" + +msgid "read" +msgstr "Lesen" + +msgid "read_permission" +msgstr "Leseberechtigung" + +msgctxt "CWAttribute" +msgid "read_permission" +msgstr "Leseberechtigung" + +msgctxt "CWEType" +msgid "read_permission" +msgstr "Leseberechtigung" + +msgctxt "CWRelation" +msgid "read_permission" +msgstr "Leseberechtigung" + +msgid "read_permission_object" +msgstr "hat eine Leseberechtigung" + +msgctxt "CWGroup" +msgid "read_permission_object" +msgstr "kann lesen" + +msgctxt "RQLExpression" +msgid "read_permission_object" +msgstr "kann lesen" + +msgid "regexp matching host(s) to which this config applies" +msgstr "" + +msgid "registry" +msgstr "Registratur" + +msgid "related entity has no state" +msgstr "Verknüpfte Entität hat keinen Zustand" + +msgid "related entity has no workflow set" +msgstr "Verknüpfte Entität hat keinen Workflow" + +msgid "relation" +msgstr "Relation" + +#, python-format +msgid "relation %(relname)s of %(ent)s" +msgstr "Relation %(relname)s von %(ent)s" + +#, python-format +msgid "" +"relation %(rtype)s with %(etype)s as %(role)s is supported but no target " +"type supported" +msgstr "" + +#, python-format +msgid "" +"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is " +"mandatory but not supported" +msgstr "" + +#, python-format +msgid "" +"relation %s is supported but none of its definitions matches supported " +"entities" +msgstr "" + +msgid "relation add" +msgstr "Relation hinzufügen" + +msgid "relation removal" +msgstr "Relation entfernen" + +msgid "relation_type" +msgstr "Relationstyp" + +msgctxt "CWAttribute" +msgid "relation_type" +msgstr "Relationstyp" + +msgctxt "CWRelation" +msgid "relation_type" +msgstr "Relationstyp" + +msgid "relation_type_object" +msgstr "Definition" + +msgctxt "CWRType" +msgid "relation_type_object" +msgstr "definition" + +msgid "relations" +msgstr "Relationen" + +msgctxt "CWUniqueTogetherConstraint" +msgid "relations" +msgstr "Relationen" + +msgid "relations deleted" +msgstr "Relationen entfernt" + +msgid "relations_object" +msgstr "Relationen von" + +msgctxt "CWRType" +msgid "relations_object" +msgstr "Relationen von" + +msgid "relative url of the bookmarked page" +msgstr "URL relativ zu der Seite" + +msgid "remove-inlined-entity-form" +msgstr "Entfernen" + +msgid "require_group" +msgstr "benötigt die Gruppe" + +msgctxt "BaseTransition" +msgid "require_group" +msgstr "auf Gruppe beschränkt" + +msgctxt "Transition" +msgid "require_group" +msgstr "auf Gruppe beschränkt" + +msgctxt "WorkflowTransition" +msgid "require_group" +msgstr "auf Gruppe beschränkt" + +msgid "require_group_object" +msgstr "hat die Rechte" + +msgctxt "CWGroup" +msgid "require_group_object" +msgstr "hat die Rechte" + +msgid "required" +msgstr "erforderlich" + +msgid "required attribute" +msgstr "erforderliches Attribut" + +msgid "required field" +msgstr "Pflichtfeld" + +msgid "resources usage" +msgstr "genutzte Ressourcen" + +msgid "" +"restriction part of a rql query. For entity rql expression, X and U are " +"predefined respectivly to the current object and to the request user. For " +"relation rql expression, S, O and U are predefined respectivly to the " +"current relation'subject, object and to the request user. " +msgstr "" +"Restriktionsteil einer RQL-Abfrage. Für einen Ausdruck, der für eine Entität " +"gilt,X und U sind jeweils für die Entität und den Nutzer vordefiniert." +"respectivement prédéfinis au sujet/objet de la relation et à l'utilisateur " + +msgid "revert changes" +msgstr "Änderungen rückgängig machen" + +msgid "right" +msgstr "rechts" + +msgid "rql expression allowing to add entities/relations of this type" +msgstr "" + +msgid "rql expression allowing to delete entities/relations of this type" +msgstr "" + +msgid "rql expression allowing to read entities/relations of this type" +msgstr "" + +msgid "rql expression allowing to update entities/relations of this type" +msgstr "" + +msgid "rql expressions" +msgstr "RQL-Ausdrücke" + +msgid "rss export" +msgstr "" + +msgid "rule" +msgstr "" + +msgctxt "CWComputedRType" +msgid "rule" +msgstr "" + +msgid "same_as" +msgstr "identisch mit" + +msgid "sample format" +msgstr "Beispiel" + +msgid "saturday" +msgstr "Samstag" + +msgid "schema-diagram" +msgstr "Diagramm" + +msgid "schema-entity-types" +msgstr "Entitätstypen" + +msgid "schema-relation-types" +msgstr "Relationstypen" + +msgid "search" +msgstr "suchen" + +msgid "search for association" +msgstr "nach verwandten Ergebnissen suchen" + +msgid "searching for" +msgstr "Suche nach" + +msgid "security" +msgstr "Sicherheit" + +msgid "see more" +msgstr "" + +msgid "see them all" +msgstr "Alle ansehen" + +msgid "see_also" +msgstr "Siehe auch" + +msgid "select" +msgstr "auswählen" + +msgid "select a" +msgstr "wählen Sie einen" + +msgid "select a key first" +msgstr "Wählen Sie zuerst einen Schlüssel." + +msgid "select a relation" +msgstr "Wählen Sie eine Relation." + +msgid "select this entity" +msgstr "Wählen Sie diese Entität" + +msgid "selected" +msgstr "ausgewählt" + +msgid "semantic description of this attribute" +msgstr "Semantische Beschreibung dieses Attributs" + +msgid "semantic description of this entity type" +msgstr "Semantische Beschreibung dieses Entitätstyps" + +msgid "semantic description of this relation" +msgstr "Semantische Beschreibung dieser Relation" + +msgid "semantic description of this relation type" +msgstr "Semantische Beschreibung dieses Relationstyps" + +msgid "semantic description of this state" +msgstr "Semantische Beschreibung dieses Zustands" + +msgid "semantic description of this transition" +msgstr "Semantische Beschreibung dieses Übergangs" + +msgid "semantic description of this workflow" +msgstr "Semantische Beschreibung dieses Workflows" + +msgid "september" +msgstr "September" + +msgid "server information" +msgstr "Server-Informationen" + +msgid "severity" +msgstr "" + +msgid "" +"should html fields being edited using fckeditor (a HTML WYSIWYG editor). " +"You should also select text/html as default text format to actually get " +"fckeditor." +msgstr "" +"Bestimmt, ob HTML-Felder mit fckeditor (ein WYSIWYG-HTML-Editor)\n" +"bearbeitet werden müssen. Es wird auch empfohlen, Text/HTML\n" +"als Standard-Textformat festzulegen, um Text mit fckeditor zu bearbeiten." + +#, python-format +msgid "show %s results" +msgstr "Zeige %s Ergebnisse" + +msgid "show advanced fields" +msgstr "Zeige detaillierte Felder" + +msgid "show filter form" +msgstr "Filter zeigen" + +msgid "site configuration" +msgstr "Konfiguration der Website" + +msgid "site documentation" +msgstr "Dokumentation der Website" + +msgid "site title" +msgstr "Titel der Website" + +msgid "site-wide property can't be set for user" +msgstr "" +"Eine Eigenschaft für die gesamte Website kann nicht für einen Nutzer gesetzt " +"werden." + +msgid "some later transaction(s) touch entity, undo them first" +msgstr "" +"Eine oder mehrere frühere Transaktion(en) betreffen die Tntität. Machen Sie " +"sie zuerst rückgängig." + +msgid "some relations violate a unicity constraint" +msgstr "" + +msgid "sorry, the server is unable to handle this query" +msgstr "Der Server kann diese Anfrage leider nicht bearbeiten." + +msgid "" +"source's configuration. One key=value per line, authorized keys depending on " +"the source's type" +msgstr "" + +msgid "sparql xml" +msgstr "Sparql XML" + +msgid "special transition allowing to go through a sub-workflow" +msgstr "Spezieller Übergang, um in einen Subworkflow hineinzugehen" + +msgid "specializes" +msgstr "leitet sich ab von" + +msgctxt "CWEType" +msgid "specializes" +msgstr "spezialisiert" + +msgid "specializes_object" +msgstr "Vorgänger von" + +msgctxt "CWEType" +msgid "specializes_object" +msgstr "Vorgänger von" + +#, python-format +msgid "specifying %s is mandatory" +msgstr "" + +msgid "" +"start timestamp of the currently in synchronization, or NULL when no " +"synchronization in progress." +msgstr "" + +msgid "start_timestamp" +msgstr "" + +msgctxt "CWDataImport" +msgid "start_timestamp" +msgstr "" + +msgid "startup views" +msgstr "Start-Ansichten" + +msgid "startupview" +msgstr "" + +msgid "state" +msgstr "Zustand" + +msgid "state and transition don't belong the the same workflow" +msgstr "Zustand und Übergang gehören nicht zum selben Workflow." + +msgid "state doesn't apply to this entity's type" +msgstr "Zustand gilt nicht für diesen Entitätstyp." + +msgid "state doesn't belong to entity's current workflow" +msgstr "Der Zustand gehört nicht zum aktuellen Workflow der Entität." + +msgid "state doesn't belong to entity's workflow" +msgstr "Der Zustand gehört nicht zum Workflow der Entität." + +msgid "" +"state doesn't belong to entity's workflow. You may want to set a custom " +"workflow for this entity first." +msgstr "" +"Der Zustand gehört nicht zum Workflow der Entität.Bitte bestimmen Sie zuerst " +"einen Workflow für diese Entität." + +msgid "state doesn't belong to this workflow" +msgstr "Zustand gehört nicht zu diesem Workflow." + +msgid "state_of" +msgstr "Zustand von" + +msgctxt "State" +msgid "state_of" +msgstr "Zustand von" + +msgid "state_of_object" +msgstr "hat als Zustand" + +msgctxt "Workflow" +msgid "state_of_object" +msgstr "enthält die Zustände" + +msgid "status" +msgstr "" + +msgctxt "CWDataImport" +msgid "status" +msgstr "" + +msgid "status change" +msgstr "Zustand ändern" + +msgid "status changed" +msgstr "Zustand geändert" + +#, python-format +msgid "status will change from %(st1)s to %(st2)s" +msgstr "Entität wird vom Zustand %(st1)s in zustand %(st2)s übergehen." + +msgid "subject" +msgstr "Subjekt" + +msgid "subject type" +msgstr "Subjekttyp" + +msgid "subject/object cardinality" +msgstr "Subjekt/Objekt Kardinalität" + +msgid "subworkflow" +msgstr "Subworkflow" + +msgctxt "WorkflowTransition" +msgid "subworkflow" +msgstr "Subworkflow" + +msgid "" +"subworkflow isn't a workflow for the same types as the transition's workflow" +msgstr "" +"Dieser Subworkflow gilt nicht für dieselben Typen wie der Workflow dieses " +"Übergangs." + +msgid "subworkflow state" +msgstr "Zustand des Subworkflows" + +msgid "subworkflow_exit" +msgstr "Ende des Subworkflows" + +msgctxt "WorkflowTransition" +msgid "subworkflow_exit" +msgstr "Ende des Subworkflows" + +msgid "subworkflow_exit_object" +msgstr "Endzustand" + +msgctxt "SubWorkflowExitPoint" +msgid "subworkflow_exit_object" +msgstr "Endzustände" + +msgid "subworkflow_object" +msgstr "verwendet vom Übergang" + +msgctxt "Workflow" +msgid "subworkflow_object" +msgstr "Subworkflow von" + +msgid "subworkflow_state" +msgstr "Zustand des Subworkflows" + +msgctxt "SubWorkflowExitPoint" +msgid "subworkflow_state" +msgstr "Zustand" + +msgid "subworkflow_state_object" +msgstr "Endzustand von" + +msgctxt "State" +msgid "subworkflow_state_object" +msgstr "Endzustand von" + +msgid "success" +msgstr "" + +msgid "sunday" +msgstr "Sonntag" + +msgid "surname" +msgstr "Name" + +msgctxt "CWUser" +msgid "surname" +msgstr "Nachname" + +msgid "symmetric" +msgstr "symmetrisch" + +msgctxt "CWRType" +msgid "symmetric" +msgstr "symmetrisch" + +msgid "synchronization-interval must be greater than 1 minute" +msgstr "" + +msgid "table" +msgstr "Tabelle" + +msgid "tablefilter" +msgstr "Tabellenfilter" + +msgid "text" +msgstr "Text" + +msgid "text/cubicweb-page-template" +msgstr "dynamischer Inhalt" + +msgid "text/html" +msgstr "html" + +msgid "text/markdown" +msgstr "" + +msgid "text/plain" +msgstr "Nur Text" + +msgid "text/rest" +msgstr "reST" + +msgid "the URI of the object" +msgstr "der URI des Objekts" + +msgid "the prefered email" +msgstr "primäre E-Mail-Adresse" + +msgid "the system source has its configuration stored on the file-system" +msgstr "" + +#, python-format +msgid "the value \"%s\" is already used, use another one" +msgstr "" +"Der Wert \"%s\" wird bereits benutzt, bitte verwenden Sie einen anderen Wert" + +msgid "there is no next page" +msgstr "" + +msgid "there is no previous page" +msgstr "" + +#, python-format +msgid "there is no transaction #%s" +msgstr "" + +msgid "this action is not reversible!" +msgstr "Achtung! Diese Aktion ist unumkehrbar." + +msgid "this entity is currently owned by" +msgstr "Diese Entität gehört:" + +msgid "this parser doesn't use a mapping" +msgstr "" + +msgid "this resource does not exist" +msgstr "cette ressource est introuvable" + +msgid "this source doesn't use a mapping" +msgstr "" + +msgid "thursday" +msgstr "Donnerstag" + +msgid "timestamp" +msgstr "Datum" + +msgctxt "CWCache" +msgid "timestamp" +msgstr "gültig seit" + +msgid "timetable" +msgstr "Zeitplan" + +msgid "title" +msgstr "titel" + +msgctxt "Bookmark" +msgid "title" +msgstr "bezeichnet" + +msgid "to" +msgstr "zu" + +#, python-format +msgid "to %(date)s" +msgstr "bis zum %(date)s" + +msgid "to associate with" +msgstr "zu verknüpfen mit" + +msgid "to_entity" +msgstr "zu der Entität" + +msgctxt "CWAttribute" +msgid "to_entity" +msgstr "für die Entität" + +msgctxt "CWRelation" +msgid "to_entity" +msgstr "für die Entität" + +msgid "to_entity_object" +msgstr "Objekt der Relation" + +msgctxt "CWEType" +msgid "to_entity_object" +msgstr "Objekt der Relation" + +msgid "to_interval_end" +msgstr "bis" + +msgid "to_state" +msgstr "zum Zustand" + +msgctxt "TrInfo" +msgid "to_state" +msgstr "Zielstatus" + +msgid "to_state_object" +msgstr "Übergänge zu dem Zustand" + +msgctxt "State" +msgid "to_state_object" +msgstr "Übergang zu diesem Zustand" + +msgid "toggle check boxes" +msgstr "Kontrollkästchen umkehren" + +msgid "tr_count" +msgstr "" + +msgctxt "TrInfo" +msgid "tr_count" +msgstr "" + +msgid "transaction undone" +msgstr "" + +#, python-format +msgid "transition %(tr)s isn't allowed from %(st)s" +msgstr "Der Übergang %(tr)s ist aus dem Zustand %(st)s nicht erlaubt." + +msgid "transition doesn't belong to entity's workflow" +msgstr "Übergang gehört nicht zum Workflow der Entität." + +msgid "transition isn't allowed" +msgstr "Der Übergang ist nicht erleubt." + +msgid "transition may not be fired" +msgstr "Der Übergang kann nicht ausgelöst werden." + +msgid "transition_of" +msgstr "Übergang des/der" + +msgctxt "BaseTransition" +msgid "transition_of" +msgstr "Übergang des/der" + +msgctxt "Transition" +msgid "transition_of" +msgstr "Übergang des/der" + +msgctxt "WorkflowTransition" +msgid "transition_of" +msgstr "Übergang des/der" + +msgid "transition_of_object" +msgstr "hat als Übergang" + +msgctxt "Workflow" +msgid "transition_of_object" +msgstr "hat als Übergang" + +msgid "tree view" +msgstr "Baumansicht" + +msgid "tuesday" +msgstr "Dienstag" + +msgid "type" +msgstr "Typ" + +msgctxt "BaseTransition" +msgid "type" +msgstr "Typ" + +msgctxt "CWSource" +msgid "type" +msgstr "" + +msgctxt "Transition" +msgid "type" +msgstr "Typ" + +msgctxt "WorkflowTransition" +msgid "type" +msgstr "Typ" + +msgid "type here a sparql query" +msgstr "Geben sie eine sparql-Anfrage ein" + +msgid "type of the source" +msgstr "" + +msgid "ui" +msgstr "Allgemeinen Eigenschaften der Nutzerschnittstelle" + +msgid "ui.date-format" +msgstr "Datumsformat" + +msgid "ui.datetime-format" +msgstr "Format von Datum und Zeit" + +msgid "ui.default-text-format" +msgstr "Textformat" + +msgid "ui.encoding" +msgstr "Kodierung" + +msgid "ui.fckeditor" +msgstr "Editor" + +msgid "ui.float-format" +msgstr "Format von Dezimalzahlen (float)" + +msgid "ui.language" +msgstr "Sprache" + +msgid "ui.main-template" +msgstr "Hauptvorlage" + +msgid "ui.site-title" +msgstr "Titel der Website" + +msgid "ui.time-format" +msgstr "Zeitformat" + +msgid "unable to check captcha, please try again" +msgstr "Kann capcha nicht bestätigen. Bitte noch einmal versuchen." + +msgid "unaccessible" +msgstr "nicnt zugänglich" + +msgid "unauthorized value" +msgstr "ungültiger Wert" + +msgid "undefined user" +msgstr "" + +msgid "undo" +msgstr "rückgängig machen" + +msgid "unique identifier used to connect to the application" +msgstr "eindeutiger Bezeichner zur Verbindung mit der Anwendung" + +msgid "unknown external entity" +msgstr "(Externe) Entität nicht gefunden" + +#, python-format +msgid "unknown options %s" +msgstr "" + +#, python-format +msgid "unknown property key %s" +msgstr "Unbekannter Eigentumsschlüssel %s" + +msgid "unknown vocabulary:" +msgstr "Unbekanntes Wörterbuch : " + +msgid "unsupported protocol" +msgstr "" + +msgid "upassword" +msgstr "Passwort" + +msgctxt "CWUser" +msgid "upassword" +msgstr "Passwort" + +msgid "update" +msgstr "Aktualisierung" + +msgid "update_permission" +msgstr "Änderungsrecht" + +msgctxt "CWAttribute" +msgid "update_permission" +msgstr "Änderungsrecht" + +msgctxt "CWEType" +msgid "update_permission" +msgstr "Änderungsrecht" + +msgid "update_permission_object" +msgstr "hat die Änderungsberechtigung" + +msgctxt "CWGroup" +msgid "update_permission_object" +msgstr "kann ändern" + +msgctxt "RQLExpression" +msgid "update_permission_object" +msgstr "kann ändern" + +msgid "update_relation" +msgstr "aktualisieren" + +msgid "updated" +msgstr "aktualisiert" + +#, python-format +msgid "updated %(etype)s #%(eid)s (%(title)s)" +msgstr "Entität %(etype)s #%(eid)s (%(title)s) aktualisiert" + +msgid "uri" +msgstr "URI" + +msgctxt "ExternalUri" +msgid "uri" +msgstr "URI" + +msgid "url" +msgstr "" + +msgctxt "CWSource" +msgid "url" +msgstr "" + +msgid "" +"use to define a transition from one or multiple states to a destination " +"states in workflow's definitions. Transition without destination state will " +"go back to the state from which we arrived to the current state." +msgstr "" +"verwendet, um einen Übergang von einem oder mehreren Zuständenin einen " +"Zielzustand eines Workflows zu definieren.Ein Übergang ohne Zielzustand " +"führt in den Zustand zurück, der dem aktuellen zustand vorausgeht." + +msgid "use_email" +msgstr "E-Mail-Adresse" + +msgctxt "CWUser" +msgid "use_email" +msgstr "verwendet die E-Mail-Adresse" + +msgid "use_email_object" +msgstr "Adresse verwendet von" + +msgctxt "EmailAddress" +msgid "use_email_object" +msgstr "verwendet von" + +msgid "" +"used for cubicweb configuration. Once a property has been created you can't " +"change the key." +msgstr "" +"konfiguriert CubicWeb. Nachdem eine Eigenschafterstellt wurde, können Sie " +"den Schlüssel nicht mehr ändern." + +msgid "" +"used to associate simple states to an entity type and/or to define workflows" +msgstr "" +"assoziiert einfache Zustände mit einem Entitätstyp und/oder definiert " +"Workflows" + +msgid "user" +msgstr "Nutzer" + +#, python-format +msgid "" +"user %s has made the following change(s):\n" +"\n" +msgstr "" +"Nutzer %s hat die folgende(n) Änderung(en) vorgenommen:\n" +"\n" + +msgid "user interface encoding" +msgstr "Kodierung für die Nutzerschnittstelle" + +msgid "user preferences" +msgstr "Nutzereinstellungen" + +msgid "user's email account" +msgstr "" + +msgid "users" +msgstr "Nutzer" + +msgid "users and groups" +msgstr "" + +msgid "users using this bookmark" +msgstr "Nutzer, die dieses Lesezeichen verwenden" + +msgid "validate modifications on selected items" +msgstr "Überprüfen der Änderungen an den ausgewählten Elementen" + +msgid "validating..." +msgstr "Überprüfung läuft..." + +msgid "value" +msgstr "Wert" + +msgctxt "CWConstraint" +msgid "value" +msgstr "Einschränkung" + +msgctxt "CWProperty" +msgid "value" +msgstr "Wert" + +#, python-format +msgid "value %(KEY-value)s must be < %(KEY-boundary)s" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be > %(KEY-boundary)s" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" +msgstr "" + +msgid "value associated to this key is not editable manually" +msgstr "" +"Der mit diesem Schlüssele verbundene Wert kann n icht manuell geändert " +"werden." + +#, python-format +msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" +msgstr "" + +#, python-format +msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" +msgstr "" + +msgid "vcard" +msgstr "VCard" + +msgid "versions configuration" +msgstr "Versionskonfiguration" + +msgid "view" +msgstr "ansehen" + +msgid "view all" +msgstr "alle ansehen" + +msgid "view detail for this entity" +msgstr "Details für diese Entität ansehen" + +msgid "view history" +msgstr "Chronik ansehen" + +msgid "view identifier" +msgstr "Nutzername" + +msgid "view title" +msgstr "Titel" + +msgid "view workflow" +msgstr "mögliche Zustände ansehen" + +msgid "view_index" +msgstr "Index-Seite" + +msgid "visible" +msgstr "sichtbar" + +msgid "warning" +msgstr "" + +msgid "we are not yet ready to handle this query" +msgstr "Momentan können wir diese sparql-Anfrage noch nicht ausführen." + +msgid "wednesday" +msgstr "Mittwoch" + +#, python-format +msgid "welcome %s!" +msgstr "Willkommen %s!" + +msgid "wf_info_for" +msgstr "Chronik von" + +msgid "wf_info_for_object" +msgstr "Chronik der Übergänge" + +msgid "wf_tab_info" +msgstr "Beschreibung" + +msgid "wfgraph" +msgstr "Grafik des Workflows" + +msgid "" +"when multiple addresses are equivalent (such as python-projects@logilab.org " +"and python-projects@lists.logilab.org), set this to indicate which is the " +"preferred form." +msgstr "" +"Wenn mehrere Adressen ähnlich sind (comme python-projects@logilab.org und " +"python-projects@lists.logilab.org), bestimmen Sie die bevorzugte Form." + +msgid "workflow" +msgstr "Workflow" + +#, python-format +msgid "workflow changed to \"%s\"" +msgstr "Workflow geändert in \"%s\"" + +msgid "workflow has no initial state" +msgstr "Workflow hat keinen Anfangszustand" + +msgid "workflow history item" +msgstr "Beginn der Chronik des Workflows" + +msgid "workflow isn't a workflow for this type" +msgstr "Der Workflow gilt nicht für diesen Entitätstyp." + +msgid "workflow to which this state belongs" +msgstr "Workflow, zu dem dieser Zustand gehört" + +msgid "workflow to which this transition belongs" +msgstr "Workflow, zu dem dieser Übergang gehört" + +msgid "workflow_of" +msgstr "Workflow von" + +msgctxt "Workflow" +msgid "workflow_of" +msgstr "Workflow von" + +msgid "workflow_of_object" +msgstr "hat als Workflow" + +msgctxt "CWEType" +msgid "workflow_of_object" +msgstr "hat als Workflow" + +#, python-format +msgid "wrong query parameter line %s" +msgstr "Falscher Anfrage-Parameter Zeile %s" + +msgid "xbel export" +msgstr "" + +msgid "xml export" +msgstr "XML-Export" + +msgid "xml export (entities)" +msgstr "" + +msgid "yes" +msgstr "Ja" + +msgid "you have been logged out" +msgstr "Sie sind jetzt abgemeldet." + +msgid "you should probably delete that property" +msgstr "Sie sollten diese Eigenschaft wahrscheinlich löschen." + +#~ msgid "Any" +#~ msgstr "irgendein" + +#~ msgid "Browse by category" +#~ msgstr "nach Kategorien navigieren" + +#~ msgid "No account? Try public access at %s" +#~ msgstr "Kein Konto? Zur öffentlichen Website: %s" + +#~ msgid "anonymous" +#~ msgstr "anonym" + +#~ msgid "can't connect to source %s, some data may be missing" +#~ msgstr "Keine Verbindung zu der Quelle %s, einige Daten könnten fehlen" + +#~ msgid "components_etypenavigation" +#~ msgstr "nach Typ filtern" + +#~ msgid "components_etypenavigation_description" +#~ msgstr "Erlaubt die Sortierung von Suchergebnissen nach Entitätstyp" + +#~ msgid "error while querying source %s, some data may be missing" +#~ msgstr "" +#~ "Fehler beim Zugriff auf Quelle %s, möglicherweise sind die Daten " +#~ "unvollständig." + +#~ msgid "no edited fields specified for entity %s" +#~ msgstr "kein Eingabefeld spezifiziert Für Entität %s" + +#~ msgid "timeline" +#~ msgstr "Zeitleiste" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/i18n/en.po --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/i18n/en.po Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4553 @@ +# cubicweb i18n catalog +# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# Logilab +msgid "" +msgstr "" +"Project-Id-Version: 2.0\n" +"POT-Creation-Date: 2006-01-12 17:35+CET\n" +"PO-Revision-Date: 2011-04-29 12:57+0200\n" +"Last-Translator: Sylvain Thenault \n" +"Language-Team: English \n" +"Language: en\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +#, python-format +msgid "" +"\n" +"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for " +"entity\n" +"'%(title)s'\n" +"\n" +"%(comment)s\n" +"\n" +"url: %(url)s\n" +msgstr "" + +#, python-format +msgid " from state %(fromstate)s to state %(tostate)s\n" +msgstr "" + +msgid " :" +msgstr ":" + +#, python-format +msgid "\"action\" must be specified in options; allowed values are %s" +msgstr "" + +msgid "\"role=subject\" or \"role=object\" must be specified in options" +msgstr "" + +#, python-format +msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" +msgstr "" + +#, python-format +msgid "%(KEY-rtype)s is part of violated unicity constraint" +msgstr "" + +#, python-format +msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" +msgstr "" + +#, python-format +msgid "%(attr)s set to %(newvalue)s" +msgstr "" + +#, python-format +msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" +msgstr "" + +#, python-format +msgid "%(etype)s by %(author)s" +msgstr "" + +#, python-format +msgid "%(firstname)s %(surname)s" +msgstr "" + +#, python-format +msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)" +msgstr "" + +#, python-format +msgid "%d days" +msgstr "" + +#, python-format +msgid "%d hours" +msgstr "" + +#, python-format +msgid "%d minutes" +msgstr "" + +#, python-format +msgid "%d months" +msgstr "" + +#, python-format +msgid "%d seconds" +msgstr "" + +#, python-format +msgid "%d weeks" +msgstr "" + +#, python-format +msgid "%d years" +msgstr "" + +#, python-format +msgid "%s could be supported" +msgstr "" + +#, python-format +msgid "%s error report" +msgstr "" + +#, python-format +msgid "%s software version of the database" +msgstr "" + +#, python-format +msgid "%s updated" +msgstr "" + +#, python-format +msgid "'%s' action doesn't take any options" +msgstr "" + +#, python-format +msgid "" +"'%s' action for in_state relation should at least have 'linkattr=name' option" +msgstr "" + +#, python-format +msgid "'%s' action requires 'linkattr' option" +msgstr "" + +msgid "(UNEXISTANT EID)" +msgstr "" + +#, python-format +msgid "(suppressed) entity #%d" +msgstr "" + +msgid "**" +msgstr "0..n 0..n" + +msgid "*+" +msgstr "0..n 1..n" + +msgid "*1" +msgstr "0..n 1" + +msgid "*?" +msgstr "0..n 0..1" + +msgid "+*" +msgstr "1..n 0..n" + +msgid "++" +msgstr "1..n 1..n" + +msgid "+1" +msgstr "1..n 1" + +msgid "+?" +msgstr "1..n 0..1" + +msgid "1*" +msgstr "1 0..n" + +msgid "1+" +msgstr "1 1..n" + +msgid "11" +msgstr "1 1" + +msgid "1?" +msgstr "1 0..1" + +#, python-format +msgid "<%s not specified>" +msgstr "" + +#, python-format +msgid "" +"
This schema of the data model excludes the meta-data, but you " +"can also display a complete schema with meta-data.
" +msgstr "" + +msgid "" +msgstr "" + +msgid "" +msgstr "" + +msgid "?*" +msgstr "0..1 0..n" + +msgid "?+" +msgstr "0..1 1..n" + +msgid "?1" +msgstr "0..1 1" + +msgid "??" +msgstr "0..1 0..1" + +msgid "AND" +msgstr "" + +msgid "About this site" +msgstr "" + +#, python-format +msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" +msgstr "" + +msgid "Attributes permissions:" +msgstr "" + +# schema pot file, generated on 2009-09-16 16:46:55 +# +# singular and plural forms for each entity type +msgid "BaseTransition" +msgstr "Transition (abstract)" + +msgid "BaseTransition_plural" +msgstr "Transitions (abstract)" + +msgid "BigInt" +msgstr "Big integer" + +msgid "BigInt_plural" +msgstr "Big integers" + +msgid "Bookmark" +msgstr "Bookmark" + +msgid "Bookmark_plural" +msgstr "Bookmarks" + +msgid "Boolean" +msgstr "Boolean" + +msgid "Boolean_plural" +msgstr "Booleans" + +msgid "BoundConstraint" +msgstr "bound constraint" + +msgid "BoundaryConstraint" +msgstr "" + +msgid "Browse by entity type" +msgstr "" + +#, python-format +msgid "By %(user)s on %(dt)s [%(undo_link)s]" +msgstr "" + +msgid "Bytes" +msgstr "Bytes" + +msgid "Bytes_plural" +msgstr "Bytes" + +msgid "CWAttribute" +msgstr "Attribute" + +msgid "CWAttribute_plural" +msgstr "Attributes" + +msgid "CWCache" +msgstr "CubicWeb Cache" + +msgid "CWCache_plural" +msgstr "CubicWeb Caches" + +msgid "CWComputedRType" +msgstr "Virtual relation" + +msgid "CWComputedRType_plural" +msgstr "Virtual relations" + +msgid "CWConstraint" +msgstr "Constraint" + +msgid "CWConstraintType" +msgstr "Constraint type" + +msgid "CWConstraintType_plural" +msgstr "Constraint types" + +msgid "CWConstraint_plural" +msgstr "Constraints" + +msgid "CWDataImport" +msgstr "Data import" + +msgid "CWDataImport_plural" +msgstr "Data imports" + +msgid "CWEType" +msgstr "Entity type" + +msgctxt "inlined:CWRelation.from_entity.subject" +msgid "CWEType" +msgstr "Entity type" + +msgctxt "inlined:CWRelation.to_entity.subject" +msgid "CWEType" +msgstr "Entity type" + +msgid "CWEType_plural" +msgstr "Entity types" + +msgid "CWGroup" +msgstr "Group" + +msgid "CWGroup_plural" +msgstr "Groups" + +msgid "CWProperty" +msgstr "Property" + +msgid "CWProperty_plural" +msgstr "Properties" + +msgid "CWRType" +msgstr "Relation type" + +msgctxt "inlined:CWRelation.relation_type.subject" +msgid "CWRType" +msgstr "Relation type" + +msgid "CWRType_plural" +msgstr "Relation types" + +msgid "CWRelation" +msgstr "Relation" + +msgid "CWRelation_plural" +msgstr "Relations" + +msgid "CWSource" +msgstr "Data source" + +msgid "CWSourceHostConfig" +msgstr "Source host configuration" + +msgid "CWSourceHostConfig_plural" +msgstr "Source host configurations" + +msgid "CWSourceSchemaConfig" +msgstr "Source schema configuration" + +msgid "CWSourceSchemaConfig_plural" +msgstr "Source schema configurations" + +msgid "CWSource_plural" +msgstr "Data sources" + +msgid "CWUniqueTogetherConstraint" +msgstr "Unicity constraint" + +msgid "CWUniqueTogetherConstraint_plural" +msgstr "Unicity constraints" + +msgid "CWUser" +msgstr "User" + +msgid "CWUser_plural" +msgstr "Users" + +#, python-format +msgid "" +"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " +"linked using this relation." +msgstr "" + +#, python-format +msgid "" +"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " +"does not exists anymore in the schema." +msgstr "" + +#, python-format +msgid "" +"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " +"anymore." +msgstr "" + +#, python-format +msgid "" +"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " +"exist anymore" +msgstr "" + +#, python-format +msgid "" +"Can't undo creation of entity %(eid)s of type %(etype)s, type no more " +"supported" +msgstr "" + +msgid "Click to sort on this column" +msgstr "" + +msgid "" +"Configuration of the system source goes to the 'sources' file, not in the " +"database" +msgstr "" + +#, python-format +msgid "Created %(etype)s : %(entity)s" +msgstr "" + +msgid "DEBUG" +msgstr "" + +msgid "Date" +msgstr "Date" + +msgid "Date_plural" +msgstr "Dates" + +msgid "Datetime" +msgstr "Date and time" + +msgid "Datetime_plural" +msgstr "Dates and times" + +msgid "Decimal" +msgstr "Decimal number" + +msgid "Decimal_plural" +msgstr "Decimal numbers" + +#, python-format +msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" +msgstr "" + +#, python-format +msgid "Deleted %(etype)s : %(entity)s" +msgstr "" + +msgid "Detected problems" +msgstr "" + +msgid "Do you want to delete the following element(s)?" +msgstr "" + +msgid "Download schema as OWL" +msgstr "" + +msgid "ERROR" +msgstr "" + +msgid "EmailAddress" +msgstr "Email address" + +msgctxt "inlined:CWUser.use_email.subject" +msgid "EmailAddress" +msgstr "Email address" + +msgid "EmailAddress_plural" +msgstr "Email addresses" + +msgid "Entities" +msgstr "" + +#, python-format +msgid "" +"Entity %(eid)s has changed since you started to edit it. Reload the page and " +"reapply your changes." +msgstr "" + +msgid "Entity and relation supported by this source" +msgstr "" + +msgid "ExternalUri" +msgstr "External Uri" + +msgid "ExternalUri_plural" +msgstr "External Uris" + +msgid "FATAL" +msgstr "" + +msgid "Float" +msgstr "Float" + +msgid "Float_plural" +msgstr "Floats" + +# schema pot file, generated on 2009-12-03 09:22:35 +# +# singular and plural forms for each entity type +msgid "FormatConstraint" +msgstr "format constraint" + +msgid "Garbage collection information" +msgstr "" + +msgid "Help" +msgstr "" + +msgid "INFO" +msgstr "" + +msgid "Instance" +msgstr "" + +msgid "Int" +msgstr "Integer" + +msgid "Int_plural" +msgstr "Integers" + +msgid "Interval" +msgstr "Interval" + +msgid "IntervalBoundConstraint" +msgstr "Interval constraint" + +msgid "Interval_plural" +msgstr "Intervals" + +msgid "Link:" +msgstr "" + +msgid "Looked up classes" +msgstr "" + +msgid "Manage" +msgstr "" + +msgid "Manage security" +msgstr "" + +msgid "Message threshold" +msgstr "" + +msgid "Most referenced classes" +msgstr "" + +msgid "New BaseTransition" +msgstr "XXX" + +msgid "New Bookmark" +msgstr "New bookmark" + +msgid "New CWAttribute" +msgstr "New attribute" + +msgid "New CWCache" +msgstr "New cache" + +msgid "New CWComputedRType" +msgstr "New virtual relation" + +msgid "New CWConstraint" +msgstr "New constraint" + +msgid "New CWConstraintType" +msgstr "New constraint type" + +msgid "New CWDataImport" +msgstr "New data import" + +msgid "New CWEType" +msgstr "New entity type" + +msgid "New CWGroup" +msgstr "New group" + +msgid "New CWProperty" +msgstr "New property" + +msgid "New CWRType" +msgstr "New relation type" + +msgid "New CWRelation" +msgstr "New relation" + +msgid "New CWSource" +msgstr "New source" + +msgid "New CWSourceHostConfig" +msgstr "New source host configuration" + +msgid "New CWSourceSchemaConfig" +msgstr "New source schema configuration" + +msgid "New CWUniqueTogetherConstraint" +msgstr "New unicity constraint" + +msgid "New CWUser" +msgstr "New user" + +msgid "New EmailAddress" +msgstr "New email address" + +msgid "New ExternalUri" +msgstr "New external URI" + +msgid "New RQLExpression" +msgstr "New RQL expression" + +msgid "New State" +msgstr "New state" + +msgid "New SubWorkflowExitPoint" +msgstr "New subworkflow exit-point" + +msgid "New TrInfo" +msgstr "New transition information" + +msgid "New Transition" +msgstr "New transition" + +msgid "New Workflow" +msgstr "New workflow" + +msgid "New WorkflowTransition" +msgstr "New workflow-transition" + +msgid "No result matching query" +msgstr "" + +msgid "Non exhaustive list of views that may apply to entities of this type" +msgstr "" + +msgid "OR" +msgstr "" + +msgid "Ownership" +msgstr "" + +msgid "Parent class:" +msgstr "" + +msgid "Password" +msgstr "Password" + +msgid "Password_plural" +msgstr "Passwords" + +msgid "Please note that this is only a shallow copy" +msgstr "" + +msgid "Powered by CubicWeb" +msgstr "" + +msgid "RQLConstraint" +msgstr "RQL constraint" + +msgid "RQLExpression" +msgstr "RQL expression" + +msgid "RQLExpression_plural" +msgstr "RQL expressions" + +msgid "RQLUniqueConstraint" +msgstr "RQL unique constraint" + +msgid "RQLVocabularyConstraint" +msgstr "RQL vocabulary constraint" + +msgid "RegexpConstraint" +msgstr "regular expression constrainte" + +msgid "Registry's content" +msgstr "" + +msgid "Relations" +msgstr "" + +msgid "Repository" +msgstr "" + +#, python-format +msgid "Schema %s" +msgstr "" + +msgid "Schema's permissions definitions" +msgstr "" + +msgid "Search for" +msgstr "" + +msgid "Site information" +msgstr "" + +msgid "SizeConstraint" +msgstr "size constraint" + +msgid "" +"Source's configuration for a particular host. One key=value per line, " +"authorized keys depending on the source's type, overriding values defined on " +"the source." +msgstr "" + +msgid "Startup views" +msgstr "" + +msgid "State" +msgstr "State" + +msgid "State_plural" +msgstr "States" + +msgid "StaticVocabularyConstraint" +msgstr "vocabulary constraint" + +msgid "String" +msgstr "String" + +msgid "String_plural" +msgstr "Strings" + +msgid "Sub-classes:" +msgstr "" + +msgid "SubWorkflowExitPoint" +msgstr "Subworkflow exit-point" + +msgid "SubWorkflowExitPoint_plural" +msgstr "subworkflow exit-points" + +msgid "Submit bug report" +msgstr "" + +msgid "Submit bug report by mail" +msgstr "" + +msgid "TZDatetime" +msgstr "International date and time" + +msgid "TZDatetime_plural" +msgstr "International dates and times" + +msgid "TZTime" +msgstr "International time" + +msgid "TZTime_plural" +msgstr "International times" + +#, python-format +msgid "The view %s can not be applied to this query" +msgstr "" + +#, python-format +msgid "The view %s could not be found" +msgstr "" + +msgid "There is no default workflow" +msgstr "" + +msgid "This BaseTransition:" +msgstr "This abstract transition:" + +msgid "This Bookmark:" +msgstr "This bookmark:" + +msgid "This CWAttribute:" +msgstr "This attribute:" + +msgid "This CWCache:" +msgstr "This cache:" + +msgid "This CWComputedRType:" +msgstr "This virtual relation:" + +msgid "This CWConstraint:" +msgstr "This constraint:" + +msgid "This CWConstraintType:" +msgstr "This constraint type:" + +msgid "This CWDataImport:" +msgstr "This data import:" + +msgid "This CWEType:" +msgstr "This entity type:" + +msgid "This CWGroup:" +msgstr "This group:" + +msgid "This CWProperty:" +msgstr "This property:" + +msgid "This CWRType:" +msgstr "This relation type:" + +msgid "This CWRelation:" +msgstr "This relation:" + +msgid "This CWSource:" +msgstr "This data source:" + +msgid "This CWSourceHostConfig:" +msgstr "This source host configuration:" + +msgid "This CWSourceSchemaConfig:" +msgstr "This source schema configuration:" + +msgid "This CWUniqueTogetherConstraint:" +msgstr "This unicity constraint:" + +msgid "This CWUser:" +msgstr "This user:" + +msgid "This EmailAddress:" +msgstr "This email address:" + +msgid "This ExternalUri:" +msgstr "This external URI:" + +msgid "This RQLExpression:" +msgstr "This RQL expression:" + +msgid "This State:" +msgstr "This state:" + +msgid "This SubWorkflowExitPoint:" +msgstr "This subworkflow exit-point:" + +msgid "This TrInfo:" +msgstr "This transition information:" + +msgid "This Transition:" +msgstr "This transition:" + +msgid "This Workflow:" +msgstr "This workflow:" + +msgid "This WorkflowTransition:" +msgstr "This workflow-transition:" + +msgid "" +"This action is forbidden. If you think it should be allowed, please contact " +"the site administrator." +msgstr "" + +msgid "This entity type permissions:" +msgstr "" + +msgid "Time" +msgstr "Time" + +msgid "Time_plural" +msgstr "Times" + +msgid "TrInfo" +msgstr "Transition information" + +msgid "TrInfo_plural" +msgstr "Workflow history" + +msgid "Transition" +msgstr "Transition" + +msgid "Transition_plural" +msgstr "Transitions" + +msgid "URLs from which content will be imported. You can put one url per line" +msgstr "" + +msgid "Undoable actions" +msgstr "" + +msgid "Undoing" +msgstr "" + +msgid "UniqueConstraint" +msgstr "unique constraint" + +msgid "Unknown source type" +msgstr "" + +msgid "Unreachable objects" +msgstr "" + +#, python-format +msgid "Updated %(etype)s : %(entity)s" +msgstr "" + +msgid "Used by:" +msgstr "" + +msgid "Users and groups management" +msgstr "" + +msgid "WARNING" +msgstr "" + +msgid "Web server" +msgstr "" + +msgid "Workflow" +msgstr "Workflow" + +msgid "Workflow history" +msgstr "" + +msgid "WorkflowTransition" +msgstr "Workflow-transition" + +msgid "WorkflowTransition_plural" +msgstr "Workflow-transitions" + +msgid "Workflow_plural" +msgstr "Workflows" + +msgid "" +"You can either submit a new file using the browse button above, or choose to " +"remove already uploaded file by checking the \"detach attached file\" check-" +"box, or edit file content online with the widget below." +msgstr "" + +msgid "" +"You can either submit a new file using the browse button above, or edit file " +"content online with the widget below." +msgstr "" + +msgid "You can't change this relation" +msgstr "" + +msgid "You cannot remove the system source" +msgstr "" + +msgid "You cannot rename the system source" +msgstr "" + +msgid "" +"You have no access to this view or it can not be used to display the current " +"data." +msgstr "" + +msgid "" +"You're not authorized to access this page. If you think you should, please " +"contact the site administrator." +msgstr "" + +#, python-format +msgid "[%s supervision] changes summary" +msgstr "" + +msgid "" +"a RQL expression which should return some results, else the transition won't " +"be available. This query may use X and U variables that will respectivly " +"represents the current entity and the current user." +msgstr "" + +msgid "a URI representing an object in external data store" +msgstr "" + +msgid "a float is expected" +msgstr "" + +msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" +msgstr "" + +msgid "" +"a simple cache entity characterized by a name and a validity date. The " +"target application is responsible for updating timestamp when necessary to " +"invalidate the cache (typically in hooks). Also, checkout the AppObject." +"get_cache() method." +msgstr "" + +msgid "abstract base class for transitions" +msgstr "" + +msgid "action menu" +msgstr "" + +msgid "action(s) on this selection" +msgstr "" + +msgid "actions" +msgstr "" + +msgid "activate" +msgstr "" + +msgid "activated" +msgstr "" + +msgid "add" +msgstr "" + +msgid "add Bookmark bookmarked_by CWUser object" +msgstr "bookmark" + +msgid "add CWAttribute add_permission RQLExpression subject" +msgstr "rql expression for add permission" + +msgid "add CWAttribute constrained_by CWConstraint subject" +msgstr "constraint" + +msgid "add CWAttribute read_permission RQLExpression subject" +msgstr "read rql expression" + +msgid "add CWAttribute relation_type CWRType object" +msgstr "attribute definition" + +msgid "add CWAttribute update_permission RQLExpression subject" +msgstr "rql expression for update permission" + +msgid "add CWEType add_permission RQLExpression subject" +msgstr "rql expression for add permission" + +msgid "add CWEType delete_permission RQLExpression subject" +msgstr "rql expression for delete permission" + +msgid "add CWEType read_permission RQLExpression subject" +msgstr "rql expression for read permission" + +msgid "add CWEType update_permission RQLExpression subject" +msgstr "rql expression for update permission" + +msgid "add CWProperty for_user CWUser object" +msgstr "property" + +msgid "add CWRelation add_permission RQLExpression subject" +msgstr "add rql expression" + +msgid "add CWRelation constrained_by CWConstraint subject" +msgstr "constraint" + +msgid "add CWRelation delete_permission RQLExpression subject" +msgstr "delete rql expression" + +msgid "add CWRelation read_permission RQLExpression subject" +msgstr "read rql expression" + +msgid "add CWRelation relation_type CWRType object" +msgstr "relation definition" + +msgid "add CWSourceHostConfig cw_host_config_of CWSource object" +msgstr "host configuration" + +msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" +msgstr "unicity constraint" + +msgid "add CWUser in_group CWGroup object" +msgstr "user" + +msgid "add CWUser use_email EmailAddress subject" +msgstr "email address" + +msgid "add State allowed_transition Transition object" +msgstr "incoming state" + +msgid "add State allowed_transition Transition subject" +msgstr "allowed transition" + +msgid "add State allowed_transition WorkflowTransition subject" +msgstr "workflow-transition" + +msgid "add State state_of Workflow object" +msgstr "state" + +msgid "add Transition condition RQLExpression subject" +msgstr "condition" + +msgid "add Transition destination_state State object" +msgstr "incoming transition" + +msgid "add Transition destination_state State subject" +msgstr "destination state" + +msgid "add Transition transition_of Workflow object" +msgstr "transition" + +msgid "add WorkflowTransition condition RQLExpression subject" +msgstr "workflow-transition" + +msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" +msgstr "subworkflow exit-point" + +msgid "add WorkflowTransition transition_of Workflow object" +msgstr "workflow-transition" + +msgid "add a BaseTransition" +msgstr "" + +msgid "add a Bookmark" +msgstr "" + +msgid "add a CWAttribute" +msgstr "" + +msgid "add a CWCache" +msgstr "" + +msgid "add a CWComputedRType" +msgstr "" + +msgid "add a CWConstraint" +msgstr "" + +msgid "add a CWConstraintType" +msgstr "" + +msgid "add a CWDataImport" +msgstr "" + +msgid "add a CWEType" +msgstr "" + +msgctxt "inlined:CWRelation.from_entity.subject" +msgid "add a CWEType" +msgstr "add an entity type" + +msgctxt "inlined:CWRelation.to_entity.subject" +msgid "add a CWEType" +msgstr "add an entity type" + +msgid "add a CWGroup" +msgstr "" + +msgid "add a CWProperty" +msgstr "" + +msgid "add a CWRType" +msgstr "" + +msgctxt "inlined:CWRelation.relation_type.subject" +msgid "add a CWRType" +msgstr "add a relation type" + +msgid "add a CWRelation" +msgstr "" + +msgid "add a CWSource" +msgstr "" + +msgid "add a CWSourceHostConfig" +msgstr "" + +msgid "add a CWSourceSchemaConfig" +msgstr "" + +msgid "add a CWUniqueTogetherConstraint" +msgstr "" + +msgid "add a CWUser" +msgstr "" + +msgid "add a EmailAddress" +msgstr "" + +msgctxt "inlined:CWUser.use_email.subject" +msgid "add a EmailAddress" +msgstr "add an email address" + +msgid "add a ExternalUri" +msgstr "" + +msgid "add a RQLExpression" +msgstr "" + +msgid "add a State" +msgstr "" + +msgid "add a SubWorkflowExitPoint" +msgstr "" + +msgid "add a TrInfo" +msgstr "" + +msgid "add a Transition" +msgstr "" + +msgid "add a Workflow" +msgstr "" + +msgid "add a WorkflowTransition" +msgstr "" + +# subject and object forms for each relation type +# (no object form for final relation types) +msgid "add_permission" +msgstr "can be added by" + +msgctxt "CWAttribute" +msgid "add_permission" +msgstr "add permission" + +# subject and object forms for each relation type +# (no object form for final relation types) +msgctxt "CWEType" +msgid "add_permission" +msgstr "add permission" + +msgctxt "CWRelation" +msgid "add_permission" +msgstr "add permission" + +msgid "add_permission_object" +msgstr "has permission to add" + +msgctxt "CWGroup" +msgid "add_permission_object" +msgstr "can add" + +msgctxt "RQLExpression" +msgid "add_permission_object" +msgstr "used to define add permission on" + +msgid "add_relation" +msgstr "add" + +#, python-format +msgid "added %(etype)s #%(eid)s (%(title)s)" +msgstr "" + +#, python-format +msgid "" +"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" +msgstr "" + +msgid "additional type specific properties" +msgstr "" + +msgid "addrelated" +msgstr "add" + +msgid "address" +msgstr "" + +msgctxt "EmailAddress" +msgid "address" +msgstr "address" + +msgid "alias" +msgstr "" + +msgctxt "EmailAddress" +msgid "alias" +msgstr "alias" + +msgid "allow to set a specific workflow for an entity" +msgstr "" + +msgid "allowed options depends on the source type" +msgstr "" + +msgid "allowed transitions from this state" +msgstr "" + +#, python-format +msgid "allowed values for \"action\" are %s" +msgstr "" + +msgid "allowed_transition" +msgstr "allowed transition" + +msgctxt "State" +msgid "allowed_transition" +msgstr "allowed transition" + +msgid "allowed_transition_object" +msgstr "incoming states" + +msgctxt "BaseTransition" +msgid "allowed_transition_object" +msgstr "incoming states" + +msgctxt "Transition" +msgid "allowed_transition_object" +msgstr "incoming states" + +msgctxt "WorkflowTransition" +msgid "allowed_transition_object" +msgstr "incoming states" + +msgid "an electronic mail address associated to a short alias" +msgstr "" + +msgid "an error occurred" +msgstr "" + +msgid "an error occurred while processing your request" +msgstr "" + +msgid "an error occurred, the request cannot be fulfilled" +msgstr "" + +msgid "an integer is expected" +msgstr "" + +msgid "and linked" +msgstr "" + +msgid "and/or between different values" +msgstr "" + +msgid "anyrsetview" +msgstr "rset views" + +msgid "april" +msgstr "" + +#, python-format +msgid "archive for %(author)s" +msgstr "" + +#, python-format +msgid "archive for %(month)s/%(year)s" +msgstr "" + +#, python-format +msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" +msgstr "" + +msgid "attribute" +msgstr "" + +msgid "august" +msgstr "" + +msgid "authentication failure" +msgstr "" + +msgid "auto" +msgstr "automatic" + +msgid "autocomputed attribute used to ensure transition coherency" +msgstr "" + +msgid "automatic" +msgstr "" + +#, python-format +msgid "back to pagination (%s results)" +msgstr "" + +msgid "bad value" +msgstr "" + +msgid "badly formatted url" +msgstr "" + +msgid "base url" +msgstr "" + +msgid "bookmark has been removed" +msgstr "" + +msgid "bookmark this page" +msgstr "" + +msgid "bookmark this search" +msgstr "" + +msgid "bookmarked_by" +msgstr "bookmarked by" + +msgctxt "Bookmark" +msgid "bookmarked_by" +msgstr "bookmarked by" + +msgid "bookmarked_by_object" +msgstr "has bookmarks" + +msgctxt "CWUser" +msgid "bookmarked_by_object" +msgstr "uses bookmarks" + +msgid "bookmarks" +msgstr "" + +msgid "bookmarks are used to have user's specific internal links" +msgstr "" + +msgid "boxes" +msgstr "" + +msgid "bug report sent" +msgstr "" + +msgid "button_apply" +msgstr "apply" + +msgid "button_cancel" +msgstr "cancel" + +msgid "button_delete" +msgstr "delete" + +msgid "button_ok" +msgstr "validate" + +msgid "by" +msgstr "" + +msgid "by relation" +msgstr "" + +msgid "by_transition" +msgstr "by transition" + +msgctxt "TrInfo" +msgid "by_transition" +msgstr "by transition" + +msgid "by_transition_object" +msgstr "transition information" + +msgctxt "BaseTransition" +msgid "by_transition_object" +msgstr "transition information" + +msgctxt "Transition" +msgid "by_transition_object" +msgstr "transition information" + +msgctxt "WorkflowTransition" +msgid "by_transition_object" +msgstr "transition information" + +msgid "calendar" +msgstr "" + +msgid "can not resolve entity types:" +msgstr "" + +msgid "can only have one url" +msgstr "" + +msgid "can't be changed" +msgstr "" + +msgid "can't be deleted" +msgstr "" + +msgid "can't change this attribute" +msgstr "" + +#, python-format +msgid "can't display data, unexpected error: %s" +msgstr "" + +msgid "can't have multiple exits on the same state" +msgstr "" + +#, python-format +msgid "can't parse %(value)r (expected %(format)s)" +msgstr "" + +#, python-format +msgid "" +"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " +"%(value)s) does not exist any longer" +msgstr "" + +#, python-format +msgid "" +"can't restore relation %(rtype)s of entity %(eid)s, this relation does not " +"exist in the schema anymore." +msgstr "" + +#, python-format +msgid "can't restore state of entity %s, it has been deleted inbetween" +msgstr "" + +#, python-format +msgid "" +"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality=" +"%(card)s" +msgstr "" + +msgid "cancel" +msgstr "" + +msgid "cancel select" +msgstr "" + +msgid "cancel this insert" +msgstr "" + +msgid "cardinality" +msgstr "cardinality" + +msgctxt "CWAttribute" +msgid "cardinality" +msgstr "cardinality" + +msgctxt "CWRelation" +msgid "cardinality" +msgstr "cardinality" + +msgid "category" +msgstr "" + +#, python-format +msgid "changed state of %(etype)s #%(eid)s (%(title)s)" +msgstr "" + +msgid "changes applied" +msgstr "" + +msgid "click here to see created entity" +msgstr "" + +msgid "click here to see edited entity" +msgstr "" + +msgid "click on the box to cancel the deletion" +msgstr "" + +msgid "click to add a value" +msgstr "" + +msgid "click to delete this value" +msgstr "" + +msgid "click to edit this field" +msgstr "" + +msgid "close all" +msgstr "" + +msgid "comment" +msgstr "" + +msgctxt "TrInfo" +msgid "comment" +msgstr "comment" + +msgid "comment_format" +msgstr "format" + +msgctxt "TrInfo" +msgid "comment_format" +msgstr "format" + +msgid "components" +msgstr "" + +msgid "components_navigation" +msgstr "page navigation" + +msgid "components_navigation_description" +msgstr "pagination component for large resultsets" + +msgid "components_rqlinput" +msgstr "rql input box" + +msgid "components_rqlinput_description" +msgstr "the rql box in the page's header" + +msgid "composite" +msgstr "" + +msgctxt "CWRelation" +msgid "composite" +msgstr "composite" + +msgid "condition" +msgstr "condition" + +msgctxt "BaseTransition" +msgid "condition" +msgstr "condition" + +msgctxt "Transition" +msgid "condition" +msgstr "condition" + +msgctxt "WorkflowTransition" +msgid "condition" +msgstr "condition" + +msgid "condition_object" +msgstr "condition of" + +msgctxt "RQLExpression" +msgid "condition_object" +msgstr "condition of" + +msgid "conditions" +msgstr "" + +msgid "config" +msgstr "" + +msgctxt "CWSource" +msgid "config" +msgstr "configuration" + +msgctxt "CWSourceHostConfig" +msgid "config" +msgstr "configuration" + +msgid "config mode" +msgstr "" + +msgid "config type" +msgstr "" + +msgid "confirm password" +msgstr "" + +msgid "constrained_by" +msgstr "constrained by" + +msgctxt "CWAttribute" +msgid "constrained_by" +msgstr "constrained by" + +msgctxt "CWRelation" +msgid "constrained_by" +msgstr "constrained by" + +msgid "constrained_by_object" +msgstr "constraints" + +msgctxt "CWConstraint" +msgid "constrained_by_object" +msgstr "constraints" + +msgid "constraint factory" +msgstr "" + +msgid "constraint_of" +msgstr "constraint of" + +msgctxt "CWUniqueTogetherConstraint" +msgid "constraint_of" +msgstr "constraint of" + +msgid "constraint_of_object" +msgstr "constrained by" + +msgctxt "CWEType" +msgid "constraint_of_object" +msgstr "constrained by" + +msgid "constraints" +msgstr "" + +msgid "constraints applying on this relation" +msgstr "" + +msgid "content type" +msgstr "" + +msgid "context" +msgstr "" + +msgid "context where this box should be displayed" +msgstr "" + +msgid "context where this component should be displayed" +msgstr "" + +msgid "context where this facet should be displayed, leave empty for both" +msgstr "" + +msgid "control subject entity's relations order" +msgstr "" + +msgid "copy" +msgstr "" + +msgid "core relation indicating a user's groups" +msgstr "" + +msgid "" +"core relation indicating owners of an entity. This relation implicitly put " +"the owner into the owners group for the entity" +msgstr "" + +msgid "core relation indicating the original creator of an entity" +msgstr "" + +msgid "core relation indicating the type of an entity" +msgstr "" + +msgid "" +"core relation indicating the types (including specialized types) of an entity" +msgstr "" + +msgid "could not connect to the SMTP server" +msgstr "" + +msgid "create an index for quick search on this attribute" +msgstr "" + +msgid "created on" +msgstr "" + +msgid "created_by" +msgstr "created by" + +msgid "created_by_object" +msgstr "has created" + +msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)" +msgstr "creating bookmark for %(linkto)s" + +msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)" +msgstr "creating attribute %(linkto)s" + +msgid "" +"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)" +msgstr "creating constraint for attribute %(linkto)s" + +msgid "" +"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)" +msgstr "creating constraint for relation %(linkto)s" + +msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)" +msgstr "creating property for user %(linkto)s" + +msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" +msgstr "creating relation %(linkto)s" + +msgid "" +"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource " +"%(linkto)s)" +msgstr "creating host configuration for source %(linkto)s" + +msgid "" +"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " +"constraint_of CWEType %(linkto)s)" +msgstr "creating unique together constraint for entity type %(linkto)s" + +msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" +msgstr "creating a new user in group %(linkto)s" + +msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)" +msgstr "creating email address for user %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" +msgstr "RQL expression granting add permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" +msgstr "RQL expression granting read permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s update_permission " +"RQLExpression)" +msgstr "RQL expression granting update permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" +msgstr "creating rql expression for add permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)" +msgstr "creating rql expression for delete permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" +msgstr "creating rql expression for read permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" +msgstr "creating rql expression for update permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" +msgstr "RQL expression granting add permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s delete_permission " +"RQLExpression)" +msgstr "RQL expression granting delete permission on %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" +msgstr "RQL expression granting read permission on %(linkto)s" + +msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" +msgstr "creating rql expression for transition %(linkto)s" + +msgid "" +"creating RQLExpression (WorkflowTransition %(linkto)s condition " +"RQLExpression)" +msgstr "creating rql expression for workflow-transition %(linkto)s" + +msgid "creating State (State allowed_transition Transition %(linkto)s)" +msgstr "creating a state able to trigger transition %(linkto)s" + +msgid "creating State (State state_of Workflow %(linkto)s)" +msgstr "creating state of workflow %(linkto)s" + +msgid "creating State (Transition %(linkto)s destination_state State)" +msgstr "creating destination state for transition %(linkto)s" + +msgid "" +"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " +"subworkflow_exit SubWorkflowExitPoint)" +msgstr "creating subworkflow exit-point for workflow-transition %(linkto)s" + +msgid "creating Transition (State %(linkto)s allowed_transition Transition)" +msgstr "creating triggerable transition for state %(linkto)s" + +msgid "creating Transition (Transition destination_state State %(linkto)s)" +msgstr "creating transition leading to state %(linkto)s" + +msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" +msgstr "creating transition of workflow %(linkto)s" + +msgid "" +"creating WorkflowTransition (State %(linkto)s allowed_transition " +"WorkflowTransition)" +msgstr "creating workflow-transition leading to state %(linkto)s" + +msgid "" +"creating WorkflowTransition (WorkflowTransition transition_of Workflow " +"%(linkto)s)" +msgstr "creating workflow-transition of workflow %(linkto)s" + +msgid "creation" +msgstr "" + +msgid "creation date" +msgstr "" + +msgid "creation time of an entity" +msgstr "" + +msgid "creation_date" +msgstr "creation date" + +msgid "cstrtype" +msgstr "constraint's type" + +msgctxt "CWConstraint" +msgid "cstrtype" +msgstr "constraint type" + +msgid "cstrtype_object" +msgstr "used by" + +msgctxt "CWConstraintType" +msgid "cstrtype_object" +msgstr "constraint type of" + +msgid "csv export" +msgstr "CSV export" + +msgid "csv export (entities)" +msgstr "CSV export (entities)" + +msgid "ctxcomponents" +msgstr "contextual components" + +msgid "ctxcomponents_anonuserlink" +msgstr "user link" + +msgid "ctxcomponents_anonuserlink_description" +msgstr "" +"for anonymous users, this is a link pointing to authentication form, for " +"logged in users, this is a link that makes a box appear and listing some " +"possible user actions" + +msgid "ctxcomponents_appliname" +msgstr "application title" + +msgid "ctxcomponents_appliname_description" +msgstr "display the application title in the page's header" + +msgid "ctxcomponents_bookmarks_box" +msgstr "bookmarks box" + +msgid "ctxcomponents_bookmarks_box_description" +msgstr "box listing the user's bookmarks" + +msgid "ctxcomponents_breadcrumbs" +msgstr "breadcrumb" + +msgid "ctxcomponents_breadcrumbs_description" +msgstr "breadcrumbs bar that display a path locating the page in the site" + +msgid "ctxcomponents_download_box" +msgstr "download box" + +msgid "ctxcomponents_download_box_description" +msgstr "" + +msgid "ctxcomponents_edit_box" +msgstr "actions box" + +msgid "ctxcomponents_edit_box_description" +msgstr "box listing the applicable actions on the displayed data" + +msgid "ctxcomponents_facet.filterbox" +msgstr "facets box" + +msgid "ctxcomponents_facet.filterbox_description" +msgstr "box providing filter within current search results functionality" + +msgid "ctxcomponents_logo" +msgstr "logo" + +msgid "ctxcomponents_logo_description" +msgstr "the application's icon displayed in the page's header" + +msgid "ctxcomponents_metadata" +msgstr "entity's metadata" + +msgid "ctxcomponents_metadata_description" +msgstr "" + +msgid "ctxcomponents_possible_views_box" +msgstr "possible views box" + +msgid "ctxcomponents_possible_views_box_description" +msgstr "box listing the possible views for the displayed data" + +msgid "ctxcomponents_prevnext" +msgstr "previous / next entity" + +msgid "ctxcomponents_prevnext_description" +msgstr "" +"display link to go from one entity to another on entities implementing the " +"\"previous/next\" interface." + +msgid "ctxcomponents_rss" +msgstr "rss box" + +msgid "ctxcomponents_rss_description" +msgstr "RSS icon to get displayed data as a RSS thread" + +msgid "ctxcomponents_search_box" +msgstr "search box" + +msgid "ctxcomponents_search_box_description" +msgstr "search box" + +msgid "ctxcomponents_startup_views_box" +msgstr "startup views box" + +msgid "ctxcomponents_startup_views_box_description" +msgstr "box listing the possible start pages" + +msgid "ctxcomponents_userstatus" +msgstr "" + +msgid "ctxcomponents_userstatus_description" +msgstr "" + +msgid "ctxcomponents_wfhistory" +msgstr "workflow history" + +msgid "ctxcomponents_wfhistory_description" +msgstr "show the workflow's history." + +msgid "ctxtoolbar" +msgstr "toolbar" + +msgid "custom_workflow" +msgstr "custom workflow" + +msgid "custom_workflow_object" +msgstr "custom workflow of" + +msgid "cw.groups-management" +msgstr "groups" + +msgid "cw.users-management" +msgstr "users" + +msgid "cw_for_source" +msgstr "for source" + +msgctxt "CWSourceSchemaConfig" +msgid "cw_for_source" +msgstr "for source" + +msgid "cw_for_source_object" +msgstr "mapping" + +msgctxt "CWSource" +msgid "cw_for_source_object" +msgstr "mapping" + +msgid "cw_host_config_of" +msgstr "source" + +msgctxt "CWSourceHostConfig" +msgid "cw_host_config_of" +msgstr "source" + +msgid "cw_host_config_of_object" +msgstr "host configuration" + +msgctxt "CWSource" +msgid "cw_host_config_of_object" +msgstr "host configuration" + +msgid "cw_import_of" +msgstr "source" + +msgctxt "CWDataImport" +msgid "cw_import_of" +msgstr "source" + +msgid "cw_import_of_object" +msgstr "imports" + +msgctxt "CWSource" +msgid "cw_import_of_object" +msgstr "imports" + +msgid "cw_schema" +msgstr "maps" + +msgctxt "CWSourceSchemaConfig" +msgid "cw_schema" +msgstr "maps" + +msgid "cw_schema_object" +msgstr "mapped by" + +msgctxt "CWEType" +msgid "cw_schema_object" +msgstr "mapped by" + +msgctxt "CWRType" +msgid "cw_schema_object" +msgstr "mapped by" + +msgctxt "CWRelation" +msgid "cw_schema_object" +msgstr "mapped by" + +msgid "cw_source" +msgstr "source" + +msgid "cw_source_object" +msgstr "contains entities" + +msgid "cwetype-box" +msgstr "\"box\" view" + +msgid "cwetype-description" +msgstr "description" + +msgid "cwetype-permissions" +msgstr "permissions" + +msgid "cwetype-views" +msgstr "views" + +msgid "cwetype-workflow" +msgstr "workflow" + +msgid "cwgroup-main" +msgstr "description" + +msgid "cwgroup-permissions" +msgstr "permissions" + +msgid "cwrtype-description" +msgstr "description" + +msgid "cwrtype-permissions" +msgstr "permissions" + +msgid "cwsource-imports" +msgstr "" + +msgid "cwsource-main" +msgstr "description" + +msgid "cwsource-mapping" +msgstr "mapping" + +msgid "cwuri" +msgstr "internal uri" + +msgid "data directory url" +msgstr "" + +msgid "data model schema" +msgstr "" + +msgid "data sources" +msgstr "" + +msgid "data sources management" +msgstr "" + +msgid "date" +msgstr "" + +msgid "deactivate" +msgstr "" + +msgid "deactivated" +msgstr "" + +msgid "december" +msgstr "" + +msgid "default" +msgstr "" + +msgid "default text format for rich text fields." +msgstr "" + +msgid "default user workflow" +msgstr "" + +msgid "default value" +msgstr "" + +msgid "default value as gziped pickled python object" +msgstr "" + +msgid "default workflow for an entity type" +msgstr "" + +msgid "default_workflow" +msgstr "default workflow" + +msgctxt "CWEType" +msgid "default_workflow" +msgstr "default workflow" + +msgid "default_workflow_object" +msgstr "default workflow of" + +msgctxt "Workflow" +msgid "default_workflow_object" +msgstr "default workflow of" + +msgid "defaultval" +msgstr "default value" + +msgctxt "CWAttribute" +msgid "defaultval" +msgstr "default value" + +msgid "define a CubicWeb user" +msgstr "" + +msgid "define a CubicWeb users group" +msgstr "" + +msgid "" +"define a final relation: link a final relation type from a non final entity " +"to a final entity type. used to build the instance schema" +msgstr "" + +msgid "" +"define a non final relation: link a non final relation type from a non final " +"entity to a non final entity type. used to build the instance schema" +msgstr "" + +msgid "define a relation type, used to build the instance schema" +msgstr "" + +msgid "define a rql expression used to define permissions" +msgstr "" + +msgid "define a schema constraint" +msgstr "" + +msgid "define a schema constraint type" +msgstr "" + +msgid "define a virtual relation type, used to build the instance schema" +msgstr "" + +msgid "define an entity type, used to build the instance schema" +msgstr "" + +msgid "define how we get out from a sub-workflow" +msgstr "" + +msgid "defines a sql-level multicolumn unique index" +msgstr "" + +msgid "" +"defines what's the property is applied for. You must select this first to be " +"able to set value" +msgstr "" + +msgid "delete" +msgstr "" + +msgid "delete this bookmark" +msgstr "" + +msgid "delete this relation" +msgstr "" + +msgid "delete_permission" +msgstr "can be deleted by" + +msgctxt "CWEType" +msgid "delete_permission" +msgstr "delete permission" + +msgctxt "CWRelation" +msgid "delete_permission" +msgstr "delete_permission" + +msgid "delete_permission_object" +msgstr "has permission to delete" + +msgctxt "CWGroup" +msgid "delete_permission_object" +msgstr "has permission to delete" + +msgctxt "RQLExpression" +msgid "delete_permission_object" +msgstr "has permission to delete" + +#, python-format +msgid "deleted %(etype)s #%(eid)s (%(title)s)" +msgstr "" + +#, python-format +msgid "" +"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" +msgstr "" + +msgid "depends on the constraint type" +msgstr "" + +msgid "description" +msgstr "description" + +msgctxt "BaseTransition" +msgid "description" +msgstr "description" + +msgctxt "CWAttribute" +msgid "description" +msgstr "description" + +msgctxt "CWComputedRType" +msgid "description" +msgstr "description" + +msgctxt "CWEType" +msgid "description" +msgstr "description" + +msgctxt "CWRType" +msgid "description" +msgstr "description" + +msgctxt "CWRelation" +msgid "description" +msgstr "description" + +msgctxt "State" +msgid "description" +msgstr "description" + +msgctxt "Transition" +msgid "description" +msgstr "description" + +msgctxt "Workflow" +msgid "description" +msgstr "description" + +msgctxt "WorkflowTransition" +msgid "description" +msgstr "description" + +msgid "description_format" +msgstr "format" + +msgctxt "BaseTransition" +msgid "description_format" +msgstr "format" + +msgctxt "CWAttribute" +msgid "description_format" +msgstr "format" + +msgctxt "CWComputedRType" +msgid "description_format" +msgstr "format" + +msgctxt "CWEType" +msgid "description_format" +msgstr "format" + +msgctxt "CWRType" +msgid "description_format" +msgstr "format" + +msgctxt "CWRelation" +msgid "description_format" +msgstr "format" + +msgctxt "State" +msgid "description_format" +msgstr "format" + +msgctxt "Transition" +msgid "description_format" +msgstr "format" + +msgctxt "Workflow" +msgid "description_format" +msgstr "format" + +msgctxt "WorkflowTransition" +msgid "description_format" +msgstr "format" + +msgid "destination state for this transition" +msgstr "" + +msgid "destination state must be in the same workflow as our parent transition" +msgstr "" + +msgid "destination state of a transition" +msgstr "" + +msgid "" +"destination state. No destination state means that transition should go back " +"to the state from which we've entered the subworkflow." +msgstr "" + +msgid "destination_state" +msgstr "destination state" + +msgctxt "SubWorkflowExitPoint" +msgid "destination_state" +msgstr "destination state" + +msgctxt "Transition" +msgid "destination_state" +msgstr "destination state" + +msgid "destination_state_object" +msgstr "destination of" + +msgctxt "State" +msgid "destination_state_object" +msgstr "destination of" + +msgid "detach attached file" +msgstr "" + +msgid "display order of the box" +msgstr "" + +msgid "display order of the component" +msgstr "" + +msgid "display order of the facet" +msgstr "" + +msgid "display the box or not" +msgstr "" + +msgid "display the component or not" +msgstr "" + +msgid "display the facet or not" +msgstr "" + +msgid "download" +msgstr "" + +#, python-format +msgid "download %s" +msgstr "" + +msgid "download icon" +msgstr "" + +msgid "download schema as owl" +msgstr "" + +msgid "edit bookmarks" +msgstr "" + +msgid "edit canceled" +msgstr "" + +msgid "editable-table" +msgstr "" + +msgid "eid" +msgstr "" + +msgid "embedded html" +msgstr "" + +msgid "end_timestamp" +msgstr "end timestamp" + +msgctxt "CWDataImport" +msgid "end_timestamp" +msgstr "end timestamp" + +msgid "entities deleted" +msgstr "" + +msgid "entity and relation types can't be mapped, only attributes or relations" +msgstr "" + +msgid "entity copied" +msgstr "" + +msgid "entity created" +msgstr "" + +msgid "entity creation" +msgstr "" + +msgid "entity deleted" +msgstr "" + +msgid "entity deletion" +msgstr "" + +msgid "entity edited" +msgstr "" + +msgid "entity has no workflow set" +msgstr "" + +msgid "entity linked" +msgstr "" + +msgid "entity type" +msgstr "" + +msgid "entity types which may use this workflow" +msgstr "" + +msgid "entity update" +msgstr "" + +msgid "entityview" +msgstr "entity views" + +msgid "error" +msgstr "" + +msgid "error while publishing ReST text" +msgstr "" + +msgid "exit state must be a subworkflow state" +msgstr "" + +msgid "exit_point" +msgstr "exit point" + +msgid "exit_point_object" +msgstr "exit point of" + +#, python-format +msgid "exiting from subworkflow %s" +msgstr "" + +msgid "expression" +msgstr "" + +msgctxt "RQLExpression" +msgid "expression" +msgstr "expression" + +msgid "exprtype" +msgstr "expression's type" + +msgctxt "RQLExpression" +msgid "exprtype" +msgstr "expression type" + +msgid "extra_props" +msgstr "" + +msgctxt "CWAttribute" +msgid "extra_props" +msgstr "" + +msgid "facet-loading-msg" +msgstr "processing, please wait" + +msgid "facet.filters" +msgstr "filter" + +msgid "facetbox" +msgstr "facettes" + +msgid "facets_created_by-facet" +msgstr "\"created by\" facet" + +msgid "facets_created_by-facet_description" +msgstr "" + +msgid "facets_cw_source-facet" +msgstr "data source" + +msgid "facets_cw_source-facet_description" +msgstr "" + +msgid "facets_cwfinal-facet" +msgstr "\"final entity or relation type\" facet" + +msgid "facets_cwfinal-facet_description" +msgstr "" + +msgid "facets_datafeed.dataimport.status" +msgstr "" + +msgid "facets_datafeed.dataimport.status_description" +msgstr "" + +msgid "facets_etype-facet" +msgstr "\"entity type\" facet" + +msgid "facets_etype-facet_description" +msgstr "" + +msgid "facets_has_text-facet" +msgstr "\"has text\" facet" + +msgid "facets_has_text-facet_description" +msgstr "" + +msgid "facets_in_group-facet" +msgstr "\"in group\" facet" + +msgid "facets_in_group-facet_description" +msgstr "" + +msgid "facets_in_state-facet" +msgstr "\"in state\" facet" + +msgid "facets_in_state-facet_description" +msgstr "" + +msgid "failed" +msgstr "" + +#, python-format +msgid "failed to uniquify path (%s, %s)" +msgstr "" + +msgid "february" +msgstr "" + +msgid "file tree view" +msgstr "" + +msgid "final" +msgstr "" + +msgctxt "CWEType" +msgid "final" +msgstr "final" + +msgctxt "CWRType" +msgid "final" +msgstr "final" + +msgid "first name" +msgstr "" + +msgid "firstname" +msgstr "" + +msgctxt "CWUser" +msgid "firstname" +msgstr "firstname" + +msgid "foaf" +msgstr "" + +msgid "focus on this selection" +msgstr "" + +msgid "follow" +msgstr "" + +#, python-format +msgid "follow this link for more information on this %s" +msgstr "" + +msgid "for_user" +msgstr "for user" + +msgctxt "CWProperty" +msgid "for_user" +msgstr "for user" + +msgid "for_user_object" +msgstr "use properties" + +msgctxt "CWUser" +msgid "for_user_object" +msgstr "property of" + +msgid "formula" +msgstr "formula" + +msgctxt "CWAttribute" +msgid "formula" +msgstr "formula" + +msgid "friday" +msgstr "" + +msgid "from" +msgstr "" + +#, python-format +msgid "from %(date)s" +msgstr "" + +msgid "from_entity" +msgstr "from entity" + +msgctxt "CWAttribute" +msgid "from_entity" +msgstr "from entity" + +msgctxt "CWRelation" +msgid "from_entity" +msgstr "from entity" + +msgid "from_entity_object" +msgstr "subjet relation" + +msgctxt "CWEType" +msgid "from_entity_object" +msgstr "subjec relation" + +msgid "from_interval_start" +msgstr "from" + +msgid "from_state" +msgstr "from state" + +msgctxt "TrInfo" +msgid "from_state" +msgstr "from state" + +msgid "from_state_object" +msgstr "transitions from this state" + +msgctxt "State" +msgid "from_state_object" +msgstr "transitions from this state" + +msgid "full text or RQL query" +msgstr "" + +msgid "fulltext_container" +msgstr "fulltext container" + +msgctxt "CWRType" +msgid "fulltext_container" +msgstr "fulltext container" + +msgid "fulltextindexed" +msgstr "fulltext indexed" + +msgctxt "CWAttribute" +msgid "fulltextindexed" +msgstr "fulltext indexed" + +msgid "gc" +msgstr "memory leak" + +msgid "generic plot" +msgstr "" + +msgid "generic relation to link one entity to another" +msgstr "" + +msgid "" +"generic relation to specify that an external entity represent the same " +"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" +msgstr "" + +msgid "granted to groups" +msgstr "" + +#, python-format +msgid "graphical representation of %(appid)s data model" +msgstr "" + +#, python-format +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" + +#, python-format +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" +msgstr "" + +msgid "group in which a user should be to be allowed to pass this transition" +msgstr "" + +msgid "groups" +msgstr "" + +msgid "groups allowed to add entities/relations of this type" +msgstr "" + +msgid "groups allowed to delete entities/relations of this type" +msgstr "" + +msgid "groups allowed to read entities/relations of this type" +msgstr "" + +msgid "groups allowed to update entities/relations of this type" +msgstr "" + +msgid "groups grant permissions to the user" +msgstr "" + +msgid "guests" +msgstr "" + +msgid "hCalendar" +msgstr "" + +msgid "has_text" +msgstr "has text" + +msgid "header-center" +msgstr "" + +msgid "header-left" +msgstr "header (left)" + +msgid "header-right" +msgstr "header (right)" + +msgid "hide filter form" +msgstr "" + +msgid "" +"how to format date and time in the ui (see this page for format " +"description)" +msgstr "" + +msgid "" +"how to format date in the ui (see this page for format " +"description)" +msgstr "" + +msgid "how to format float numbers in the ui" +msgstr "" + +msgid "" +"how to format time in the ui (see this page for format " +"description)" +msgstr "" + +msgid "i18n_bookmark_url_fqs" +msgstr "parameters" + +msgid "i18n_bookmark_url_path" +msgstr "path" + +msgid "i18n_login_popup" +msgstr "login" + +msgid "i18ncard_*" +msgstr "0..n" + +msgid "i18ncard_+" +msgstr "1..n" + +msgid "i18ncard_1" +msgstr "1" + +msgid "i18ncard_?" +msgstr "0..1" + +msgid "i18nprevnext_next" +msgstr "next" + +msgid "i18nprevnext_previous" +msgstr "previous" + +msgid "i18nprevnext_up" +msgstr "up" + +msgid "iCalendar" +msgstr "" + +msgid "id of main template used to render pages" +msgstr "" + +msgid "identical to" +msgstr "" + +msgid "identical_to" +msgstr "identical to" + +msgid "identity" +msgstr "" + +msgid "identity_object" +msgstr "identity" + +msgid "" +"if full text content of subject/object entity should be added to other side " +"entity (the container)." +msgstr "" + +msgid "image" +msgstr "" + +msgid "in progress" +msgstr "" + +msgid "in_group" +msgstr "in group" + +msgctxt "CWUser" +msgid "in_group" +msgstr "in group" + +msgid "in_group_object" +msgstr "contains" + +msgctxt "CWGroup" +msgid "in_group_object" +msgstr "contains" + +msgid "in_state" +msgstr "in state" + +msgid "in_state_object" +msgstr "state of" + +msgid "in_synchronization" +msgstr "in synchronization" + +msgctxt "CWSource" +msgid "in_synchronization" +msgstr "in synchronization" + +msgid "incontext" +msgstr "in-context" + +msgid "incorrect captcha value" +msgstr "" + +#, python-format +msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" +msgstr "" + +msgid "index this attribute's value in the plain text index" +msgstr "" + +msgid "indexed" +msgstr "" + +msgctxt "CWAttribute" +msgid "indexed" +msgstr "indexed" + +msgid "indicate the current state of an entity" +msgstr "" + +msgid "" +"indicate which state should be used by default when an entity using states " +"is created" +msgstr "" + +msgid "indifferent" +msgstr "indifferent" + +msgid "info" +msgstr "" + +msgid "initial state for this workflow" +msgstr "" + +msgid "initial_state" +msgstr "initial state" + +msgctxt "Workflow" +msgid "initial_state" +msgstr "initial state" + +msgid "initial_state_object" +msgstr "initial state of" + +msgctxt "State" +msgid "initial_state_object" +msgstr "initial state of" + +msgid "inlined" +msgstr "" + +msgctxt "CWRType" +msgid "inlined" +msgstr "inlined" + +msgid "instance home" +msgstr "" + +msgid "internal entity uri" +msgstr "" + +msgid "internationalizable" +msgstr "" + +msgctxt "CWAttribute" +msgid "internationalizable" +msgstr "internationalizable" + +#, python-format +msgid "invalid action %r" +msgstr "" + +#, python-format +msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" +msgstr "" + +msgid "is" +msgstr "" + +msgid "is object of:" +msgstr "" + +msgid "is subject of:" +msgstr "" + +msgid "" +"is the subject/object entity of the relation composed of the other ? This " +"implies that when the composite is deleted, composants are also deleted." +msgstr "" + +msgid "is this attribute's value translatable" +msgstr "" + +msgid "is this relation equivalent in both direction ?" +msgstr "" + +msgid "" +"is this relation physically inlined? you should know what you're doing if " +"you are changing this!" +msgstr "" + +msgid "is_instance_of" +msgstr "is instance of" + +msgid "is_instance_of_object" +msgstr "instances" + +msgid "is_object" +msgstr "has instances" + +msgid "january" +msgstr "" + +msgid "json-entities-export-view" +msgstr "JSON export (entities)" + +msgid "json-export-view" +msgstr "JSON export" + +msgid "july" +msgstr "" + +msgid "june" +msgstr "" + +msgid "language of the user interface" +msgstr "" + +msgid "last connection date" +msgstr "" + +msgid "last login time" +msgstr "" + +msgid "last name" +msgstr "" + +msgid "last usage" +msgstr "" + +msgid "last_login_time" +msgstr "last login time" + +msgctxt "CWUser" +msgid "last_login_time" +msgstr "last login time" + +msgid "latest import" +msgstr "" + +msgid "latest modification time of an entity" +msgstr "" + +msgid "latest synchronization time" +msgstr "" + +msgid "latest update on" +msgstr "" + +msgid "latest_retrieval" +msgstr "latest retrieval" + +msgctxt "CWSource" +msgid "latest_retrieval" +msgstr "latest retrieval" + +msgid "left" +msgstr "" + +msgid "line" +msgstr "" + +msgid "" +"link a property to the user which want this property customization. Unless " +"you're a site manager, this relation will be handled automatically." +msgstr "" + +msgid "link a relation definition to its object entity type" +msgstr "" + +msgid "link a relation definition to its relation type" +msgstr "" + +msgid "link a relation definition to its subject entity type" +msgstr "" + +msgid "link a state to one or more workflow" +msgstr "" + +msgid "link a transition information to its object" +msgstr "" + +msgid "link a transition to one or more workflow" +msgstr "" + +msgid "link a workflow to one or more entity type" +msgstr "" + +msgid "list" +msgstr "" + +msgid "log" +msgstr "" + +msgctxt "CWDataImport" +msgid "log" +msgstr "" + +msgid "log in" +msgstr "" + +msgid "login" +msgstr "" + +msgctxt "CWUser" +msgid "login" +msgstr "login" + +msgid "login / password" +msgstr "" + +msgid "login or email" +msgstr "" + +msgid "login_action" +msgstr "log in" + +msgid "logout" +msgstr "" + +#, python-format +msgid "loop in %(rel)s relation (%(eid)s)" +msgstr "" + +msgid "main informations" +msgstr "" + +msgid "main_tab" +msgstr "description" + +msgid "mainvars" +msgstr "main vars" + +msgctxt "RQLExpression" +msgid "mainvars" +msgstr "main variables" + +msgid "manage" +msgstr "" + +msgid "manage bookmarks" +msgstr "" + +msgid "manage permissions" +msgstr "" + +msgid "managers" +msgstr "" + +msgid "mandatory relation" +msgstr "" + +msgid "march" +msgstr "" + +msgid "match_host" +msgstr "match host" + +msgctxt "CWSourceHostConfig" +msgid "match_host" +msgstr "match host" + +msgid "maximum number of characters in short description" +msgstr "" + +msgid "maximum number of entities to display in related combo box" +msgstr "" + +msgid "maximum number of objects displayed by page of results" +msgstr "" + +msgid "maximum number of related entities to display in the primary view" +msgstr "" + +msgid "may" +msgstr "" + +msgid "memory leak debugging" +msgstr "" + +msgid "message" +msgstr "" + +#, python-format +msgid "missing parameters for entity %s" +msgstr "" + +msgid "modification" +msgstr "" + +msgid "modification_date" +msgstr "modification date" + +msgid "modify" +msgstr "" + +msgid "monday" +msgstr "" + +msgid "more actions" +msgstr "" + +msgid "more info about this workflow" +msgstr "" + +msgid "multiple edit" +msgstr "" + +msgid "my custom search" +msgstr "" + +msgid "name" +msgstr "" + +msgctxt "BaseTransition" +msgid "name" +msgstr "name" + +msgctxt "CWCache" +msgid "name" +msgstr "name" + +msgctxt "CWComputedRType" +msgid "name" +msgstr "name" + +msgctxt "CWConstraintType" +msgid "name" +msgstr "name" + +msgctxt "CWEType" +msgid "name" +msgstr "name" + +msgctxt "CWGroup" +msgid "name" +msgstr "name" + +msgctxt "CWRType" +msgid "name" +msgstr "name" + +msgctxt "CWSource" +msgid "name" +msgstr "name" + +msgctxt "CWUniqueTogetherConstraint" +msgid "name" +msgstr "" + +msgctxt "State" +msgid "name" +msgstr "name" + +msgctxt "Transition" +msgid "name" +msgstr "name" + +msgctxt "Workflow" +msgid "name" +msgstr "name" + +msgctxt "WorkflowTransition" +msgid "name" +msgstr "name" + +msgid "name of the cache" +msgstr "" + +msgid "" +"name of the main variables which should be used in the selection if " +"necessary (comma separated)" +msgstr "" + +msgid "name of the source" +msgstr "" + +msgid "navbottom" +msgstr "page bottom" + +msgid "navcontentbottom" +msgstr "page bottom, within main content" + +msgid "navcontenttop" +msgstr "page top, within main content" + +msgid "navigation" +msgstr "" + +msgid "navigation.combobox-limit" +msgstr "\"related\" combo-box" + +msgid "navigation.page-size" +msgstr "number of results" + +msgid "navigation.related-limit" +msgstr "number of entities in the primary view " + +msgid "navigation.short-line-size" +msgstr "short description" + +msgid "navtop" +msgstr "page top" + +msgid "new" +msgstr "" + +msgid "next page" +msgstr "" + +msgid "next_results" +msgstr "next results" + +msgid "no" +msgstr "" + +msgid "no content next link" +msgstr "" + +msgid "no content prev link" +msgstr "" + +msgid "no edited fields specified" +msgstr "" + +msgid "no log to display" +msgstr "" + +msgid "no related entity" +msgstr "" + +msgid "no repository sessions found" +msgstr "" + +msgid "no selected entities" +msgstr "" + +#, python-format +msgid "no such entity type %s" +msgstr "" + +msgid "no version information" +msgstr "" + +msgid "no web sessions found" +msgstr "" + +msgid "normal" +msgstr "" + +msgid "not authorized" +msgstr "" + +msgid "not selected" +msgstr "" + +msgid "november" +msgstr "" + +msgid "num. users" +msgstr "" + +msgid "object" +msgstr "" + +msgid "object type" +msgstr "" + +msgid "october" +msgstr "" + +msgid "one month" +msgstr "" + +msgid "one week" +msgstr "" + +msgid "oneline" +msgstr "one-line" + +msgid "only select queries are authorized" +msgstr "" + +msgid "open all" +msgstr "" + +msgid "opened sessions" +msgstr "" + +msgid "opened web sessions" +msgstr "" + +msgid "options" +msgstr "" + +msgctxt "CWSourceSchemaConfig" +msgid "options" +msgstr "options" + +msgid "order" +msgstr "" + +msgid "ordernum" +msgstr "order" + +msgctxt "CWAttribute" +msgid "ordernum" +msgstr "order" + +msgctxt "CWRelation" +msgid "ordernum" +msgstr "order" + +msgid "owl" +msgstr "" + +msgid "owlabox" +msgstr "" + +msgid "owned_by" +msgstr "owned by" + +msgid "owned_by_object" +msgstr "owns" + +msgid "owners" +msgstr "" + +msgid "ownerships have been changed" +msgstr "" + +msgid "pageid-not-found" +msgstr "" +"some necessary data seem expired, please reload the page and try again." + +msgid "parser" +msgstr "" + +msgctxt "CWSource" +msgid "parser" +msgstr "parser" + +msgid "parser to use to extract entities from content retrieved at given URLs." +msgstr "" + +msgid "password" +msgstr "" + +msgid "password and confirmation don't match" +msgstr "" + +msgid "path" +msgstr "" + +msgctxt "Bookmark" +msgid "path" +msgstr "path" + +msgid "permalink to this message" +msgstr "" + +msgid "permission" +msgstr "" + +msgid "permissions" +msgstr "" + +msgid "pick existing bookmarks" +msgstr "" + +msgid "pkey" +msgstr "key" + +msgctxt "CWProperty" +msgid "pkey" +msgstr "key" + +msgid "please correct errors below" +msgstr "" + +msgid "please correct the following errors:" +msgstr "" + +msgid "possible views" +msgstr "" + +msgid "prefered_form" +msgstr "prefered form" + +msgctxt "EmailAddress" +msgid "prefered_form" +msgstr "prefered form" + +msgid "prefered_form_object" +msgstr "prefered over" + +msgctxt "EmailAddress" +msgid "prefered_form_object" +msgstr "prefered over" + +msgid "preferences" +msgstr "" + +msgid "previous page" +msgstr "" + +msgid "previous_results" +msgstr "previous results" + +msgid "primary" +msgstr "" + +msgid "primary_email" +msgstr "primary email" + +msgctxt "CWUser" +msgid "primary_email" +msgstr "primary email" + +msgid "primary_email_object" +msgstr "primary email of" + +msgctxt "EmailAddress" +msgid "primary_email_object" +msgstr "primary email of" + +msgid "profile" +msgstr "" + +msgid "rdef-description" +msgstr "description" + +msgid "rdef-permissions" +msgstr "permissions" + +msgid "rdf export" +msgstr "RDF export" + +msgid "read" +msgstr "" + +msgid "read_permission" +msgstr "read permission" + +msgctxt "CWAttribute" +msgid "read_permission" +msgstr "read permission" + +msgctxt "CWEType" +msgid "read_permission" +msgstr "read permission" + +msgctxt "CWRelation" +msgid "read_permission" +msgstr "read permission" + +msgid "read_permission_object" +msgstr "has permission to read" + +msgctxt "CWGroup" +msgid "read_permission_object" +msgstr "has permission to read" + +msgctxt "RQLExpression" +msgid "read_permission_object" +msgstr "has permission to read" + +msgid "regexp matching host(s) to which this config applies" +msgstr "" + +msgid "registry" +msgstr "" + +msgid "related entity has no state" +msgstr "" + +msgid "related entity has no workflow set" +msgstr "" + +msgid "relation" +msgstr "" + +#, python-format +msgid "relation %(relname)s of %(ent)s" +msgstr "" + +#, python-format +msgid "" +"relation %(rtype)s with %(etype)s as %(role)s is supported but no target " +"type supported" +msgstr "" + +#, python-format +msgid "" +"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is " +"mandatory but not supported" +msgstr "" + +#, python-format +msgid "" +"relation %s is supported but none of its definitions matches supported " +"entities" +msgstr "" + +msgid "relation add" +msgstr "" + +msgid "relation removal" +msgstr "" + +msgid "relation_type" +msgstr "relation type" + +msgctxt "CWAttribute" +msgid "relation_type" +msgstr "relation type" + +msgctxt "CWRelation" +msgid "relation_type" +msgstr "relation type" + +msgid "relation_type_object" +msgstr "relation definitions" + +msgctxt "CWRType" +msgid "relation_type_object" +msgstr "relation definitions" + +msgid "relations" +msgstr "" + +msgctxt "CWUniqueTogetherConstraint" +msgid "relations" +msgstr "relations" + +msgid "relations deleted" +msgstr "" + +msgid "relations_object" +msgstr "constrained by" + +msgctxt "CWRType" +msgid "relations_object" +msgstr "constrained by" + +msgid "relative url of the bookmarked page" +msgstr "" + +msgid "remove-inlined-entity-form" +msgstr "remove" + +msgid "require_group" +msgstr "require the group" + +msgctxt "BaseTransition" +msgid "require_group" +msgstr "require group" + +msgctxt "Transition" +msgid "require_group" +msgstr "require group" + +msgctxt "WorkflowTransition" +msgid "require_group" +msgstr "require group" + +msgid "require_group_object" +msgstr "required by" + +msgctxt "CWGroup" +msgid "require_group_object" +msgstr "required by" + +msgid "required" +msgstr "" + +msgid "required attribute" +msgstr "" + +msgid "required field" +msgstr "" + +msgid "resources usage" +msgstr "" + +msgid "" +"restriction part of a rql query. For entity rql expression, X and U are " +"predefined respectivly to the current object and to the request user. For " +"relation rql expression, S, O and U are predefined respectivly to the " +"current relation'subject, object and to the request user. " +msgstr "" + +msgid "revert changes" +msgstr "" + +msgid "right" +msgstr "" + +msgid "rql expression allowing to add entities/relations of this type" +msgstr "" + +msgid "rql expression allowing to delete entities/relations of this type" +msgstr "" + +msgid "rql expression allowing to read entities/relations of this type" +msgstr "" + +msgid "rql expression allowing to update entities/relations of this type" +msgstr "" + +msgid "rql expressions" +msgstr "" + +msgid "rss export" +msgstr "RSS export" + +msgid "rule" +msgstr "rule" + +msgctxt "CWComputedRType" +msgid "rule" +msgstr "rule" + +msgid "same_as" +msgstr "same as" + +msgid "sample format" +msgstr "" + +msgid "saturday" +msgstr "" + +msgid "schema-diagram" +msgstr "diagram" + +msgid "schema-entity-types" +msgstr "entities" + +msgid "schema-relation-types" +msgstr "relations" + +msgid "search" +msgstr "" + +msgid "search for association" +msgstr "" + +msgid "searching for" +msgstr "" + +msgid "security" +msgstr "" + +msgid "see more" +msgstr "" + +msgid "see them all" +msgstr "" + +msgid "see_also" +msgstr "see also" + +msgid "select" +msgstr "" + +msgid "select a" +msgstr "" + +msgid "select a key first" +msgstr "" + +msgid "select a relation" +msgstr "" + +msgid "select this entity" +msgstr "" + +msgid "selected" +msgstr "" + +msgid "semantic description of this attribute" +msgstr "" + +msgid "semantic description of this entity type" +msgstr "" + +msgid "semantic description of this relation" +msgstr "" + +msgid "semantic description of this relation type" +msgstr "" + +msgid "semantic description of this state" +msgstr "" + +msgid "semantic description of this transition" +msgstr "" + +msgid "semantic description of this workflow" +msgstr "" + +msgid "september" +msgstr "" + +msgid "server information" +msgstr "" + +msgid "severity" +msgstr "" + +msgid "" +"should html fields being edited using fckeditor (a HTML WYSIWYG editor). " +"You should also select text/html as default text format to actually get " +"fckeditor." +msgstr "" + +#, python-format +msgid "show %s results" +msgstr "" + +msgid "show advanced fields" +msgstr "" + +msgid "show filter form" +msgstr "" + +msgid "site configuration" +msgstr "" + +msgid "site documentation" +msgstr "" + +msgid "site title" +msgstr "" + +msgid "site-wide property can't be set for user" +msgstr "" + +msgid "some later transaction(s) touch entity, undo them first" +msgstr "" + +msgid "some relations violate a unicity constraint" +msgstr "" + +msgid "sorry, the server is unable to handle this query" +msgstr "" + +msgid "" +"source's configuration. One key=value per line, authorized keys depending on " +"the source's type" +msgstr "" + +msgid "sparql xml" +msgstr "" + +msgid "special transition allowing to go through a sub-workflow" +msgstr "" + +msgid "specializes" +msgstr "specializes" + +msgctxt "CWEType" +msgid "specializes" +msgstr "specializes" + +msgid "specializes_object" +msgstr "specialized by" + +msgctxt "CWEType" +msgid "specializes_object" +msgstr "specialized by" + +#, python-format +msgid "specifying %s is mandatory" +msgstr "" + +msgid "" +"start timestamp of the currently in synchronization, or NULL when no " +"synchronization in progress." +msgstr "" + +msgid "start_timestamp" +msgstr "start timestamp" + +msgctxt "CWDataImport" +msgid "start_timestamp" +msgstr "start timestamp" + +msgid "startup views" +msgstr "" + +msgid "startupview" +msgstr "startup views" + +msgid "state" +msgstr "" + +msgid "state and transition don't belong the the same workflow" +msgstr "" + +msgid "state doesn't apply to this entity's type" +msgstr "" + +msgid "state doesn't belong to entity's current workflow" +msgstr "" + +msgid "state doesn't belong to entity's workflow" +msgstr "" + +msgid "" +"state doesn't belong to entity's workflow. You may want to set a custom " +"workflow for this entity first." +msgstr "" + +msgid "state doesn't belong to this workflow" +msgstr "" + +msgid "state_of" +msgstr "state of" + +msgctxt "State" +msgid "state_of" +msgstr "state of" + +msgid "state_of_object" +msgstr "use states" + +msgctxt "Workflow" +msgid "state_of_object" +msgstr "use states" + +msgid "status" +msgstr "" + +msgctxt "CWDataImport" +msgid "status" +msgstr "status" + +msgid "status change" +msgstr "" + +msgid "status changed" +msgstr "" + +#, python-format +msgid "status will change from %(st1)s to %(st2)s" +msgstr "" + +msgid "subject" +msgstr "" + +msgid "subject type" +msgstr "" + +msgid "subject/object cardinality" +msgstr "" + +msgid "subworkflow" +msgstr "" + +msgctxt "WorkflowTransition" +msgid "subworkflow" +msgstr "subworkflow" + +msgid "" +"subworkflow isn't a workflow for the same types as the transition's workflow" +msgstr "" + +msgid "subworkflow state" +msgstr "" + +msgid "subworkflow_exit" +msgstr "subworkflow exit" + +msgctxt "WorkflowTransition" +msgid "subworkflow_exit" +msgstr "subworkflow exit" + +msgid "subworkflow_exit_object" +msgstr "subworkflow exit of" + +msgctxt "SubWorkflowExitPoint" +msgid "subworkflow_exit_object" +msgstr "subworkflow exit of" + +msgid "subworkflow_object" +msgstr "subworkflow of" + +msgctxt "Workflow" +msgid "subworkflow_object" +msgstr "subworkflow of" + +msgid "subworkflow_state" +msgstr "subworkflow state" + +msgctxt "SubWorkflowExitPoint" +msgid "subworkflow_state" +msgstr "subworkflow state" + +msgid "subworkflow_state_object" +msgstr "exit point" + +msgctxt "State" +msgid "subworkflow_state_object" +msgstr "exit point" + +msgid "success" +msgstr "" + +msgid "sunday" +msgstr "" + +msgid "surname" +msgstr "" + +msgctxt "CWUser" +msgid "surname" +msgstr "" + +msgid "symmetric" +msgstr "symmetric" + +msgctxt "CWRType" +msgid "symmetric" +msgstr "symmetric" + +msgid "synchronization-interval must be greater than 1 minute" +msgstr "" + +msgid "table" +msgstr "" + +msgid "tablefilter" +msgstr "table filter" + +msgid "text" +msgstr "" + +msgid "text/cubicweb-page-template" +msgstr "cubicweb page template" + +msgid "text/html" +msgstr "html" + +msgid "text/markdown" +msgstr "markdown formatted text" + +msgid "text/plain" +msgstr "plain text" + +msgid "text/rest" +msgstr "ReST text" + +msgid "the URI of the object" +msgstr "" + +msgid "the prefered email" +msgstr "" + +msgid "the system source has its configuration stored on the file-system" +msgstr "" + +#, python-format +msgid "the value \"%s\" is already used, use another one" +msgstr "" + +msgid "there is no next page" +msgstr "" + +msgid "there is no previous page" +msgstr "" + +#, python-format +msgid "there is no transaction #%s" +msgstr "" + +msgid "this action is not reversible!" +msgstr "" + +msgid "this entity is currently owned by" +msgstr "" + +msgid "this parser doesn't use a mapping" +msgstr "" + +msgid "this resource does not exist" +msgstr "" + +msgid "this source doesn't use a mapping" +msgstr "" + +msgid "thursday" +msgstr "" + +msgid "timestamp" +msgstr "" + +msgctxt "CWCache" +msgid "timestamp" +msgstr "timestamp" + +msgid "timetable" +msgstr "" + +msgid "title" +msgstr "" + +msgctxt "Bookmark" +msgid "title" +msgstr "title" + +msgid "to" +msgstr "" + +#, python-format +msgid "to %(date)s" +msgstr "" + +msgid "to associate with" +msgstr "" + +msgid "to_entity" +msgstr "to entity" + +msgctxt "CWAttribute" +msgid "to_entity" +msgstr "to entity" + +msgctxt "CWRelation" +msgid "to_entity" +msgstr "to entity" + +msgid "to_entity_object" +msgstr "object relations" + +msgctxt "CWEType" +msgid "to_entity_object" +msgstr "object relations" + +msgid "to_interval_end" +msgstr "to" + +msgid "to_state" +msgstr "to state" + +msgctxt "TrInfo" +msgid "to_state" +msgstr "to state" + +msgid "to_state_object" +msgstr "transitions to this state" + +msgctxt "State" +msgid "to_state_object" +msgstr "transitions to this state" + +msgid "toggle check boxes" +msgstr "" + +msgid "tr_count" +msgstr "transition number" + +msgctxt "TrInfo" +msgid "tr_count" +msgstr "transition number" + +msgid "transaction undone" +msgstr "" + +#, python-format +msgid "transition %(tr)s isn't allowed from %(st)s" +msgstr "" + +msgid "transition doesn't belong to entity's workflow" +msgstr "" + +msgid "transition isn't allowed" +msgstr "" + +msgid "transition may not be fired" +msgstr "" + +msgid "transition_of" +msgstr "transition of" + +msgctxt "BaseTransition" +msgid "transition_of" +msgstr "transition of" + +msgctxt "Transition" +msgid "transition_of" +msgstr "transition of" + +msgctxt "WorkflowTransition" +msgid "transition_of" +msgstr "transition of" + +msgid "transition_of_object" +msgstr "use transitions" + +msgctxt "Workflow" +msgid "transition_of_object" +msgstr "use transitions" + +msgid "tree view" +msgstr "" + +msgid "tuesday" +msgstr "" + +msgid "type" +msgstr "" + +msgctxt "BaseTransition" +msgid "type" +msgstr "type" + +msgctxt "CWSource" +msgid "type" +msgstr "type" + +msgctxt "Transition" +msgid "type" +msgstr "type" + +msgctxt "WorkflowTransition" +msgid "type" +msgstr "type" + +msgid "type here a sparql query" +msgstr "" + +msgid "type of the source" +msgstr "" + +msgid "ui" +msgstr "" + +msgid "ui.date-format" +msgstr "date format" + +msgid "ui.datetime-format" +msgstr "date and time format" + +msgid "ui.default-text-format" +msgstr "text format" + +msgid "ui.encoding" +msgstr "encoding" + +msgid "ui.fckeditor" +msgstr "content editor" + +msgid "ui.float-format" +msgstr "float format" + +msgid "ui.language" +msgstr "language" + +msgid "ui.main-template" +msgstr "main template" + +msgid "ui.site-title" +msgstr "site title" + +msgid "ui.time-format" +msgstr "time format" + +msgid "unable to check captcha, please try again" +msgstr "" + +msgid "unaccessible" +msgstr "" + +msgid "unauthorized value" +msgstr "" + +msgid "undefined user" +msgstr "" + +msgid "undo" +msgstr "" + +msgid "unique identifier used to connect to the application" +msgstr "" + +msgid "unknown external entity" +msgstr "" + +#, python-format +msgid "unknown options %s" +msgstr "" + +#, python-format +msgid "unknown property key %s" +msgstr "" + +msgid "unknown vocabulary:" +msgstr "" + +msgid "unsupported protocol" +msgstr "" + +msgid "upassword" +msgstr "password" + +msgctxt "CWUser" +msgid "upassword" +msgstr "password" + +msgid "update" +msgstr "" + +msgid "update_permission" +msgstr "can be updated by" + +msgctxt "CWAttribute" +msgid "update_permission" +msgstr "can be updated by" + +msgctxt "CWEType" +msgid "update_permission" +msgstr "can be updated by" + +msgid "update_permission_object" +msgstr "has permission to update" + +msgctxt "CWGroup" +msgid "update_permission_object" +msgstr "has permission to update" + +msgctxt "RQLExpression" +msgid "update_permission_object" +msgstr "has permission to update" + +msgid "update_relation" +msgstr "update" + +msgid "updated" +msgstr "" + +#, python-format +msgid "updated %(etype)s #%(eid)s (%(title)s)" +msgstr "" + +msgid "uri" +msgstr "" + +msgctxt "ExternalUri" +msgid "uri" +msgstr "uri" + +msgid "url" +msgstr "" + +msgctxt "CWSource" +msgid "url" +msgstr "url" + +msgid "" +"use to define a transition from one or multiple states to a destination " +"states in workflow's definitions. Transition without destination state will " +"go back to the state from which we arrived to the current state." +msgstr "" + +msgid "use_email" +msgstr "use email" + +msgctxt "CWUser" +msgid "use_email" +msgstr "use email" + +msgid "use_email_object" +msgstr "used by" + +msgctxt "EmailAddress" +msgid "use_email_object" +msgstr "used by" + +msgid "" +"used for cubicweb configuration. Once a property has been created you can't " +"change the key." +msgstr "" + +msgid "" +"used to associate simple states to an entity type and/or to define workflows" +msgstr "" + +msgid "user" +msgstr "" + +#, python-format +msgid "" +"user %s has made the following change(s):\n" +"\n" +msgstr "" + +msgid "user interface encoding" +msgstr "" + +msgid "user preferences" +msgstr "" + +msgid "user's email account" +msgstr "" + +msgid "users" +msgstr "" + +msgid "users and groups" +msgstr "" + +msgid "users using this bookmark" +msgstr "" + +msgid "validate modifications on selected items" +msgstr "" + +msgid "validating..." +msgstr "" + +msgid "value" +msgstr "" + +msgctxt "CWConstraint" +msgid "value" +msgstr "" + +msgctxt "CWProperty" +msgid "value" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be < %(KEY-boundary)s" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be > %(KEY-boundary)s" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" +msgstr "" + +msgid "value associated to this key is not editable manually" +msgstr "" + +#, python-format +msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" +msgstr "" + +#, python-format +msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" +msgstr "" + +msgid "vcard" +msgstr "" + +msgid "versions configuration" +msgstr "" + +msgid "view" +msgstr "" + +msgid "view all" +msgstr "" + +msgid "view detail for this entity" +msgstr "" + +msgid "view history" +msgstr "" + +msgid "view identifier" +msgstr "" + +msgid "view title" +msgstr "" + +msgid "view workflow" +msgstr "" + +msgid "view_index" +msgstr "index" + +msgid "visible" +msgstr "" + +msgid "warning" +msgstr "" + +msgid "we are not yet ready to handle this query" +msgstr "" + +msgid "wednesday" +msgstr "" + +#, python-format +msgid "welcome %s!" +msgstr "" + +msgid "wf_info_for" +msgstr "record for" + +msgid "wf_info_for_object" +msgstr "workflow history" + +msgid "wf_tab_info" +msgstr "states and transitions" + +msgid "wfgraph" +msgstr "graph" + +msgid "" +"when multiple addresses are equivalent (such as python-projects@logilab.org " +"and python-projects@lists.logilab.org), set this to indicate which is the " +"preferred form." +msgstr "" + +msgid "workflow" +msgstr "" + +#, python-format +msgid "workflow changed to \"%s\"" +msgstr "" + +msgid "workflow has no initial state" +msgstr "" + +msgid "workflow history item" +msgstr "" + +msgid "workflow isn't a workflow for this type" +msgstr "" + +msgid "workflow to which this state belongs" +msgstr "" + +msgid "workflow to which this transition belongs" +msgstr "" + +msgid "workflow_of" +msgstr "workflow of" + +msgctxt "Workflow" +msgid "workflow_of" +msgstr "workflow of" + +msgid "workflow_of_object" +msgstr "may use workflow" + +msgctxt "CWEType" +msgid "workflow_of_object" +msgstr "may use workflow" + +#, python-format +msgid "wrong query parameter line %s" +msgstr "" + +msgid "xbel export" +msgstr "XBEL export" + +msgid "xml export" +msgstr "XML export" + +msgid "xml export (entities)" +msgstr "XML export (entities)" + +msgid "yes" +msgstr "" + +msgid "you have been logged out" +msgstr "" + +msgid "you should probably delete that property" +msgstr "" + +#~ msgid "components_etypenavigation" +#~ msgstr "filtering by type" + +#~ msgid "components_etypenavigation_description" +#~ msgstr "permit to filter search results by entity type" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/i18n/es.po --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/i18n/es.po Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4796 @@ +# cubicweb i18n catalog +# Copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# Logilab +# Translators: +# CreaLibre , 2014 +msgid "" +msgstr "" +"Project-Id-Version: Cubicweb\n" +"POT-Creation-Date: 2006-01-12 17:35+CET\n" +"PO-Revision-Date: 2014-03-04 08:10+0000\n" +"Last-Translator: CreaLibre \n" +"Language-Team: Spanish (http://www.transifex.com/projects/p/cubicweb/" +"language/es/)\n" +"Language: es\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#, python-format +msgid "" +"\n" +"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for " +"entity\n" +"'%(title)s'\n" +"\n" +"%(comment)s\n" +"\n" +"url: %(url)s\n" +msgstr "" +"\n" +"%(user)s ha cambiado su estado de <%(previous_state)s> hacia <" +"%(current_state)s> en la entidad\n" +"'%(title)s'\n" +"\n" +"%(comment)s\n" +"\n" +"url: %(url)s\n" + +#, python-format +msgid " from state %(fromstate)s to state %(tostate)s\n" +msgstr " del estado %(fromstate)s hacia el estado %(tostate)s\n" + +msgid " :" +msgstr ":" + +#, python-format +msgid "\"action\" must be specified in options; allowed values are %s" +msgstr "" +"\"action\" debe estar especificada en opciones; los valores permitidos son : " +"%s" + +msgid "\"role=subject\" or \"role=object\" must be specified in options" +msgstr "" +"\"role=subject\" o \"role=object\" debe ser especificado en las opciones" + +#, python-format +msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" +msgstr "%(KEY-cstr)s restricción errónea para el valor %(KEY-value)r" + +#, python-format +msgid "%(KEY-rtype)s is part of violated unicity constraint" +msgstr "%(KEY-rtype)s pertenece a una restricción de unidad no respectada" + +#, python-format +msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" +msgstr "%(KEY-value)r no corresponde a la expresión regular %(KEY-regexp)r" + +#, python-format +msgid "%(attr)s set to %(newvalue)s" +msgstr "%(attr)s modificado a %(newvalue)s" + +#, python-format +msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" +msgstr "%(attr)s modificado de %(oldvalue)s a %(newvalue)s" + +#, python-format +msgid "%(etype)s by %(author)s" +msgstr "%(etype)s por %(author)s" + +#, python-format +msgid "%(firstname)s %(surname)s" +msgstr "%(firstname)s %(surname)s" + +#, python-format +msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)" +msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)" + +#, python-format +msgid "%d days" +msgstr "%d días" + +#, python-format +msgid "%d hours" +msgstr "%d horas" + +#, python-format +msgid "%d minutes" +msgstr "%d minutos" + +#, python-format +msgid "%d months" +msgstr "%d meses" + +#, python-format +msgid "%d seconds" +msgstr "%d segundos" + +#, python-format +msgid "%d weeks" +msgstr "%d semanas" + +#, python-format +msgid "%d years" +msgstr "%d años" + +#, python-format +msgid "%s could be supported" +msgstr "%s podría ser mantenido" + +#, python-format +msgid "%s error report" +msgstr "%s reporte de errores" + +#, python-format +msgid "%s software version of the database" +msgstr "versión sistema de la base para %s" + +#, python-format +msgid "%s updated" +msgstr "%s actualizado" + +#, python-format +msgid "'%s' action doesn't take any options" +msgstr "la acción '%s' no acepta opciones" + +#, python-format +msgid "" +"'%s' action for in_state relation should at least have 'linkattr=name' option" +msgstr "" +"'%s' acción en la relación in_state debe por lo menos tener la opción " +"'linkattr=name'" + +#, python-format +msgid "'%s' action requires 'linkattr' option" +msgstr "la acción '%s' requiere una opción 'linkattr'" + +msgid "(UNEXISTANT EID)" +msgstr "(EID INEXISTENTE" + +#, python-format +msgid "(suppressed) entity #%d" +msgstr "(eliminada) entidad #%d" + +msgid "**" +msgstr "0..n 0..n" + +msgid "*+" +msgstr "0..n 1..n" + +msgid "*1" +msgstr "0..n 1" + +msgid "*?" +msgstr "0..n 0..1" + +msgid "+*" +msgstr "1..n 0..n" + +msgid "++" +msgstr "1..n 1..n" + +msgid "+1" +msgstr "1..n 1" + +msgid "+?" +msgstr "1..n 0..1" + +msgid "1*" +msgstr "1 0..n" + +msgid "1+" +msgstr "1 1..n" + +msgid "11" +msgstr "1 1" + +msgid "1?" +msgstr "1 0..1" + +#, python-format +msgid "<%s not specified>" +msgstr "<%s no especificado>" + +#, python-format +msgid "" +"
This schema of the data model excludes the meta-data, but you " +"can also display a complete schema with meta-data.
" +msgstr "" +"
Este esquema del modelo de datos no incluye los meta-datos, " +"pero se puede ver a un modelo completo con meta-datos." + +msgid "" +msgstr "" + +msgid "" +msgstr "" + +msgid "?*" +msgstr "0..1 0..n" + +msgid "?+" +msgstr "0..1 1..n" + +msgid "?1" +msgstr "0..1 1" + +msgid "??" +msgstr "0..1 0..1" + +msgid "AND" +msgstr "Y" + +msgid "About this site" +msgstr "Información del Sistema" + +#, python-format +msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" +msgstr "Relación agregada : %(entity_from)s %(rtype)s %(entity_to)s" + +msgid "Attributes permissions:" +msgstr "Permisos de atributos:" + +# schema pot file, generated on 2009-09-16 16:46:55 +# +# singular and plural forms for each entity type +msgid "BaseTransition" +msgstr "Transición (abstracta)" + +msgid "BaseTransition_plural" +msgstr "Transiciones (abstractas)" + +msgid "BigInt" +msgstr "Big integer" + +msgid "BigInt_plural" +msgstr "Big integers" + +msgid "Bookmark" +msgstr "Favorito" + +msgid "Bookmark_plural" +msgstr "Favoritos" + +msgid "Boolean" +msgstr "Booleano" + +msgid "Boolean_plural" +msgstr "Booleanos" + +msgid "BoundConstraint" +msgstr "Restricción de límite" + +msgid "BoundaryConstraint" +msgstr "Restricción de límite" + +msgid "Browse by entity type" +msgstr "Busca por tipo de entidad" + +#, python-format +msgid "By %(user)s on %(dt)s [%(undo_link)s]" +msgstr "Por %(user)s en %(dt)s [%(undo_link)s]" + +msgid "Bytes" +msgstr "Bytes" + +msgid "Bytes_plural" +msgstr "Bytes" + +msgid "CWAttribute" +msgstr "Atributo" + +msgid "CWAttribute_plural" +msgstr "Atributos" + +msgid "CWCache" +msgstr "Cache" + +msgid "CWCache_plural" +msgstr "Caches" + +msgid "CWComputedRType" +msgstr "" + +msgid "CWComputedRType_plural" +msgstr "" + +msgid "CWConstraint" +msgstr "Restricción" + +msgid "CWConstraintType" +msgstr "Tipo de Restricción" + +msgid "CWConstraintType_plural" +msgstr "Tipos de Restricción" + +msgid "CWConstraint_plural" +msgstr "Restricciones" + +msgid "CWDataImport" +msgstr "Importación de Datos" + +msgid "CWDataImport_plural" +msgstr "Importaciones de Datos" + +msgid "CWEType" +msgstr "Tipo de entidad" + +msgctxt "inlined:CWRelation.from_entity.subject" +msgid "CWEType" +msgstr "Tipo de entidad" + +msgctxt "inlined:CWRelation.to_entity.subject" +msgid "CWEType" +msgstr "Tipo de entidad" + +msgid "CWEType_plural" +msgstr "Tipos de entidades" + +msgid "CWGroup" +msgstr "Groupo" + +msgid "CWGroup_plural" +msgstr "Grupos" + +msgid "CWProperty" +msgstr "Propiedad" + +msgid "CWProperty_plural" +msgstr "Propiedades" + +msgid "CWRType" +msgstr "Tipo de relación" + +msgctxt "inlined:CWRelation.relation_type.subject" +msgid "CWRType" +msgstr "Tipo de relación" + +msgid "CWRType_plural" +msgstr "Tipos de relación" + +msgid "CWRelation" +msgstr "Relación" + +msgid "CWRelation_plural" +msgstr "Relaciones" + +msgid "CWSource" +msgstr "Fuente de datos" + +msgid "CWSourceHostConfig" +msgstr "Configuración de Fuente" + +msgid "CWSourceHostConfig_plural" +msgstr "Configuraciones de fuente" + +msgid "CWSourceSchemaConfig" +msgstr "Configuraciones de Esquema de Fuente" + +msgid "CWSourceSchemaConfig_plural" +msgstr "Configuraciones de Esquema de Fuente" + +msgid "CWSource_plural" +msgstr "Fuentes de Datos" + +msgid "CWUniqueTogetherConstraint" +msgstr "Restricción de Singularidad" + +msgid "CWUniqueTogetherConstraint_plural" +msgstr "Restricciones de Singularidad" + +msgid "CWUser" +msgstr "Usuario" + +msgid "CWUser_plural" +msgstr "Usuarios" + +#, python-format +msgid "" +"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " +"linked using this relation." +msgstr "" +"No puede restaurar la relación %(role)s %(rtype)s en la entidad %(eid)s pues " +"ya esta ligada a otra entidad usando esa relación." + +#, python-format +msgid "" +"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " +"does not exists anymore in the schema." +msgstr "" +"No puede restaurar la relación %(rtype)s entre %(subj)s y %(obj)s, esta " +"relación ya no existe en el esquema." + +#, python-format +msgid "" +"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " +"anymore." +msgstr "" +"No puede restaurar la relación %(rtype)s, la entidad %(role)s %(eid)s ya no " +"existe." + +#, python-format +msgid "" +"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " +"exist anymore" +msgstr "" +"No puede anular el agregar la relación %(rtype)s de %(subj)s a %(obj)s, esta " +"relación ya no existe" + +#, python-format +msgid "" +"Can't undo creation of entity %(eid)s of type %(etype)s, type no more " +"supported" +msgstr "" +"No puede anular la creación de la entidad %(eid)s de tipo %(etype)s, este " +"tipo ya no existe" + +msgid "Click to sort on this column" +msgstr "Seleccione para ordenar columna" + +msgid "" +"Configuration of the system source goes to the 'sources' file, not in the " +"database" +msgstr "" +"La configuración de la fuente sistema va en el archivo \"Sources\"/Fuentes, " +"y no en la base de datos." + +#, python-format +msgid "Created %(etype)s : %(entity)s" +msgstr "Se creó %(etype)s : %(entity)s" + +msgid "DEBUG" +msgstr "DEPURAR" + +msgid "Date" +msgstr "Fecha" + +msgid "Date_plural" +msgstr "Fechas" + +msgid "Datetime" +msgstr "Fecha y hora" + +msgid "Datetime_plural" +msgstr "Fechas y horas" + +msgid "Decimal" +msgstr "Decimal" + +msgid "Decimal_plural" +msgstr "Decimales" + +#, python-format +msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" +msgstr "Eliminar relación : %(entity_from)s %(rtype)s %(entity_to)s" + +#, python-format +msgid "Deleted %(etype)s : %(entity)s" +msgstr "Se eliminó %(etype)s : %(entity)s" + +msgid "Detected problems" +msgstr "Problemas detectados" + +msgid "Do you want to delete the following element(s)?" +msgstr "Desea eliminar el/los elemento(s) a continuación?" + +msgid "Download schema as OWL" +msgstr "Descargar el esquema en formato OWL" + +msgid "ERROR" +msgstr "ERROR" + +msgid "EmailAddress" +msgstr "Correo Electrónico" + +msgctxt "inlined:CWUser.use_email.subject" +msgid "EmailAddress" +msgstr "Correo Electrónico" + +msgid "EmailAddress_plural" +msgstr "Direcciones de Correo Electrónico" + +msgid "Entities" +msgstr "Entidades" + +#, python-format +msgid "" +"Entity %(eid)s has changed since you started to edit it. Reload the page and " +"reapply your changes." +msgstr "" + +msgid "Entity and relation supported by this source" +msgstr "Entidades y relaciones aceptadas por esta fuente" + +msgid "ExternalUri" +msgstr "Uri externo" + +msgid "ExternalUri_plural" +msgstr "Uris externos" + +msgid "FATAL" +msgstr "FATAL" + +msgid "Float" +msgstr "Número flotante" + +msgid "Float_plural" +msgstr "Números flotantes" + +# schema pot file, generated on 2009-12-03 09:22:35 +# +# singular and plural forms for each entity type +msgid "FormatConstraint" +msgstr "Restricción de Formato" + +msgid "Garbage collection information" +msgstr "Recolector de basura en memoria" + +msgid "Help" +msgstr "Ayuda" + +msgid "INFO" +msgstr "INFO" + +msgid "Instance" +msgstr "Instancia" + +msgid "Int" +msgstr "Número entero" + +msgid "Int_plural" +msgstr "Números enteros" + +msgid "Interval" +msgstr "Duración" + +msgid "IntervalBoundConstraint" +msgstr "Restricción de intervalo" + +msgid "Interval_plural" +msgstr "Duraciones" + +msgid "Link:" +msgstr "Liga:" + +msgid "Looked up classes" +msgstr "Clases buscadas" + +msgid "Manage" +msgstr "Administración" + +msgid "Manage security" +msgstr "Gestión de seguridad" + +msgid "Message threshold" +msgstr "Límite de mensajes" + +msgid "Most referenced classes" +msgstr "Clases más referenciadas" + +msgid "New BaseTransition" +msgstr "XXX" + +msgid "New Bookmark" +msgstr "Agregar a Favoritos" + +msgid "New CWAttribute" +msgstr "Nueva definición de relación final" + +msgid "New CWCache" +msgstr "Agregar Caché" + +msgid "New CWComputedRType" +msgstr "" + +msgid "New CWConstraint" +msgstr "Agregar Restricción" + +msgid "New CWConstraintType" +msgstr "Agregar tipo de Restricción" + +msgid "New CWDataImport" +msgstr "Nueva importación de datos" + +msgid "New CWEType" +msgstr "Agregar tipo de entidad" + +msgid "New CWGroup" +msgstr "Nuevo grupo" + +msgid "New CWProperty" +msgstr "Agregar Propiedad" + +msgid "New CWRType" +msgstr "Agregar tipo de relación" + +msgid "New CWRelation" +msgstr "Nueva definición de relación final" + +msgid "New CWSource" +msgstr "Nueva fuente" + +msgid "New CWSourceHostConfig" +msgstr "Nueva configuración de fuente" + +msgid "New CWSourceSchemaConfig" +msgstr "Nueva parte de mapeo de fuente" + +msgid "New CWUniqueTogetherConstraint" +msgstr "Nueva restricción de singularidad" + +msgid "New CWUser" +msgstr "Agregar usuario" + +msgid "New EmailAddress" +msgstr "Agregar Email" + +msgid "New ExternalUri" +msgstr "Agregar Uri externa" + +msgid "New RQLExpression" +msgstr "Agregar expresión rql" + +msgid "New State" +msgstr "Agregar Estado" + +msgid "New SubWorkflowExitPoint" +msgstr "Agregar salida de sub-Workflow" + +msgid "New TrInfo" +msgstr "Agregar Información de Transición" + +msgid "New Transition" +msgstr "Agregar transición" + +msgid "New Workflow" +msgstr "Agregar Workflow" + +msgid "New WorkflowTransition" +msgstr "Agregar transición de Workflow" + +msgid "No result matching query" +msgstr "Ningún resultado corresponde a su búsqueda" + +msgid "Non exhaustive list of views that may apply to entities of this type" +msgstr "Lista no exhaustiva de vistas aplicables a este tipo de entidad" + +msgid "OR" +msgstr "O" + +msgid "Ownership" +msgstr "Propiedad" + +msgid "Parent class:" +msgstr "Clase padre:" + +msgid "Password" +msgstr "Contraseña" + +msgid "Password_plural" +msgstr "Contraseñas" + +msgid "Please note that this is only a shallow copy" +msgstr "Recuerde que sólo es una copia superficial" + +msgid "Powered by CubicWeb" +msgstr "Potenciado en CubicWeb" + +msgid "RQLConstraint" +msgstr "Restricción RQL" + +msgid "RQLExpression" +msgstr "Expresión RQL" + +msgid "RQLExpression_plural" +msgstr "Expresiones RQL" + +msgid "RQLUniqueConstraint" +msgstr "Restricción RQL de Unicidad" + +msgid "RQLVocabularyConstraint" +msgstr "Restricción RQL de Vocabulario" + +msgid "RegexpConstraint" +msgstr "restricción expresión regular" + +msgid "Registry's content" +msgstr "Contenido del registro" + +msgid "Relations" +msgstr "Relaciones" + +msgid "Repository" +msgstr "Repositorio" + +#, python-format +msgid "Schema %s" +msgstr "Esquema %s" + +msgid "Schema's permissions definitions" +msgstr "Definiciones de permisos del esquema" + +msgid "Search for" +msgstr "Buscar" + +msgid "Site information" +msgstr "Información del Sitio" + +msgid "SizeConstraint" +msgstr "Restricción de tamaño" + +msgid "" +"Source's configuration for a particular host. One key=value per line, " +"authorized keys depending on the source's type, overriding values defined on " +"the source." +msgstr "" +"Configuración de la fuente por un \"host\" específico. Una clave=valor por " +"línea, las claves permitidas dependen del tipo de fuente. Estos valores son " +"prioritarios a los valores definidos en la fuente." + +msgid "Startup views" +msgstr "Vistas de inicio" + +msgid "State" +msgstr "Estado" + +msgid "State_plural" +msgstr "Estados" + +msgid "StaticVocabularyConstraint" +msgstr "Restricción de vocabulario" + +msgid "String" +msgstr "Cadena de caracteres" + +msgid "String_plural" +msgstr "Cadenas de caracteres" + +msgid "Sub-classes:" +msgstr "Clases hijas:" + +msgid "SubWorkflowExitPoint" +msgstr "Salida de sub-workflow" + +msgid "SubWorkflowExitPoint_plural" +msgstr "Salidas de sub-workflow" + +msgid "Submit bug report" +msgstr "Enviar un reporte de error (bug)" + +msgid "Submit bug report by mail" +msgstr "Enviar este reporte por email" + +msgid "TZDatetime" +msgstr "Fecha y hora internacional" + +msgid "TZDatetime_plural" +msgstr "Fechas y horas internacionales" + +msgid "TZTime" +msgstr "Hora internacional" + +msgid "TZTime_plural" +msgstr "Horas internacionales" + +#, python-format +msgid "The view %s can not be applied to this query" +msgstr "La vista %s no puede ser aplicada a esta búsqueda" + +#, python-format +msgid "The view %s could not be found" +msgstr "La vista %s no ha podido ser encontrada" + +msgid "There is no default workflow" +msgstr "Esta entidad no posee workflow por defecto" + +msgid "This BaseTransition:" +msgstr "Esta transición abstracta:" + +msgid "This Bookmark:" +msgstr "Este favorito:" + +msgid "This CWAttribute:" +msgstr "Esta definición de relación final:" + +msgid "This CWCache:" +msgstr "Este Caché:" + +msgid "This CWComputedRType:" +msgstr "" + +msgid "This CWConstraint:" +msgstr "Esta Restricción:" + +msgid "This CWConstraintType:" +msgstr "Este tipo de Restricción:" + +msgid "This CWDataImport:" +msgstr "Esta importación de datos:" + +msgid "This CWEType:" +msgstr "Este tipo de Entidad:" + +msgid "This CWGroup:" +msgstr "Este grupo:" + +msgid "This CWProperty:" +msgstr "Esta propiedad:" + +msgid "This CWRType:" +msgstr "Este tipo de relación:" + +msgid "This CWRelation:" +msgstr "Esta definición de relación no final:" + +msgid "This CWSource:" +msgstr "Esta fuente:" + +msgid "This CWSourceHostConfig:" +msgstr "Esta configuración de fuente:" + +msgid "This CWSourceSchemaConfig:" +msgstr "Esta parte de mapeo de fuente:" + +msgid "This CWUniqueTogetherConstraint:" +msgstr "Esta restricción de singularidad:" + +msgid "This CWUser:" +msgstr "Este usuario:" + +msgid "This EmailAddress:" +msgstr "Esta dirección electrónica:" + +msgid "This ExternalUri:" +msgstr "Este Uri externo:" + +msgid "This RQLExpression:" +msgstr "Esta expresión RQL:" + +msgid "This State:" +msgstr "Este estado:" + +msgid "This SubWorkflowExitPoint:" +msgstr "Esta Salida de sub-workflow:" + +msgid "This TrInfo:" +msgstr "Esta información de transición:" + +msgid "This Transition:" +msgstr "Esta transición:" + +msgid "This Workflow:" +msgstr "Este Workflow:" + +msgid "This WorkflowTransition:" +msgstr "Esta transición de Workflow:" + +msgid "" +"This action is forbidden. If you think it should be allowed, please contact " +"the site administrator." +msgstr "" +"Esta acción le es prohibida. Si cree que Ud. debería de tener autorización, " +"favor de contactar al administrador del sitio. " + +msgid "This entity type permissions:" +msgstr "Permisos para este tipo de entidad:" + +msgid "Time" +msgstr "Hora" + +msgid "Time_plural" +msgstr "Horas" + +msgid "TrInfo" +msgstr "Información Transición" + +msgid "TrInfo_plural" +msgstr "Información Transiciones" + +msgid "Transition" +msgstr "Transición" + +msgid "Transition_plural" +msgstr "Transiciones" + +msgid "URLs from which content will be imported. You can put one url per line" +msgstr "" +"URLs desde el cual el contenido sera importado. Usted puede incluir un URL " +"por línea." + +msgid "Undoable actions" +msgstr "Acciones irreversibles" + +msgid "Undoing" +msgstr "Deshaciendo" + +msgid "UniqueConstraint" +msgstr "Restricción de Unicidad" + +msgid "Unknown source type" +msgstr "tipo de fuente desconocida" + +msgid "Unreachable objects" +msgstr "Objetos inaccesibles" + +#, python-format +msgid "Updated %(etype)s : %(entity)s" +msgstr "Se actualizó %(etype)s : %(entity)s" + +msgid "Used by:" +msgstr "Utilizado por :" + +msgid "Users and groups management" +msgstr "Usuarios y grupos de administradores" + +msgid "WARNING" +msgstr "ADVERTENCIA" + +msgid "Web server" +msgstr "Servidor web" + +msgid "Workflow" +msgstr "Workflow" + +msgid "Workflow history" +msgstr "Histórico del Workflow" + +msgid "WorkflowTransition" +msgstr "Transición de Workflow" + +msgid "WorkflowTransition_plural" +msgstr "Transiciones de Workflow" + +msgid "Workflow_plural" +msgstr "work flows" + +msgid "" +"You can either submit a new file using the browse button above, or choose to " +"remove already uploaded file by checking the \"detach attached file\" check-" +"box, or edit file content online with the widget below." +msgstr "" +"Usted puede proponer un nuevo archivo utilizando el botón\n" +"\"buscar\" aquí arriba, o eliminar el archivo ya elegido al\n" +"seleccionar el cuadro \"soltar archivo adjunto\", o editar el contenido\n" +"del archivo en línea con el componente inferior." + +msgid "" +"You can either submit a new file using the browse button above, or edit file " +"content online with the widget below." +msgstr "" +"Puede proponer un nuevo archivo utilizando el botón buscar \n" +"\"buscar\" en la parte superior, o editar el contenido del archivo en línea\n" +"en el campo siguiente." + +msgid "You can't change this relation" +msgstr "Usted no puede modificar esta relación" + +msgid "You cannot remove the system source" +msgstr "Usted no puede eliminar la fuente sistema" + +msgid "You cannot rename the system source" +msgstr "Usted no puede Renombrar la fuente sistema" + +msgid "" +"You have no access to this view or it can not be used to display the current " +"data." +msgstr "" +"No tiene permisos para accesar esta vista o No puede utilizarse para " +"desplegar los datos seleccionados." + +msgid "" +"You're not authorized to access this page. If you think you should, please " +"contact the site administrator." +msgstr "" +"Usted no esta autorizado a acceder a esta página. Si Usted cree que \n" +"hay un error, favor de contactar al administrador del Sistema." + +#, python-format +msgid "[%s supervision] changes summary" +msgstr "[%s supervision] descripción de cambios" + +msgid "" +"a RQL expression which should return some results, else the transition won't " +"be available. This query may use X and U variables that will respectivly " +"represents the current entity and the current user." +msgstr "" +"una expresión RQL que debe haber enviado resultados, para que la transición " +"pueda ser realizada. Esta expresión puede utilizar las variables X y U que " +"representan respectivamente la entidad en transición y el usuario actual. " + +msgid "a URI representing an object in external data store" +msgstr "una URI designando un objeto en un repositorio de datos externo" + +msgid "a float is expected" +msgstr "un número flotante es requerido" + +msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" +msgstr "se espera un número (en segundos) ó 20s, 10min, 24h ó 4d " + +msgid "" +"a simple cache entity characterized by a name and a validity date. The " +"target application is responsible for updating timestamp when necessary to " +"invalidate the cache (typically in hooks). Also, checkout the AppObject." +"get_cache() method." +msgstr "" +"un caché simple caracterizado por un nombre y una fecha de validez. Es\n" +"el código de la instancia quién es responsable de actualizar la fecha de\n" +"validez mientras el caché debe ser invalidado (en general en un hook).\n" +"Para recuperar un caché, hace falta utilizar el método\n" +"get_cache(cachename)." + +msgid "abstract base class for transitions" +msgstr "Clase de base abstracta para la transiciones" + +msgid "action menu" +msgstr "acciones" + +msgid "action(s) on this selection" +msgstr "Acción(es) en esta selección" + +msgid "actions" +msgstr "Acciones" + +msgid "activate" +msgstr "Activar" + +msgid "activated" +msgstr "Activado" + +msgid "add" +msgstr "Agregar" + +msgid "add Bookmark bookmarked_by CWUser object" +msgstr "Agregar a los favoritos " + +msgid "add CWAttribute add_permission RQLExpression subject" +msgstr "Expresión RQL de agregación" + +msgid "add CWAttribute constrained_by CWConstraint subject" +msgstr "Restricción" + +msgid "add CWAttribute read_permission RQLExpression subject" +msgstr "Expresión RQL de lectura" + +msgid "add CWAttribute relation_type CWRType object" +msgstr "Definición de atributo" + +msgid "add CWAttribute update_permission RQLExpression subject" +msgstr "Permiso de actualización" + +msgid "add CWEType add_permission RQLExpression subject" +msgstr "Expresión RQL de agregación" + +msgid "add CWEType delete_permission RQLExpression subject" +msgstr "Expresión RQL de eliminación" + +msgid "add CWEType read_permission RQLExpression subject" +msgstr "Expresión RQL de lectura" + +msgid "add CWEType update_permission RQLExpression subject" +msgstr "Definir una expresión RQL de actualización" + +msgid "add CWProperty for_user CWUser object" +msgstr "Propiedad" + +msgid "add CWRelation add_permission RQLExpression subject" +msgstr "Expresión RQL de agregar" + +msgid "add CWRelation constrained_by CWConstraint subject" +msgstr "Restricción" + +msgid "add CWRelation delete_permission RQLExpression subject" +msgstr "Expresión RQL de supresión" + +msgid "add CWRelation read_permission RQLExpression subject" +msgstr "Expresión RQL de lectura" + +msgid "add CWRelation relation_type CWRType object" +msgstr "Definición de relación" + +msgid "add CWSourceHostConfig cw_host_config_of CWSource object" +msgstr "configuración del host" + +msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" +msgstr "restricción de singularidad" + +msgid "add CWUser in_group CWGroup object" +msgstr "Usuario" + +msgid "add CWUser use_email EmailAddress subject" +msgstr "Email" + +msgid "add State allowed_transition Transition object" +msgstr "Estado en entrada" + +msgid "add State allowed_transition Transition subject" +msgstr "Transición en salida" + +msgid "add State allowed_transition WorkflowTransition subject" +msgstr "Transición workflow en salida" + +msgid "add State state_of Workflow object" +msgstr "Estado" + +msgid "add Transition condition RQLExpression subject" +msgstr "Restricción" + +msgid "add Transition destination_state State object" +msgstr "Transición de entrada" + +msgid "add Transition destination_state State subject" +msgstr "Estado de salida" + +msgid "add Transition transition_of Workflow object" +msgstr "Transición" + +msgid "add WorkflowTransition condition RQLExpression subject" +msgstr "Condición" + +msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" +msgstr "Salida de sub-workflow" + +msgid "add WorkflowTransition transition_of Workflow object" +msgstr "Transición Workflow" + +msgid "add a BaseTransition" +msgstr "" + +msgid "add a Bookmark" +msgstr "" + +msgid "add a CWAttribute" +msgstr "" + +msgid "add a CWCache" +msgstr "" + +msgid "add a CWComputedRType" +msgstr "" + +msgid "add a CWConstraint" +msgstr "" + +msgid "add a CWConstraintType" +msgstr "" + +msgid "add a CWDataImport" +msgstr "" + +msgid "add a CWEType" +msgstr "" + +msgctxt "inlined:CWRelation.from_entity.subject" +msgid "add a CWEType" +msgstr "Agregar un tipo de entidad" + +msgctxt "inlined:CWRelation.to_entity.subject" +msgid "add a CWEType" +msgstr "Agregar un tipo de entidad" + +msgid "add a CWGroup" +msgstr "" + +msgid "add a CWProperty" +msgstr "" + +msgid "add a CWRType" +msgstr "" + +msgctxt "inlined:CWRelation.relation_type.subject" +msgid "add a CWRType" +msgstr "Agregar un tipo de relación" + +msgid "add a CWRelation" +msgstr "" + +msgid "add a CWSource" +msgstr "" + +msgid "add a CWSourceHostConfig" +msgstr "" + +msgid "add a CWSourceSchemaConfig" +msgstr "" + +msgid "add a CWUniqueTogetherConstraint" +msgstr "" + +msgid "add a CWUser" +msgstr "" + +msgid "add a EmailAddress" +msgstr "" + +msgctxt "inlined:CWUser.use_email.subject" +msgid "add a EmailAddress" +msgstr "Agregar correo electrónico" + +msgid "add a ExternalUri" +msgstr "" + +msgid "add a RQLExpression" +msgstr "" + +msgid "add a State" +msgstr "" + +msgid "add a SubWorkflowExitPoint" +msgstr "" + +msgid "add a TrInfo" +msgstr "" + +msgid "add a Transition" +msgstr "" + +msgid "add a Workflow" +msgstr "" + +msgid "add a WorkflowTransition" +msgstr "" + +# subject and object forms for each relation type +# (no object form for final relation types) +msgid "add_permission" +msgstr "Autorización para agregar" + +msgctxt "CWAttribute" +msgid "add_permission" +msgstr "Permiso de agregar" + +# subject and object forms for each relation type +# (no object form for final relation types) +msgctxt "CWEType" +msgid "add_permission" +msgstr "Permiso de agregar" + +msgctxt "CWRelation" +msgid "add_permission" +msgstr "Permiso de agregar" + +msgid "add_permission_object" +msgstr "tiene permiso de agregar" + +msgctxt "CWGroup" +msgid "add_permission_object" +msgstr "tiene permiso de agregar" + +msgctxt "RQLExpression" +msgid "add_permission_object" +msgstr "tiene permiso de agregar" + +msgid "add_relation" +msgstr "agregar" + +#, python-format +msgid "added %(etype)s #%(eid)s (%(title)s)" +msgstr "se agregó %(etype)s #%(eid)s (%(title)s)" + +#, python-format +msgid "" +"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" +msgstr "" +"la relación %(rtype)s de %(frometype)s #%(eidfrom)s a %(toetype)s #%(eidto)s " +"ha sido agregada" + +msgid "additional type specific properties" +msgstr "propiedades adicionales específicas al tipo" + +msgid "addrelated" +msgstr "Agregar" + +msgid "address" +msgstr "correo electrónico" + +msgctxt "EmailAddress" +msgid "address" +msgstr "correo electrónico" + +msgid "alias" +msgstr "alias" + +msgctxt "EmailAddress" +msgid "alias" +msgstr "alias" + +msgid "allow to set a specific workflow for an entity" +msgstr "permite definir un Workflow específico para una entidad" + +msgid "allowed options depends on the source type" +msgstr "las opciones permitidas dependen del tipo de fuente" + +msgid "allowed transitions from this state" +msgstr "transiciones autorizadas desde este estado" + +#, python-format +msgid "allowed values for \"action\" are %s" +msgstr "los valores permitidos por \"acción\" son %s" + +msgid "allowed_transition" +msgstr "transiciones autorizadas" + +msgctxt "State" +msgid "allowed_transition" +msgstr "transiciones autorizadas" + +msgid "allowed_transition_object" +msgstr "Estados de entrada" + +msgctxt "BaseTransition" +msgid "allowed_transition_object" +msgstr "transición autorizada de" + +msgctxt "Transition" +msgid "allowed_transition_object" +msgstr "transición autorizada de" + +msgctxt "WorkflowTransition" +msgid "allowed_transition_object" +msgstr "transición autorizada de" + +msgid "an electronic mail address associated to a short alias" +msgstr "una dirección electrónica asociada a este alias" + +msgid "an error occurred" +msgstr "Ha ocurrido un error" + +msgid "an error occurred while processing your request" +msgstr "un error ocurrió al procesar su demanda" + +msgid "an error occurred, the request cannot be fulfilled" +msgstr "un error ha ocurrido, la búsqueda no ha podido ser realizada" + +msgid "an integer is expected" +msgstr "un número entero es esperado" + +msgid "and linked" +msgstr "y relacionada" + +msgid "and/or between different values" +msgstr "y/o entre los diferentes valores" + +msgid "anyrsetview" +msgstr "vistas rset" + +msgid "april" +msgstr "Abril" + +#, python-format +msgid "archive for %(author)s" +msgstr "archivo de %(author)s" + +#, python-format +msgid "archive for %(month)s/%(year)s" +msgstr "archivo del %(month)s/%(year)s" + +#, python-format +msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" +msgstr "" +"La entidad #%(eid)s de tipo %(etype)s debe necesariamente tener almenos una " +"relación de tipo %(rtype)s" + +msgid "attribute" +msgstr "Atributo" + +msgid "august" +msgstr "Agosto" + +msgid "authentication failure" +msgstr "Usuario o contraseña incorrecta" + +msgid "auto" +msgstr "Automático" + +msgid "autocomputed attribute used to ensure transition coherency" +msgstr "" +"Atributo automatizado usado para asegurar la coherencia en la transición" + +msgid "automatic" +msgstr "Automático" + +#, python-format +msgid "back to pagination (%s results)" +msgstr "regresar a paginación (%s resultados)" + +msgid "bad value" +msgstr "Valor erróneo" + +msgid "badly formatted url" +msgstr "url mal formateado" + +msgid "base url" +msgstr "Url de base" + +msgid "bookmark has been removed" +msgstr "Ha sido eliminado de sus favoritos" + +msgid "bookmark this page" +msgstr "Agregar esta página a los favoritos" + +msgid "bookmark this search" +msgstr "Guardar esta búsqueda" + +msgid "bookmarked_by" +msgstr "está en los Favoritos de" + +msgctxt "Bookmark" +msgid "bookmarked_by" +msgstr "está en los Favoritos de" + +msgid "bookmarked_by_object" +msgstr "tiene como Favoritos" + +msgctxt "CWUser" +msgid "bookmarked_by_object" +msgstr "tiene como Favoritos" + +msgid "bookmarks" +msgstr "Favoritos" + +msgid "bookmarks are used to have user's specific internal links" +msgstr "los Favoritos son ligas directas a espacios guardados por el usuario" + +msgid "boxes" +msgstr "Cajas" + +msgid "bug report sent" +msgstr "Reporte de error enviado" + +msgid "button_apply" +msgstr "Aplicar" + +msgid "button_cancel" +msgstr "Cancelar" + +msgid "button_delete" +msgstr "Eliminar" + +msgid "button_ok" +msgstr "Validar" + +msgid "by" +msgstr "por" + +msgid "by relation" +msgstr "por la relación" + +msgid "by_transition" +msgstr "transición" + +msgctxt "TrInfo" +msgid "by_transition" +msgstr "transición" + +msgid "by_transition_object" +msgstr "cambio de estados" + +msgctxt "BaseTransition" +msgid "by_transition_object" +msgstr "tiene como información" + +msgctxt "Transition" +msgid "by_transition_object" +msgstr "tiene como información" + +msgctxt "WorkflowTransition" +msgid "by_transition_object" +msgstr "tiene como información" + +msgid "calendar" +msgstr "mostrar un calendario" + +msgid "can not resolve entity types:" +msgstr "Imposible de interpretar los tipos de entidades:" + +msgid "can only have one url" +msgstr "solo puede tener un URL" + +msgid "can't be changed" +msgstr "No puede ser modificado" + +msgid "can't be deleted" +msgstr "No puede ser eliminado" + +msgid "can't change this attribute" +msgstr "no puede modificar este atributo" + +#, python-format +msgid "can't display data, unexpected error: %s" +msgstr "imposible de mostrar los datos, a causa del siguiente error: %s" + +msgid "can't have multiple exits on the same state" +msgstr "no puede tener varias salidas en el mismo estado" + +#, python-format +msgid "can't parse %(value)r (expected %(format)s)" +msgstr "no puede analizar %(value)r (formato requerido : %(format)s)" + +#, python-format +msgid "" +"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " +"%(value)s) does not exist any longer" +msgstr "" +"no se pudo restaurar la entidad %(eid)s del tipo %(eschema)s, objetivo de " +"%(rtype)s (eid %(value)s) pues ésta ya no existe" + +#, python-format +msgid "" +"can't restore relation %(rtype)s of entity %(eid)s, this relation does not " +"exist in the schema anymore." +msgstr "" +"no se pudo restaurar la relación %(rtype)s de la entidad %(eid)s, esta " +"relación ya no existe en el esquema. " + +#, python-format +msgid "can't restore state of entity %s, it has been deleted inbetween" +msgstr "" +"no se puede restaurar el estado de la entidad %s, se ha borrado desde " +"entonces" + +#, python-format +msgid "" +"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality=" +"%(card)s" +msgstr "" +"no puede poner 'inlined' = True, %(stype)s %(rtype)s %(otype)s tiene " +"cardinalidad %(card)s" + +msgid "cancel" +msgstr "anular" + +msgid "cancel select" +msgstr "Cancelar la selección" + +msgid "cancel this insert" +msgstr "Cancelar esta inserción" + +msgid "cardinality" +msgstr "cardinalidad" + +msgctxt "CWAttribute" +msgid "cardinality" +msgstr "cardinalidad" + +msgctxt "CWRelation" +msgid "cardinality" +msgstr "cardinalidad" + +msgid "category" +msgstr "categoría" + +#, python-format +msgid "changed state of %(etype)s #%(eid)s (%(title)s)" +msgstr "Cambiar del estado de %(etype)s #%(eid)s (%(title)s)" + +msgid "changes applied" +msgstr "Cambios realizados" + +msgid "click here to see created entity" +msgstr "Ver la entidad creada" + +msgid "click here to see edited entity" +msgstr "seleccione aquí para ver la entidad modificada" + +msgid "click on the box to cancel the deletion" +msgstr "Seleccione la zona de edición para cancelar la eliminación" + +msgid "click to add a value" +msgstr "seleccione para agregar un valor" + +msgid "click to delete this value" +msgstr "seleccione para eliminar este valor" + +msgid "click to edit this field" +msgstr "seleccione para editar este campo" + +msgid "close all" +msgstr "cerrar todos" + +msgid "comment" +msgstr "Comentario" + +msgctxt "TrInfo" +msgid "comment" +msgstr "Comentario" + +msgid "comment_format" +msgstr "Formato" + +msgctxt "TrInfo" +msgid "comment_format" +msgstr "Formato" + +msgid "components" +msgstr "Componentes" + +msgid "components_navigation" +msgstr "Navigación por página" + +msgid "components_navigation_description" +msgstr "" +"Componente que permite presentar en varias páginas los resultados de " +"búsqueda cuando son mayores a un número predeterminado " + +msgid "components_rqlinput" +msgstr "Barra RQL" + +msgid "components_rqlinput_description" +msgstr "La barra para realizar consultas en RQL, en el encabezado de página" + +msgid "composite" +msgstr "composite" + +msgctxt "CWRelation" +msgid "composite" +msgstr "composite" + +msgid "condition" +msgstr "condición" + +msgctxt "BaseTransition" +msgid "condition" +msgstr "condición" + +msgctxt "Transition" +msgid "condition" +msgstr "condición" + +msgctxt "WorkflowTransition" +msgid "condition" +msgstr "condición" + +msgid "condition_object" +msgstr "condición de" + +msgctxt "RQLExpression" +msgid "condition_object" +msgstr "condición de" + +msgid "conditions" +msgstr "condiciones" + +msgid "config" +msgstr "configuración" + +msgctxt "CWSource" +msgid "config" +msgstr "configuración" + +msgctxt "CWSourceHostConfig" +msgid "config" +msgstr "configuración" + +msgid "config mode" +msgstr "Modo de configuración" + +msgid "config type" +msgstr "Tipo de configuración" + +msgid "confirm password" +msgstr "Confirmar contraseña" + +msgid "constrained_by" +msgstr "Restricción impuesta por" + +msgctxt "CWAttribute" +msgid "constrained_by" +msgstr "Restricción impuesta por" + +msgctxt "CWRelation" +msgid "constrained_by" +msgstr "Restricción impuesta por" + +msgid "constrained_by_object" +msgstr "Restricción de" + +msgctxt "CWConstraint" +msgid "constrained_by_object" +msgstr "Restricción de" + +msgid "constraint factory" +msgstr "Fábrica de restricciones" + +msgid "constraint_of" +msgstr "restricción de" + +msgctxt "CWUniqueTogetherConstraint" +msgid "constraint_of" +msgstr "restricción de" + +msgid "constraint_of_object" +msgstr "restringida por" + +msgctxt "CWEType" +msgid "constraint_of_object" +msgstr "restringida por" + +msgid "constraints" +msgstr "Restricciones" + +msgid "constraints applying on this relation" +msgstr "Restricciones que se aplican a esta relación" + +msgid "content type" +msgstr "tipo MIME" + +msgid "context" +msgstr "Contexto" + +msgid "context where this box should be displayed" +msgstr "Contexto en el cual la caja debe aparecer en el sistema" + +msgid "context where this component should be displayed" +msgstr "Contexto en el cual el componente debe aparecer en el sistema" + +msgid "context where this facet should be displayed, leave empty for both" +msgstr "" +"Contexto en el cual esta faceta debe ser mostrada, dejar vacia para ambos" + +msgid "control subject entity's relations order" +msgstr "Controla el orden de relaciones de la entidad sujeto" + +msgid "copy" +msgstr "Copiar" + +msgid "core relation indicating a user's groups" +msgstr "" +"Relación sistema que indica los grupos a los cuales pertenece un usuario" + +msgid "" +"core relation indicating owners of an entity. This relation implicitly put " +"the owner into the owners group for the entity" +msgstr "" +"Relación sistema que indica el(los) propietario(s) de una entidad. Esta " +"relación pone de manera implícita al propietario en el grupo de propietarios " +"de una entidad." + +msgid "core relation indicating the original creator of an entity" +msgstr "Relación sistema que indica el creador de una entidad." + +msgid "core relation indicating the type of an entity" +msgstr "Relación sistema que indica el tipo de entidad." + +msgid "" +"core relation indicating the types (including specialized types) of an entity" +msgstr "" +"Relación sistema indicando los tipos (incluídos los tipos padres) de una " +"entidad" + +msgid "could not connect to the SMTP server" +msgstr "Imposible de conectarse al servidor SMTP" + +msgid "create an index for quick search on this attribute" +msgstr "Crear un índice para accelerar las búsquedas sobre este atributo" + +msgid "created on" +msgstr "creado el" + +msgid "created_by" +msgstr "creado por" + +msgid "created_by_object" +msgstr "ha creado" + +msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)" +msgstr "Creando Favorito" + +msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)" +msgstr "Creación del atributo %(linkto)s" + +msgid "" +"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)" +msgstr "Creación condicionada por el atributo %(linkto)s" + +msgid "" +"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)" +msgstr "Creación condicionada por la relación %(linkto)s" + +msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)" +msgstr "Creación de una propiedad por el usuario %(linkto)s" + +msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" +msgstr "Creación de la relación %(linkto)s" + +msgid "" +"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource " +"%(linkto)s)" +msgstr "creación de una configuración host para la fuente %(linkto)s" + +msgid "" +"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " +"constraint_of CWEType %(linkto)s)" +msgstr "creación de una restricción de singularidad en %(linkto)s" + +msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" +msgstr "Creación de un usuario para agregar al grupo %(linkto)s" + +msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)" +msgstr "Creación de una dirección electrónica para el usuario %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" +msgstr "Creación de una expresión RQL para permitir agregar %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" +msgstr "creación de una expresión RQL por el derecho de lectura de %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s update_permission " +"RQLExpression)" +msgstr "" +"creación de una expresión RQL por el derecho de actualización de %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" +msgstr "" +"Creación de una expresión RQL para la autorización de agregar %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)" +msgstr "" +"Creación de una expresión RQL para la autorización de eliminar %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" +msgstr "Creación de una expresión RQL para permitir leer %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" +msgstr "Creación de una expresión RQL para permitir actualizar %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" +msgstr "Creación de una expresión RQL para permitir agregar %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s delete_permission " +"RQLExpression)" +msgstr "Creación de una expresión RQL para permitir eliminar %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" +msgstr "Creación de una expresión RQL para permitir leer %(linkto)s" + +msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" +msgstr "Creación de una expresión RQL para la transición %(linkto)s" + +msgid "" +"creating RQLExpression (WorkflowTransition %(linkto)s condition " +"RQLExpression)" +msgstr "Creación de una expresión RQL para la transición Workflow %(linkto)s" + +msgid "creating State (State allowed_transition Transition %(linkto)s)" +msgstr "Creación de un estado que puede ir hacia la transición %(linkto)s" + +msgid "creating State (State state_of Workflow %(linkto)s)" +msgstr "Creando un Estado del Workflow" + +msgid "creating State (Transition %(linkto)s destination_state State)" +msgstr "Creación de un Estado Destinación de la Transición %(linkto)s" + +msgid "" +"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " +"subworkflow_exit SubWorkflowExitPoint)" +msgstr "creación de un punto de Salida de la Transición Workflow %(linkto)s" + +msgid "creating Transition (State %(linkto)s allowed_transition Transition)" +msgstr "Creación de una transición autorizada desde el Estado %(linkto)s" + +msgid "creating Transition (Transition destination_state State %(linkto)s)" +msgstr "Creación de un transición hacia el Estado %(linkto)s" + +msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" +msgstr "Creación de una Transición Workflow %(linkto)s" + +msgid "" +"creating WorkflowTransition (State %(linkto)s allowed_transition " +"WorkflowTransition)" +msgstr "" +"Creación de una Transición Workflow permitida desde el estado %(linkto)s" + +msgid "" +"creating WorkflowTransition (WorkflowTransition transition_of Workflow " +"%(linkto)s)" +msgstr "Creación de una Transición Workflow del Workflow %(linkto)s" + +msgid "creation" +msgstr "Creación" + +msgid "creation date" +msgstr "Fecha de Creación" + +msgid "creation time of an entity" +msgstr "Fecha de creación de una entidad" + +msgid "creation_date" +msgstr "Fecha de Creación" + +msgid "cstrtype" +msgstr "Tipo de restricción" + +msgctxt "CWConstraint" +msgid "cstrtype" +msgstr "Tipo" + +msgid "cstrtype_object" +msgstr "utilizado por" + +msgctxt "CWConstraintType" +msgid "cstrtype_object" +msgstr "Tipo de restricciones" + +msgid "csv export" +msgstr "Exportar en CSV" + +msgid "csv export (entities)" +msgstr "Exportar a CSV (entidades)" + +msgid "ctxcomponents" +msgstr "Componentes contextuales" + +msgid "ctxcomponents_anonuserlink" +msgstr "Liga usuario" + +msgid "ctxcomponents_anonuserlink_description" +msgstr "" +"Muestra un enlace hacia el formulario de conexión para los usuarios " +"anónimos, o una caja que contiene los enlaces del usuario conectado. " + +msgid "ctxcomponents_appliname" +msgstr "Nombre de la aplicación" + +msgid "ctxcomponents_appliname_description" +msgstr "Muestra el nombre de la aplicación en el encabezado de la página" + +msgid "ctxcomponents_bookmarks_box" +msgstr "Caja de Favoritos" + +msgid "ctxcomponents_bookmarks_box_description" +msgstr "Muestra y permite administrar los favoritos del usuario" + +msgid "ctxcomponents_breadcrumbs" +msgstr "Ruta de Navegación" + +msgid "ctxcomponents_breadcrumbs_description" +msgstr "Muestra la ruta que permite localizar la página actual en el Sistema" + +msgid "ctxcomponents_download_box" +msgstr "Configuración de caja de descargas" + +msgid "ctxcomponents_download_box_description" +msgstr "Caja que contiene los elementos descargados" + +msgid "ctxcomponents_edit_box" +msgstr "Caja de Acciones" + +msgid "ctxcomponents_edit_box_description" +msgstr "Muestra las acciones posibles a ejecutar para los datos seleccionados" + +msgid "ctxcomponents_facet.filterbox" +msgstr "Filtros" + +msgid "ctxcomponents_facet.filterbox_description" +msgstr "Muestra los filtros aplicables a una búsqueda realizada" + +msgid "ctxcomponents_logo" +msgstr "logo" + +msgid "ctxcomponents_logo_description" +msgstr "El logo de la aplicación, en el encabezado de página" + +msgid "ctxcomponents_metadata" +msgstr "Metadatos de la Entidad" + +msgid "ctxcomponents_metadata_description" +msgstr "espacio que incluye los metadatos de la entidad actual" + +msgid "ctxcomponents_possible_views_box" +msgstr "Caja de Vistas Posibles" + +msgid "ctxcomponents_possible_views_box_description" +msgstr "Muestra las vistas posibles a aplicar a los datos seleccionados" + +msgid "ctxcomponents_prevnext" +msgstr "Elemento anterior / siguiente" + +msgid "ctxcomponents_prevnext_description" +msgstr "" +"Muestra las ligas que permiten pasar de una entidad a otra en las entidades " +"que implementan la interface \"anterior/siguiente\"." + +msgid "ctxcomponents_rss" +msgstr "Ícono RSS" + +msgid "ctxcomponents_rss_description" +msgstr "Muestra el ícono RSS para vistas RSS" + +msgid "ctxcomponents_search_box" +msgstr "Caja de búsqueda" + +msgid "ctxcomponents_search_box_description" +msgstr "" +"Permite realizar una búsqueda simple para cualquier tipo de dato en la " +"aplicación" + +msgid "ctxcomponents_startup_views_box" +msgstr "Caja Vistas de inicio" + +msgid "ctxcomponents_startup_views_box_description" +msgstr "Muestra las vistas de inicio de la aplicación" + +msgid "ctxcomponents_userstatus" +msgstr "estado del usuario" + +msgid "ctxcomponents_userstatus_description" +msgstr "establece el estado del usuario" + +msgid "ctxcomponents_wfhistory" +msgstr "Histórico del workflow." + +msgid "ctxcomponents_wfhistory_description" +msgstr "" +"Sección que muestra el reporte histórico de las transiciones del workflow. " +"Aplica solo en entidades con workflow." + +msgid "ctxtoolbar" +msgstr "Barra de herramientas" + +msgid "custom_workflow" +msgstr "Workflow específico" + +msgid "custom_workflow_object" +msgstr "Workflow de" + +msgid "cw.groups-management" +msgstr "grupos" + +msgid "cw.users-management" +msgstr "usuarios" + +msgid "cw_for_source" +msgstr "fuente" + +msgctxt "CWSourceSchemaConfig" +msgid "cw_for_source" +msgstr "fuente" + +msgid "cw_for_source_object" +msgstr "elemento de mapeo" + +msgctxt "CWSource" +msgid "cw_for_source_object" +msgstr "elemento de mapeo" + +msgid "cw_host_config_of" +msgstr "configuración del host de" + +msgctxt "CWSourceHostConfig" +msgid "cw_host_config_of" +msgstr "configuración del host de" + +msgid "cw_host_config_of_object" +msgstr "tiene la configuración del host" + +msgctxt "CWSource" +msgid "cw_host_config_of_object" +msgstr "tiene la configuración del host" + +msgid "cw_import_of" +msgstr "fuente" + +msgctxt "CWDataImport" +msgid "cw_import_of" +msgstr "fuente" + +msgid "cw_import_of_object" +msgstr "importación" + +msgctxt "CWSource" +msgid "cw_import_of_object" +msgstr "importación" + +msgid "cw_schema" +msgstr "esquema" + +msgctxt "CWSourceSchemaConfig" +msgid "cw_schema" +msgstr "esquema" + +msgid "cw_schema_object" +msgstr "mapeado por" + +msgctxt "CWEType" +msgid "cw_schema_object" +msgstr "mapeado por" + +msgctxt "CWRType" +msgid "cw_schema_object" +msgstr "mapeado por" + +msgctxt "CWRelation" +msgid "cw_schema_object" +msgstr "mapeado por" + +msgid "cw_source" +msgstr "desde la fuente de datos" + +msgid "cw_source_object" +msgstr "entidades" + +msgid "cwetype-box" +msgstr "Vista \"caja\"" + +msgid "cwetype-description" +msgstr "Descripción" + +msgid "cwetype-permissions" +msgstr "Permisos" + +msgid "cwetype-views" +msgstr "Vistas" + +msgid "cwetype-workflow" +msgstr "Workflow" + +msgid "cwgroup-main" +msgstr "Descripción" + +msgid "cwgroup-permissions" +msgstr "Permisos" + +msgid "cwrtype-description" +msgstr "Descripción" + +msgid "cwrtype-permissions" +msgstr "Permisos" + +msgid "cwsource-imports" +msgstr "importación" + +msgid "cwsource-main" +msgstr "descripción" + +msgid "cwsource-mapping" +msgstr "mapeo" + +msgid "cwuri" +msgstr "Uri Interna" + +msgid "data directory url" +msgstr "Url del repertorio de datos" + +msgid "data model schema" +msgstr "Esquema del Sistema" + +msgid "data sources" +msgstr "fuente de datos" + +msgid "data sources management" +msgstr "administración de fuentes de datos" + +msgid "date" +msgstr "Fecha" + +msgid "deactivate" +msgstr "Desactivar" + +msgid "deactivated" +msgstr "Desactivado" + +msgid "december" +msgstr "Diciembre" + +msgid "default" +msgstr "Valor por defecto" + +msgid "default text format for rich text fields." +msgstr "" +"Formato de texto que se utilizará por defecto para los campos de tipo texto" + +msgid "default user workflow" +msgstr "Workflow por defecto de los usuarios" + +msgid "default value" +msgstr "Valor por defecto" + +msgid "default value as gziped pickled python object" +msgstr "valor por defecto, en la forma de objeto python, al usar pickle y gzip" + +msgid "default workflow for an entity type" +msgstr "Workflow por defecto para un tipo de entidad" + +msgid "default_workflow" +msgstr "Workflow por defecto" + +msgctxt "CWEType" +msgid "default_workflow" +msgstr "Workflow por defecto" + +msgid "default_workflow_object" +msgstr "Workflow por defecto de" + +msgctxt "Workflow" +msgid "default_workflow_object" +msgstr "Workflow por defecto de" + +msgid "defaultval" +msgstr "Valor por defecto" + +msgctxt "CWAttribute" +msgid "defaultval" +msgstr "Valor por defecto" + +msgid "define a CubicWeb user" +msgstr "Define un usuario CubicWeb" + +msgid "define a CubicWeb users group" +msgstr "Define un grupo de usuarios CubicWeb" + +msgid "" +"define a final relation: link a final relation type from a non final entity " +"to a final entity type. used to build the instance schema" +msgstr "" +"Define una relación final: liga un tipo de relación final desde una entidad " +"NO final hacia un tipo de entidad final. Se usa para crear el esquema de la " +"instancia." + +msgid "" +"define a non final relation: link a non final relation type from a non final " +"entity to a non final entity type. used to build the instance schema" +msgstr "" +"Define una relación NO final: liga un tipo de relación NO final desde una " +"entidad NO final hacia un tipo de entidad NO final. Se usa para crear el " +"esquema de la instancia." + +msgid "define a relation type, used to build the instance schema" +msgstr "" +"Define un tipo de relación, usado para construir el esquema de la instancia." + +msgid "define a rql expression used to define permissions" +msgstr "Expresión RQL utilizada para definir los derechos de acceso" + +msgid "define a schema constraint" +msgstr "Define una condición de esquema" + +msgid "define a schema constraint type" +msgstr "Define un tipo de condición de esquema" + +msgid "define a virtual relation type, used to build the instance schema" +msgstr "" + +msgid "define an entity type, used to build the instance schema" +msgstr "" +"Define un tipo de entidad, usado para construir el esquema de la instancia." + +msgid "define how we get out from a sub-workflow" +msgstr "Define como salir de un sub-Workflow" + +msgid "defines a sql-level multicolumn unique index" +msgstr "define un índice SQL único a través de varias columnas" + +msgid "" +"defines what's the property is applied for. You must select this first to be " +"able to set value" +msgstr "" +"Define a que se aplica la propiedad . Debe de seleccionar esto antes de " +"establecer un valor" + +msgid "delete" +msgstr "Eliminar" + +msgid "delete this bookmark" +msgstr "Eliminar este favorito" + +msgid "delete this relation" +msgstr "Eliminar esta relación" + +msgid "delete_permission" +msgstr "Permiso de eliminar" + +msgctxt "CWEType" +msgid "delete_permission" +msgstr "Permiso de eliminar" + +msgctxt "CWRelation" +msgid "delete_permission" +msgstr "Permiso de eliminar" + +msgid "delete_permission_object" +msgstr "posee permiso para eliminar" + +msgctxt "CWGroup" +msgid "delete_permission_object" +msgstr "puede eliminar" + +msgctxt "RQLExpression" +msgid "delete_permission_object" +msgstr "puede eliminar" + +#, python-format +msgid "deleted %(etype)s #%(eid)s (%(title)s)" +msgstr "Eliminación de la entidad %(etype)s #%(eid)s (%(title)s)" + +#, python-format +msgid "" +"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" +msgstr "" +"La relación %(rtype)s de %(frometype)s #%(eidfrom)s a %(toetype)s #%(eidto)s " +"ha sido suprimida." + +msgid "depends on the constraint type" +msgstr "Depende del tipo de restricción" + +msgid "description" +msgstr "Descripción" + +msgctxt "BaseTransition" +msgid "description" +msgstr "Descripción" + +msgctxt "CWAttribute" +msgid "description" +msgstr "Descripción" + +msgctxt "CWComputedRType" +msgid "description" +msgstr "" + +msgctxt "CWEType" +msgid "description" +msgstr "Descripción" + +msgctxt "CWRType" +msgid "description" +msgstr "Descripción" + +msgctxt "CWRelation" +msgid "description" +msgstr "Descripción" + +msgctxt "State" +msgid "description" +msgstr "Descripción" + +msgctxt "Transition" +msgid "description" +msgstr "Descripción" + +msgctxt "Workflow" +msgid "description" +msgstr "Descripción" + +msgctxt "WorkflowTransition" +msgid "description" +msgstr "Descripción" + +msgid "description_format" +msgstr "Formato" + +msgctxt "BaseTransition" +msgid "description_format" +msgstr "Formato" + +msgctxt "CWAttribute" +msgid "description_format" +msgstr "Formato" + +msgctxt "CWComputedRType" +msgid "description_format" +msgstr "" + +msgctxt "CWEType" +msgid "description_format" +msgstr "Formato" + +msgctxt "CWRType" +msgid "description_format" +msgstr "Formato" + +msgctxt "CWRelation" +msgid "description_format" +msgstr "Formato" + +msgctxt "State" +msgid "description_format" +msgstr "Formato" + +msgctxt "Transition" +msgid "description_format" +msgstr "Formato" + +msgctxt "Workflow" +msgid "description_format" +msgstr "Formato" + +msgctxt "WorkflowTransition" +msgid "description_format" +msgstr "Formato" + +msgid "destination state for this transition" +msgstr "Estados accesibles para esta transición" + +msgid "destination state must be in the same workflow as our parent transition" +msgstr "" +"El estado de destino debe pertenecer al mismo Workflow que la transición " +"padre." + +msgid "destination state of a transition" +msgstr "Estado destino de una transición" + +msgid "" +"destination state. No destination state means that transition should go back " +"to the state from which we've entered the subworkflow." +msgstr "" +"Estado destino de la transición. Si el Estado destino no ha sido " +"especificado, la transición regresará hacia el estado que tenía la entidad " +"al entrar en el Sub-Workflow." + +msgid "destination_state" +msgstr "Estado destino" + +msgctxt "SubWorkflowExitPoint" +msgid "destination_state" +msgstr "Estado destino" + +msgctxt "Transition" +msgid "destination_state" +msgstr "Estado destino" + +msgid "destination_state_object" +msgstr "Destino de" + +msgctxt "State" +msgid "destination_state_object" +msgstr "Estado final de" + +msgid "detach attached file" +msgstr "soltar el archivo existente" + +msgid "display order of the box" +msgstr "Orden de aparición de la caja" + +msgid "display order of the component" +msgstr "Orden de aparición del componente" + +msgid "display order of the facet" +msgstr "Orden de aparición de la faceta" + +msgid "display the box or not" +msgstr "Mostrar o no la caja" + +msgid "display the component or not" +msgstr "Mostrar o no el componente" + +msgid "display the facet or not" +msgstr "Mostrar o no la faceta" + +msgid "download" +msgstr "Descargar" + +#, python-format +msgid "download %s" +msgstr "Descargar %s" + +msgid "download icon" +msgstr "ícono de descarga" + +msgid "download schema as owl" +msgstr "Descargar esquema en formato OWL" + +msgid "edit bookmarks" +msgstr "Editar favoritos" + +msgid "edit canceled" +msgstr "Edición cancelada" + +msgid "editable-table" +msgstr "Tabla modificable" + +msgid "eid" +msgstr "eid" + +msgid "embedded html" +msgstr "Html incrustado" + +msgid "end_timestamp" +msgstr "horario final" + +msgctxt "CWDataImport" +msgid "end_timestamp" +msgstr "horario final" + +msgid "entities deleted" +msgstr "Entidades eliminadas" + +msgid "entity and relation types can't be mapped, only attributes or relations" +msgstr "" +"los tipos de entidad y relación no pueden ser mapeados, solo los atributos y " +"las relaciones" + +msgid "entity copied" +msgstr "Entidad copiada" + +msgid "entity created" +msgstr "Entidad creada" + +msgid "entity creation" +msgstr "Creación de entidad" + +msgid "entity deleted" +msgstr "Entidad eliminada" + +msgid "entity deletion" +msgstr "Eliminación de entidad" + +msgid "entity edited" +msgstr "Entidad modificada" + +msgid "entity has no workflow set" +msgstr "La entidad no tiene Workflow" + +msgid "entity linked" +msgstr "Entidad asociada" + +msgid "entity type" +msgstr "Tipo de entidad" + +msgid "entity types which may use this workflow" +msgstr "Tipos de entidades que pueden utilizar este Workflow" + +msgid "entity update" +msgstr "Actualización de la Entidad" + +msgid "entityview" +msgstr "vistas de entidades" + +msgid "error" +msgstr "error" + +msgid "error while publishing ReST text" +msgstr "" +"Se ha producido un error durante la interpretación del texto en formato ReST" + +msgid "exit state must be a subworkflow state" +msgstr "El estado de salida debe de ser un estado del Sub-Workflow" + +msgid "exit_point" +msgstr "Estado de Salida" + +msgid "exit_point_object" +msgstr "Estado de Salida de" + +#, python-format +msgid "exiting from subworkflow %s" +msgstr "Salida del subworkflow %s" + +msgid "expression" +msgstr "Expresión" + +msgctxt "RQLExpression" +msgid "expression" +msgstr "RQL de la expresión" + +msgid "exprtype" +msgstr "Tipo de la expresión" + +msgctxt "RQLExpression" +msgid "exprtype" +msgstr "Tipo" + +msgid "extra_props" +msgstr "propiedades adicionales" + +msgctxt "CWAttribute" +msgid "extra_props" +msgstr "propiedades adicionales" + +msgid "facet-loading-msg" +msgstr "procesando, espere por favor" + +msgid "facet.filters" +msgstr "Filtros" + +msgid "facetbox" +msgstr "Caja de facetas" + +msgid "facets_created_by-facet" +msgstr "Faceta \"creada por\"" + +msgid "facets_created_by-facet_description" +msgstr "Faceta creada por" + +msgid "facets_cw_source-facet" +msgstr "faceta \"fuente de datos\"" + +msgid "facets_cw_source-facet_description" +msgstr "fuente de datos" + +msgid "facets_cwfinal-facet" +msgstr "Faceta \"final\"" + +msgid "facets_cwfinal-facet_description" +msgstr "Faceta para las entidades \"finales\"" + +msgid "facets_datafeed.dataimport.status" +msgstr "estado de la importación" + +msgid "facets_datafeed.dataimport.status_description" +msgstr "Estado de la importación de datos" + +msgid "facets_etype-facet" +msgstr "Faceta \"es de tipo\"" + +msgid "facets_etype-facet_description" +msgstr "Faceta es de tipo" + +msgid "facets_has_text-facet" +msgstr "Faceta \"contiene el texto\"" + +msgid "facets_has_text-facet_description" +msgstr "Faceta contiene el texto" + +msgid "facets_in_group-facet" +msgstr "Faceta \"forma parte del grupo\"" + +msgid "facets_in_group-facet_description" +msgstr "Faceta en grupo" + +msgid "facets_in_state-facet" +msgstr "Faceta \"en el estado\"" + +msgid "facets_in_state-facet_description" +msgstr "Faceta en el estado" + +msgid "failed" +msgstr "fallido" + +#, python-format +msgid "failed to uniquify path (%s, %s)" +msgstr "No se pudo obtener un dato único (%s, %s)" + +msgid "february" +msgstr "Febrero" + +msgid "file tree view" +msgstr "Arborescencia (archivos)" + +msgid "final" +msgstr "Final" + +msgctxt "CWEType" +msgid "final" +msgstr "Final" + +msgctxt "CWRType" +msgid "final" +msgstr "Final" + +msgid "first name" +msgstr "Nombre" + +msgid "firstname" +msgstr "Nombre" + +msgctxt "CWUser" +msgid "firstname" +msgstr "Nombre" + +msgid "foaf" +msgstr "Amigo de un Amigo, FOAF" + +msgid "focus on this selection" +msgstr "muestre esta selección" + +msgid "follow" +msgstr "Seguir la liga" + +#, python-format +msgid "follow this link for more information on this %s" +msgstr "Seleccione esta liga para obtener mayor información sobre %s" + +msgid "for_user" +msgstr "Para el usuario" + +msgctxt "CWProperty" +msgid "for_user" +msgstr "Propiedad del Usuario" + +msgid "for_user_object" +msgstr "Utiliza las propiedades" + +msgctxt "CWUser" +msgid "for_user_object" +msgstr "Tiene como preferencia" + +msgid "formula" +msgstr "" + +msgctxt "CWAttribute" +msgid "formula" +msgstr "" + +msgid "friday" +msgstr "Viernes" + +msgid "from" +msgstr "De" + +#, python-format +msgid "from %(date)s" +msgstr "de %(date)s" + +msgid "from_entity" +msgstr "De la entidad" + +msgctxt "CWAttribute" +msgid "from_entity" +msgstr "Atributo de la entidad" + +msgctxt "CWRelation" +msgid "from_entity" +msgstr "Relación de la entidad" + +msgid "from_entity_object" +msgstr "Relación sujeto" + +msgctxt "CWEType" +msgid "from_entity_object" +msgstr "Entidad de" + +msgid "from_interval_start" +msgstr "De" + +msgid "from_state" +msgstr "Del Estado" + +msgctxt "TrInfo" +msgid "from_state" +msgstr "Estado de Inicio" + +msgid "from_state_object" +msgstr "Transiciones desde este estado" + +msgctxt "State" +msgid "from_state_object" +msgstr "Estado de Inicio de" + +msgid "full text or RQL query" +msgstr "Texto de búsqueda o demanda RQL" + +msgid "fulltext_container" +msgstr "Contenedor de texto indexado" + +msgctxt "CWRType" +msgid "fulltext_container" +msgstr "Objeto a indexar" + +msgid "fulltextindexed" +msgstr "Indexación de texto" + +msgctxt "CWAttribute" +msgid "fulltextindexed" +msgstr "Texto indexado" + +msgid "gc" +msgstr "fuga de memoria" + +msgid "generic plot" +msgstr "Gráfica Genérica" + +msgid "generic relation to link one entity to another" +msgstr "Relación genérica para ligar entidades" + +msgid "" +"generic relation to specify that an external entity represent the same " +"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" +msgstr "" +"Relación genérica que indicar que una entidad es idéntica a otro recurso web " +"(ver http://www.w3.org/TR/owl-ref/#sameAs-def)." + +msgid "granted to groups" +msgstr "Otorgado a los grupos" + +#, python-format +msgid "graphical representation of %(appid)s data model" +msgstr "Representación gráfica del modelo de datos de %(appid)s" + +#, python-format +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" +"Representación gráfica del modelo de datos para el tipo de entidad %(etype)s " +"de %(appid)s" + +#, python-format +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" +msgstr "" +"Representación gráfica del modelo de datos para el tipo de relación " +"%(rtype)s de %(appid)s" + +msgid "group in which a user should be to be allowed to pass this transition" +msgstr "Grupo en el cual el usuario debe estar lograr la transición" + +msgid "groups" +msgstr "Grupos" + +msgid "groups allowed to add entities/relations of this type" +msgstr "grupos autorizados a agregar entidades/relaciones de este tipo" + +msgid "groups allowed to delete entities/relations of this type" +msgstr "grupos autorizados a eliminar entidades/relaciones de este tipo" + +msgid "groups allowed to read entities/relations of this type" +msgstr "grupos autorizados a leer entidades/relaciones de este tipo" + +msgid "groups allowed to update entities/relations of this type" +msgstr "grupos autorizados a actualizar entidades/relaciones de este tipo" + +msgid "groups grant permissions to the user" +msgstr "Los grupos otorgan los permisos al usuario" + +msgid "guests" +msgstr "Invitados" + +msgid "hCalendar" +msgstr "hCalendar" + +msgid "has_text" +msgstr "Contiene el texto" + +msgid "header-center" +msgstr "header - centro" + +msgid "header-left" +msgstr "encabezado (izquierdo)" + +msgid "header-right" +msgstr "encabezado (derecho)" + +msgid "hide filter form" +msgstr "Esconder el filtro" + +msgid "" +"how to format date and time in the ui (see this page for format " +"description)" +msgstr "" +"Formato de fecha y hora que se utilizará por defecto en la interfaz (mayor información del formato)" + +msgid "" +"how to format date in the ui (see this page for format " +"description)" +msgstr "" +"Formato de fecha que se utilizará por defecto en la interfaz (mayor información del formato)" + +msgid "how to format float numbers in the ui" +msgstr "" +"Formato de números flotantes que se utilizará por defecto en la interfaz" + +msgid "" +"how to format time in the ui (see this page for format " +"description)" +msgstr "" +"Formato de hora que se utilizará por defecto en la interfaz (mayor información del formato)" + +msgid "i18n_bookmark_url_fqs" +msgstr "Parámetros" + +msgid "i18n_bookmark_url_path" +msgstr "Ruta" + +msgid "i18n_login_popup" +msgstr "Identificarse" + +msgid "i18ncard_*" +msgstr "0..n" + +msgid "i18ncard_+" +msgstr "1..n" + +msgid "i18ncard_1" +msgstr "1" + +msgid "i18ncard_?" +msgstr "0..1" + +msgid "i18nprevnext_next" +msgstr "Siguiente" + +msgid "i18nprevnext_previous" +msgstr "Anterior" + +msgid "i18nprevnext_up" +msgstr "Padre" + +msgid "iCalendar" +msgstr "iCalendar" + +msgid "id of main template used to render pages" +msgstr "ID del template principal" + +msgid "identical to" +msgstr "Idéntico a" + +msgid "identical_to" +msgstr "idéntico a" + +msgid "identity" +msgstr "es idéntico a" + +msgid "identity_object" +msgstr "es idéntico a" + +msgid "" +"if full text content of subject/object entity should be added to other side " +"entity (the container)." +msgstr "" +"Si el texto indexado de la entidad sujeto/objeto debe ser agregado a la " +"entidad al otro extremo de la relación (el contenedor)." + +msgid "image" +msgstr "Imagen" + +msgid "in progress" +msgstr "en progreso" + +msgid "in_group" +msgstr "En el grupo" + +msgctxt "CWUser" +msgid "in_group" +msgstr "Forma parte del grupo" + +msgid "in_group_object" +msgstr "Miembros" + +msgctxt "CWGroup" +msgid "in_group_object" +msgstr "Contiene los usuarios" + +msgid "in_state" +msgstr "Estado" + +msgid "in_state_object" +msgstr "Estado de" + +msgid "in_synchronization" +msgstr "sincronizado" + +msgctxt "CWSource" +msgid "in_synchronization" +msgstr "sincronizado" + +msgid "incontext" +msgstr "En el contexto" + +msgid "incorrect captcha value" +msgstr "Valor del Captcha incorrecto" + +#, python-format +msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" +msgstr "el valor (%(KEY-value)r) es incorrecto para el tipo \"%(KEY-type)s\"" + +msgid "index this attribute's value in the plain text index" +msgstr "Indexar el valor de este atributo en el índice de texto simple" + +msgid "indexed" +msgstr "Indexado" + +msgctxt "CWAttribute" +msgid "indexed" +msgstr "Indexado" + +msgid "indicate the current state of an entity" +msgstr "Indica el estado actual de una entidad" + +msgid "" +"indicate which state should be used by default when an entity using states " +"is created" +msgstr "" +"Indica cual estado deberá ser utilizado por defecto al crear una entidad" + +msgid "indifferent" +msgstr "indifferente" + +msgid "info" +msgstr "Información del Sistema" + +msgid "initial state for this workflow" +msgstr "Estado inicial para este Workflow" + +msgid "initial_state" +msgstr "Estado inicial" + +msgctxt "Workflow" +msgid "initial_state" +msgstr "Estado inicial" + +msgid "initial_state_object" +msgstr "Estado inicial de" + +msgctxt "State" +msgid "initial_state_object" +msgstr "Estado inicial de" + +msgid "inlined" +msgstr "Inlined" + +msgctxt "CWRType" +msgid "inlined" +msgstr "Inlined" + +msgid "instance home" +msgstr "Repertorio de la Instancia" + +msgid "internal entity uri" +msgstr "Uri Interna" + +msgid "internationalizable" +msgstr "Internacionalizable" + +msgctxt "CWAttribute" +msgid "internationalizable" +msgstr "Internacionalizable" + +#, python-format +msgid "invalid action %r" +msgstr "Acción %r invalida" + +#, python-format +msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" +msgstr "Valor %(KEY-value)s es incorrecto, seleccione entre %(KEY-choices)s" + +msgid "is" +msgstr "es" + +msgid "is object of:" +msgstr "es objeto de" + +msgid "is subject of:" +msgstr "es sujeto de" + +msgid "" +"is the subject/object entity of the relation composed of the other ? This " +"implies that when the composite is deleted, composants are also deleted." +msgstr "" +"Es la entidad sujeto/objeto de la relación une agregación de el otro ? De " +"ser así, el destruir el composite destruirá de igual manera sus componentes " + +msgid "is this attribute's value translatable" +msgstr "Es el valor de este atributo traducible ?" + +msgid "is this relation equivalent in both direction ?" +msgstr "Es esta relación equivalente en los ambos sentidos ?" + +msgid "" +"is this relation physically inlined? you should know what you're doing if " +"you are changing this!" +msgstr "" +"Es esta relación estilo INLINED en la base de datos ? Usted debe saber lo " +"que hace si cambia esto !" + +msgid "is_instance_of" +msgstr "es una instancia de" + +msgid "is_instance_of_object" +msgstr "tiene como instancias" + +msgid "is_object" +msgstr "tiene por instancia" + +msgid "january" +msgstr "Enero" + +msgid "json-entities-export-view" +msgstr "Exportación JSON (de entidades)" + +msgid "json-export-view" +msgstr "Exportación JSON" + +msgid "july" +msgstr "Julio" + +msgid "june" +msgstr "Junio" + +msgid "language of the user interface" +msgstr "Idioma que se utilizará por defecto en la interfaz usuario" + +msgid "last connection date" +msgstr "Ultima conexión" + +msgid "last login time" +msgstr "Ultima conexión" + +msgid "last name" +msgstr "Apellido" + +msgid "last usage" +msgstr "Ultimo uso" + +msgid "last_login_time" +msgstr "Ultima fecha de conexión" + +msgctxt "CWUser" +msgid "last_login_time" +msgstr "Ultima conexión" + +msgid "latest import" +msgstr "importaciones recientes" + +msgid "latest modification time of an entity" +msgstr "Fecha de la última modificación de una entidad " + +msgid "latest synchronization time" +msgstr "fecha de la última sincronización" + +msgid "latest update on" +msgstr "Actualizado el" + +msgid "latest_retrieval" +msgstr "última sincronización" + +msgctxt "CWSource" +msgid "latest_retrieval" +msgstr "fecha de la última sincronización de la fuente" + +msgid "left" +msgstr "izquierda" + +msgid "line" +msgstr "línea" + +msgid "" +"link a property to the user which want this property customization. Unless " +"you're a site manager, this relation will be handled automatically." +msgstr "" +"Liga una propiedad al usuario que desea esta personalización. Salvo que " +"usted sea un administrador del sistema, esta relación será administrada de " +"forma automática." + +msgid "link a relation definition to its object entity type" +msgstr "Liga una definición de relación a su tipo de entidad objeto" + +msgid "link a relation definition to its relation type" +msgstr "Liga una definición de relación a su tipo de relación" + +msgid "link a relation definition to its subject entity type" +msgstr "Liga una definición de relación a su tipo de entidad" + +msgid "link a state to one or more workflow" +msgstr "Liga un estado a uno o más Workflow" + +msgid "link a transition information to its object" +msgstr "Liga una transición de informacion hacia los objetos asociados" + +msgid "link a transition to one or more workflow" +msgstr "Liga una transición a uno o más Workflow" + +msgid "link a workflow to one or more entity type" +msgstr "Liga un Workflow a uno a más tipos de entidad" + +msgid "list" +msgstr "Lista" + +msgid "log" +msgstr "log" + +msgctxt "CWDataImport" +msgid "log" +msgstr "log" + +msgid "log in" +msgstr "Acceder" + +msgid "login" +msgstr "Usuario" + +msgctxt "CWUser" +msgid "login" +msgstr "Usuario" + +msgid "login / password" +msgstr "usuario / contraseña" + +msgid "login or email" +msgstr "Usuario o dirección de correo" + +msgid "login_action" +msgstr "Ingresa tus datos" + +msgid "logout" +msgstr "Desconectarse" + +#, python-format +msgid "loop in %(rel)s relation (%(eid)s)" +msgstr "loop detectado en %(rel)s de la entidad #%(eid)s" + +msgid "main informations" +msgstr "Informaciones Generales" + +msgid "main_tab" +msgstr "descripción" + +msgid "mainvars" +msgstr "Variables principales" + +msgctxt "RQLExpression" +msgid "mainvars" +msgstr "Variables principales" + +msgid "manage" +msgstr "Administración Sistema" + +msgid "manage bookmarks" +msgstr "Gestión de favoritos" + +msgid "manage permissions" +msgstr "Gestión de permisos" + +msgid "managers" +msgstr "Administradores" + +msgid "mandatory relation" +msgstr "Relación obligatoria" + +msgid "march" +msgstr "Marzo" + +msgid "match_host" +msgstr "para el host" + +msgctxt "CWSourceHostConfig" +msgid "match_host" +msgstr "para el host" + +msgid "maximum number of characters in short description" +msgstr "Máximo de caracteres en las descripciones cortas" + +msgid "maximum number of entities to display in related combo box" +msgstr "Máximo de entidades a mostrar en las listas dinámicas" + +msgid "maximum number of objects displayed by page of results" +msgstr "Máximo de elementos mostrados por página de resultados" + +msgid "maximum number of related entities to display in the primary view" +msgstr "Máximo de entidades relacionadas a mostrar en la vista primaria" + +msgid "may" +msgstr "Mayo" + +msgid "memory leak debugging" +msgstr "depuración (debugging) de fuga de memoria" + +msgid "message" +msgstr "mensaje" + +#, python-format +msgid "missing parameters for entity %s" +msgstr "Parámetros faltantes a la entidad %s" + +msgid "modification" +msgstr "modificación" + +msgid "modification_date" +msgstr "Fecha de modificación" + +msgid "modify" +msgstr "Modificar" + +msgid "monday" +msgstr "Lunes" + +msgid "more actions" +msgstr "Más acciones" + +msgid "more info about this workflow" +msgstr "Más información acerca de este workflow" + +msgid "multiple edit" +msgstr "Edición multiple" + +msgid "my custom search" +msgstr "Mi búsqueda personalizada" + +msgid "name" +msgstr "Nombre" + +msgctxt "BaseTransition" +msgid "name" +msgstr "Nombre" + +msgctxt "CWCache" +msgid "name" +msgstr "Nombre" + +msgctxt "CWComputedRType" +msgid "name" +msgstr "" + +msgctxt "CWConstraintType" +msgid "name" +msgstr "Nombre" + +msgctxt "CWEType" +msgid "name" +msgstr "Nombre" + +msgctxt "CWGroup" +msgid "name" +msgstr "Nombre" + +msgctxt "CWRType" +msgid "name" +msgstr "Nombre" + +msgctxt "CWSource" +msgid "name" +msgstr "nombre" + +msgctxt "CWUniqueTogetherConstraint" +msgid "name" +msgstr "nombre" + +msgctxt "State" +msgid "name" +msgstr "nombre" + +msgctxt "Transition" +msgid "name" +msgstr "Nombre" + +msgctxt "Workflow" +msgid "name" +msgstr "Nombre" + +msgctxt "WorkflowTransition" +msgid "name" +msgstr "Nombre" + +msgid "name of the cache" +msgstr "Nombre del Caché" + +msgid "" +"name of the main variables which should be used in the selection if " +"necessary (comma separated)" +msgstr "" +"Nombre de las variables principales que deberían ser utilizadas en la " +"selección de ser necesario (separarlas con comas)" + +msgid "name of the source" +msgstr "nombre de la fuente" + +msgid "navbottom" +msgstr "Pie de página" + +msgid "navcontentbottom" +msgstr "Pie de página del contenido principal" + +msgid "navcontenttop" +msgstr "Encabezado" + +msgid "navigation" +msgstr "Navegación" + +msgid "navigation.combobox-limit" +msgstr "ComboBox" + +msgid "navigation.page-size" +msgstr "Paginación" + +msgid "navigation.related-limit" +msgstr "Entidades relacionadas" + +msgid "navigation.short-line-size" +msgstr "Descripción corta" + +msgid "navtop" +msgstr "Encabezado del contenido principal" + +msgid "new" +msgstr "Nuevo" + +msgid "next page" +msgstr "página siguiente" + +msgid "next_results" +msgstr "Siguientes resultados" + +msgid "no" +msgstr "No" + +msgid "no content next link" +msgstr "no hay liga siguiente" + +msgid "no content prev link" +msgstr "no existe liga previa" + +msgid "no edited fields specified" +msgstr "ningún campo por editar especificado" + +msgid "no log to display" +msgstr "no arrojó elementos para mostrar" + +msgid "no related entity" +msgstr "No posee entidad asociada" + +msgid "no repository sessions found" +msgstr "Ninguna sesión encontrada" + +msgid "no selected entities" +msgstr "No hay entidades seleccionadas" + +#, python-format +msgid "no such entity type %s" +msgstr "El tipo de entidad '%s' no existe" + +msgid "no version information" +msgstr "No existe la información de version" + +msgid "no web sessions found" +msgstr "Ninguna sesión web encontrada" + +msgid "normal" +msgstr "Normal" + +msgid "not authorized" +msgstr "No autorizado" + +msgid "not selected" +msgstr "No seleccionado" + +msgid "november" +msgstr "Noviembre" + +msgid "num. users" +msgstr "Número de Usuarios" + +msgid "object" +msgstr "Objeto" + +msgid "object type" +msgstr "Tipo de Objeto" + +msgid "october" +msgstr "Octubre" + +msgid "one month" +msgstr "Un mes" + +msgid "one week" +msgstr "Una semana" + +msgid "oneline" +msgstr "En una línea" + +msgid "only select queries are authorized" +msgstr "Solo están permitidas consultas de lectura" + +msgid "open all" +msgstr "Abrir todos" + +msgid "opened sessions" +msgstr "Sesiones abiertas" + +msgid "opened web sessions" +msgstr "Sesiones Web abiertas" + +msgid "options" +msgstr "Opciones" + +msgctxt "CWSourceSchemaConfig" +msgid "options" +msgstr "opciones" + +msgid "order" +msgstr "Orden" + +msgid "ordernum" +msgstr "Orden" + +msgctxt "CWAttribute" +msgid "ordernum" +msgstr "Número de Orden" + +msgctxt "CWRelation" +msgid "ordernum" +msgstr "Número de Orden" + +msgid "owl" +msgstr "OWL" + +msgid "owlabox" +msgstr "OWLabox" + +msgid "owned_by" +msgstr "Pertenece a" + +msgid "owned_by_object" +msgstr "Pertenece al objeto" + +msgid "owners" +msgstr "Proprietarios" + +msgid "ownerships have been changed" +msgstr "Derechos de propiedad modificados" + +msgid "pageid-not-found" +msgstr "Página no encontrada." + +msgid "parser" +msgstr "analizador (parser)" + +msgctxt "CWSource" +msgid "parser" +msgstr "analizador (parser)" + +msgid "parser to use to extract entities from content retrieved at given URLs." +msgstr "" +"analizador (parser) que sirve para extraer entidades y relaciones del " +"contenido recuperado de las URLs." + +msgid "password" +msgstr "Contraseña" + +msgid "password and confirmation don't match" +msgstr "Su contraseña y confirmación no concuerdan" + +msgid "path" +msgstr "Ruta" + +msgctxt "Bookmark" +msgid "path" +msgstr "Ruta" + +msgid "permalink to this message" +msgstr "liga permanente a este mensaje" + +msgid "permission" +msgstr "Permiso" + +msgid "permissions" +msgstr "Permisos" + +msgid "pick existing bookmarks" +msgstr "Seleccionar favoritos existentes" + +msgid "pkey" +msgstr "Clave" + +msgctxt "CWProperty" +msgid "pkey" +msgstr "Código de la Propiedad" + +msgid "please correct errors below" +msgstr "Por favor corregir los errores señalados en la parte inferior" + +msgid "please correct the following errors:" +msgstr "Por favor corregir los siguientes errores:" + +msgid "possible views" +msgstr "Vistas posibles" + +msgid "prefered_form" +msgstr "Forma preferida" + +msgctxt "EmailAddress" +msgid "prefered_form" +msgstr "Email principal" + +msgid "prefered_form_object" +msgstr "Formato preferido sobre" + +msgctxt "EmailAddress" +msgid "prefered_form_object" +msgstr "Email principal de" + +msgid "preferences" +msgstr "Preferencias" + +msgid "previous page" +msgstr "página anterior" + +msgid "previous_results" +msgstr "Resultados Anteriores" + +msgid "primary" +msgstr "Primaria" + +msgid "primary_email" +msgstr "Dirección principal de correo electrónico" + +msgctxt "CWUser" +msgid "primary_email" +msgstr "Dirección principal de correo electrónico" + +msgid "primary_email_object" +msgstr "Dirección de email principal (objeto)" + +msgctxt "EmailAddress" +msgid "primary_email_object" +msgstr "Dirección principal de correo electrónico de" + +msgid "profile" +msgstr "perfil" + +msgid "rdef-description" +msgstr "Descripción" + +msgid "rdef-permissions" +msgstr "Permisos" + +msgid "rdf export" +msgstr "Exportación RDF" + +msgid "read" +msgstr "Lectura" + +msgid "read_permission" +msgstr "Permiso de lectura" + +msgctxt "CWAttribute" +msgid "read_permission" +msgstr "Permiso de Lectura" + +msgctxt "CWEType" +msgid "read_permission" +msgstr "Permiso de Lectura" + +msgctxt "CWRelation" +msgid "read_permission" +msgstr "Permiso de Lectura" + +msgid "read_permission_object" +msgstr "Tiene acceso de lectura a" + +msgctxt "CWGroup" +msgid "read_permission_object" +msgstr "Puede leer" + +msgctxt "RQLExpression" +msgid "read_permission_object" +msgstr "Puede leer" + +msgid "regexp matching host(s) to which this config applies" +msgstr "" +"expresión regular de los nombres de hosts a los cuales esta configuración " +"aplica" + +msgid "registry" +msgstr "Registro" + +msgid "related entity has no state" +msgstr "La entidad relacionada no posee Estado" + +msgid "related entity has no workflow set" +msgstr "La entidad relacionada no posee Workflow definido" + +msgid "relation" +msgstr "relación" + +#, python-format +msgid "relation %(relname)s of %(ent)s" +msgstr "relación %(relname)s de %(ent)s" + +#, python-format +msgid "" +"relation %(rtype)s with %(etype)s as %(role)s is supported but no target " +"type supported" +msgstr "" +"la relación %(rtype)s con %(etype)s como %(role)s es aceptada pero ningún " +"tipo target es aceptado" + +#, python-format +msgid "" +"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is " +"mandatory but not supported" +msgstr "" +"la relación %(type)s con %(etype)s como %(role)s y tipo objetivo %(target)s " +"es obligatoria pero no mantenida" + +#, python-format +msgid "" +"relation %s is supported but none of its definitions matches supported " +"entities" +msgstr "" +"la relación %s es aceptada pero ninguna de sus definiciones corresponden a " +"los tipos de entidades aceptadas" + +msgid "relation add" +msgstr "Agregar Relación" + +msgid "relation removal" +msgstr "Eliminar Relación" + +msgid "relation_type" +msgstr "Tipo de Relación" + +msgctxt "CWAttribute" +msgid "relation_type" +msgstr "Tipo de Relación" + +msgctxt "CWRelation" +msgid "relation_type" +msgstr "Tipo de Relación" + +msgid "relation_type_object" +msgstr "Definición de Relaciones" + +msgctxt "CWRType" +msgid "relation_type_object" +msgstr "Definición de Relaciones" + +msgid "relations" +msgstr "relaciones" + +msgctxt "CWUniqueTogetherConstraint" +msgid "relations" +msgstr "relaciones" + +msgid "relations deleted" +msgstr "Relaciones Eliminadas" + +msgid "relations_object" +msgstr "relaciones de" + +msgctxt "CWRType" +msgid "relations_object" +msgstr "relaciones de" + +msgid "relative url of the bookmarked page" +msgstr "Url relativa de la página" + +msgid "remove-inlined-entity-form" +msgstr "Eliminar" + +msgid "require_group" +msgstr "Requiere el grupo" + +msgctxt "BaseTransition" +msgid "require_group" +msgstr "Restringida al Grupo" + +msgctxt "Transition" +msgid "require_group" +msgstr "Restringida al Grupo" + +msgctxt "WorkflowTransition" +msgid "require_group" +msgstr "Restringida al Grupo" + +msgid "require_group_object" +msgstr "Posee derechos sobre" + +msgctxt "CWGroup" +msgid "require_group_object" +msgstr "Posee derechos sobre" + +msgid "required" +msgstr "Requerido" + +msgid "required attribute" +msgstr "Atributo requerido" + +msgid "required field" +msgstr "Campo requerido" + +msgid "resources usage" +msgstr "Recursos utilizados" + +msgid "" +"restriction part of a rql query. For entity rql expression, X and U are " +"predefined respectivly to the current object and to the request user. For " +"relation rql expression, S, O and U are predefined respectivly to the " +"current relation'subject, object and to the request user. " +msgstr "" +"Parte restrictiva de una consulta RQL. En una expresión ligada a una " +"entidad, X y U son respectivamente asignadas a la Entidad y el Usuario en " +"curso.En una expresión ligada a una relación, S, O y U son respectivamente " +"asignados al Sujeto/Objeto de la relación y al Usuario actual." + +msgid "revert changes" +msgstr "Anular modificación" + +msgid "right" +msgstr "Derecha" + +msgid "rql expression allowing to add entities/relations of this type" +msgstr "Expresión RQL que permite AGREGAR entidades/relaciones de este tipo" + +msgid "rql expression allowing to delete entities/relations of this type" +msgstr "Expresión RQL que permite ELIMINAR entidades/relaciones de este tipo" + +msgid "rql expression allowing to read entities/relations of this type" +msgstr "Expresión RQL que permite LEER entidades/relaciones de este tipo" + +msgid "rql expression allowing to update entities/relations of this type" +msgstr "Expresión RQL que permite ACTUALIZAR entidades/relaciones de este tipo" + +msgid "rql expressions" +msgstr "Expresiones RQL" + +msgid "rss export" +msgstr "Exportación RSS" + +msgid "rule" +msgstr "" + +msgctxt "CWComputedRType" +msgid "rule" +msgstr "" + +msgid "same_as" +msgstr "Idéntico a" + +msgid "sample format" +msgstr "Ejemplo" + +msgid "saturday" +msgstr "Sábado" + +msgid "schema-diagram" +msgstr "Gráfica" + +msgid "schema-entity-types" +msgstr "Entidades" + +msgid "schema-relation-types" +msgstr "Relaciones" + +msgid "search" +msgstr "Buscar" + +msgid "search for association" +msgstr "Búsqueda por asociación" + +msgid "searching for" +msgstr "Buscando" + +msgid "security" +msgstr "Seguridad" + +msgid "see more" +msgstr "ver más" + +msgid "see them all" +msgstr "Ver todos" + +msgid "see_also" +msgstr "Ver además" + +msgid "select" +msgstr "Seleccionar" + +msgid "select a" +msgstr "Seleccione un" + +msgid "select a key first" +msgstr "Seleccione una clave" + +msgid "select a relation" +msgstr "Seleccione una relación" + +msgid "select this entity" +msgstr "Seleccionar esta entidad" + +msgid "selected" +msgstr "Seleccionado" + +msgid "semantic description of this attribute" +msgstr "Descripción semántica de este atributo" + +msgid "semantic description of this entity type" +msgstr "Descripción semántica de este tipo de entidad" + +msgid "semantic description of this relation" +msgstr "Descripción semántica de esta relación" + +msgid "semantic description of this relation type" +msgstr "Descripción semántica de este tipo de relación" + +msgid "semantic description of this state" +msgstr "Descripción semántica de este estado" + +msgid "semantic description of this transition" +msgstr "Descripcion semántica de esta transición" + +msgid "semantic description of this workflow" +msgstr "Descripcion semántica de este Workflow" + +msgid "september" +msgstr "Septiembre" + +msgid "server information" +msgstr "Información del servidor" + +msgid "severity" +msgstr "severidad" + +msgid "" +"should html fields being edited using fckeditor (a HTML WYSIWYG editor). " +"You should also select text/html as default text format to actually get " +"fckeditor." +msgstr "" +"Indica si los campos de tipo texto deberán ser editados usando fckeditor " +"(un\n" +"editor HTML WYSIWYG). Deberá también elegir text/html\n" +"como formato de texto por defecto para poder utilizar fckeditor." + +#, python-format +msgid "show %s results" +msgstr "Mostrar %s resultados" + +msgid "show advanced fields" +msgstr "Mostrar campos avanzados" + +msgid "show filter form" +msgstr "Mostrar el Filtro" + +msgid "site configuration" +msgstr "Configuración Sistema" + +msgid "site documentation" +msgstr "Documentación Sistema" + +msgid "site title" +msgstr "Nombre del Sistema" + +msgid "site-wide property can't be set for user" +msgstr "Una propiedad específica al Sistema no puede ser propia al usuario" + +msgid "some later transaction(s) touch entity, undo them first" +msgstr "" +"Las transacciones más recientes modificaron esta entidad, anúlelas primero" + +msgid "some relations violate a unicity constraint" +msgstr "algunas relaciones no respetan la restricción de unicidad" + +msgid "sorry, the server is unable to handle this query" +msgstr "Lo sentimos, el servidor no puede manejar esta consulta" + +msgid "" +"source's configuration. One key=value per line, authorized keys depending on " +"the source's type" +msgstr "" +"configuración de fuentes. Una clave=valor por línea, las claves permitidas " +"dependen del tipo de la fuente." + +msgid "sparql xml" +msgstr "XML Sparql" + +msgid "special transition allowing to go through a sub-workflow" +msgstr "Transición especial que permite ir en un Sub-Workflow" + +msgid "specializes" +msgstr "Deriva de" + +msgctxt "CWEType" +msgid "specializes" +msgstr "Especializa" + +msgid "specializes_object" +msgstr "Especializado por" + +msgctxt "CWEType" +msgid "specializes_object" +msgstr "Especializado por" + +#, python-format +msgid "specifying %s is mandatory" +msgstr "especificar %s es obligatorio" + +msgid "" +"start timestamp of the currently in synchronization, or NULL when no " +"synchronization in progress." +msgstr "" +"horario de inicio de la sincronización en curso, o NULL cuando no existe " +"sincronización en curso" + +msgid "start_timestamp" +msgstr "horario inicio" + +msgctxt "CWDataImport" +msgid "start_timestamp" +msgstr "horario inicio" + +msgid "startup views" +msgstr "Vistas de inicio" + +msgid "startupview" +msgstr "Vistas de Inicio" + +msgid "state" +msgstr "Estado" + +msgid "state and transition don't belong the the same workflow" +msgstr "El Estado y la Transición no pertenecen al mismo Workflow" + +msgid "state doesn't apply to this entity's type" +msgstr "Este Estado no aplica a este tipo de Entidad" + +msgid "state doesn't belong to entity's current workflow" +msgstr "El Estado no pertenece al Workflow actual de la Entidad" + +msgid "state doesn't belong to entity's workflow" +msgstr "El Estado no pertenece al Workflow de la Entidad" + +msgid "" +"state doesn't belong to entity's workflow. You may want to set a custom " +"workflow for this entity first." +msgstr "" +"El Estado no pertenece al Workflow Actual de la Entidad. Usted deseaquizás " +"especificar que esta entidad debe utilizar este Workflow" + +msgid "state doesn't belong to this workflow" +msgstr "El Estado no pertenece a este Workflow" + +msgid "state_of" +msgstr "Estado de" + +msgctxt "State" +msgid "state_of" +msgstr "Estado de" + +msgid "state_of_object" +msgstr "Tiene por Estado" + +msgctxt "Workflow" +msgid "state_of_object" +msgstr "Tiene por Estado" + +msgid "status" +msgstr "estado" + +msgctxt "CWDataImport" +msgid "status" +msgstr "estado" + +msgid "status change" +msgstr "Cambio de Estatus" + +msgid "status changed" +msgstr "Estatus cambiado" + +#, python-format +msgid "status will change from %(st1)s to %(st2)s" +msgstr "El estatus cambiará de %(st1)s a %(st2)s" + +msgid "subject" +msgstr "Sujeto" + +msgid "subject type" +msgstr "Tipo del sujeto" + +msgid "subject/object cardinality" +msgstr "Cardinalidad Sujeto/Objeto" + +msgid "subworkflow" +msgstr "Sub-Workflow" + +msgctxt "WorkflowTransition" +msgid "subworkflow" +msgstr "Sub-Workflow" + +msgid "" +"subworkflow isn't a workflow for the same types as the transition's workflow" +msgstr "" +"Le Sub-Workflow no se aplica a los mismos tipos que el Workflow de esta " +"transición" + +msgid "subworkflow state" +msgstr "Estado de Sub-Workflow" + +msgid "subworkflow_exit" +msgstr "Salida del Sub-Workflow" + +msgctxt "WorkflowTransition" +msgid "subworkflow_exit" +msgstr "Salida del Sub-Workflow" + +msgid "subworkflow_exit_object" +msgstr "Salida Sub-Workflow de" + +msgctxt "SubWorkflowExitPoint" +msgid "subworkflow_exit_object" +msgstr "Salida Sub-Workflow de" + +msgid "subworkflow_object" +msgstr "Sub-Workflow de" + +msgctxt "Workflow" +msgid "subworkflow_object" +msgstr "Sub-Workflow de" + +msgid "subworkflow_state" +msgstr "Estado de Sub-Workflow" + +msgctxt "SubWorkflowExitPoint" +msgid "subworkflow_state" +msgstr "Estado de Sub-Workflow" + +msgid "subworkflow_state_object" +msgstr "Estado de Salida de" + +msgctxt "State" +msgid "subworkflow_state_object" +msgstr "Estado de Salida de" + +msgid "success" +msgstr "éxito" + +msgid "sunday" +msgstr "Domingo" + +msgid "surname" +msgstr "Apellido" + +msgctxt "CWUser" +msgid "surname" +msgstr "Apellido" + +msgid "symmetric" +msgstr "Simétrico" + +msgctxt "CWRType" +msgid "symmetric" +msgstr "Simétrico" + +msgid "synchronization-interval must be greater than 1 minute" +msgstr "synchronization-interval debe ser mayor a 1 minuto" + +msgid "table" +msgstr "Tabla" + +msgid "tablefilter" +msgstr "Tablero de Filtrado" + +msgid "text" +msgstr "Texto" + +msgid "text/cubicweb-page-template" +msgstr "Usar Page Templates" + +msgid "text/html" +msgstr "Usar HTML" + +msgid "text/markdown" +msgstr "" + +msgid "text/plain" +msgstr "Usar Texto simple" + +msgid "text/rest" +msgstr "Texto en REST" + +msgid "the URI of the object" +msgstr "El URI del Objeto" + +msgid "the prefered email" +msgstr "Dirección principal de email" + +msgid "the system source has its configuration stored on the file-system" +msgstr "" +"el sistema fuente tiene su configuración almacenada en el sistema de archivos" + +#, python-format +msgid "the value \"%s\" is already used, use another one" +msgstr "El valor \"%s\" ya esta en uso, favor de utilizar otro" + +msgid "there is no next page" +msgstr "no existe página siguiente" + +msgid "there is no previous page" +msgstr "no existe página anterior" + +#, python-format +msgid "there is no transaction #%s" +msgstr "no existe la transacción #%s" + +msgid "this action is not reversible!" +msgstr "Esta acción es irreversible!." + +msgid "this entity is currently owned by" +msgstr "Esta Entidad es propiedad de" + +msgid "this parser doesn't use a mapping" +msgstr "este analizador (parser) no utiliza mapeo" + +msgid "this resource does not exist" +msgstr "Este recurso no existe" + +msgid "this source doesn't use a mapping" +msgstr "esta fuente no utiliza mapeo" + +msgid "thursday" +msgstr "Jueves" + +msgid "timestamp" +msgstr "Fecha" + +msgctxt "CWCache" +msgid "timestamp" +msgstr "Válido desde" + +msgid "timetable" +msgstr "Tablero de tiempos" + +msgid "title" +msgstr "Nombre" + +msgctxt "Bookmark" +msgid "title" +msgstr "Nombre" + +msgid "to" +msgstr "a" + +#, python-format +msgid "to %(date)s" +msgstr "a %(date)s" + +msgid "to associate with" +msgstr "Para asociar con" + +msgid "to_entity" +msgstr "Hacia la entidad" + +msgctxt "CWAttribute" +msgid "to_entity" +msgstr "Por la entidad" + +msgctxt "CWRelation" +msgid "to_entity" +msgstr "Por la entidad" + +msgid "to_entity_object" +msgstr "Objeto de la Relación" + +msgctxt "CWEType" +msgid "to_entity_object" +msgstr "Objeto de la Relación" + +msgid "to_interval_end" +msgstr "a" + +msgid "to_state" +msgstr "Hacia el Estado" + +msgctxt "TrInfo" +msgid "to_state" +msgstr "Hacia el Estado" + +msgid "to_state_object" +msgstr "Transición hacia este Estado" + +msgctxt "State" +msgid "to_state_object" +msgstr "Transición hacia este Estado" + +msgid "toggle check boxes" +msgstr "Cambiar valor" + +msgid "tr_count" +msgstr "n° de transición" + +msgctxt "TrInfo" +msgid "tr_count" +msgstr "n° de transición" + +msgid "transaction undone" +msgstr "transacción anulada" + +#, python-format +msgid "transition %(tr)s isn't allowed from %(st)s" +msgstr "La transición %(tr)s no esta permitida desde el Estado %(st)s" + +msgid "transition doesn't belong to entity's workflow" +msgstr "La transición no pertenece al Workflow de la Entidad" + +msgid "transition isn't allowed" +msgstr "La transición no esta permitida" + +msgid "transition may not be fired" +msgstr "La transición no puede ser lanzada" + +msgid "transition_of" +msgstr "Transición de" + +msgctxt "BaseTransition" +msgid "transition_of" +msgstr "Transición de" + +msgctxt "Transition" +msgid "transition_of" +msgstr "Transición de" + +msgctxt "WorkflowTransition" +msgid "transition_of" +msgstr "Transición de" + +msgid "transition_of_object" +msgstr "Utiliza las transiciones" + +msgctxt "Workflow" +msgid "transition_of_object" +msgstr "Utiliza las transiciones" + +msgid "tree view" +msgstr "Vista Jerárquica" + +msgid "tuesday" +msgstr "Martes" + +msgid "type" +msgstr "Tipo" + +msgctxt "BaseTransition" +msgid "type" +msgstr "Tipo" + +msgctxt "CWSource" +msgid "type" +msgstr "tipo" + +msgctxt "Transition" +msgid "type" +msgstr "Tipo" + +msgctxt "WorkflowTransition" +msgid "type" +msgstr "Tipo" + +msgid "type here a sparql query" +msgstr "Escriba aquí su consulta en Sparql" + +msgid "type of the source" +msgstr "tipo de la fuente" + +msgid "ui" +msgstr "Interfaz Genérica" + +msgid "ui.date-format" +msgstr "Formato de Fecha" + +msgid "ui.datetime-format" +msgstr "Formato de Fecha y Hora" + +msgid "ui.default-text-format" +msgstr "Formato de texto" + +msgid "ui.encoding" +msgstr "Codificación" + +msgid "ui.fckeditor" +msgstr "Editor de texto FCK" + +msgid "ui.float-format" +msgstr "Números flotantes" + +msgid "ui.language" +msgstr "Lenguaje" + +msgid "ui.main-template" +msgstr "Plantilla Principal" + +msgid "ui.site-title" +msgstr "Nombre del Sistema" + +msgid "ui.time-format" +msgstr "Formato de hora" + +msgid "unable to check captcha, please try again" +msgstr "Imposible de verificar el Captcha, inténtelo otra vez" + +msgid "unaccessible" +msgstr "Inaccesible" + +msgid "unauthorized value" +msgstr "Valor no permitido" + +msgid "undefined user" +msgstr "usuario indefinido" + +msgid "undo" +msgstr "Anular" + +msgid "unique identifier used to connect to the application" +msgstr "Identificador único utilizado para conectarse al Sistema" + +msgid "unknown external entity" +msgstr "Entidad externa desconocida" + +#, python-format +msgid "unknown options %s" +msgstr "opciones desconocidas: %s" + +#, python-format +msgid "unknown property key %s" +msgstr "Clave de Propiedad desconocida: %s" + +msgid "unknown vocabulary:" +msgstr "Vocabulario desconocido: " + +msgid "unsupported protocol" +msgstr "protocolo no soportado" + +msgid "upassword" +msgstr "Contraseña" + +msgctxt "CWUser" +msgid "upassword" +msgstr "Contraseña" + +msgid "update" +msgstr "Modificación" + +msgid "update_permission" +msgstr "Puede ser modificado por" + +msgctxt "CWAttribute" +msgid "update_permission" +msgstr "Puede ser modificado por" + +msgctxt "CWEType" +msgid "update_permission" +msgstr "Puede ser modificado por" + +msgid "update_permission_object" +msgstr "Tiene permiso de modificar" + +msgctxt "CWGroup" +msgid "update_permission_object" +msgstr "Puede modificar" + +msgctxt "RQLExpression" +msgid "update_permission_object" +msgstr "Puede modificar" + +msgid "update_relation" +msgstr "Modificar" + +msgid "updated" +msgstr "Actualizado" + +#, python-format +msgid "updated %(etype)s #%(eid)s (%(title)s)" +msgstr "actualización de la entidad %(etype)s #%(eid)s (%(title)s)" + +msgid "uri" +msgstr "URI" + +msgctxt "ExternalUri" +msgid "uri" +msgstr "URI" + +msgid "url" +msgstr "url" + +msgctxt "CWSource" +msgid "url" +msgstr "url" + +msgid "" +"use to define a transition from one or multiple states to a destination " +"states in workflow's definitions. Transition without destination state will " +"go back to the state from which we arrived to the current state." +msgstr "" +"Se utiliza en una definición de procesos para agregar una transición desde " +"uno o varios estados hacia un estado destino. Una transición sin Estado " +"destino regresará al Estado anterior del Estado actual" + +msgid "use_email" +msgstr "Correo electrónico" + +msgctxt "CWUser" +msgid "use_email" +msgstr "Usa el Correo Electrónico" + +msgid "use_email_object" +msgstr "Email utilizado por" + +msgctxt "EmailAddress" +msgid "use_email_object" +msgstr "Utilizado por" + +msgid "" +"used for cubicweb configuration. Once a property has been created you can't " +"change the key." +msgstr "" +"Se utiliza para la configuración de CubicWeb. Una vez que la propiedad ha " +"sido creada no puede cambiar la clave" + +msgid "" +"used to associate simple states to an entity type and/or to define workflows" +msgstr "" +"Se utiliza para asociar estados simples a un tipo de entidad y/o para " +"definir Workflows" + +msgid "user" +msgstr "Usuario" + +#, python-format +msgid "" +"user %s has made the following change(s):\n" +"\n" +msgstr "" +"El usuario %s ha efectuado los siguentes cambios:\n" +"\n" + +msgid "user interface encoding" +msgstr "Encoding de la interfaz de usuario" + +msgid "user preferences" +msgstr "Preferencias" + +msgid "user's email account" +msgstr "email del usuario" + +msgid "users" +msgstr "Usuarios" + +msgid "users and groups" +msgstr "usuarios y grupos" + +msgid "users using this bookmark" +msgstr "Usuarios utilizando este Favorito" + +msgid "validate modifications on selected items" +msgstr "Valida modificaciones sobre elementos seleccionados" + +msgid "validating..." +msgstr "Validando ..." + +msgid "value" +msgstr "Valor" + +msgctxt "CWConstraint" +msgid "value" +msgstr "Valor" + +msgctxt "CWProperty" +msgid "value" +msgstr "Vampr" + +#, python-format +msgid "value %(KEY-value)s must be < %(KEY-boundary)s" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" +msgstr "el valor %(KEY-value)s debe ser <= %(KEY-boundary)s" + +#, python-format +msgid "value %(KEY-value)s must be > %(KEY-boundary)s" +msgstr "" + +#, python-format +msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" +msgstr "el valor %(KEY-value)s debe ser >= %(KEY-boundary)s" + +msgid "value associated to this key is not editable manually" +msgstr "El valor asociado a este elemento no es editable manualmente" + +#, python-format +msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" +msgstr "el valor máximo es %(KEY-max)s y encontramos %(KEY-size)s" + +#, python-format +msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" +msgstr "el valor mínimo debe ser %(KEY-min)s y encontramos %(KEY-size)s" + +msgid "vcard" +msgstr "vcard" + +msgid "versions configuration" +msgstr "Configuración de Versión" + +msgid "view" +msgstr "Ver" + +msgid "view all" +msgstr "Ver todos" + +msgid "view detail for this entity" +msgstr "Ver a detalle esta entidad" + +msgid "view history" +msgstr "Ver histórico" + +msgid "view identifier" +msgstr "Identificador" + +msgid "view title" +msgstr "Nombre" + +msgid "view workflow" +msgstr "Ver Workflow" + +msgid "view_index" +msgstr "Inicio" + +msgid "visible" +msgstr "Visible" + +msgid "warning" +msgstr "atención" + +msgid "we are not yet ready to handle this query" +msgstr "Aún no podemos manejar este tipo de consulta Sparql" + +msgid "wednesday" +msgstr "Miércoles" + +#, python-format +msgid "welcome %s!" +msgstr "Bienvenido %s." + +msgid "wf_info_for" +msgstr "Histórico de" + +msgid "wf_info_for_object" +msgstr "Histórico de transiciones" + +msgid "wf_tab_info" +msgstr "Descripción" + +msgid "wfgraph" +msgstr "Gráfica del Workflow" + +msgid "" +"when multiple addresses are equivalent (such as python-projects@logilab.org " +"and python-projects@lists.logilab.org), set this to indicate which is the " +"preferred form." +msgstr "" +"Cuando varias direcciones email son equivalentes (como python-" +"projects@logilab.org y python-projects@lists.logilab.org), aquí se indica " +"cual es la forma preferida." + +msgid "workflow" +msgstr "Workflow" + +#, python-format +msgid "workflow changed to \"%s\"" +msgstr "Workflow cambiado a \"%s\"" + +msgid "workflow has no initial state" +msgstr "El Workflow no posee Estado Inicial" + +msgid "workflow history item" +msgstr "Elemento histórico del Workflow" + +msgid "workflow isn't a workflow for this type" +msgstr "El Workflow no se aplica a este Tipo de Entidad" + +msgid "workflow to which this state belongs" +msgstr "Workflow al cual pertenece este estado" + +msgid "workflow to which this transition belongs" +msgstr "Workflow al cual pertenece esta transición" + +msgid "workflow_of" +msgstr "Workflow de" + +msgctxt "Workflow" +msgid "workflow_of" +msgstr "Workflow de" + +msgid "workflow_of_object" +msgstr "Utiliza el Workflow" + +msgctxt "CWEType" +msgid "workflow_of_object" +msgstr "Utiliza el Workflow" + +#, python-format +msgid "wrong query parameter line %s" +msgstr "Parámetro erróneo de consulta línea %s" + +msgid "xbel export" +msgstr "Exportación XBEL" + +msgid "xml export" +msgstr "Exportar XML" + +msgid "xml export (entities)" +msgstr "Exportación XML (entidades)" + +msgid "yes" +msgstr "Sí" + +msgid "you have been logged out" +msgstr "Ha terminado la sesión" + +msgid "you should probably delete that property" +msgstr "probablamente debería suprimir esta propriedad" + +#~ msgid "%s relation should not be in mapped" +#~ msgstr "la relación %s no debería estar mapeada" + +#~ msgid "Any" +#~ msgstr "Cualquiera" + +#~ msgid "Browse by category" +#~ msgstr "Busca por categoría" + +#~ msgid "No account? Try public access at %s" +#~ msgstr "No esta registrado? Use el acceso público en %s" + +#~ msgid "anonymous" +#~ msgstr "anónimo" + +#~ msgid "attribute/relation can't be mapped, only entity and relation types" +#~ msgstr "" +#~ "los atributos y las relaciones no pueden ser mapeados, solamente los " +#~ "tipos de entidad y de relación" + +#~ msgid "can't connect to source %s, some data may be missing" +#~ msgstr "no se puede conectar a la fuente %s, algunos datos pueden faltar" + +#~ msgid "can't mix dontcross and maycross options" +#~ msgstr "no puede mezclar las opciones dontcross y maycross" + +#~ msgid "can't mix dontcross and write options" +#~ msgstr "no puede mezclar las opciones dontcross y write" + +#~ msgid "components_etypenavigation" +#~ msgstr "Filtar por tipo" + +#~ msgid "components_etypenavigation_description" +#~ msgstr "Permite filtrar por tipo de entidad los resultados de una búsqueda" + +#~ msgid "error while querying source %s, some data may be missing" +#~ msgstr "" +#~ "Un error ha ocurrido al interrogar %s, es posible que los \n" +#~ "datos visibles se encuentren incompletos" + +#~ msgid "inlined relation %(rtype)s of %(etype)s should be supported" +#~ msgstr "" +#~ "la relación %(rtype)s del tipo de entidad %(etype)s debe ser aceptada " +#~ "('inlined')" + +#~ msgid "no edited fields specified for entity %s" +#~ msgstr "Ningún campo editable especificado para la entidad %s" + +#~ msgid "timeline" +#~ msgstr "Escala de Tiempo" + +#~ msgid "unknown option(s): %s" +#~ msgstr "opcion(es) desconocida(s): %s" + +#~ msgid "value %(KEY-value)s must be %(KEY-op)s %(KEY-boundary)s" +#~ msgstr "El valor %(KEY-value)s debe ser %(KEY-op)s %(KEY-boundary)s" + +#~ msgid "web sessions without CNX" +#~ msgstr "sesiones web sin conexión asociada" + +#~ msgid "workflow already has a state of that name" +#~ msgstr "el workflow posee ya un estado con ese nombre" + +#~ msgid "workflow already has a transition of that name" +#~ msgstr "El Workflow posee ya una transición con ese nombre" + +#~ msgid "you may want to specify something for %s" +#~ msgstr "usted desea quizás especificar algo para la relación %s" + +#~ msgid "" +#~ "you should un-inline relation %s which is supported and may be crossed " +#~ msgstr "" +#~ "usted debe quitar la puesta en línea de la relación %s que es aceptada y " +#~ "puede ser cruzada" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/i18n/fr.po --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/i18n/fr.po Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4725 @@ +# cubicweb i18n catalog +# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# Logilab +msgid "" +msgstr "" +"Project-Id-Version: cubicweb 2.46.0\n" +"PO-Revision-Date: 2014-06-24 13:29+0200\n" +"Last-Translator: Logilab Team \n" +"Language-Team: fr \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#, python-format +msgid "" +"\n" +"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for " +"entity\n" +"'%(title)s'\n" +"\n" +"%(comment)s\n" +"\n" +"url: %(url)s\n" +msgstr "" +"\n" +"%(user)s a changé l'état de <%(previous_state)s> vers <%(current_state)s> " +"pour l'entité\n" +"'%(title)s'\n" +"\n" +"%(comment)s\n" +"\n" +"url: %(url)s\n" + +#, python-format +msgid " from state %(fromstate)s to state %(tostate)s\n" +msgstr " de l'état %(fromstate)s vers l'état %(tostate)s\n" + +msgid " :" +msgstr " :" + +#, python-format +msgid "\"action\" must be specified in options; allowed values are %s" +msgstr "" +"\"action\" doit être specifié dans les options; les valeurs autorisées " +"sont : %s" + +msgid "\"role=subject\" or \"role=object\" must be specified in options" +msgstr "" +"\"role=subject\" ou \"role=object\" doit être specifié dans les options" + +#, python-format +msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" +msgstr "la valeur %(KEY-value)r ne satisfait pas la contrainte %(KEY-cstr)s" + +#, python-format +msgid "%(KEY-rtype)s is part of violated unicity constraint" +msgstr "%(KEY-rtype)s appartient à une contrainte d'unicité transgressée" + +#, python-format +msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" +msgstr "" +"%(KEY-value)r ne correspond pas à l'expression régulière %(KEY-regexp)r" + +#, python-format +msgid "%(attr)s set to %(newvalue)s" +msgstr "%(attr)s modifié à %(newvalue)s" + +#, python-format +msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" +msgstr "%(attr)s modifié de %(oldvalue)s à %(newvalue)s" + +#, python-format +msgid "%(etype)s by %(author)s" +msgstr "%(etype)s par %(author)s" + +#, python-format +msgid "%(firstname)s %(surname)s" +msgstr "%(firstname)s %(surname)s" + +#, python-format +msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)" +msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)" + +#, python-format +msgid "%d days" +msgstr "%d jours" + +#, python-format +msgid "%d hours" +msgstr "%d heures" + +#, python-format +msgid "%d minutes" +msgstr "%d minutes" + +#, python-format +msgid "%d months" +msgstr "%d mois" + +#, python-format +msgid "%d seconds" +msgstr "%d secondes" + +#, python-format +msgid "%d weeks" +msgstr "%d semaines" + +#, python-format +msgid "%d years" +msgstr "%d années" + +#, python-format +msgid "%s could be supported" +msgstr "%s pourrait être supporté" + +#, python-format +msgid "%s error report" +msgstr "%s rapport d'erreur" + +#, python-format +msgid "%s software version of the database" +msgstr "version logicielle de la base pour %s" + +#, python-format +msgid "%s updated" +msgstr "%s mis à jour" + +#, python-format +msgid "'%s' action doesn't take any options" +msgstr "l'action '%s' ne prend pas d'option" + +#, python-format +msgid "" +"'%s' action for in_state relation should at least have 'linkattr=name' option" +msgstr "" +"l'action '%s' pour la relation in_state doit au moins avoir l'option " +"'linkattr=name'" + +#, python-format +msgid "'%s' action requires 'linkattr' option" +msgstr "l'action '%s' nécessite une option 'linkattr'" + +msgid "(UNEXISTANT EID)" +msgstr "(EID INTROUVABLE)" + +#, python-format +msgid "(suppressed) entity #%d" +msgstr "entité #%d (supprimée)" + +msgid "**" +msgstr "0..n 0..n" + +msgid "*+" +msgstr "0..n 1..n" + +msgid "*1" +msgstr "0..n 1" + +msgid "*?" +msgstr "0..n 0..1" + +msgid "+*" +msgstr "1..n 0..n" + +msgid "++" +msgstr "1..n 1..n" + +msgid "+1" +msgstr "1..n 1" + +msgid "+?" +msgstr "1..n 0..1" + +msgid "1*" +msgstr "1 0..n" + +msgid "1+" +msgstr "1 1..n" + +msgid "11" +msgstr "1 1" + +msgid "1?" +msgstr "1 0..1" + +#, python-format +msgid "<%s not specified>" +msgstr "<%s non spécifié>" + +#, python-format +msgid "" +"
This schema of the data model excludes the meta-data, but you " +"can also display a complete schema with meta-data.
" +msgstr "" +"
Ce schéma du modèle de données exclut les méta-données, mais " +"vous pouvez afficher un schéma complet.
" + +msgid "" +msgstr "" + +msgid "" +msgstr "" + +msgid "?*" +msgstr "0..1 0..n" + +msgid "?+" +msgstr "0..1 1..n" + +msgid "?1" +msgstr "0..1 1" + +msgid "??" +msgstr "0..1 0..1" + +msgid "AND" +msgstr "ET" + +msgid "About this site" +msgstr "À propos de ce site" + +#, python-format +msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" +msgstr "Relation ajoutée : %(entity_from)s %(rtype)s %(entity_to)s" + +msgid "Attributes permissions:" +msgstr "Permissions des attributs" + +# schema pot file, generated on 2009-09-16 16:46:55 +# +# singular and plural forms for each entity type +msgid "BaseTransition" +msgstr "Transition (abstraite)" + +msgid "BaseTransition_plural" +msgstr "Transitions (abstraites)" + +msgid "BigInt" +msgstr "Entier long" + +msgid "BigInt_plural" +msgstr "Entiers longs" + +msgid "Bookmark" +msgstr "Signet" + +msgid "Bookmark_plural" +msgstr "Signets" + +msgid "Boolean" +msgstr "Booléen" + +msgid "Boolean_plural" +msgstr "Booléen" + +msgid "BoundConstraint" +msgstr "contrainte de bornes" + +msgid "BoundaryConstraint" +msgstr "contrainte de bornes" + +msgid "Browse by entity type" +msgstr "Naviguer par type d'entité" + +#, python-format +msgid "By %(user)s on %(dt)s [%(undo_link)s]" +msgstr "Par %(user)s le %(dt)s [%(undo_link)s] " + +msgid "Bytes" +msgstr "Donnée binaires" + +msgid "Bytes_plural" +msgstr "Données binaires" + +msgid "CWAttribute" +msgstr "Attribut" + +msgid "CWAttribute_plural" +msgstr "Attributs" + +msgid "CWCache" +msgstr "Cache applicatif" + +msgid "CWCache_plural" +msgstr "Caches applicatifs" + +msgid "CWComputedRType" +msgstr "Relation virtuelle" + +msgid "CWComputedRType_plural" +msgstr "Relations virtuelles" + +msgid "CWConstraint" +msgstr "Contrainte" + +msgid "CWConstraintType" +msgstr "Type de contrainte" + +msgid "CWConstraintType_plural" +msgstr "Types de contrainte" + +msgid "CWConstraint_plural" +msgstr "Contraintes" + +msgid "CWDataImport" +msgstr "Import de données" + +msgid "CWDataImport_plural" +msgstr "Imports de données" + +msgid "CWEType" +msgstr "Type d'entité" + +msgctxt "inlined:CWRelation.from_entity.subject" +msgid "CWEType" +msgstr "Type d'entité" + +msgctxt "inlined:CWRelation.to_entity.subject" +msgid "CWEType" +msgstr "Type d'entité" + +msgid "CWEType_plural" +msgstr "Types d'entité" + +msgid "CWGroup" +msgstr "Groupe" + +msgid "CWGroup_plural" +msgstr "Groupes" + +msgid "CWProperty" +msgstr "Propriété" + +msgid "CWProperty_plural" +msgstr "Propriétés" + +msgid "CWRType" +msgstr "Type de relation" + +msgctxt "inlined:CWRelation.relation_type.subject" +msgid "CWRType" +msgstr "Type de relation" + +msgid "CWRType_plural" +msgstr "Types de relation" + +msgid "CWRelation" +msgstr "Relation" + +msgid "CWRelation_plural" +msgstr "Relations" + +msgid "CWSource" +msgstr "Source de données" + +msgid "CWSourceHostConfig" +msgstr "Configuration de source" + +msgid "CWSourceHostConfig_plural" +msgstr "Configurations de source" + +msgid "CWSourceSchemaConfig" +msgstr "Configuration de schéma de source" + +msgid "CWSourceSchemaConfig_plural" +msgstr "Configurations de schéma de source" + +msgid "CWSource_plural" +msgstr "Source de données" + +msgid "CWUniqueTogetherConstraint" +msgstr "Contrainte d'unicité" + +msgid "CWUniqueTogetherConstraint_plural" +msgstr "Contraintes d'unicité" + +msgid "CWUser" +msgstr "Utilisateur" + +msgid "CWUser_plural" +msgstr "Utilisateurs" + +#, python-format +msgid "" +"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " +"linked using this relation." +msgstr "" +"Ne peut restaurer la relation %(role)s %(rtype)s vers l'entité %(eid)s qui " +"est déja lié à une autre entité par cette relation." + +#, python-format +msgid "" +"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " +"does not exists anymore in the schema." +msgstr "" +"Ne peut restaurer la relation %(rtype)s entre %(subj)s et %(obj)s, cette " +"relation n'existe plus dans le schéma." + +#, python-format +msgid "" +"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " +"anymore." +msgstr "" +"Ne peut restaurer la relation %(rtype)s, l'entité %(role)s %(eid)s n'existe " +"plus." + +#, python-format +msgid "" +"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " +"exist anymore" +msgstr "" +"Ne peut annuler l'ajout de relation %(rtype)s de %(subj)s vers %(obj)s, " +"cette relation n'existe plus" + +#, python-format +msgid "" +"Can't undo creation of entity %(eid)s of type %(etype)s, type no more " +"supported" +msgstr "" +"Ne peut annuler la création de l'entité %(eid)s de type %(etype)s, ce type " +"n'existe plus" + +msgid "Click to sort on this column" +msgstr "Cliquer pour trier sur cette colonne" + +msgid "" +"Configuration of the system source goes to the 'sources' file, not in the " +"database" +msgstr "" +"La configuration de la source système va dans le fichier 'sources' et non " +"dans la base de données" + +#, python-format +msgid "Created %(etype)s : %(entity)s" +msgstr "Entité %(etype)s crée : %(entity)s" + +msgid "DEBUG" +msgstr "DEBUG" + +msgid "Date" +msgstr "Date" + +msgid "Date_plural" +msgstr "Dates" + +msgid "Datetime" +msgstr "Date et heure" + +msgid "Datetime_plural" +msgstr "Dates et heures" + +msgid "Decimal" +msgstr "Nombre décimal" + +msgid "Decimal_plural" +msgstr "Nombres décimaux" + +#, python-format +msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" +msgstr "Relation supprimée : %(entity_from)s %(rtype)s %(entity_to)s" + +#, python-format +msgid "Deleted %(etype)s : %(entity)s" +msgstr "Entité %(etype)s supprimée : %(entity)s" + +msgid "Detected problems" +msgstr "Problèmes détectés" + +msgid "Do you want to delete the following element(s)?" +msgstr "Voulez-vous supprimer le(s) élément(s) suivant(s) ?" + +msgid "Download schema as OWL" +msgstr "Télécharger le schéma au format OWL" + +msgid "ERROR" +msgstr "ERREUR" + +msgid "EmailAddress" +msgstr "Adresse électronique" + +msgctxt "inlined:CWUser.use_email.subject" +msgid "EmailAddress" +msgstr "Adresse électronique" + +msgid "EmailAddress_plural" +msgstr "Adresses électroniques" + +msgid "Entities" +msgstr "entités" + +#, python-format +msgid "" +"Entity %(eid)s has changed since you started to edit it. Reload the page and " +"reapply your changes." +msgstr "" +"L'entité %(eid)s a été modifiée depuis votre demande d'édition. Veuillez " +"recharger cette page et réappliquer vos changements." + +msgid "Entity and relation supported by this source" +msgstr "Entités et relations supportés par cette source" + +msgid "ExternalUri" +msgstr "Uri externe" + +msgid "ExternalUri_plural" +msgstr "Uri externes" + +msgid "FATAL" +msgstr "FATAL" + +msgid "Float" +msgstr "Nombre flottant" + +msgid "Float_plural" +msgstr "Nombres flottants" + +# schema pot file, generated on 2009-12-03 09:22:35 +# +# singular and plural forms for each entity type +msgid "FormatConstraint" +msgstr "contrainte de format" + +msgid "Garbage collection information" +msgstr "Information sur le ramasse-miette" + +msgid "Help" +msgstr "Aide" + +msgid "INFO" +msgstr "INFO" + +msgid "Instance" +msgstr "Instance" + +msgid "Int" +msgstr "Nombre entier" + +msgid "Int_plural" +msgstr "Nombres entiers" + +msgid "Interval" +msgstr "Durée" + +msgid "IntervalBoundConstraint" +msgstr "contrainte d'interval" + +msgid "Interval_plural" +msgstr "Durées" + +msgid "Link:" +msgstr "Lien :" + +msgid "Looked up classes" +msgstr "Classes recherchées" + +msgid "Manage" +msgstr "Administration" + +msgid "Manage security" +msgstr "Gestion de la sécurité" + +msgid "Message threshold" +msgstr "Niveau du message" + +msgid "Most referenced classes" +msgstr "Classes les plus référencées" + +msgid "New BaseTransition" +msgstr "XXX" + +msgid "New Bookmark" +msgstr "Nouveau signet" + +msgid "New CWAttribute" +msgstr "Nouvelle définition de relation finale" + +msgid "New CWCache" +msgstr "Nouveau cache applicatif" + +msgid "New CWComputedRType" +msgstr "Nouvelle relation virtuelle" + +msgid "New CWConstraint" +msgstr "Nouvelle contrainte" + +msgid "New CWConstraintType" +msgstr "Nouveau type de contrainte" + +msgid "New CWDataImport" +msgstr "Nouvel import de données" + +msgid "New CWEType" +msgstr "Nouveau type d'entité" + +msgid "New CWGroup" +msgstr "Nouveau groupe" + +msgid "New CWProperty" +msgstr "Nouvelle propriété" + +msgid "New CWRType" +msgstr "Nouveau type de relation" + +msgid "New CWRelation" +msgstr "Nouvelle définition de relation non finale" + +msgid "New CWSource" +msgstr "Nouvelle source" + +msgid "New CWSourceHostConfig" +msgstr "Nouvelle configuration de source" + +msgid "New CWSourceSchemaConfig" +msgstr "Nouvelle partie de mapping de source" + +msgid "New CWUniqueTogetherConstraint" +msgstr "Nouvelle contrainte unique_together" + +msgid "New CWUser" +msgstr "Nouvel utilisateur" + +msgid "New EmailAddress" +msgstr "Nouvelle adresse électronique" + +msgid "New ExternalUri" +msgstr "Nouvelle Uri externe" + +msgid "New RQLExpression" +msgstr "Nouvelle expression rql" + +msgid "New State" +msgstr "Nouvel état" + +msgid "New SubWorkflowExitPoint" +msgstr "Nouvelle sortie de sous-workflow" + +msgid "New TrInfo" +msgstr "Nouvelle information de transition" + +msgid "New Transition" +msgstr "Nouvelle transition" + +msgid "New Workflow" +msgstr "Nouveau workflow" + +msgid "New WorkflowTransition" +msgstr "Nouvelle transition workflow" + +msgid "No result matching query" +msgstr "Aucun résultat ne correspond à la requête" + +msgid "Non exhaustive list of views that may apply to entities of this type" +msgstr "Liste non exhaustive des vues s'appliquant à ce type d'entité" + +msgid "OR" +msgstr "OU" + +msgid "Ownership" +msgstr "Propriété" + +msgid "Parent class:" +msgstr "Classe parente" + +msgid "Password" +msgstr "Mot de passe" + +msgid "Password_plural" +msgstr "Mots de passe" + +msgid "Please note that this is only a shallow copy" +msgstr "Attention, cela n'effectue qu'une copie de surface" + +msgid "Powered by CubicWeb" +msgstr "Construit avec CubicWeb" + +msgid "RQLConstraint" +msgstr "contrainte rql" + +msgid "RQLExpression" +msgstr "Expression RQL" + +msgid "RQLExpression_plural" +msgstr "Expressions RQL" + +msgid "RQLUniqueConstraint" +msgstr "contrainte rql d'unicité" + +msgid "RQLVocabularyConstraint" +msgstr "contrainte rql de vocabulaire" + +msgid "RegexpConstraint" +msgstr "contrainte expression régulière" + +msgid "Registry's content" +msgstr "Contenu du registre" + +msgid "Relations" +msgstr "Relations" + +msgid "Repository" +msgstr "Entrepôt de données" + +#, python-format +msgid "Schema %s" +msgstr "Schéma %s" + +msgid "Schema's permissions definitions" +msgstr "Permissions définies dans le schéma" + +msgid "Search for" +msgstr "Rechercher" + +msgid "Site information" +msgstr "Information du site" + +msgid "SizeConstraint" +msgstr "contrainte de taille" + +msgid "" +"Source's configuration for a particular host. One key=value per line, " +"authorized keys depending on the source's type, overriding values defined on " +"the source." +msgstr "" +"Configuration de la source pour un hôte spécifique. Une clé=valeur par " +"ligne, les clés autorisées dépendantes du type de source. Les valeurs " +"surchargent celles définies sur la source." + +msgid "Startup views" +msgstr "Vues de départ" + +msgid "State" +msgstr "État" + +msgid "State_plural" +msgstr "États" + +msgid "StaticVocabularyConstraint" +msgstr "contrainte de vocabulaire" + +msgid "String" +msgstr "Chaîne de caractères" + +msgid "String_plural" +msgstr "Chaînes de caractères" + +msgid "Sub-classes:" +msgstr "Classes filles :" + +msgid "SubWorkflowExitPoint" +msgstr "Sortie de sous-workflow" + +msgid "SubWorkflowExitPoint_plural" +msgstr "Sorties de sous-workflow" + +msgid "Submit bug report" +msgstr "Soumettre un rapport de bug" + +msgid "Submit bug report by mail" +msgstr "Soumettre ce rapport par email" + +msgid "TZDatetime" +msgstr "Date et heure internationale" + +msgid "TZDatetime_plural" +msgstr "Dates et heures internationales" + +msgid "TZTime" +msgstr "Heure internationale" + +msgid "TZTime_plural" +msgstr "Heures internationales" + +#, python-format +msgid "The view %s can not be applied to this query" +msgstr "La vue %s ne peut être appliquée à cette requête" + +#, python-format +msgid "The view %s could not be found" +msgstr "La vue %s est introuvable" + +msgid "There is no default workflow" +msgstr "Ce type d'entité n'a pas de workflow par défault" + +msgid "This BaseTransition:" +msgstr "Cette transition abstraite :" + +msgid "This Bookmark:" +msgstr "Ce signet :" + +msgid "This CWAttribute:" +msgstr "Cette définition de relation finale :" + +msgid "This CWCache:" +msgstr "Ce cache applicatif :" + +msgid "This CWComputedRType:" +msgstr "Cette relation virtuelle :" + +msgid "This CWConstraint:" +msgstr "Cette contrainte :" + +msgid "This CWConstraintType:" +msgstr "Ce type de contrainte :" + +msgid "This CWDataImport:" +msgstr "Cet import de données :" + +msgid "This CWEType:" +msgstr "Ce type d'entité :" + +msgid "This CWGroup:" +msgstr "Ce groupe :" + +msgid "This CWProperty:" +msgstr "Cette propriété :" + +msgid "This CWRType:" +msgstr "Ce type de relation :" + +msgid "This CWRelation:" +msgstr "Cette définition de relation :" + +msgid "This CWSource:" +msgstr "Cette source :" + +msgid "This CWSourceHostConfig:" +msgstr "Cette configuration de source :" + +msgid "This CWSourceSchemaConfig:" +msgstr "Cette partie de mapping de source :" + +msgid "This CWUniqueTogetherConstraint:" +msgstr "Cette contrainte unique_together :" + +msgid "This CWUser:" +msgstr "Cet utilisateur :" + +msgid "This EmailAddress:" +msgstr "Cette adresse électronique :" + +msgid "This ExternalUri:" +msgstr "Cette Uri externe :" + +msgid "This RQLExpression:" +msgstr "Cette expression RQL :" + +msgid "This State:" +msgstr "Cet état :" + +msgid "This SubWorkflowExitPoint:" +msgstr "Cette sortie de sous-workflow :" + +msgid "This TrInfo:" +msgstr "Cette information de transition :" + +msgid "This Transition:" +msgstr "Cette transition :" + +msgid "This Workflow:" +msgstr "Ce workflow :" + +msgid "This WorkflowTransition:" +msgstr "Cette transition workflow :" + +msgid "" +"This action is forbidden. If you think it should be allowed, please contact " +"the site administrator." +msgstr "" +"Cette action est interdite. Si toutefois vous pensez qu'elle devrait être " +"autorisée, veuillez contacter l'administrateur du site." + +msgid "This entity type permissions:" +msgstr "Permissions pour ce type d'entité" + +msgid "Time" +msgstr "Heure" + +msgid "Time_plural" +msgstr "Heures" + +msgid "TrInfo" +msgstr "Information transition" + +msgid "TrInfo_plural" +msgstr "Information transitions" + +msgid "Transition" +msgstr "Transition" + +msgid "Transition_plural" +msgstr "Transitions" + +msgid "URLs from which content will be imported. You can put one url per line" +msgstr "" +"URLs depuis lesquelles le contenu sera importé. Vous pouvez mettre une URL " +"par ligne." + +msgid "Undoable actions" +msgstr "Action annulables" + +msgid "Undoing" +msgstr "Annuler" + +msgid "UniqueConstraint" +msgstr "contrainte d'unicité" + +msgid "Unknown source type" +msgstr "Type de source inconnue" + +msgid "Unreachable objects" +msgstr "Objets inaccessibles" + +#, python-format +msgid "Updated %(etype)s : %(entity)s" +msgstr "Entité %(etype)s mise à jour : %(entity)s" + +msgid "Used by:" +msgstr "Utilisé par :" + +msgid "Users and groups management" +msgstr "Gestion des utilisateurs et groupes" + +msgid "WARNING" +msgstr "AVERTISSEMENT" + +msgid "Web server" +msgstr "Serveur web" + +msgid "Workflow" +msgstr "Workflow" + +msgid "Workflow history" +msgstr "Historique des changements d'état" + +msgid "WorkflowTransition" +msgstr "Transition workflow" + +msgid "WorkflowTransition_plural" +msgstr "Transitions workflow" + +msgid "Workflow_plural" +msgstr "Workflows" + +msgid "" +"You can either submit a new file using the browse button above, or choose to " +"remove already uploaded file by checking the \"detach attached file\" check-" +"box, or edit file content online with the widget below." +msgstr "" +"Vous pouvez soit soumettre un nouveau fichier en utilisant le bouton\n" +"\"parcourir\" ci-dessus, soit supprimer le fichier déjà présent en\n" +"cochant la case \"détacher fichier attaché\", soit éditer le contenu\n" +"du fichier en ligne avec le champ ci-dessous." + +msgid "" +"You can either submit a new file using the browse button above, or edit file " +"content online with the widget below." +msgstr "" +"Vous pouvez soit soumettre un nouveau fichier en utilisant le bouton\n" +"\"parcourir\" ci-dessus, soit éditer le contenu du fichier en ligne\n" +"avec le champ ci-dessous." + +msgid "You can't change this relation" +msgstr "Vous ne pouvez pas modifier cette relation" + +msgid "You cannot remove the system source" +msgstr "Vous ne pouvez pas supprimer la source système" + +msgid "You cannot rename the system source" +msgstr "Vous ne pouvez pas renommer la source système" + +msgid "" +"You have no access to this view or it can not be used to display the current " +"data." +msgstr "" +"Vous n'avez pas accès à cette vue ou elle ne peut pas afficher ces données." + +msgid "" +"You're not authorized to access this page. If you think you should, please " +"contact the site administrator." +msgstr "" +"Vous n'êtes pas autorisé à accéder à cette page. Si toutefois vous pensez\n" +"que c'est une erreur, veuillez contacter l'administrateur du site." + +#, python-format +msgid "[%s supervision] changes summary" +msgstr "[%s supervision] description des changements" + +msgid "" +"a RQL expression which should return some results, else the transition won't " +"be available. This query may use X and U variables that will respectivly " +"represents the current entity and the current user." +msgstr "" +"une expression RQL devant retourner des résultats pour que la transition " +"puisse être passée. Cette expression peut utiliser les variables X et U qui " +"représentent respectivement l'entité à laquelle on veut appliquer la " +"transition et l'utilisateur courant." + +msgid "a URI representing an object in external data store" +msgstr "une Uri désignant un objet dans un entrepôt de données externe" + +msgid "a float is expected" +msgstr "un nombre flottant est attendu" + +msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" +msgstr "un nombre (en seconde) ou 20s, 10min, 24h ou 4d sont attendus" + +msgid "" +"a simple cache entity characterized by a name and a validity date. The " +"target application is responsible for updating timestamp when necessary to " +"invalidate the cache (typically in hooks). Also, checkout the AppObject." +"get_cache() method." +msgstr "" +"un cache simple caractérisé par un nom et une date de validité. C'est\n" +"le code de l'instance qui est responsable de mettre à jour la date de\n" +"validité lorsque le cache doit être invalidé (en général dans un hook).\n" +"Pour récupérer un cache, il faut utiliser utiliser la méthode\n" +"get_cache(cachename)." + +msgid "abstract base class for transitions" +msgstr "classe de base abstraite pour les transitions" + +msgid "action menu" +msgstr "actions" + +msgid "action(s) on this selection" +msgstr "action(s) sur cette sélection" + +msgid "actions" +msgstr "actions" + +msgid "activate" +msgstr "activer" + +msgid "activated" +msgstr "activé" + +msgid "add" +msgstr "ajouter" + +msgid "add Bookmark bookmarked_by CWUser object" +msgstr "signet" + +msgid "add CWAttribute add_permission RQLExpression subject" +msgstr "définir une expression RQL d'ajout" + +msgid "add CWAttribute constrained_by CWConstraint subject" +msgstr "contrainte" + +msgid "add CWAttribute read_permission RQLExpression subject" +msgstr "expression rql de lecture" + +msgid "add CWAttribute relation_type CWRType object" +msgstr "définition d'attribut" + +msgid "add CWAttribute update_permission RQLExpression subject" +msgstr "permission de mise à jour" + +msgid "add CWEType add_permission RQLExpression subject" +msgstr "définir une expression RQL d'ajout" + +msgid "add CWEType delete_permission RQLExpression subject" +msgstr "définir une expression RQL de suppression" + +msgid "add CWEType read_permission RQLExpression subject" +msgstr "définir une expression RQL de lecture" + +msgid "add CWEType update_permission RQLExpression subject" +msgstr "définir une expression RQL de mise à jour" + +msgid "add CWProperty for_user CWUser object" +msgstr "propriété" + +msgid "add CWRelation add_permission RQLExpression subject" +msgstr "expression rql d'ajout" + +msgid "add CWRelation constrained_by CWConstraint subject" +msgstr "contrainte" + +msgid "add CWRelation delete_permission RQLExpression subject" +msgstr "expression rql de suppression" + +msgid "add CWRelation read_permission RQLExpression subject" +msgstr "expression rql de lecture" + +msgid "add CWRelation relation_type CWRType object" +msgstr "définition de relation" + +msgid "add CWSourceHostConfig cw_host_config_of CWSource object" +msgstr "configuration d'hôte" + +msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" +msgstr "contrainte unique_together" + +msgid "add CWUser in_group CWGroup object" +msgstr "utilisateur" + +msgid "add CWUser use_email EmailAddress subject" +msgstr "adresse email" + +msgid "add State allowed_transition Transition object" +msgstr "état en entrée" + +msgid "add State allowed_transition Transition subject" +msgstr "transition en sortie" + +msgid "add State allowed_transition WorkflowTransition subject" +msgstr "transition workflow en sortie" + +msgid "add State state_of Workflow object" +msgstr "état" + +msgid "add Transition condition RQLExpression subject" +msgstr "condition" + +msgid "add Transition destination_state State object" +msgstr "transition en entrée" + +msgid "add Transition destination_state State subject" +msgstr "état de sortie" + +msgid "add Transition transition_of Workflow object" +msgstr "transition" + +msgid "add WorkflowTransition condition RQLExpression subject" +msgstr "condition" + +msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" +msgstr "sortie de sous-workflow" + +msgid "add WorkflowTransition transition_of Workflow object" +msgstr "transition workflow" + +msgid "add a BaseTransition" +msgstr "" + +msgid "add a Bookmark" +msgstr "" + +msgid "add a CWAttribute" +msgstr "" + +msgid "add a CWCache" +msgstr "" + +msgid "add a CWComputedRType" +msgstr "" + +msgid "add a CWConstraint" +msgstr "" + +msgid "add a CWConstraintType" +msgstr "" + +msgid "add a CWDataImport" +msgstr "" + +msgid "add a CWEType" +msgstr "" + +msgctxt "inlined:CWRelation.from_entity.subject" +msgid "add a CWEType" +msgstr "ajouter un type d'entité sujet" + +msgctxt "inlined:CWRelation.to_entity.subject" +msgid "add a CWEType" +msgstr "ajouter un type d'entité objet" + +msgid "add a CWGroup" +msgstr "" + +msgid "add a CWProperty" +msgstr "" + +msgid "add a CWRType" +msgstr "" + +msgctxt "inlined:CWRelation.relation_type.subject" +msgid "add a CWRType" +msgstr "ajouter un type de relation" + +msgid "add a CWRelation" +msgstr "" + +msgid "add a CWSource" +msgstr "" + +msgid "add a CWSourceHostConfig" +msgstr "" + +msgid "add a CWSourceSchemaConfig" +msgstr "" + +msgid "add a CWUniqueTogetherConstraint" +msgstr "" + +msgid "add a CWUser" +msgstr "" + +msgid "add a EmailAddress" +msgstr "" + +msgctxt "inlined:CWUser.use_email.subject" +msgid "add a EmailAddress" +msgstr "ajouter une adresse électronique" + +msgid "add a ExternalUri" +msgstr "" + +msgid "add a RQLExpression" +msgstr "" + +msgid "add a State" +msgstr "" + +msgid "add a SubWorkflowExitPoint" +msgstr "" + +msgid "add a TrInfo" +msgstr "" + +msgid "add a Transition" +msgstr "" + +msgid "add a Workflow" +msgstr "" + +msgid "add a WorkflowTransition" +msgstr "" + +# subject and object forms for each relation type +# (no object form for final relation types) +msgid "add_permission" +msgstr "peut ajouter" + +msgctxt "CWAttribute" +msgid "add_permission" +msgstr "permission d'ajout" + +# subject and object forms for each relation type +# (no object form for final relation types) +msgctxt "CWEType" +msgid "add_permission" +msgstr "permission d'ajout" + +msgctxt "CWRelation" +msgid "add_permission" +msgstr "permission d'ajout" + +msgid "add_permission_object" +msgstr "a la permission d'ajouter" + +msgctxt "CWGroup" +msgid "add_permission_object" +msgstr "a la permission d'ajouter" + +msgctxt "RQLExpression" +msgid "add_permission_object" +msgstr "a la permission d'ajouter" + +msgid "add_relation" +msgstr "ajouter" + +#, python-format +msgid "added %(etype)s #%(eid)s (%(title)s)" +msgstr "ajout de l'entité %(etype)s #%(eid)s (%(title)s)" + +#, python-format +msgid "" +"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" +msgstr "" +"la relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #" +"%(eidto)s a été ajoutée" + +msgid "additional type specific properties" +msgstr "propriétés supplémentaires spécifiques au type" + +msgid "addrelated" +msgstr "ajouter" + +msgid "address" +msgstr "adresse électronique" + +msgctxt "EmailAddress" +msgid "address" +msgstr "adresse électronique" + +msgid "alias" +msgstr "alias" + +msgctxt "EmailAddress" +msgid "alias" +msgstr "alias" + +msgid "allow to set a specific workflow for an entity" +msgstr "permet de spécifier un workflow donné pour une entité" + +msgid "allowed options depends on the source type" +msgstr "les options autorisées dépendent du type de la source" + +msgid "allowed transitions from this state" +msgstr "transitions autorisées depuis cet état" + +#, python-format +msgid "allowed values for \"action\" are %s" +msgstr "les valeurs autorisées pour \"action\" sont %s" + +msgid "allowed_transition" +msgstr "transitions autorisées" + +msgctxt "State" +msgid "allowed_transition" +msgstr "transitions autorisées" + +msgid "allowed_transition_object" +msgstr "états en entrée" + +msgctxt "BaseTransition" +msgid "allowed_transition_object" +msgstr "transition autorisée de" + +msgctxt "Transition" +msgid "allowed_transition_object" +msgstr "transition autorisée de" + +msgctxt "WorkflowTransition" +msgid "allowed_transition_object" +msgstr "transition autorisée de" + +msgid "an electronic mail address associated to a short alias" +msgstr "une adresse électronique associée à un alias" + +msgid "an error occurred" +msgstr "une erreur est survenue" + +msgid "an error occurred while processing your request" +msgstr "une erreur est survenue pendant le traitement de votre requête" + +msgid "an error occurred, the request cannot be fulfilled" +msgstr "une erreur est survenue, la requête ne peut être complétée" + +msgid "an integer is expected" +msgstr "un nombre entier est attendu" + +msgid "and linked" +msgstr "et liée" + +msgid "and/or between different values" +msgstr "et/ou entre les différentes valeurs" + +msgid "anyrsetview" +msgstr "vues pour tout rset" + +msgid "april" +msgstr "avril" + +#, python-format +msgid "archive for %(author)s" +msgstr "archive pour l'auteur %(author)s" + +#, python-format +msgid "archive for %(month)s/%(year)s" +msgstr "archive pour le mois %(month)s/%(year)s" + +#, python-format +msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" +msgstr "" +"l'entité #%(eid)s de type %(etype)s doit nécessairement être reliée à une\n" +"autre via la relation %(rtype)s" + +msgid "attribute" +msgstr "attribut" + +msgid "august" +msgstr "août" + +msgid "authentication failure" +msgstr "Identifiant ou mot de passe incorrect" + +msgid "auto" +msgstr "automatique" + +msgid "autocomputed attribute used to ensure transition coherency" +msgstr "" +"attribut calculé automatiquement pour assurer la cohérence de la transition" + +msgid "automatic" +msgstr "automatique" + +#, python-format +msgid "back to pagination (%s results)" +msgstr "retour à la vue paginée (%s résultats)" + +msgid "bad value" +msgstr "mauvaise valeur" + +msgid "badly formatted url" +msgstr "URL mal formattée" + +msgid "base url" +msgstr "url de base" + +msgid "bookmark has been removed" +msgstr "le signet a été retiré" + +msgid "bookmark this page" +msgstr "poser un signet ici" + +msgid "bookmark this search" +msgstr "mémoriser cette recherche" + +msgid "bookmarked_by" +msgstr "utilisé par" + +msgctxt "Bookmark" +msgid "bookmarked_by" +msgstr "utilisé par" + +msgid "bookmarked_by_object" +msgstr "utilise le(s) signet(s)" + +msgctxt "CWUser" +msgid "bookmarked_by_object" +msgstr "utilise le(s) signet(s)" + +msgid "bookmarks" +msgstr "signets" + +msgid "bookmarks are used to have user's specific internal links" +msgstr "" +"les signets sont utilisés pour gérer des liens internes par utilisateur" + +msgid "boxes" +msgstr "boîtes" + +msgid "bug report sent" +msgstr "rapport d'erreur envoyé" + +msgid "button_apply" +msgstr "appliquer" + +msgid "button_cancel" +msgstr "annuler" + +msgid "button_delete" +msgstr "supprimer" + +msgid "button_ok" +msgstr "valider" + +msgid "by" +msgstr "par" + +msgid "by relation" +msgstr "via la relation" + +msgid "by_transition" +msgstr "transition" + +msgctxt "TrInfo" +msgid "by_transition" +msgstr "transition" + +msgid "by_transition_object" +msgstr "changement d'états" + +msgctxt "BaseTransition" +msgid "by_transition_object" +msgstr "a pour information" + +msgctxt "Transition" +msgid "by_transition_object" +msgstr "a pour information" + +msgctxt "WorkflowTransition" +msgid "by_transition_object" +msgstr "a pour information" + +msgid "calendar" +msgstr "afficher un calendrier" + +msgid "can not resolve entity types:" +msgstr "impossible d'interpréter les types d'entités :" + +msgid "can only have one url" +msgstr "ne supporte qu'une seule URL" + +msgid "can't be changed" +msgstr "ne peut-être modifié" + +msgid "can't be deleted" +msgstr "ne peut-être supprimé" + +msgid "can't change this attribute" +msgstr "cet attribut ne peut pas être modifié" + +#, python-format +msgid "can't display data, unexpected error: %s" +msgstr "impossible d'afficher les données à cause de l'erreur suivante: %s" + +msgid "can't have multiple exits on the same state" +msgstr "ne peut avoir plusieurs sorties sur le même état" + +#, python-format +msgid "can't parse %(value)r (expected %(format)s)" +msgstr "ne peut analyser %(value)r (format attendu : %(format)s)" + +#, python-format +msgid "" +"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " +"%(value)s) does not exist any longer" +msgstr "" +"impossible de rétablir l'entité %(eid)s de type %(eschema)s, cible de la " +"relation %(rtype)s (eid %(value)s) n'existe plus" + +#, python-format +msgid "" +"can't restore relation %(rtype)s of entity %(eid)s, this relation does not " +"exist in the schema anymore." +msgstr "" +"impossible de rétablir la relation %(rtype)s sur l'entité %(eid)s, cette " +"relation n'existe plus dans le schéma." + +#, python-format +msgid "can't restore state of entity %s, it has been deleted inbetween" +msgstr "" +"impossible de rétablir l'état de l'entité %s, elle a été supprimée entre-" +"temps" + +#, python-format +msgid "" +"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality=" +"%(card)s" +msgstr "" +"ne peut mettre 'inlined'=Vrai, %(stype)s %(rtype)s %(otype)s a pour " +"cardinalité %(card)s" + +msgid "cancel" +msgstr "annuler" + +msgid "cancel select" +msgstr "annuler la sélection" + +msgid "cancel this insert" +msgstr "annuler cette insertion" + +msgid "cardinality" +msgstr "cardinalité" + +msgctxt "CWAttribute" +msgid "cardinality" +msgstr "cardinalité" + +msgctxt "CWRelation" +msgid "cardinality" +msgstr "cardinalité" + +msgid "category" +msgstr "categorie" + +#, python-format +msgid "changed state of %(etype)s #%(eid)s (%(title)s)" +msgstr "changement de l'état de %(etype)s #%(eid)s (%(title)s)" + +msgid "changes applied" +msgstr "changements appliqués" + +msgid "click here to see created entity" +msgstr "cliquez ici pour voir l'entité créée" + +msgid "click here to see edited entity" +msgstr "cliquez ici pour voir l'entité modifiée" + +msgid "click on the box to cancel the deletion" +msgstr "cliquez dans la zone d'édition pour annuler la suppression" + +msgid "click to add a value" +msgstr "cliquer pour ajouter une valeur" + +msgid "click to delete this value" +msgstr "cliquer pour supprimer cette valeur" + +msgid "click to edit this field" +msgstr "cliquez pour éditer ce champ" + +msgid "close all" +msgstr "tout fermer" + +msgid "comment" +msgstr "commentaire" + +msgctxt "TrInfo" +msgid "comment" +msgstr "commentaire" + +msgid "comment_format" +msgstr "format" + +msgctxt "TrInfo" +msgid "comment_format" +msgstr "format" + +msgid "components" +msgstr "composants" + +msgid "components_navigation" +msgstr "navigation par page" + +msgid "components_navigation_description" +msgstr "" +"composant permettant de présenter sur plusieurs pages les requêtes renvoyant " +"plus d'un certain nombre de résultat" + +msgid "components_rqlinput" +msgstr "barre rql" + +msgid "components_rqlinput_description" +msgstr "la barre de requête rql, dans l'en-tête de page" + +msgid "composite" +msgstr "composite" + +msgctxt "CWRelation" +msgid "composite" +msgstr "composite" + +msgid "condition" +msgstr "condition" + +msgctxt "BaseTransition" +msgid "condition" +msgstr "condition" + +msgctxt "Transition" +msgid "condition" +msgstr "condition" + +msgctxt "WorkflowTransition" +msgid "condition" +msgstr "condition" + +msgid "condition_object" +msgstr "condition de" + +msgctxt "RQLExpression" +msgid "condition_object" +msgstr "condition de" + +msgid "conditions" +msgstr "conditions" + +msgid "config" +msgstr "configuration" + +msgctxt "CWSource" +msgid "config" +msgstr "configuration" + +msgctxt "CWSourceHostConfig" +msgid "config" +msgstr "configuration" + +msgid "config mode" +msgstr "mode de configuration" + +msgid "config type" +msgstr "type de configuration" + +msgid "confirm password" +msgstr "confirmer le mot de passe" + +msgid "constrained_by" +msgstr "contraint par" + +msgctxt "CWAttribute" +msgid "constrained_by" +msgstr "contraint par" + +msgctxt "CWRelation" +msgid "constrained_by" +msgstr "contraint par" + +msgid "constrained_by_object" +msgstr "contrainte de" + +msgctxt "CWConstraint" +msgid "constrained_by_object" +msgstr "contrainte de" + +msgid "constraint factory" +msgstr "fabrique de contraintes" + +msgid "constraint_of" +msgstr "contrainte de" + +msgctxt "CWUniqueTogetherConstraint" +msgid "constraint_of" +msgstr "contrainte de" + +msgid "constraint_of_object" +msgstr "contraint par" + +msgctxt "CWEType" +msgid "constraint_of_object" +msgstr "contraint par" + +msgid "constraints" +msgstr "contraintes" + +msgid "constraints applying on this relation" +msgstr "contraintes s'appliquant à cette relation" + +msgid "content type" +msgstr "type MIME" + +msgid "context" +msgstr "contexte" + +msgid "context where this box should be displayed" +msgstr "contexte dans lequel la boite devrait être affichée" + +msgid "context where this component should be displayed" +msgstr "contexte où ce composant doit être affiché" + +msgid "context where this facet should be displayed, leave empty for both" +msgstr "" +"contexte où cette facette doit être affichée. Laissez ce champ vide pour " +"l'avoir dans les deux." + +msgid "control subject entity's relations order" +msgstr "contrôle l'ordre des relations de l'entité sujet" + +msgid "copy" +msgstr "copier" + +msgid "core relation indicating a user's groups" +msgstr "" +"relation système indiquant les groupes auxquels appartient l'utilisateur" + +msgid "" +"core relation indicating owners of an entity. This relation implicitly put " +"the owner into the owners group for the entity" +msgstr "" +"relation système indiquant le(s) propriétaire(s) d'une entité. Cette " +"relation place implicitement les utilisateurs liés dans le groupe des " +"propriétaires pour cette entité" + +msgid "core relation indicating the original creator of an entity" +msgstr "relation système indiquant le créateur d'une entité." + +msgid "core relation indicating the type of an entity" +msgstr "relation système indiquant le type de l'entité" + +msgid "" +"core relation indicating the types (including specialized types) of an entity" +msgstr "" +"relation système indiquant les types (y compris les types parents) d'une " +"entité" + +msgid "could not connect to the SMTP server" +msgstr "impossible de se connecter au serveur SMTP" + +msgid "create an index for quick search on this attribute" +msgstr "créer un index pour accélérer les recherches sur cet attribut" + +msgid "created on" +msgstr "créé le" + +msgid "created_by" +msgstr "créé par" + +msgid "created_by_object" +msgstr "a créé" + +msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)" +msgstr "création d'un signet pour %(linkto)s" + +msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)" +msgstr "création d'un attribut %(linkto)s" + +msgid "" +"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)" +msgstr "création d'une contrainte pour l'attribut %(linkto)s" + +msgid "" +"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)" +msgstr "création d'une contrainte pour la relation %(linkto)s" + +msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)" +msgstr "création d'une propriété pour l'utilisateur %(linkto)s" + +msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" +msgstr "création relation %(linkto)s" + +msgid "" +"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource " +"%(linkto)s)" +msgstr "création d'une configuration d'hôte pour la source %(linkto)s" + +msgid "" +"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " +"constraint_of CWEType %(linkto)s)" +msgstr "création d'une contrainte unique_together sur %(linkto)s" + +msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" +msgstr "création d'un utilisateur à rajouter au groupe %(linkto)s" + +msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)" +msgstr "création d'une adresse électronique pour l'utilisateur %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" +msgstr "création d'une expression rql pour le droit d'ajout de %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" +msgstr "création d'une expression rql pour le droit de lecture de %(linkto)s" + +msgid "" +"creating RQLExpression (CWAttribute %(linkto)s update_permission " +"RQLExpression)" +msgstr "" +"création d'une expression rql pour le droit de mise à jour de %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" +msgstr "création d'une expression RQL pour la permission d'ajout de %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)" +msgstr "" +"création d'une expression RQL pour la permission de suppression de %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" +msgstr "création d'une expression RQL pour la permission de lire %(linkto)s" + +msgid "" +"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" +msgstr "" +"création d'une expression RQL pour la permission de mise à jour de %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" +msgstr "création d'une expression rql pour le droit d'ajout de %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s delete_permission " +"RQLExpression)" +msgstr "" +"création d'une expression rql pour le droit de suppression de %(linkto)s" + +msgid "" +"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" +msgstr "création d'une expression rql pour le droit de lecture de %(linkto)s" + +msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" +msgstr "création d'une expression RQL pour la transition %(linkto)s" + +msgid "" +"creating RQLExpression (WorkflowTransition %(linkto)s condition " +"RQLExpression)" +msgstr "création d'une expression RQL pour la transition workflow %(linkto)s" + +msgid "creating State (State allowed_transition Transition %(linkto)s)" +msgstr "création d'un état pouvant aller vers la transition %(linkto)s" + +msgid "creating State (State state_of Workflow %(linkto)s)" +msgstr "création d'un état du workflow %(linkto)s" + +msgid "creating State (Transition %(linkto)s destination_state State)" +msgstr "création d'un état destination de la transition %(linkto)s" + +msgid "" +"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " +"subworkflow_exit SubWorkflowExitPoint)" +msgstr "création d'un point de sortie de la transition workflow %(linkto)s" + +msgid "creating Transition (State %(linkto)s allowed_transition Transition)" +msgstr "création d'une transition autorisée depuis l'état %(linkto)s" + +msgid "creating Transition (Transition destination_state State %(linkto)s)" +msgstr "création d'une transition vers l'état %(linkto)s" + +msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" +msgstr "création d'une transition du workflow %(linkto)s" + +msgid "" +"creating WorkflowTransition (State %(linkto)s allowed_transition " +"WorkflowTransition)" +msgstr "création d'une transition workflow autorisée depuis l'état %(linkto)s" + +msgid "" +"creating WorkflowTransition (WorkflowTransition transition_of Workflow " +"%(linkto)s)" +msgstr "création d'une transition workflow du workflow %(linkto)s" + +msgid "creation" +msgstr "création" + +msgid "creation date" +msgstr "date de création" + +msgid "creation time of an entity" +msgstr "date de création d'une entité" + +msgid "creation_date" +msgstr "date de création" + +msgid "cstrtype" +msgstr "type de contrainte" + +msgctxt "CWConstraint" +msgid "cstrtype" +msgstr "type" + +msgid "cstrtype_object" +msgstr "utilisé par" + +msgctxt "CWConstraintType" +msgid "cstrtype_object" +msgstr "type des contraintes" + +msgid "csv export" +msgstr "export CSV" + +msgid "csv export (entities)" +msgstr "export CSV (entités)" + +msgid "ctxcomponents" +msgstr "composants contextuels" + +msgid "ctxcomponents_anonuserlink" +msgstr "lien utilisateur" + +msgid "ctxcomponents_anonuserlink_description" +msgstr "" +"affiche un lien vers le formulaire d'authentification pour les utilisateurs " +"anonymes, sinon une boite contenant notamment des liens propres à " +"l'utilisateur connectés" + +msgid "ctxcomponents_appliname" +msgstr "titre de l'application" + +msgid "ctxcomponents_appliname_description" +msgstr "affiche le titre de l'application dans l'en-tête de page" + +msgid "ctxcomponents_bookmarks_box" +msgstr "boîte signets" + +msgid "ctxcomponents_bookmarks_box_description" +msgstr "boîte contenant les signets de l'utilisateur" + +msgid "ctxcomponents_breadcrumbs" +msgstr "fil d'ariane" + +msgid "ctxcomponents_breadcrumbs_description" +msgstr "" +"affiche un chemin permettant de localiser la page courante dans le site" + +msgid "ctxcomponents_download_box" +msgstr "boîte de téléchargement" + +msgid "ctxcomponents_download_box_description" +msgstr "boîte contenant un lien permettant de télécharger la ressource" + +msgid "ctxcomponents_edit_box" +msgstr "boîte d'actions" + +msgid "ctxcomponents_edit_box_description" +msgstr "" +"boîte affichant les différentes actions possibles sur les données affichées" + +msgid "ctxcomponents_facet.filterbox" +msgstr "boîte à facettes" + +msgid "ctxcomponents_facet.filterbox_description" +msgstr "" +"boîte permettant de filtrer parmi les résultats d'une recherche à l'aide de " +"facettes" + +msgid "ctxcomponents_logo" +msgstr "logo" + +msgid "ctxcomponents_logo_description" +msgstr "le logo de l'application, dans l'en-tête de page" + +msgid "ctxcomponents_metadata" +msgstr "méta-données de l'entité" + +msgid "ctxcomponents_metadata_description" +msgstr "" + +msgid "ctxcomponents_possible_views_box" +msgstr "boîte des vues possibles" + +msgid "ctxcomponents_possible_views_box_description" +msgstr "boîte affichant les vues possibles pour les données courantes" + +msgid "ctxcomponents_prevnext" +msgstr "élément précedent / suivant" + +msgid "ctxcomponents_prevnext_description" +msgstr "" +"affiche des liens permettant de passer d'une entité à une autre sur les " +"entités implémentant l'interface \"précédent/suivant\"." + +msgid "ctxcomponents_rss" +msgstr "icône RSS" + +msgid "ctxcomponents_rss_description" +msgstr "l'icône RSS permettant de récupérer la vue RSS des données affichées" + +msgid "ctxcomponents_search_box" +msgstr "boîte de recherche" + +msgid "ctxcomponents_search_box_description" +msgstr "boîte avec un champ de recherche simple" + +msgid "ctxcomponents_startup_views_box" +msgstr "boîte des vues de départs" + +msgid "ctxcomponents_startup_views_box_description" +msgstr "boîte affichant les vues de départs de l'application" + +msgid "ctxcomponents_userstatus" +msgstr "état de l'utilisateur" + +msgid "ctxcomponents_userstatus_description" +msgstr "" + +msgid "ctxcomponents_wfhistory" +msgstr "historique du workflow." + +msgid "ctxcomponents_wfhistory_description" +msgstr "" +"section affichant l'historique du workflow pour les entités ayant un " +"workflow." + +msgid "ctxtoolbar" +msgstr "barre d'outils" + +msgid "custom_workflow" +msgstr "workflow spécifique" + +msgid "custom_workflow_object" +msgstr "workflow de" + +msgid "cw.groups-management" +msgstr "groupes" + +msgid "cw.users-management" +msgstr "utilisateurs" + +msgid "cw_for_source" +msgstr "source" + +msgctxt "CWSourceSchemaConfig" +msgid "cw_for_source" +msgstr "source" + +msgid "cw_for_source_object" +msgstr "élément de mapping" + +msgctxt "CWSource" +msgid "cw_for_source_object" +msgstr "élément de mapping" + +msgid "cw_host_config_of" +msgstr "host configuration of" + +msgctxt "CWSourceHostConfig" +msgid "cw_host_config_of" +msgstr "host configuration of" + +msgid "cw_host_config_of_object" +msgstr "has host configuration" + +msgctxt "CWSource" +msgid "cw_host_config_of_object" +msgstr "has host configuration" + +msgid "cw_import_of" +msgstr "source" + +msgctxt "CWDataImport" +msgid "cw_import_of" +msgstr "source" + +msgid "cw_import_of_object" +msgstr "imports" + +msgctxt "CWSource" +msgid "cw_import_of_object" +msgstr "imports" + +msgid "cw_schema" +msgstr "schéma" + +msgctxt "CWSourceSchemaConfig" +msgid "cw_schema" +msgstr "schéma" + +msgid "cw_schema_object" +msgstr "mappé par" + +msgctxt "CWEType" +msgid "cw_schema_object" +msgstr "mappé par" + +msgctxt "CWRType" +msgid "cw_schema_object" +msgstr "mappé par" + +msgctxt "CWRelation" +msgid "cw_schema_object" +msgstr "mappé par" + +msgid "cw_source" +msgstr "source" + +msgid "cw_source_object" +msgstr "entités" + +msgid "cwetype-box" +msgstr "vue \"boîte\"" + +msgid "cwetype-description" +msgstr "description" + +msgid "cwetype-permissions" +msgstr "permissions" + +msgid "cwetype-views" +msgstr "vues" + +msgid "cwetype-workflow" +msgstr "workflow" + +msgid "cwgroup-main" +msgstr "description" + +msgid "cwgroup-permissions" +msgstr "permissions" + +msgid "cwrtype-description" +msgstr "description" + +msgid "cwrtype-permissions" +msgstr "permissions" + +msgid "cwsource-imports" +msgstr "imports" + +msgid "cwsource-main" +msgstr "description" + +msgid "cwsource-mapping" +msgstr "mapping" + +msgid "cwuri" +msgstr "uri interne" + +msgid "data directory url" +msgstr "url du répertoire de données" + +msgid "data model schema" +msgstr "schéma du modèle de données" + +msgid "data sources" +msgstr "sources de données" + +msgid "data sources management" +msgstr "gestion des sources de données" + +msgid "date" +msgstr "date" + +msgid "deactivate" +msgstr "désactiver" + +msgid "deactivated" +msgstr "désactivé" + +msgid "december" +msgstr "décembre" + +msgid "default" +msgstr "valeur par défaut" + +msgid "default text format for rich text fields." +msgstr "format de texte par défaut pour les champs textes" + +msgid "default user workflow" +msgstr "workflow par défaut des utilisateurs" + +msgid "default value" +msgstr "valeur par défaut" + +msgid "default value as gziped pickled python object" +msgstr "valeur par défaut, sous forme d'objet python picklé zippé" + +msgid "default workflow for an entity type" +msgstr "workflow par défaut pour un type d'entité" + +msgid "default_workflow" +msgstr "workflow par défaut" + +msgctxt "CWEType" +msgid "default_workflow" +msgstr "workflow par défaut" + +msgid "default_workflow_object" +msgstr "workflow par défaut de" + +msgctxt "Workflow" +msgid "default_workflow_object" +msgstr "workflow par défaut de" + +msgid "defaultval" +msgstr "valeur par défaut" + +msgctxt "CWAttribute" +msgid "defaultval" +msgstr "valeur par défaut" + +msgid "define a CubicWeb user" +msgstr "défini un utilisateur CubicWeb" + +msgid "define a CubicWeb users group" +msgstr "défini un groupe d'utilisateur CubicWeb" + +msgid "" +"define a final relation: link a final relation type from a non final entity " +"to a final entity type. used to build the instance schema" +msgstr "" +"définit une relation non finale: lie un type de relation non finale depuis " +"une entité vers un type d'entité non final. Utilisé pour construire le " +"schéma de l'instance" + +msgid "" +"define a non final relation: link a non final relation type from a non final " +"entity to a non final entity type. used to build the instance schema" +msgstr "" +"définit une relation 'attribut', utilisé pour construire le schéma de " +"l'instance" + +msgid "define a relation type, used to build the instance schema" +msgstr "définit un type de relation" + +msgid "define a rql expression used to define permissions" +msgstr "définit une expression rql donnant une permission" + +msgid "define a schema constraint" +msgstr "définit une contrainte de schema" + +msgid "define a schema constraint type" +msgstr "définit un type de contrainte de schema" + +msgid "define a virtual relation type, used to build the instance schema" +msgstr "définit une relation virtuelle" + +msgid "define an entity type, used to build the instance schema" +msgstr "définit un type d'entité" + +msgid "define how we get out from a sub-workflow" +msgstr "définit comment sortir d'un sous-workflow" + +msgid "defines a sql-level multicolumn unique index" +msgstr "définit un index SQL unique sur plusieurs colonnes" + +msgid "" +"defines what's the property is applied for. You must select this first to be " +"able to set value" +msgstr "" +"définit à quoi la propriété est appliquée. Vous devez sélectionner cela " +"avant de pouvoir fixer une valeur" + +msgid "delete" +msgstr "supprimer" + +msgid "delete this bookmark" +msgstr "supprimer ce signet" + +msgid "delete this relation" +msgstr "supprimer cette relation" + +msgid "delete_permission" +msgstr "permission de supprimer" + +msgctxt "CWEType" +msgid "delete_permission" +msgstr "permission de supprimer" + +msgctxt "CWRelation" +msgid "delete_permission" +msgstr "permission de supprimer" + +msgid "delete_permission_object" +msgstr "a la permission de supprimer" + +msgctxt "CWGroup" +msgid "delete_permission_object" +msgstr "peut supprimer" + +msgctxt "RQLExpression" +msgid "delete_permission_object" +msgstr "peut supprimer" + +#, python-format +msgid "deleted %(etype)s #%(eid)s (%(title)s)" +msgstr "suppression de l'entité %(etype)s #%(eid)s (%(title)s)" + +#, python-format +msgid "" +"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" +msgstr "" +"relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #%(eidto)s " +"supprimée" + +msgid "depends on the constraint type" +msgstr "dépend du type de contrainte" + +msgid "description" +msgstr "description" + +msgctxt "BaseTransition" +msgid "description" +msgstr "description" + +msgctxt "CWAttribute" +msgid "description" +msgstr "description" + +msgctxt "CWComputedRType" +msgid "description" +msgstr "description" + +msgctxt "CWEType" +msgid "description" +msgstr "description" + +msgctxt "CWRType" +msgid "description" +msgstr "description" + +msgctxt "CWRelation" +msgid "description" +msgstr "description" + +msgctxt "State" +msgid "description" +msgstr "description" + +msgctxt "Transition" +msgid "description" +msgstr "description" + +msgctxt "Workflow" +msgid "description" +msgstr "description" + +msgctxt "WorkflowTransition" +msgid "description" +msgstr "description" + +msgid "description_format" +msgstr "format" + +msgctxt "BaseTransition" +msgid "description_format" +msgstr "format" + +msgctxt "CWAttribute" +msgid "description_format" +msgstr "format" + +msgctxt "CWComputedRType" +msgid "description_format" +msgstr "format" + +msgctxt "CWEType" +msgid "description_format" +msgstr "format" + +msgctxt "CWRType" +msgid "description_format" +msgstr "format" + +msgctxt "CWRelation" +msgid "description_format" +msgstr "format" + +msgctxt "State" +msgid "description_format" +msgstr "format" + +msgctxt "Transition" +msgid "description_format" +msgstr "format" + +msgctxt "Workflow" +msgid "description_format" +msgstr "format" + +msgctxt "WorkflowTransition" +msgid "description_format" +msgstr "format" + +msgid "destination state for this transition" +msgstr "états accessibles par cette transition" + +msgid "destination state must be in the same workflow as our parent transition" +msgstr "" +"l'état de destination doit être dans le même workflow que la transition " +"parente" + +msgid "destination state of a transition" +msgstr "état d'arrivée d'une transition" + +msgid "" +"destination state. No destination state means that transition should go back " +"to the state from which we've entered the subworkflow." +msgstr "" +"état de destination de la transition. Si aucun état de destination n'est " +"spécifié, la transition ira vers l'état depuis lequel l'entité est entrée " +"dans le sous-workflow." + +msgid "destination_state" +msgstr "état de destination" + +msgctxt "SubWorkflowExitPoint" +msgid "destination_state" +msgstr "état de destination" + +msgctxt "Transition" +msgid "destination_state" +msgstr "état de destination" + +msgid "destination_state_object" +msgstr "destination de" + +msgctxt "State" +msgid "destination_state_object" +msgstr "état final de" + +msgid "detach attached file" +msgstr "détacher le fichier existant" + +msgid "display order of the box" +msgstr "ordre d'affichage de la boîte" + +msgid "display order of the component" +msgstr "ordre d'affichage du composant" + +msgid "display order of the facet" +msgstr "ordre d'affichage de la facette" + +msgid "display the box or not" +msgstr "afficher la boîte ou non" + +msgid "display the component or not" +msgstr "afficher le composant ou non" + +msgid "display the facet or not" +msgstr "afficher la facette ou non" + +msgid "download" +msgstr "télécharger" + +#, python-format +msgid "download %s" +msgstr "télécharger %s" + +msgid "download icon" +msgstr "icône de téléchargement" + +msgid "download schema as owl" +msgstr "télécharger le schéma OWL" + +msgid "edit bookmarks" +msgstr "éditer les signets" + +msgid "edit canceled" +msgstr "édition annulée" + +msgid "editable-table" +msgstr "table éditable" + +msgid "eid" +msgstr "eid" + +msgid "embedded html" +msgstr "HTML contenu" + +msgid "end_timestamp" +msgstr "horodate de fin" + +msgctxt "CWDataImport" +msgid "end_timestamp" +msgstr "horodate de fin" + +msgid "entities deleted" +msgstr "entités supprimées" + +msgid "entity and relation types can't be mapped, only attributes or relations" +msgstr "" +"les types d'entités et de relations ne peuvent être mappés, uniquement les " +"relations" + +msgid "entity copied" +msgstr "entité copiée" + +msgid "entity created" +msgstr "entité créée" + +msgid "entity creation" +msgstr "création d'entité" + +msgid "entity deleted" +msgstr "entité supprimée" + +msgid "entity deletion" +msgstr "suppression d'entité" + +msgid "entity edited" +msgstr "entité éditée" + +msgid "entity has no workflow set" +msgstr "l'entité n'a pas de workflow" + +msgid "entity linked" +msgstr "entité liée" + +msgid "entity type" +msgstr "type d'entité" + +msgid "entity types which may use this workflow" +msgstr "types d'entité pouvant utiliser ce workflow" + +msgid "entity update" +msgstr "mise à jour d'entité" + +msgid "entityview" +msgstr "vues d'entité" + +msgid "error" +msgstr "erreur" + +msgid "error while publishing ReST text" +msgstr "" +"une erreur s'est produite lors de l'interprétation du texte au format ReST" + +msgid "exit state must be a subworkflow state" +msgstr "l'état de sortie doit être un état du sous-workflow" + +msgid "exit_point" +msgstr "état de sortie" + +msgid "exit_point_object" +msgstr "état de sortie de" + +#, python-format +msgid "exiting from subworkflow %s" +msgstr "sortie du sous-workflow %s" + +msgid "expression" +msgstr "expression" + +msgctxt "RQLExpression" +msgid "expression" +msgstr "rql de l'expression" + +msgid "exprtype" +msgstr "type de l'expression" + +msgctxt "RQLExpression" +msgid "exprtype" +msgstr "type" + +msgid "extra_props" +msgstr "" + +msgctxt "CWAttribute" +msgid "extra_props" +msgstr "propriétés additionnelles" + +msgid "facet-loading-msg" +msgstr "en cours de traitement, merci de patienter" + +msgid "facet.filters" +msgstr "facettes" + +msgid "facetbox" +msgstr "boîte à facettes" + +msgid "facets_created_by-facet" +msgstr "facette \"créé par\"" + +msgid "facets_created_by-facet_description" +msgstr "" + +msgid "facets_cw_source-facet" +msgstr "facette \"source de données\"" + +msgid "facets_cw_source-facet_description" +msgstr "" + +msgid "facets_cwfinal-facet" +msgstr "facette \"type d'entité ou de relation final\"" + +msgid "facets_cwfinal-facet_description" +msgstr "" + +msgid "facets_datafeed.dataimport.status" +msgstr "état de l'iport" + +msgid "facets_datafeed.dataimport.status_description" +msgstr "" + +msgid "facets_etype-facet" +msgstr "facette \"est de type\"" + +msgid "facets_etype-facet_description" +msgstr "" + +msgid "facets_has_text-facet" +msgstr "facette \"contient le texte\"" + +msgid "facets_has_text-facet_description" +msgstr "" + +msgid "facets_in_group-facet" +msgstr "facette \"fait partie du groupe\"" + +msgid "facets_in_group-facet_description" +msgstr "" + +msgid "facets_in_state-facet" +msgstr "facette \"dans l'état\"" + +msgid "facets_in_state-facet_description" +msgstr "" + +msgid "failed" +msgstr "échec" + +#, python-format +msgid "failed to uniquify path (%s, %s)" +msgstr "ne peut obtenir un nom de fichier unique (%s, %s)" + +msgid "february" +msgstr "février" + +msgid "file tree view" +msgstr "arborescence (fichiers)" + +msgid "final" +msgstr "final" + +msgctxt "CWEType" +msgid "final" +msgstr "final" + +msgctxt "CWRType" +msgid "final" +msgstr "final" + +msgid "first name" +msgstr "prénom" + +msgid "firstname" +msgstr "prénom" + +msgctxt "CWUser" +msgid "firstname" +msgstr "prénom" + +msgid "foaf" +msgstr "foaf" + +msgid "focus on this selection" +msgstr "afficher cette sélection" + +msgid "follow" +msgstr "suivre le lien" + +#, python-format +msgid "follow this link for more information on this %s" +msgstr "suivez ce lien pour plus d'information sur ce %s" + +msgid "for_user" +msgstr "pour l'utilisateur" + +msgctxt "CWProperty" +msgid "for_user" +msgstr "propriété de l'utilisateur" + +msgid "for_user_object" +msgstr "utilise les propriétés" + +msgctxt "CWUser" +msgid "for_user_object" +msgstr "a pour préférence" + +msgid "formula" +msgstr "formule" + +msgctxt "CWAttribute" +msgid "formula" +msgstr "formule" + +msgid "friday" +msgstr "vendredi" + +msgid "from" +msgstr "de" + +#, python-format +msgid "from %(date)s" +msgstr "du %(date)s" + +msgid "from_entity" +msgstr "de l'entité" + +msgctxt "CWAttribute" +msgid "from_entity" +msgstr "attribut de l'entité" + +msgctxt "CWRelation" +msgid "from_entity" +msgstr "relation de l'entité" + +msgid "from_entity_object" +msgstr "relation sujet" + +msgctxt "CWEType" +msgid "from_entity_object" +msgstr "entité de" + +msgid "from_interval_start" +msgstr "De" + +msgid "from_state" +msgstr "de l'état" + +msgctxt "TrInfo" +msgid "from_state" +msgstr "état de départ" + +msgid "from_state_object" +msgstr "transitions depuis cet état" + +msgctxt "State" +msgid "from_state_object" +msgstr "état de départ de" + +msgid "full text or RQL query" +msgstr "texte à rechercher ou requête RQL" + +msgid "fulltext_container" +msgstr "conteneur du texte indexé" + +msgctxt "CWRType" +msgid "fulltext_container" +msgstr "objet à indexer" + +msgid "fulltextindexed" +msgstr "indexation du texte" + +msgctxt "CWAttribute" +msgid "fulltextindexed" +msgstr "texte indexé" + +msgid "gc" +msgstr "fuite mémoire" + +msgid "generic plot" +msgstr "tracé de courbes standard" + +msgid "generic relation to link one entity to another" +msgstr "relation générique pour lier une entité à une autre" + +msgid "" +"generic relation to specify that an external entity represent the same " +"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" +msgstr "" +"relation générique permettant d'indiquer qu'une entité est identique à une " +"autre ressource web (voir http://www.w3.org/TR/owl-ref/#sameAs-def)." + +msgid "granted to groups" +msgstr "accordée aux groupes" + +#, python-format +msgid "graphical representation of %(appid)s data model" +msgstr "réprésentation graphique du modèle de données de %(appid)s" + +#, python-format +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" +"réprésentation graphique du modèle de données pour le type d'entité " +"%(etype)s de %(appid)s" + +#, python-format +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" +msgstr "" +"réprésentation graphique du modèle de données pour le type de relation " +"%(rtype)s de %(appid)s" + +msgid "group in which a user should be to be allowed to pass this transition" +msgstr "" +"groupe dans lequel l'utilisateur doit être pour pouvoir passer la transition" + +msgid "groups" +msgstr "groupes" + +msgid "groups allowed to add entities/relations of this type" +msgstr "groupes autorisés à ajouter des entités/relations de ce type" + +msgid "groups allowed to delete entities/relations of this type" +msgstr "groupes autorisés à supprimer des entités/relations de ce type" + +msgid "groups allowed to read entities/relations of this type" +msgstr "groupes autorisés à lire des entités/relations de ce type" + +msgid "groups allowed to update entities/relations of this type" +msgstr "groupes autorisés à mettre à jour des entités/relations de ce type" + +msgid "groups grant permissions to the user" +msgstr "les groupes donnent des permissions à l'utilisateur" + +msgid "guests" +msgstr "invités" + +msgid "hCalendar" +msgstr "hCalendar" + +msgid "has_text" +msgstr "contient le texte" + +msgid "header-center" +msgstr "en-tête (centre)" + +msgid "header-left" +msgstr "en-tête (gauche)" + +msgid "header-right" +msgstr "en-tête (droite)" + +msgid "hide filter form" +msgstr "cacher le filtre" + +msgid "" +"how to format date and time in the ui (see this page for format " +"description)" +msgstr "" +"comment formater l'horodate dans l'interface (description du " +"format)" + +msgid "" +"how to format date in the ui (see this page for format " +"description)" +msgstr "" +"comment formater la date dans l'interface (description du format)" + +msgid "how to format float numbers in the ui" +msgstr "comment formater les nombres flottants dans l'interface" + +msgid "" +"how to format time in the ui (see this page for format " +"description)" +msgstr "" +"comment formater l'heure dans l'interface (description du format)" + +msgid "i18n_bookmark_url_fqs" +msgstr "paramètres" + +msgid "i18n_bookmark_url_path" +msgstr "chemin" + +msgid "i18n_login_popup" +msgstr "s'identifier" + +msgid "i18ncard_*" +msgstr "0..n" + +msgid "i18ncard_+" +msgstr "1..n" + +msgid "i18ncard_1" +msgstr "1" + +msgid "i18ncard_?" +msgstr "0..1" + +msgid "i18nprevnext_next" +msgstr "suivant" + +msgid "i18nprevnext_previous" +msgstr "précédent" + +msgid "i18nprevnext_up" +msgstr "parent" + +msgid "iCalendar" +msgstr "iCalendar" + +msgid "id of main template used to render pages" +msgstr "id du template principal" + +msgid "identical to" +msgstr "identique à" + +msgid "identical_to" +msgstr "identique à" + +msgid "identity" +msgstr "est identique à" + +msgid "identity_object" +msgstr "est identique à" + +msgid "" +"if full text content of subject/object entity should be added to other side " +"entity (the container)." +msgstr "" +"si le text indexé de l'entité sujet/objet doit être ajouté à l'entité à " +"l'autre extrémité de la relation (le conteneur)." + +msgid "image" +msgstr "image" + +msgid "in progress" +msgstr "en cours" + +msgid "in_group" +msgstr "dans le groupe" + +msgctxt "CWUser" +msgid "in_group" +msgstr "fait partie du groupe" + +msgid "in_group_object" +msgstr "membres" + +msgctxt "CWGroup" +msgid "in_group_object" +msgstr "contient les utilisateurs" + +msgid "in_state" +msgstr "état" + +msgid "in_state_object" +msgstr "état de" + +msgid "in_synchronization" +msgstr "en cours de synchronisation" + +msgctxt "CWSource" +msgid "in_synchronization" +msgstr "en cours de synchronisation" + +msgid "incontext" +msgstr "dans le contexte" + +msgid "incorrect captcha value" +msgstr "valeur de captcha incorrecte" + +#, python-format +msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" +msgstr "la valeur %(KEY-value)s est incorrecte pour le type \"%(KEY-type)s\"" + +msgid "index this attribute's value in the plain text index" +msgstr "indexer la valeur de cet attribut dans l'index plein texte" + +msgid "indexed" +msgstr "index" + +msgctxt "CWAttribute" +msgid "indexed" +msgstr "indexé" + +msgid "indicate the current state of an entity" +msgstr "indique l'état courant d'une entité" + +msgid "" +"indicate which state should be used by default when an entity using states " +"is created" +msgstr "" +"indique quel état devrait être utilisé par défaut lorsqu'une entité est créée" + +msgid "indifferent" +msgstr "indifférent" + +msgid "info" +msgstr "information" + +msgid "initial state for this workflow" +msgstr "état initial pour ce workflow" + +msgid "initial_state" +msgstr "état initial" + +msgctxt "Workflow" +msgid "initial_state" +msgstr "état initial" + +msgid "initial_state_object" +msgstr "état initial de" + +msgctxt "State" +msgid "initial_state_object" +msgstr "état initial de" + +msgid "inlined" +msgstr "mise en ligne" + +msgctxt "CWRType" +msgid "inlined" +msgstr "mise en ligne" + +msgid "instance home" +msgstr "répertoire de l'instance" + +msgid "internal entity uri" +msgstr "uri interne" + +msgid "internationalizable" +msgstr "internationalisable" + +msgctxt "CWAttribute" +msgid "internationalizable" +msgstr "internationalisable" + +#, python-format +msgid "invalid action %r" +msgstr "action %r invalide" + +#, python-format +msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" +msgstr "" +"la valeur %(KEY-value)s est incorrecte, elle doit être parmi %(KEY-choices)s" + +msgid "is" +msgstr "de type" + +msgid "is object of:" +msgstr "est object de" + +msgid "is subject of:" +msgstr "est sujet de" + +msgid "" +"is the subject/object entity of the relation composed of the other ? This " +"implies that when the composite is deleted, composants are also deleted." +msgstr "" +"Est-ce que l'entité sujet/objet de la relation est une agrégation de " +"l'autre ?Si c'est le cas, détruire le composite détruira ses composants " +"également" + +msgid "is this attribute's value translatable" +msgstr "est-ce que la valeur de cet attribut est traduisible ?" + +msgid "is this relation equivalent in both direction ?" +msgstr "est que cette relation est équivalent dans les deux sens ?" + +msgid "" +"is this relation physically inlined? you should know what you're doing if " +"you are changing this!" +msgstr "" +"est ce que cette relation est mise en ligne dans la base de données ?vous " +"devez savoir ce que vous faites si vous changez cela !" + +msgid "is_instance_of" +msgstr "est une instance de" + +msgid "is_instance_of_object" +msgstr "type de" + +msgid "is_object" +msgstr "a pour instance" + +msgid "january" +msgstr "janvier" + +msgid "json-entities-export-view" +msgstr "export JSON (entités)" + +msgid "json-export-view" +msgstr "export JSON" + +msgid "july" +msgstr "juillet" + +msgid "june" +msgstr "juin" + +msgid "language of the user interface" +msgstr "langue pour l'interface utilisateur" + +msgid "last connection date" +msgstr "dernière date de connexion" + +msgid "last login time" +msgstr "dernière date de connexion" + +msgid "last name" +msgstr "nom" + +msgid "last usage" +msgstr "dernier usage" + +msgid "last_login_time" +msgstr "dernière date de connexion" + +msgctxt "CWUser" +msgid "last_login_time" +msgstr "dernière date de connexion" + +msgid "latest import" +msgstr "dernier import" + +msgid "latest modification time of an entity" +msgstr "date de dernière modification d'une entité" + +msgid "latest synchronization time" +msgstr "date de la dernière synchronisation" + +msgid "latest update on" +msgstr "dernière mise à jour" + +msgid "latest_retrieval" +msgstr "dernière synchronisation" + +msgctxt "CWSource" +msgid "latest_retrieval" +msgstr "date de la dernière synchronisation de la source." + +msgid "left" +msgstr "gauche" + +msgid "line" +msgstr "ligne" + +msgid "" +"link a property to the user which want this property customization. Unless " +"you're a site manager, this relation will be handled automatically." +msgstr "" +"lie une propriété à l'utilisateur désirant cette personnalisation. A moins " +"que vous ne soyez gestionnaire du site, cette relation est gérée " +"automatiquement." + +msgid "link a relation definition to its object entity type" +msgstr "lie une définition de relation à son type d'entité objet" + +msgid "link a relation definition to its relation type" +msgstr "lie une définition de relation à son type d'entité" + +msgid "link a relation definition to its subject entity type" +msgstr "lie une définition de relation à son type d'entité sujet" + +msgid "link a state to one or more workflow" +msgstr "lie un état à un ou plusieurs workflow" + +msgid "link a transition information to its object" +msgstr "lié une enregistrement de transition vers l'objet associé" + +msgid "link a transition to one or more workflow" +msgstr "lie une transition à un ou plusieurs workflow" + +msgid "link a workflow to one or more entity type" +msgstr "lie un workflow à un ou plusieurs types d'entité" + +msgid "list" +msgstr "liste" + +msgid "log" +msgstr "journal" + +msgctxt "CWDataImport" +msgid "log" +msgstr "journal" + +msgid "log in" +msgstr "s'identifier" + +msgid "login" +msgstr "identifiant" + +msgctxt "CWUser" +msgid "login" +msgstr "identifiant" + +msgid "login / password" +msgstr "identifiant / mot de passe" + +msgid "login or email" +msgstr "identifiant ou email" + +msgid "login_action" +msgstr "identifiez vous" + +msgid "logout" +msgstr "se déconnecter" + +#, python-format +msgid "loop in %(rel)s relation (%(eid)s)" +msgstr "boucle détectée en parcourant la relation %(rel)s de l'entité #%(eid)s" + +msgid "main informations" +msgstr "Informations générales" + +msgid "main_tab" +msgstr "description" + +msgid "mainvars" +msgstr "variables principales" + +msgctxt "RQLExpression" +msgid "mainvars" +msgstr "variables principales" + +msgid "manage" +msgstr "gestion du site" + +msgid "manage bookmarks" +msgstr "gérer les signets" + +msgid "manage permissions" +msgstr "gestion des permissions" + +msgid "managers" +msgstr "administrateurs" + +msgid "mandatory relation" +msgstr "relation obligatoire" + +msgid "march" +msgstr "mars" + +msgid "match_host" +msgstr "pour l'hôte" + +msgctxt "CWSourceHostConfig" +msgid "match_host" +msgstr "pour l'hôte" + +msgid "maximum number of characters in short description" +msgstr "nombre maximum de caractères dans les descriptions courtes" + +msgid "maximum number of entities to display in related combo box" +msgstr "nombre maximum d'entités à afficher dans les listes déroulantes" + +msgid "maximum number of objects displayed by page of results" +msgstr "nombre maximum d'entités affichées par pages" + +msgid "maximum number of related entities to display in the primary view" +msgstr "nombre maximum d'entités liées à afficher dans la vue primaire" + +msgid "may" +msgstr "mai" + +msgid "memory leak debugging" +msgstr "Déboguage des fuites de mémoire" + +msgid "message" +msgstr "message" + +#, python-format +msgid "missing parameters for entity %s" +msgstr "paramètres manquants pour l'entité %s" + +msgid "modification" +msgstr "modification" + +msgid "modification_date" +msgstr "date de modification" + +msgid "modify" +msgstr "modifier" + +msgid "monday" +msgstr "lundi" + +msgid "more actions" +msgstr "plus d'actions" + +msgid "more info about this workflow" +msgstr "plus d'information sur ce workflow" + +msgid "multiple edit" +msgstr "édition multiple" + +msgid "my custom search" +msgstr "ma recherche personnalisée" + +msgid "name" +msgstr "nom" + +msgctxt "BaseTransition" +msgid "name" +msgstr "nom" + +msgctxt "CWCache" +msgid "name" +msgstr "nom" + +msgctxt "CWComputedRType" +msgid "name" +msgstr "nom" + +msgctxt "CWConstraintType" +msgid "name" +msgstr "nom" + +msgctxt "CWEType" +msgid "name" +msgstr "nom" + +msgctxt "CWGroup" +msgid "name" +msgstr "nom" + +msgctxt "CWRType" +msgid "name" +msgstr "nom" + +msgctxt "CWSource" +msgid "name" +msgstr "nom" + +msgctxt "CWUniqueTogetherConstraint" +msgid "name" +msgstr "nom" + +msgctxt "State" +msgid "name" +msgstr "nom" + +msgctxt "Transition" +msgid "name" +msgstr "nom" + +msgctxt "Workflow" +msgid "name" +msgstr "nom" + +msgctxt "WorkflowTransition" +msgid "name" +msgstr "nom" + +msgid "name of the cache" +msgstr "nom du cache applicatif" + +msgid "" +"name of the main variables which should be used in the selection if " +"necessary (comma separated)" +msgstr "" +"nom des variables principales qui devrait être utilisées dans la sélection " +"si nécessaire (les séparer par des virgules)" + +msgid "name of the source" +msgstr "nom de la source" + +msgid "navbottom" +msgstr "bas de page" + +msgid "navcontentbottom" +msgstr "bas de page du contenu principal" + +msgid "navcontenttop" +msgstr "haut de page" + +msgid "navigation" +msgstr "navigation" + +msgid "navigation.combobox-limit" +msgstr "nombre d'entités dans les listes déroulantes" + +msgid "navigation.page-size" +msgstr "nombre de résultats" + +msgid "navigation.related-limit" +msgstr "nombre d'entités dans la vue primaire" + +msgid "navigation.short-line-size" +msgstr "taille des descriptions courtes" + +msgid "navtop" +msgstr "haut de page du contenu principal" + +msgid "new" +msgstr "nouveau" + +msgid "next page" +msgstr "page suivante" + +msgid "next_results" +msgstr "résultats suivants" + +msgid "no" +msgstr "non" + +msgid "no content next link" +msgstr "pas de lien 'suivant'" + +msgid "no content prev link" +msgstr "pas de lien 'précédent'" + +msgid "no edited fields specified" +msgstr "aucun champ à éditer spécifié" + +msgid "no log to display" +msgstr "rien à afficher" + +msgid "no related entity" +msgstr "pas d'entité liée" + +msgid "no repository sessions found" +msgstr "aucune session trouvée" + +msgid "no selected entities" +msgstr "pas d'entité sélectionnée" + +#, python-format +msgid "no such entity type %s" +msgstr "le type d'entité '%s' n'existe pas" + +msgid "no version information" +msgstr "pas d'information de version" + +msgid "no web sessions found" +msgstr "aucune session trouvée" + +msgid "normal" +msgstr "normal" + +msgid "not authorized" +msgstr "non autorisé" + +msgid "not selected" +msgstr "non sélectionné" + +msgid "november" +msgstr "novembre" + +msgid "num. users" +msgstr "nombre d'utilisateurs" + +msgid "object" +msgstr "objet" + +msgid "object type" +msgstr "type de l'objet" + +msgid "october" +msgstr "octobre" + +msgid "one month" +msgstr "un mois" + +msgid "one week" +msgstr "une semaine" + +msgid "oneline" +msgstr "une ligne" + +msgid "only select queries are authorized" +msgstr "seules les requêtes de sélections sont autorisées" + +msgid "open all" +msgstr "tout ouvrir" + +msgid "opened sessions" +msgstr "sessions ouvertes" + +msgid "opened web sessions" +msgstr "sessions web ouvertes" + +msgid "options" +msgstr "options" + +msgctxt "CWSourceSchemaConfig" +msgid "options" +msgstr "options" + +msgid "order" +msgstr "ordre" + +msgid "ordernum" +msgstr "ordre" + +msgctxt "CWAttribute" +msgid "ordernum" +msgstr "numéro d'ordre" + +msgctxt "CWRelation" +msgid "ordernum" +msgstr "numéro d'ordre" + +msgid "owl" +msgstr "owl" + +msgid "owlabox" +msgstr "owl ABox" + +msgid "owned_by" +msgstr "appartient à" + +msgid "owned_by_object" +msgstr "possède" + +msgid "owners" +msgstr "propriétaires" + +msgid "ownerships have been changed" +msgstr "les droits de propriété ont été modifiés" + +msgid "pageid-not-found" +msgstr "" +"des données nécessaires semblent expirées, veuillez recharger la page et " +"recommencer." + +msgid "parser" +msgstr "parseur" + +msgctxt "CWSource" +msgid "parser" +msgstr "parseur" + +msgid "parser to use to extract entities from content retrieved at given URLs." +msgstr "" +"parseur à utiliser pour extraire entités et relations du contenu récupéré " +"aux URLs données" + +msgid "password" +msgstr "mot de passe" + +msgid "password and confirmation don't match" +msgstr "le mot de passe et la confirmation sont différents" + +msgid "path" +msgstr "chemin" + +msgctxt "Bookmark" +msgid "path" +msgstr "chemin" + +msgid "permalink to this message" +msgstr "lien permanent vers ce message" + +msgid "permission" +msgstr "permission" + +msgid "permissions" +msgstr "permissions" + +msgid "pick existing bookmarks" +msgstr "récupérer des signets existants" + +msgid "pkey" +msgstr "clé" + +msgctxt "CWProperty" +msgid "pkey" +msgstr "code de la propriété" + +msgid "please correct errors below" +msgstr "veuillez corriger les erreurs ci-dessous" + +msgid "please correct the following errors:" +msgstr "veuillez corriger les erreurs suivantes :" + +msgid "possible views" +msgstr "vues possibles" + +msgid "prefered_form" +msgstr "forme préférée" + +msgctxt "EmailAddress" +msgid "prefered_form" +msgstr "forme préférée" + +msgid "prefered_form_object" +msgstr "forme préférée à" + +msgctxt "EmailAddress" +msgid "prefered_form_object" +msgstr "forme préférée de" + +msgid "preferences" +msgstr "préférences" + +msgid "previous page" +msgstr "page précédente" + +msgid "previous_results" +msgstr "résultats précédents" + +msgid "primary" +msgstr "primaire" + +msgid "primary_email" +msgstr "adresse email principale" + +msgctxt "CWUser" +msgid "primary_email" +msgstr "email principal" + +msgid "primary_email_object" +msgstr "adresse email principale (object)" + +msgctxt "EmailAddress" +msgid "primary_email_object" +msgstr "adresse principale de" + +msgid "profile" +msgstr "profil" + +msgid "rdef-description" +msgstr "description" + +msgid "rdef-permissions" +msgstr "permissions" + +msgid "rdf export" +msgstr "export RDF" + +msgid "read" +msgstr "lecture" + +msgid "read_permission" +msgstr "permission de lire" + +msgctxt "CWAttribute" +msgid "read_permission" +msgstr "permission de lire" + +msgctxt "CWEType" +msgid "read_permission" +msgstr "permission de lire" + +msgctxt "CWRelation" +msgid "read_permission" +msgstr "permission de lire" + +msgid "read_permission_object" +msgstr "a la permission de lire" + +msgctxt "CWGroup" +msgid "read_permission_object" +msgstr "peut lire" + +msgctxt "RQLExpression" +msgid "read_permission_object" +msgstr "peut lire" + +msgid "regexp matching host(s) to which this config applies" +msgstr "" +"expression régulière des noms d'hôtes auxquels cette configuration s'applique" + +msgid "registry" +msgstr "registre" + +msgid "related entity has no state" +msgstr "l'entité lié n'a pas d'état" + +msgid "related entity has no workflow set" +msgstr "l'entité lié n'a pas de workflow" + +msgid "relation" +msgstr "relation" + +#, python-format +msgid "relation %(relname)s of %(ent)s" +msgstr "relation %(relname)s de %(ent)s" + +#, python-format +msgid "" +"relation %(rtype)s with %(etype)s as %(role)s is supported but no target " +"type supported" +msgstr "" +"la relation %(rtype)s avec %(etype)s comme %(role)s est supportée mais aucun " +"type cible n'est supporté" + +#, python-format +msgid "" +"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is " +"mandatory but not supported" +msgstr "" +"la relation %(rtype)s avec %(etype)s comme %(role)s est obligatoire mais non " +"supportée" + +#, python-format +msgid "" +"relation %s is supported but none of its definitions matches supported " +"entities" +msgstr "" +"la relation %s est supportée mais aucune de ses définitions ne correspondent " +"aux types d'entités supportés" + +msgid "relation add" +msgstr "ajout de relation" + +msgid "relation removal" +msgstr "suppression de relation" + +msgid "relation_type" +msgstr "type de relation" + +msgctxt "CWAttribute" +msgid "relation_type" +msgstr "type de relation" + +msgctxt "CWRelation" +msgid "relation_type" +msgstr "type de relation" + +msgid "relation_type_object" +msgstr "définition" + +msgctxt "CWRType" +msgid "relation_type_object" +msgstr "définition" + +msgid "relations" +msgstr "relations" + +msgctxt "CWUniqueTogetherConstraint" +msgid "relations" +msgstr "relations" + +msgid "relations deleted" +msgstr "relations supprimées" + +msgid "relations_object" +msgstr "relations de" + +msgctxt "CWRType" +msgid "relations_object" +msgstr "relations de" + +msgid "relative url of the bookmarked page" +msgstr "url relative de la page" + +msgid "remove-inlined-entity-form" +msgstr "supprimer" + +msgid "require_group" +msgstr "nécessite le groupe" + +msgctxt "BaseTransition" +msgid "require_group" +msgstr "restreinte au groupe" + +msgctxt "Transition" +msgid "require_group" +msgstr "restreinte au groupe" + +msgctxt "WorkflowTransition" +msgid "require_group" +msgstr "restreinte au groupe" + +msgid "require_group_object" +msgstr "a les droits" + +msgctxt "CWGroup" +msgid "require_group_object" +msgstr "a les droits" + +msgid "required" +msgstr "requis" + +msgid "required attribute" +msgstr "attribut requis" + +msgid "required field" +msgstr "champ requis" + +msgid "resources usage" +msgstr "resources utilisées" + +msgid "" +"restriction part of a rql query. For entity rql expression, X and U are " +"predefined respectivly to the current object and to the request user. For " +"relation rql expression, S, O and U are predefined respectivly to the " +"current relation'subject, object and to the request user. " +msgstr "" +"partie restriction de la requête rql. Pour une expression s'appliquant à une " +"entité, X et U sont respectivement prédéfinis à l'entité et à l'utilisateur " +"courant. Pour une expression s'appliquant à une relation, S, O et U sont " +"respectivement prédéfinis au sujet/objet de la relation et à l'utilisateur " +"courant." + +msgid "revert changes" +msgstr "annuler les changements" + +msgid "right" +msgstr "droite" + +msgid "rql expression allowing to add entities/relations of this type" +msgstr "expression rql autorisant à ajouter des entités/relations de ce type" + +msgid "rql expression allowing to delete entities/relations of this type" +msgstr "expression rql autorisant à supprimer des entités/relations de ce type" + +msgid "rql expression allowing to read entities/relations of this type" +msgstr "expression rql autorisant à lire des entités/relations de ce type" + +msgid "rql expression allowing to update entities/relations of this type" +msgstr "" +"expression rql autorisant à mettre à jour des entités/relations de ce type" + +msgid "rql expressions" +msgstr "conditions rql" + +msgid "rss export" +msgstr "export RSS" + +msgid "rule" +msgstr "règle" + +msgctxt "CWComputedRType" +msgid "rule" +msgstr "règle" + +msgid "same_as" +msgstr "identique à" + +msgid "sample format" +msgstr "exemple" + +msgid "saturday" +msgstr "samedi" + +msgid "schema-diagram" +msgstr "diagramme" + +msgid "schema-entity-types" +msgstr "types d'entités" + +msgid "schema-relation-types" +msgstr "types de relations" + +msgid "search" +msgstr "rechercher" + +msgid "search for association" +msgstr "rechercher pour associer" + +msgid "searching for" +msgstr "Recherche de" + +msgid "security" +msgstr "sécurité" + +msgid "see more" +msgstr "voir plus" + +msgid "see them all" +msgstr "les voir toutes" + +msgid "see_also" +msgstr "voir aussi" + +msgid "select" +msgstr "sélectionner" + +msgid "select a" +msgstr "sélectionner un" + +msgid "select a key first" +msgstr "sélectionnez d'abord une clé" + +msgid "select a relation" +msgstr "sélectionner une relation" + +msgid "select this entity" +msgstr "sélectionner cette entité" + +msgid "selected" +msgstr "sélectionné" + +msgid "semantic description of this attribute" +msgstr "description sémantique de cet attribut" + +msgid "semantic description of this entity type" +msgstr "description sémantique de ce type d'entité" + +msgid "semantic description of this relation" +msgstr "description sémantique de cette relation" + +msgid "semantic description of this relation type" +msgstr "description sémantique de ce type de relation" + +msgid "semantic description of this state" +msgstr "description sémantique de cet état" + +msgid "semantic description of this transition" +msgstr "description sémantique de cette transition" + +msgid "semantic description of this workflow" +msgstr "description sémantique de ce workflow" + +msgid "september" +msgstr "septembre" + +msgid "server information" +msgstr "informations serveur" + +msgid "severity" +msgstr "sévérité" + +msgid "" +"should html fields being edited using fckeditor (a HTML WYSIWYG editor). " +"You should also select text/html as default text format to actually get " +"fckeditor." +msgstr "" +"indique si les champs HTML doivent être édités avec fckeditor (un\n" +"éditeur HTML WYSIWYG). Il est également conseillé de choisir text/html\n" +"comme format de texte par défaut pour pouvoir utiliser fckeditor." + +#, python-format +msgid "show %s results" +msgstr "montrer %s résultats" + +msgid "show advanced fields" +msgstr "montrer les champs avancés" + +msgid "show filter form" +msgstr "afficher le filtre" + +msgid "site configuration" +msgstr "configuration du site" + +msgid "site documentation" +msgstr "documentation du site" + +msgid "site title" +msgstr "titre du site" + +msgid "site-wide property can't be set for user" +msgstr "une propriété spécifique au site ne peut être propre à un utilisateur" + +msgid "some later transaction(s) touch entity, undo them first" +msgstr "" +"des transactions plus récentes modifient cette entité, annulez les d'abord" + +msgid "some relations violate a unicity constraint" +msgstr "certaines relations transgressent une contrainte d'unicité" + +msgid "sorry, the server is unable to handle this query" +msgstr "désolé, le serveur ne peut traiter cette requête" + +msgid "" +"source's configuration. One key=value per line, authorized keys depending on " +"the source's type" +msgstr "" +"Configuration de la source. Une clé=valeur par ligne, les clés autorisées " +"dépendantes du type de source. Les valeur surchargent celles définies sur la " +"source." + +msgid "sparql xml" +msgstr "XML Sparql" + +msgid "special transition allowing to go through a sub-workflow" +msgstr "transition spécial permettant d'aller dans un sous-workfow" + +msgid "specializes" +msgstr "dérive de" + +msgctxt "CWEType" +msgid "specializes" +msgstr "spécialise" + +msgid "specializes_object" +msgstr "parent de" + +msgctxt "CWEType" +msgid "specializes_object" +msgstr "parent de" + +#, python-format +msgid "specifying %s is mandatory" +msgstr "spécifier %s est obligatoire" + +msgid "" +"start timestamp of the currently in synchronization, or NULL when no " +"synchronization in progress." +msgstr "" +"horodate de départ de la synchronisation en cours, ou NULL s'il n'y en a pas." + +msgid "start_timestamp" +msgstr "horodate de début" + +msgctxt "CWDataImport" +msgid "start_timestamp" +msgstr "horodate de début" + +msgid "startup views" +msgstr "vues de départ" + +msgid "startupview" +msgstr "vues de départ" + +msgid "state" +msgstr "état" + +msgid "state and transition don't belong the the same workflow" +msgstr "l'état et la transition n'appartiennent pas au même workflow" + +msgid "state doesn't apply to this entity's type" +msgstr "cet état ne s'applique pas à ce type d'entité" + +msgid "state doesn't belong to entity's current workflow" +msgstr "l'état n'appartient pas au workflow courant de l'entité" + +msgid "state doesn't belong to entity's workflow" +msgstr "l'état n'appartient pas au workflow de l'entité" + +msgid "" +"state doesn't belong to entity's workflow. You may want to set a custom " +"workflow for this entity first." +msgstr "" +"l'état n'appartient pas au workflow courant de l'entité. Vous désirez peut-" +"être spécifier que cette entité doit utiliser ce workflow." + +msgid "state doesn't belong to this workflow" +msgstr "l'état n'appartient pas à ce workflow" + +msgid "state_of" +msgstr "état de" + +msgctxt "State" +msgid "state_of" +msgstr "état de" + +msgid "state_of_object" +msgstr "a pour état" + +msgctxt "Workflow" +msgid "state_of_object" +msgstr "contient les états" + +msgid "status" +msgstr "état" + +msgctxt "CWDataImport" +msgid "status" +msgstr "état" + +msgid "status change" +msgstr "changer l'état" + +msgid "status changed" +msgstr "changement d'état" + +#, python-format +msgid "status will change from %(st1)s to %(st2)s" +msgstr "l'entité passera de l'état %(st1)s à l'état %(st2)s" + +msgid "subject" +msgstr "sujet" + +msgid "subject type" +msgstr "type du sujet" + +msgid "subject/object cardinality" +msgstr "cardinalité sujet/objet" + +msgid "subworkflow" +msgstr "sous-workflow" + +msgctxt "WorkflowTransition" +msgid "subworkflow" +msgstr "sous-workflow" + +msgid "" +"subworkflow isn't a workflow for the same types as the transition's workflow" +msgstr "" +"le sous-workflow ne s'applique pas aux mêmes types que le workflow de cette " +"transition" + +msgid "subworkflow state" +msgstr "état de sous-workflow" + +msgid "subworkflow_exit" +msgstr "sortie de sous-workflow" + +msgctxt "WorkflowTransition" +msgid "subworkflow_exit" +msgstr "sortie du sous-workflow" + +msgid "subworkflow_exit_object" +msgstr "états de sortie" + +msgctxt "SubWorkflowExitPoint" +msgid "subworkflow_exit_object" +msgstr "états de sortie" + +msgid "subworkflow_object" +msgstr "utilisé par la transition" + +msgctxt "Workflow" +msgid "subworkflow_object" +msgstr "sous workflow de" + +msgid "subworkflow_state" +msgstr "état du sous-workflow" + +msgctxt "SubWorkflowExitPoint" +msgid "subworkflow_state" +msgstr "état" + +msgid "subworkflow_state_object" +msgstr "état de sortie de" + +msgctxt "State" +msgid "subworkflow_state_object" +msgstr "état de sortie de" + +msgid "success" +msgstr "succès" + +msgid "sunday" +msgstr "dimanche" + +msgid "surname" +msgstr "nom" + +msgctxt "CWUser" +msgid "surname" +msgstr "nom de famille" + +msgid "symmetric" +msgstr "symétrique" + +msgctxt "CWRType" +msgid "symmetric" +msgstr "symétrique" + +msgid "synchronization-interval must be greater than 1 minute" +msgstr "synchronization-interval doit être supérieur à 1 minute" + +msgid "table" +msgstr "table" + +msgid "tablefilter" +msgstr "filtre de tableau" + +msgid "text" +msgstr "text" + +msgid "text/cubicweb-page-template" +msgstr "contenu dynamique" + +msgid "text/html" +msgstr "html" + +msgid "text/markdown" +msgstr "texte au format markdown" + +msgid "text/plain" +msgstr "texte pur" + +msgid "text/rest" +msgstr "ReST" + +msgid "the URI of the object" +msgstr "l'Uri de l'objet" + +msgid "the prefered email" +msgstr "l'adresse électronique principale" + +msgid "the system source has its configuration stored on the file-system" +msgstr "la source système a sa configuration stockée sur le système de fichier" + +#, python-format +msgid "the value \"%s\" is already used, use another one" +msgstr "la valeur \"%s\" est déjà utilisée, veuillez utiliser une autre valeur" + +msgid "there is no next page" +msgstr "Il n'y a pas de page suivante" + +msgid "there is no previous page" +msgstr "Il n'y a pas de page précédente" + +#, python-format +msgid "there is no transaction #%s" +msgstr "Il n'y a pas de transaction #%s" + +msgid "this action is not reversible!" +msgstr "" +"Attention ! Cette opération va détruire les données de façon irréversible." + +msgid "this entity is currently owned by" +msgstr "cette entité appartient à" + +msgid "this parser doesn't use a mapping" +msgstr "ce parseur n'utilise pas de mapping" + +msgid "this resource does not exist" +msgstr "cette ressource est introuvable" + +msgid "this source doesn't use a mapping" +msgstr "cette source n'utilise pas de mapping" + +msgid "thursday" +msgstr "jeudi" + +msgid "timestamp" +msgstr "date" + +msgctxt "CWCache" +msgid "timestamp" +msgstr "valide depuis" + +msgid "timetable" +msgstr "emploi du temps" + +msgid "title" +msgstr "titre" + +msgctxt "Bookmark" +msgid "title" +msgstr "libellé" + +msgid "to" +msgstr "à" + +#, python-format +msgid "to %(date)s" +msgstr "au %(date)s" + +msgid "to associate with" +msgstr "pour associer à" + +msgid "to_entity" +msgstr "vers l'entité" + +msgctxt "CWAttribute" +msgid "to_entity" +msgstr "pour l'entité" + +msgctxt "CWRelation" +msgid "to_entity" +msgstr "pour l'entité" + +msgid "to_entity_object" +msgstr "objet de la relation" + +msgctxt "CWEType" +msgid "to_entity_object" +msgstr "objet de la relation" + +msgid "to_interval_end" +msgstr "à" + +msgid "to_state" +msgstr "vers l'état" + +msgctxt "TrInfo" +msgid "to_state" +msgstr "état de destination" + +msgid "to_state_object" +msgstr "transitions vers cet état" + +msgctxt "State" +msgid "to_state_object" +msgstr "transition vers cet état" + +msgid "toggle check boxes" +msgstr "afficher/masquer les cases à cocher" + +msgid "tr_count" +msgstr "n° de transition" + +msgctxt "TrInfo" +msgid "tr_count" +msgstr "n° de transition" + +msgid "transaction undone" +msgstr "transaction annulée" + +#, python-format +msgid "transition %(tr)s isn't allowed from %(st)s" +msgstr "la transition %(tr)s n'est pas autorisée depuis l'état %(st)s" + +msgid "transition doesn't belong to entity's workflow" +msgstr "la transition n'appartient pas au workflow de l'entité" + +msgid "transition isn't allowed" +msgstr "la transition n'est pas autorisée" + +msgid "transition may not be fired" +msgstr "la transition ne peut-être déclenchée" + +msgid "transition_of" +msgstr "transition de" + +msgctxt "BaseTransition" +msgid "transition_of" +msgstr "transition de" + +msgctxt "Transition" +msgid "transition_of" +msgstr "transition de" + +msgctxt "WorkflowTransition" +msgid "transition_of" +msgstr "transition de" + +msgid "transition_of_object" +msgstr "a pour transition" + +msgctxt "Workflow" +msgid "transition_of_object" +msgstr "a pour transition" + +msgid "tree view" +msgstr "arborescence" + +msgid "tuesday" +msgstr "mardi" + +msgid "type" +msgstr "type" + +msgctxt "BaseTransition" +msgid "type" +msgstr "type" + +msgctxt "CWSource" +msgid "type" +msgstr "type" + +msgctxt "Transition" +msgid "type" +msgstr "type" + +msgctxt "WorkflowTransition" +msgid "type" +msgstr "type" + +msgid "type here a sparql query" +msgstr "Tapez une requête sparql" + +msgid "type of the source" +msgstr "type de la source" + +msgid "ui" +msgstr "propriétés génériques de l'interface" + +msgid "ui.date-format" +msgstr "format de date" + +msgid "ui.datetime-format" +msgstr "format de date et de l'heure" + +msgid "ui.default-text-format" +msgstr "format de texte" + +msgid "ui.encoding" +msgstr "encodage" + +msgid "ui.fckeditor" +msgstr "éditeur du contenu" + +msgid "ui.float-format" +msgstr "format des flottants" + +msgid "ui.language" +msgstr "langue" + +msgid "ui.main-template" +msgstr "gabarit principal" + +msgid "ui.site-title" +msgstr "titre du site" + +msgid "ui.time-format" +msgstr "format de l'heure" + +msgid "unable to check captcha, please try again" +msgstr "impossible de vérifier le captcha, veuillez réessayer" + +msgid "unaccessible" +msgstr "inaccessible" + +msgid "unauthorized value" +msgstr "valeur non autorisée" + +msgid "undefined user" +msgstr "utilisateur inconnu" + +msgid "undo" +msgstr "annuler" + +msgid "unique identifier used to connect to the application" +msgstr "identifiant unique utilisé pour se connecter à l'application" + +msgid "unknown external entity" +msgstr "entité (externe) introuvable" + +#, python-format +msgid "unknown options %s" +msgstr "options inconnues : %s" + +#, python-format +msgid "unknown property key %s" +msgstr "clé de propriété inconnue : %s" + +msgid "unknown vocabulary:" +msgstr "vocabulaire inconnu : " + +msgid "unsupported protocol" +msgstr "protocole non supporté" + +msgid "upassword" +msgstr "mot de passe" + +msgctxt "CWUser" +msgid "upassword" +msgstr "mot de passe" + +msgid "update" +msgstr "modification" + +msgid "update_permission" +msgstr "permission de modification" + +msgctxt "CWAttribute" +msgid "update_permission" +msgstr "permission de modifier" + +msgctxt "CWEType" +msgid "update_permission" +msgstr "permission de modifier" + +msgid "update_permission_object" +msgstr "a la permission de modifier" + +msgctxt "CWGroup" +msgid "update_permission_object" +msgstr "peut modifier" + +msgctxt "RQLExpression" +msgid "update_permission_object" +msgstr "peut modifier" + +msgid "update_relation" +msgstr "modifier" + +msgid "updated" +msgstr "mis à jour" + +#, python-format +msgid "updated %(etype)s #%(eid)s (%(title)s)" +msgstr "modification de l'entité %(etype)s #%(eid)s (%(title)s)" + +msgid "uri" +msgstr "uri" + +msgctxt "ExternalUri" +msgid "uri" +msgstr "uri" + +msgid "url" +msgstr "url" + +msgctxt "CWSource" +msgid "url" +msgstr "url" + +msgid "" +"use to define a transition from one or multiple states to a destination " +"states in workflow's definitions. Transition without destination state will " +"go back to the state from which we arrived to the current state." +msgstr "" +"utilisé dans une définition de processus pour ajouter une transition depuis " +"un ou plusieurs états vers un état de destination. Une transition sans état " +"de destination retournera à l'état précédent l'état courant." + +msgid "use_email" +msgstr "adresse électronique" + +msgctxt "CWUser" +msgid "use_email" +msgstr "utilise l'adresse électronique" + +msgid "use_email_object" +msgstr "adresse utilisée par" + +msgctxt "EmailAddress" +msgid "use_email_object" +msgstr "utilisée par" + +msgid "" +"used for cubicweb configuration. Once a property has been created you can't " +"change the key." +msgstr "" +"utilisé pour la configuration de l'application. Une fois qu'une propriété a " +"été créée, vous ne pouvez plus changez la clé associée" + +msgid "" +"used to associate simple states to an entity type and/or to define workflows" +msgstr "associe les états à un type d'entité pour définir un workflow" + +msgid "user" +msgstr "utilisateur" + +#, python-format +msgid "" +"user %s has made the following change(s):\n" +"\n" +msgstr "" +"l'utilisateur %s a effectué le(s) changement(s) suivant(s):\n" +"\n" + +msgid "user interface encoding" +msgstr "encodage utilisé dans l'interface utilisateur" + +msgid "user preferences" +msgstr "préférences utilisateur" + +msgid "user's email account" +msgstr "email de l'utilisateur" + +msgid "users" +msgstr "utilisateurs" + +msgid "users and groups" +msgstr "utilisateurs et groupes" + +msgid "users using this bookmark" +msgstr "utilisateurs utilisant ce signet" + +msgid "validate modifications on selected items" +msgstr "valider les modifications apportées aux éléments sélectionnés" + +msgid "validating..." +msgstr "chargement en cours ..." + +msgid "value" +msgstr "valeur" + +msgctxt "CWConstraint" +msgid "value" +msgstr "contrainte" + +msgctxt "CWProperty" +msgid "value" +msgstr "valeur" + +#, python-format +msgid "value %(KEY-value)s must be < %(KEY-boundary)s" +msgstr "la valeur %(KEY-value)s doit être strictement inférieure à %(KEY-boundary)s" + +#, python-format +msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" +msgstr "" +"la valeur %(KEY-value)s doit être inférieure ou égale à %(KEY-boundary)s" + +#, python-format +msgid "value %(KEY-value)s must be > %(KEY-boundary)s" +msgstr "la valeur %(KEY-value)s doit être strictement supérieure à %(KEY-boundary)s" + +#, python-format +msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" +msgstr "" +"la valeur %(KEY-value)s doit être supérieure ou égale à %(KEY-boundary)s" + +msgid "value associated to this key is not editable manually" +msgstr "la valeur associée à cette clé n'est pas éditable manuellement" + +#, python-format +msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" +msgstr "" +"la taille maximum est %(KEY-max)s mais cette valeur est de taille " +"%(KEY-size)s" + +#, python-format +msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" +msgstr "" +"la taille minimum est %(KEY-min)s mais cette valeur est de taille " +"%(KEY-size)s" + +msgid "vcard" +msgstr "vcard" + +msgid "versions configuration" +msgstr "configuration de version" + +msgid "view" +msgstr "voir" + +msgid "view all" +msgstr "voir tous" + +msgid "view detail for this entity" +msgstr "voir les détails de cette entité" + +msgid "view history" +msgstr "voir l'historique" + +msgid "view identifier" +msgstr "identifiant" + +msgid "view title" +msgstr "titre" + +msgid "view workflow" +msgstr "voir les états possibles" + +msgid "view_index" +msgstr "accueil" + +msgid "visible" +msgstr "visible" + +msgid "warning" +msgstr "attention" + +msgid "we are not yet ready to handle this query" +msgstr "" +"nous ne sommes pas capable de gérer ce type de requête sparql pour le moment" + +msgid "wednesday" +msgstr "mercredi" + +#, python-format +msgid "welcome %s!" +msgstr "bienvenue %s !" + +msgid "wf_info_for" +msgstr "historique de" + +msgid "wf_info_for_object" +msgstr "historique des transitions" + +msgid "wf_tab_info" +msgstr "description" + +msgid "wfgraph" +msgstr "image du workflow" + +msgid "" +"when multiple addresses are equivalent (such as python-projects@logilab.org " +"and python-projects@lists.logilab.org), set this to indicate which is the " +"preferred form." +msgstr "" +"quand plusieurs addresses sont équivalentes (comme python-projects@logilab." +"org et python-projects@lists.logilab.org), indique laquelle est la forme " +"préférentielle." + +msgid "workflow" +msgstr "workflow" + +#, python-format +msgid "workflow changed to \"%s\"" +msgstr "workflow changé à \"%s\"" + +msgid "workflow has no initial state" +msgstr "le workflow n'a pas d'état initial" + +msgid "workflow history item" +msgstr "entrée de l'historique de workflow" + +msgid "workflow isn't a workflow for this type" +msgstr "le workflow ne s'applique pas à ce type d'entité" + +msgid "workflow to which this state belongs" +msgstr "workflow auquel cet état appartient" + +msgid "workflow to which this transition belongs" +msgstr "workflow auquel cette transition appartient" + +msgid "workflow_of" +msgstr "workflow de" + +msgctxt "Workflow" +msgid "workflow_of" +msgstr "workflow de" + +msgid "workflow_of_object" +msgstr "a pour workflow" + +msgctxt "CWEType" +msgid "workflow_of_object" +msgstr "a pour workflow" + +#, python-format +msgid "wrong query parameter line %s" +msgstr "mauvais paramètre de requête ligne %s" + +msgid "xbel export" +msgstr "export XBEL" + +msgid "xml export" +msgstr "export XML" + +msgid "xml export (entities)" +msgstr "export XML (entités)" + +msgid "yes" +msgstr "oui" + +msgid "you have been logged out" +msgstr "vous avez été déconnecté" + +msgid "you should probably delete that property" +msgstr "vous devriez probablement supprimer cette propriété" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/mail.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/mail.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,154 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Common utilies to format / send emails.""" + +__docformat__ = "restructuredtext en" + +from base64 import b64encode, b64decode +from time import time +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email.mime.image import MIMEImage +from email.header import Header +from email.utils import formatdate +from socket import gethostname + +from six import PY2, PY3, text_type + + +def header(ustring): + if PY3: + return Header(ustring, 'utf-8') + return Header(ustring.encode('UTF-8'), 'UTF-8') + +def addrheader(uaddr, uname=None): + # even if an email address should be ascii, encode it using utf8 since + # automatic tests may generate non ascii email address + if PY2: + addr = uaddr.encode('UTF-8') + else: + addr = uaddr + if uname: + val = '%s <%s>' % (header(uname).encode(), addr) + else: + val = addr + assert isinstance(val, str) # bytes in py2, ascii-encoded unicode in py3 + return val + + +def construct_message_id(appid, eid, withtimestamp=True): + if withtimestamp: + addrpart = 'eid=%s×tamp=%.10f' % (eid, time()) + else: + addrpart = 'eid=%s' % eid + # we don't want any equal sign nor trailing newlines + leftpart = b64encode(addrpart.encode('ascii'), b'.-').decode('ascii').rstrip().rstrip('=') + return '<%s@%s.%s>' % (leftpart, appid, gethostname()) + + +def parse_message_id(msgid, appid): + if msgid[0] == '<': + msgid = msgid[1:] + if msgid[-1] == '>': + msgid = msgid[:-1] + try: + values, qualif = msgid.split('@') + padding = len(values) % 4 + values = b64decode(str(values + '='*padding), '.-').decode('ascii') + values = dict(v.split('=') for v in values.split('&')) + fromappid, host = qualif.split('.', 1) + except Exception: + return None + if appid != fromappid or host != gethostname(): + return None + return values + + +def format_mail(uinfo, to_addrs, content, subject="", + cc_addrs=(), msgid=None, references=(), config=None): + """Sends an Email to 'e_addr' with content 'content', and subject 'subject' + + to_addrs and cc_addrs are expected to be a list of email address without + name + """ + assert isinstance(content, text_type), repr(content) + msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8') + # safety: keep only the first newline + try: + subject = subject.splitlines()[0] + msg['Subject'] = header(subject) + except IndexError: + pass # no subject + if uinfo.get('email'): + email = uinfo['email'] + elif config and config['sender-addr']: + email = text_type(config['sender-addr']) + else: + email = u'' + if uinfo.get('name'): + name = uinfo['name'] + elif config and config['sender-name']: + name = text_type(config['sender-name']) + else: + name = u'' + msg['From'] = addrheader(email, name) + if config and config['sender-addr'] and config['sender-addr'] != email: + appaddr = addrheader(config['sender-addr'], config['sender-name']) + msg['Reply-to'] = '%s, %s' % (msg['From'], appaddr) + elif email: + msg['Reply-to'] = msg['From'] + if config is not None: + msg['X-CW'] = config.appid + unique_addrs = lambda addrs: sorted(set(addr for addr in addrs if addr is not None)) + msg['To'] = ', '.join(addrheader(addr) for addr in unique_addrs(to_addrs)) + if cc_addrs: + msg['Cc'] = ', '.join(addrheader(addr) for addr in unique_addrs(cc_addrs)) + if msgid: + msg['Message-id'] = msgid + if references: + msg['References'] = ', '.join(references) + msg['Date'] = formatdate() + return msg + + +class HtmlEmail(MIMEMultipart): + + def __init__(self, subject, textcontent, htmlcontent, + sendermail=None, sendername=None, recipients=None, ccrecipients=None): + MIMEMultipart.__init__(self, 'related') + self['Subject'] = header(subject) + self.preamble = 'This is a multi-part message in MIME format.' + # Attach alternative text message + alternative = MIMEMultipart('alternative') + self.attach(alternative) + msgtext = MIMEText(textcontent.encode('UTF-8'), 'plain', 'UTF-8') + alternative.attach(msgtext) + # Attach html message + msghtml = MIMEText(htmlcontent.encode('UTF-8'), 'html', 'UTF-8') + alternative.attach(msghtml) + if sendermail or sendername: + self['From'] = addrheader(sendermail, sendername) + if recipients: + self['To'] = ', '.join(addrheader(addr) for addr in recipients if addr is not None) + if ccrecipients: + self['Cc'] = ', '.join(addrheader(addr) for addr in ccrecipients if addr is not None) + + def attach_image(self, data, htmlId): + image = MIMEImage(data) + image.add_header('Content-ID', '<%s>' % htmlId) + self.attach(image) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/md5crypt.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/md5crypt.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,121 @@ +# md5crypt.py +# +# 0423.2000 by michal wallace http://www.sabren.com/ +# based on perl's Crypt::PasswdMD5 by Luis Munoz (lem@cantv.net) +# based on /usr/src/libcrypt/crypt.c from FreeBSD 2.2.5-RELEASE +# +# MANY THANKS TO +# +# Carey Evans - http://home.clear.net.nz/pages/c.evans/ +# Dennis Marti - http://users.starpower.net/marti1/ +# +# For the patches that got this thing working! +# +# modification by logilab: +# * remove usage of the string module +# * don't include the magic string in the output string +# for true crypt.crypt compatibility +# * use hashlib module instead of md5 +######################################################### +"""md5crypt.py - Provides interoperable MD5-based crypt() function + +SYNOPSIS + + import md5crypt.py + + cryptedpassword = md5crypt.md5crypt(password, salt); + +DESCRIPTION + +unix_md5_crypt() provides a crypt()-compatible interface to the +rather new MD5-based crypt() function found in modern operating systems. +It's based on the implementation found on FreeBSD 2.2.[56]-RELEASE and +contains the following license in it: + + "THE BEER-WARE LICENSE" (Revision 42): + wrote this file. As long as you retain this notice you + can do whatever you want with this stuff. If we meet some day, and you think + this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp +""" + +MAGIC = b'$1$' # Magic string +ITOA64 = b"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + +from hashlib import md5 # pylint: disable=E0611 + +from six import text_type, indexbytes +from six.moves import range + + +def to64 (v, n): + ret = bytearray() + while (n - 1 >= 0): + n = n - 1 + ret.append(ITOA64[v & 0x3f]) + v = v >> 6 + return ret + +def crypt(pw, salt): + if isinstance(pw, text_type): + pw = pw.encode('utf-8') + if isinstance(salt, text_type): + salt = salt.encode('ascii') + # Take care of the magic string if present + if salt.startswith(MAGIC): + salt = salt[len(MAGIC):] + # salt can have up to 8 characters: + salt = salt.split(b'$', 1)[0] + salt = salt[:8] + ctx = pw + MAGIC + salt + final = md5(pw + salt + pw).digest() + for pl in range(len(pw), 0, -16): + if pl > 16: + ctx = ctx + final[:16] + else: + ctx = ctx + final[:pl] + # Now the 'weird' xform (??) + i = len(pw) + while i: + if i & 1: + ctx = ctx + b'\0' #if ($i & 1) { $ctx->add(pack("C", 0)); } + else: + ctx = ctx + pw[0] + i = i >> 1 + final = md5(ctx).digest() + # The following is supposed to make + # things run slower. + # my question: WTF??? + for i in range(1000): + ctx1 = b'' + if i & 1: + ctx1 = ctx1 + pw + else: + ctx1 = ctx1 + final[:16] + if i % 3: + ctx1 = ctx1 + salt + if i % 7: + ctx1 = ctx1 + pw + if i & 1: + ctx1 = ctx1 + final[:16] + else: + ctx1 = ctx1 + pw + final = md5(ctx1).digest() + # Final xform + passwd = b'' + passwd += to64((indexbytes(final, 0) << 16) + |(indexbytes(final, 6) << 8) + |(indexbytes(final, 12)),4) + passwd += to64((indexbytes(final, 1) << 16) + |(indexbytes(final, 7) << 8) + |(indexbytes(final, 13)), 4) + passwd += to64((indexbytes(final, 2) << 16) + |(indexbytes(final, 8) << 8) + |(indexbytes(final, 14)), 4) + passwd += to64((indexbytes(final, 3) << 16) + |(indexbytes(final, 9) << 8) + |(indexbytes(final, 15)), 4) + passwd += to64((indexbytes(final, 4) << 16) + |(indexbytes(final, 10) << 8) + |(indexbytes(final, 5)), 4) + passwd += to64((indexbytes(final, 11)), 2) + return passwd diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/migration.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/migration.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,553 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""utilities for instances migration""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +import os +import logging +import tempfile +from os.path import exists, join, basename, splitext +from itertools import chain +from warnings import warn + +from six import string_types + +from logilab.common import IGNORED_EXTENSIONS +from logilab.common.decorators import cached +from logilab.common.configuration import REQUIRED, read_old_config +from logilab.common.shellutils import ASK +from logilab.common.changelog import Version +from logilab.common.deprecation import deprecated + +from cubicweb import ConfigurationError, ExecutionError +from cubicweb.cwconfig import CubicWebConfiguration as cwcfg +from cubicweb.toolsutils import show_diffs + +def filter_scripts(config, directory, fromversion, toversion, quiet=True): + """return a list of paths of migration files to consider to upgrade + from a version to a greater one + """ + from logilab.common.changelog import Version # doesn't work with appengine + assert fromversion + assert toversion + assert isinstance(fromversion, tuple), fromversion.__class__ + assert isinstance(toversion, tuple), toversion.__class__ + assert fromversion <= toversion, (fromversion, toversion) + if not exists(directory): + if not quiet: + print(directory, "doesn't exists, no migration path") + return [] + if fromversion == toversion: + return [] + result = [] + for fname in os.listdir(directory): + if fname.endswith(IGNORED_EXTENSIONS): + continue + fpath = join(directory, fname) + try: + tver, mode = fname.split('_', 1) + except ValueError: + continue + mode = mode.split('.', 1)[0] + if not config.accept_mode(mode): + continue + try: + tver = Version(tver) + except ValueError: + continue + if tver <= fromversion: + continue + if tver > toversion: + continue + result.append((tver, fpath)) + # be sure scripts are executed in order + return sorted(result) + + +def execscript_confirm(scriptpath): + """asks for confirmation before executing a script and provides the + ability to show the script's content + """ + while True: + answer = ASK.ask('Execute %r ?' % scriptpath, + ('Y','n','show','abort'), 'Y') + if answer == 'abort': + raise SystemExit(1) + elif answer == 'n': + return False + elif answer == 'show': + stream = open(scriptpath) + scriptcontent = stream.read() + stream.close() + print() + print(scriptcontent) + print() + else: + return True + +def yes(*args, **kwargs): + return True + + +class MigrationHelper(object): + """class holding CubicWeb Migration Actions used by migration scripts""" + + def __init__(self, config, interactive=True, verbosity=1): + self.config = config + if config: + # no config on shell to a remote instance + self.config.init_log(logthreshold=logging.ERROR) + # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything + self.verbosity = verbosity + self.need_wrap = True + if not interactive or not verbosity: + self.confirm = yes + self.execscript_confirm = yes + else: + self.execscript_confirm = execscript_confirm + self._option_changes = [] + self.__context = {'confirm': self.confirm, + 'config': self.config, + 'interactive_mode': interactive, + } + self._context_stack = [] + + def __getattribute__(self, name): + try: + return object.__getattribute__(self, name) + except AttributeError: + cmd = 'cmd_%s' % name + # search self.__class__ to avoid infinite recursion + if hasattr(self.__class__, cmd): + meth = getattr(self, cmd) + return lambda *args, **kwargs: self.interact(args, kwargs, + meth=meth) + raise + raise AttributeError(name) + + def migrate(self, vcconf, toupgrade, options): + """upgrade the given set of cubes + + `cubes` is an ordered list of 3-uple: + (cube, fromversion, toversion) + """ + if options.fs_only: + # monkey path configuration.accept_mode so database mode (e.g. Any) + # won't be accepted + orig_accept_mode = self.config.accept_mode + def accept_mode(mode): + if mode == 'Any': + return False + return orig_accept_mode(mode) + self.config.accept_mode = accept_mode + # may be an iterator + toupgrade = tuple(toupgrade) + vmap = dict( (cube, (fromver, tover)) for cube, fromver, tover in toupgrade) + ctx = self.__context + ctx['versions_map'] = vmap + if self.config.accept_mode('Any') and 'cubicweb' in vmap: + migrdir = self.config.migration_scripts_dir() + self.cmd_process_script(join(migrdir, 'bootstrapmigration_repository.py')) + for cube, fromversion, toversion in toupgrade: + if cube == 'cubicweb': + migrdir = self.config.migration_scripts_dir() + else: + migrdir = self.config.cube_migration_scripts_dir(cube) + scripts = filter_scripts(self.config, migrdir, fromversion, toversion) + if scripts: + prevversion = None + for version, script in scripts: + # take care to X.Y.Z_Any.py / X.Y.Z_common.py: we've to call + # cube_upgraded once all script of X.Y.Z have been executed + if prevversion is not None and version != prevversion: + self.cube_upgraded(cube, prevversion) + prevversion = version + self.cmd_process_script(script) + self.cube_upgraded(cube, toversion) + else: + self.cube_upgraded(cube, toversion) + + def cube_upgraded(self, cube, version): + pass + + def shutdown(self): + pass + + def interact(self, args, kwargs, meth): + """execute the given method according to user's confirmation""" + msg = 'Execute command: %s(%s) ?' % ( + meth.__name__[4:], + ', '.join([repr(arg) for arg in args] + + ['%s=%r' % (n,v) for n,v in kwargs.items()])) + if 'ask_confirm' in kwargs: + ask_confirm = kwargs.pop('ask_confirm') + else: + ask_confirm = True + if not ask_confirm or self.confirm(msg): + return meth(*args, **kwargs) + + def confirm(self, question, # pylint: disable=E0202 + shell=True, abort=True, retry=False, pdb=False, default='y'): + """ask for confirmation and return true on positive answer + + if `retry` is true the r[etry] answer may return 2 + """ + possibleanswers = ['y', 'n'] + if abort: + possibleanswers.append('abort') + if pdb: + possibleanswers.append('pdb') + if shell: + possibleanswers.append('shell') + if retry: + possibleanswers.append('retry') + try: + answer = ASK.ask(question, possibleanswers, default) + except (EOFError, KeyboardInterrupt): + answer = 'abort' + if answer == 'n': + return False + if answer == 'retry': + return 2 + if answer == 'abort': + raise SystemExit(1) + if answer == 'shell': + self.interactive_shell() + return self.confirm(question, shell, abort, retry, pdb, default) + if answer == 'pdb': + import pdb + pdb.set_trace() + return self.confirm(question, shell, abort, retry, pdb, default) + return True + + def interactive_shell(self): + self.confirm = yes + self.need_wrap = False + # avoid '_' to be added to builtins by sys.display_hook + def do_not_add___to_builtins(obj): + if obj is not None: + print(repr(obj)) + sys.displayhook = do_not_add___to_builtins + local_ctx = self._create_context() + try: + import readline + from cubicweb.toolsutils import CWShellCompleter + except ImportError: + # readline not available + pass + else: + rql_completer = CWShellCompleter(local_ctx) + readline.set_completer(rql_completer.complete) + readline.parse_and_bind('tab: complete') + home_key = 'HOME' + if sys.platform == 'win32': + home_key = 'USERPROFILE' + histfile = os.path.join(os.environ[home_key], ".cwshell_history") + try: + readline.read_history_file(histfile) + except IOError: + pass + from code import interact + banner = """entering the migration python shell +just type migration commands or arbitrary python code and type ENTER to execute it +type "exit" or Ctrl-D to quit the shell and resume operation""" + # give custom readfunc to avoid http://bugs.python.org/issue1288615 + def unicode_raw_input(prompt): + return unicode(raw_input(prompt), sys.stdin.encoding) + interact(banner, readfunc=unicode_raw_input, local=local_ctx) + try: + readline.write_history_file(histfile) + except IOError: + pass + # delete instance's confirm attribute to avoid questions + del self.confirm + self.need_wrap = True + + @cached + def _create_context(self): + """return a dictionary to use as migration script execution context""" + context = self.__context + for attr in dir(self): + if attr.startswith('cmd_'): + if self.need_wrap: + context[attr[4:]] = getattr(self, attr[4:]) + else: + context[attr[4:]] = getattr(self, attr) + return context + + def update_context(self, key, value): + for context in self._context_stack: + context[key] = value + self.__context[key] = value + + def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): + """execute a migration script in interactive mode + + Display the migration script path, ask for confirmation and execute it + if confirmed + + Allowed input file formats for migration scripts: + - `python` (.py) + - `sql` (.sql) + - `doctest` (.txt or .rst) + + .. warning:: sql migration scripts are not available in web-only instance + + You can pass script parameters with using double dash (--) in the + command line + + Context environment can have these variables defined: + - __name__ : will be determine by funcname parameter + - __file__ : is the name of the script if it exists + - __args__ : script arguments coming from command-line + + :param migrscript: name of the script + :param funcname: defines __name__ inside the shell (or use __main__) + :params args: optional arguments for funcname + :keyword scriptargs: optional arguments of the script + """ + ftypes = {'python': ('.py',), + 'doctest': ('.txt', '.rst'), + 'sql': ('.sql',)} + # sql migration scripts are not available in web-only instance + if not hasattr(self, "session"): + ftypes.pop('sql') + migrscript = os.path.normpath(migrscript) + for (script_mode, ftype) in ftypes.items(): + if migrscript.endswith(ftype): + break + else: + ftypes = ', '.join(chain(*ftypes.values())) + msg = 'ignoring %s, not a valid script extension (%s)' + raise ExecutionError(msg % (migrscript, ftypes)) + if not self.execscript_confirm(migrscript): + return + scriptlocals = self._create_context().copy() + scriptlocals.update({'__file__': migrscript, + '__args__': kwargs.pop("scriptargs", [])}) + self._context_stack.append(scriptlocals) + if script_mode == 'python': + if funcname is None: + pyname = '__main__' + else: + pyname = splitext(basename(migrscript))[0] + scriptlocals['__name__'] = pyname + with open(migrscript, 'rb') as fobj: + fcontent = fobj.read() + try: + code = compile(fcontent, migrscript, 'exec') + except SyntaxError: + # try without print_function + code = compile(fcontent, migrscript, 'exec', 0, True) + warn('[3.22] script %r should be updated to work with print_function' + % migrscript, DeprecationWarning) + exec(code, scriptlocals) + if funcname is not None: + try: + func = scriptlocals[funcname] + self.info('found %s in locals', funcname) + assert callable(func), '%s (%s) is not callable' % (func, funcname) + except KeyError: + self.critical('no %s in script %s', funcname, migrscript) + return None + return func(*args, **kwargs) + elif script_mode == 'sql': + from cubicweb.server.sqlutils import sqlexec + sqlexec(open(migrscript).read(), self.session.system_sql) + self.commit() + else: # script_mode == 'doctest' + import doctest + return doctest.testfile(migrscript, module_relative=False, + optionflags=doctest.ELLIPSIS, + # verbose mode when user input is expected + verbose=self.verbosity==2, + report=True, + encoding='utf-8', + globs=scriptlocals) + self._context_stack.pop() + + def cmd_option_renamed(self, oldname, newname): + """a configuration option has been renamed""" + self._option_changes.append(('renamed', oldname, newname)) + + def cmd_option_group_changed(self, option, oldgroup, newgroup): + """a configuration option has been moved in another group""" + self._option_changes.append(('moved', option, oldgroup, newgroup)) + + def cmd_option_added(self, optname): + """a configuration option has been added""" + self._option_changes.append(('added', optname)) + + def cmd_option_removed(self, optname): + """a configuration option has been removed""" + # can safely be ignored + #self._option_changes.append(('removed', optname)) + + def cmd_option_type_changed(self, optname, oldtype, newvalue): + """a configuration option's type has changed""" + self._option_changes.append(('typechanged', optname, oldtype, newvalue)) + + def cmd_add_cubes(self, cubes): + """modify the list of used cubes in the in-memory config + returns newly inserted cubes, including dependencies + """ + if isinstance(cubes, string_types): + cubes = (cubes,) + origcubes = self.config.cubes() + newcubes = [p for p in self.config.expand_cubes(cubes) + if not p in origcubes] + if newcubes: + self.config.add_cubes(newcubes) + return newcubes + + @deprecated('[3.20] use drop_cube() instead of remove_cube()') + def cmd_remove_cube(self, cube, removedeps=False): + return self.cmd_drop_cube(cube, removedeps) + + def cmd_drop_cube(self, cube, removedeps=False): + if removedeps: + toremove = self.config.expand_cubes([cube]) + else: + toremove = (cube,) + origcubes = self.config._cubes + basecubes = [c for c in origcubes if not c in toremove] + # don't fake-add any new ones, or we won't be able to really-add them later + self.config._cubes = tuple(cube for cube in self.config.expand_cubes(basecubes) + if cube in origcubes) + removed = [p for p in origcubes if not p in self.config._cubes] + if not cube in removed and cube in origcubes: + raise ConfigurationError("can't remove cube %s, " + "used as a dependency" % cube) + return removed + + def rewrite_configuration(self): + configfile = self.config.main_config_file() + if self._option_changes: + read_old_config(self.config, self._option_changes, configfile) + fd, newconfig = tempfile.mkstemp() + for optdescr in self._option_changes: + if optdescr[0] == 'added': + optdict = self.config.get_option_def(optdescr[1]) + if optdict.get('default') is REQUIRED: + self.config.input_option(optdescr[1], optdict) + self.config.generate_config(open(newconfig, 'w')) + show_diffs(configfile, newconfig, askconfirm=self.confirm is not yes) + os.close(fd) + if exists(newconfig): + os.unlink(newconfig) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + +from logging import getLogger +from cubicweb import set_log_methods +set_log_methods(MigrationHelper, getLogger('cubicweb.migration')) + + +def version_strictly_lower(a, b): + if a is None: + return True + if b is None: + return False + if a: + a = Version(a) + if b: + b = Version(b) + return a < b + +def max_version(a, b): + return str(max(Version(a), Version(b))) + +class ConfigurationProblem(object): + """Each cube has its own list of dependencies on other cubes/versions. + + The ConfigurationProblem is used to record the loaded cubes, then to detect + inconsistencies in their dependencies. + + See configuration management on wikipedia for litterature. + """ + + def __init__(self, config): + self.config = config + self.cubes = {'cubicweb': cwcfg.cubicweb_version()} + + def add_cube(self, name, version): + self.cubes[name] = version + + def solve(self): + self.warnings = [] + self.errors = [] + self.dependencies = {} + self.reverse_dependencies = {} + self.constraints = {} + # read dependencies + for cube in self.cubes: + if cube == 'cubicweb': continue + self.dependencies[cube] = dict(self.config.cube_dependencies(cube)) + self.dependencies[cube]['cubicweb'] = self.config.cube_depends_cubicweb_version(cube) + # compute reverse dependencies + for cube, dependencies in self.dependencies.items(): + for name, constraint in dependencies.items(): + self.reverse_dependencies.setdefault(name,set()) + if constraint: + try: + oper, version = constraint.split() + self.reverse_dependencies[name].add( (oper, version, cube) ) + except Exception: + self.warnings.append( + 'cube %s depends on %s but constraint badly ' + 'formatted: %s' % (cube, name, constraint)) + else: + self.reverse_dependencies[name].add( (None, None, cube) ) + # check consistency + for cube, versions in sorted(self.reverse_dependencies.items()): + oper, version, source = None, None, None + # simplify constraints + if versions: + for constraint in versions: + op, ver, src = constraint + if oper is None: + oper = op + version = ver + source = src + elif op == '>=' and oper == '>=': + if version_strictly_lower(version, ver): + version = ver + source = src + elif op == None: + continue + else: + print('unable to handle %s in %s, set to `%s %s` ' + 'but currently up to `%s %s`' % + (cube, source, oper, version, op, ver)) + # "solve" constraint satisfaction problem + if cube not in self.cubes: + self.errors.append( ('add', cube, version, source) ) + elif versions: + lower_strict = version_strictly_lower(self.cubes[cube], version) + if oper in ('>=','=','=='): + if lower_strict: + self.errors.append( ('update', cube, version, source) ) + elif oper is None: + pass # no constraint on version + else: + print('unknown operator', oper) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cmp_schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cmp_schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,24 @@ +"""This module compare the Schema on the file system to the one in the database""" + +from cStringIO import StringIO +from cubicweb.web.schemaviewer import SchemaViewer +from logilab.common.ureports import TextWriter +import difflib + +viewer = SchemaViewer() +layout_db = viewer.visit_schema(schema, display_relations=True) +layout_fs = viewer.visit_schema(fsschema, display_relations=True) +writer = TextWriter() +stream_db = StringIO() +stream_fs = StringIO() +writer.format(layout_db, stream=stream_db) +writer.format(layout_fs, stream=stream_fs) + +stream_db.seek(0) +stream_fs.seek(0) +db = stream_db.getvalue().splitlines() +fs = stream_fs.getvalue().splitlines() +open('db_schema.txt', 'w').write(stream_db.getvalue()) +open('fs_schema.txt', 'w').write(stream_fs.getvalue()) +#for diff in difflib.ndiff(fs, db): +# print diff diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwdesklets/gfx/bg.png Binary file cubicweb/misc/cwdesklets/gfx/bg.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwdesklets/gfx/border-left.png Binary file cubicweb/misc/cwdesklets/gfx/border-left.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwdesklets/gfx/logo_cw.png Binary file cubicweb/misc/cwdesklets/gfx/logo_cw.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwdesklets/gfx/rss.png Binary file cubicweb/misc/cwdesklets/gfx/rss.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwdesklets/rql_query.display --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cwdesklets/rql_query.display Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwdesklets/rqlsensor/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cwdesklets/rqlsensor/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,118 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +import webbrowser +reload(webbrowser) + +from sensor.Sensor import Sensor +from utils import datatypes, i18n + +from cubicweb.dbapi import connect + +_ = str + +class RQLSensor(Sensor): + + def __init__(self, *args): + global _; _ = i18n.Translator("rql-desklet") + Sensor.__init__(self) + # define configuration + self._set_config_type("appid", datatypes.TYPE_STRING, "") + self._set_config_type("user", datatypes.TYPE_STRING, "") + self._set_config_type("passwd", datatypes.TYPE_SECRET_STRING, "") + self._set_config_type("rql", datatypes.TYPE_STRING, "") + self._set_config_type("url", datatypes.TYPE_STRING, "") + self._set_config_type("delay", datatypes.TYPE_STRING, "600") + # default timer + self._add_timer(20, self.__update) + + def get_configurator(self): + configurator = self._new_configurator() + configurator.set_name(_("RQL")) + configurator.add_title(_("CubicWeb source settings")) + configurator.add_entry(_("ID",), "appid", _("The application id of this source")) + configurator.add_entry(_("User",), "user", _("The user to connect to this source")) + configurator.add_entry(_("Password",), "passwd", _("The user's password to connect to this source")) + configurator.add_entry(_("URL",), "url", _("The url of the web interface for this source")) + configurator.add_entry(_("RQL",), "rql", _("The rql query")) + configurator.add_entry(_("Update interval",), "delay", _("Delay in seconds between updates")) + return configurator + + + def call_action(self, action, path, args=[]): + index = path[-1] + output = self._new_output() + if action=="enter-line": + # change background + output.set('resultbg[%s]' % index, 'yellow') + elif action=="leave-line": + # change background + output.set('resultbg[%s]' % index, 'black') + elif action=="click-line": + # open url + output.set('resultbg[%s]' % index, 'black') + webbrowser.open(self._urls[index]) + self._send_output(output) + + def __get_connection(self): + try: + return self._v_cnx + except AttributeError: + appid, user, passwd = self._get_config("appid"), self._get_config("user"), self._get_config("passwd") + cnx = connect(database=appid, login=user, password=passwd) + self._v_cnx = cnx + return cnx + + def __run_query(self, output): + base = self._get_config('url') + rql = self._get_config('rql') + cnx = self.__get_connection() + cursor = cnx.cursor() + try: + rset = cursor.execute(rql) + except Exception: + del self._v_cnx + raise + self._urls = [] + output.set('layout', 'vertical, 14') + output.set('length', rset.rowcount) + i = 0 + for line in rset: + output.set('result[%s]' % i, ', '.join([str(v) for v in line[1:]])) + output.set('resultbg[%s]' % i, 'black') + try: + self._urls.append(base % 'Any X WHERE X eid %s' % line[0]) + except Exception: + self._urls.append('') + i += 1 + + def __update(self): + output = self._new_output() + try: + self.__run_query(output) + except Exception as ex: + import traceback + traceback.print_exc() + output.set('layout', 'vertical, 10') + output.set('length', 1) + output.set('result[0]', str(ex)) + self._send_output(output) + self._add_timer(int(self._get_config('delay'))*1000, self.__update) + + +def new_sensor(args): + return RQLSensor(*args) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwdesklets/web_query.display --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cwdesklets/web_query.display Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwfs/A_FAIRE --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cwfs/A_FAIRE Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,14 @@ +TACHES +====== + +-- crire objet stocke/manipule les donnes + +-- extraire tests de chane de caractre + +* utiliser sqlite + +* crire fonction prend chemin en argument et renvoie contenu + +* extraire tests (chane de caractre) de spec + +* utiliser yams pour schma \ No newline at end of file diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwfs/cwfs-spec.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cwfs/cwfs-spec.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,180 @@ +======================= + Specification cubicwebfs +======================= + +Remarque: cubicwebfs c'est le siamois de yamsfs +en fait c'est un yamsfs avec une interrogation +de base RQL + +Modle +------- + +Description du modle; +:: + societe + nom + ville + + affaire + ref + + document + annee + mois + jour + type {RAP,CLI,OFR,FCT} + fichier + +document concerne affaire +affaire concerne societe + +Contenu de la base exemple +--------------------------- + +societe | nom | ville | + | CETIAD | Dijon | + | EDF R&D | Clamart | + | Logilab | Paris | + +affaire | ref | concerne | + | CTIA01 | CETIAD | + | EDFR01 | EDF R&D | + | EDFR02 | EDF R&D | + +document | annee | mois | jour | type | concerne | fichier | + | 2004 | 09 | 06 | PRE | CTIA01 | depodoc/2004/09/CTIA01-040906-PRE-1-01.pdf | + | 2005 | 02 | 01 | CLI | EDFR01 | depodoc/2005/02/EDFR01-050201-CLI-1-01.pdf | + | 2005 | 03 | 22 | OFR | EDFR01 | depodoc/2005/02/EDFR01-050322-OFR-1-01.pdf | + + +Exemples de chemins/recherches +------------------------------- + +Cherche documents de mars 2005; +:: + /document/annee/2005/mois/03/ + + +Dont le contenu successif serait; + +Test:: + + $ ls /document + annee/ mois/ jour/ type/ + affaire/ concerne/ CTIA01-040906-PRE-1-01.pdf + EDFR01-050201-CLI-1-01.pdf EDFR01-050322-OFR-1-01.pdf + + $ ls /document/annee/ + 2004/ 2005/ + + $ ls /document/annee/2005/ + mois/ jour/ type/ affaire/ + concerne/ EDFR01-050201-CLI-1-01.pdf EDFR01-050322-OFR-1-01.pdf + + $ ls /document/annee/2005/mois/ + 02/ 03/ + + $ ls /document/annee/2005/mois/03/ + jour/ type/ affaire/ concerne/ + EDFR01-050322-OFR-1-01.pdf + + +Question: est-ce que fichier/ ne va pas nous manquer ? + + +Cherche documents relatifs CTIA01; +:: + /affaire/ref/CTIA01/document/ + +Dont le contenu des rpertoires successifs serait: + +Test:: + + $ ls /affaire/ + ref/ societe/ concerne/ document/ + concerne_par/ CTIA01 EDFR01 EDFR02 + + $ ls /affaire/ref/ + CTIA01/ EDFR01/ EDFR02/ + + $ ls /affaire/ref/CTIA01/ + societe/ concerne/ document/ concerne_par/ + + $ ls /affaire/ref/CTIA01/document/ + annee/ mois/ jour/ type/ + CTIA01-040906-PRE-1-01.pdf + + +Cherche documents des affaires qui concernent CETIAD; +:: + /societe/nom/CETIAD/affaire/document/ + +Dont le contenu des rpertoires successifs serait; + +Test:: + + $ ls /societe/ + nom/ ville/ affaire/ concerne_par/ + CETIAD EDF R&D Logilab + + $ ls /societe/nom/ + CETIAD EDF R&D Logilab + + $ ls /societe/nom/CETIAD/ + ville/ affaire/ concerne_par/ CETIAD Logilab + + $ ls /societe/nom/CETIAD/affaire/ + ref/ societe/ concerne/ document/ + concerne_par/ CTIA01 + + $ ls /societe/nom/CETIAD/affaire/document/ + annee/ mois/ jour/ type/ + affaire/ concerne/ CTIA01-040906-PRE-1-01.pdf + + +En particulier, pour la recherche ci-dessus on ne peut pas crire; +:: + /document/affaire/concerne/societe/CETIAD/ + +La logique est que si on est dans un rpertoire document, il faut +qu'il contienne des documents. + +Cherche documents de 2002 qui concernent des affaires +qui concernent CETIAD; +:: + /societe/CETIAD/affaire/document/annee/2002/ + +Question: est-ce que les relations doivent tre des composants +du chemin ? +Question : si les relations ne font pas partie du chemin, il faudrait +pouvoir faire des recherches en utilisant des relations anonymes (ce +qui est impossible en RQL par exemple); +:: + /document/affaire/... s'il existe plusieurs relations entre + les entits document et affaire, on ne peut pas s'en sortir + +Question: que va-t-il se passer pour des chemins du type; +:: + /affaire/CTIA*/document/ + +Nicolas: mon avis on a rien faire, car c'est le shell qui +s'en occupe. De la mme faon, le systme de fichier n'a pas + se proccuper de ~/ et les programmes reoivent pas le "qqch*" +en argument, mais directement la liste. + +Attention: si jamais l'arborescence est sans fond, les +commandes rcursives vont prendre du temps... + +Attention: dans un premier temps, un systme de fichiers en +lecture seule est satisfaisant. on verra ensuite pour l'dition. +pour l'dition, on peut s'inspirer du external editor de zope +et avoir un format d'change XML entre le serveur et l'diteur. + +Le cas suivant est dbile, faut-il l'interdire ? +:: + /document/affaire/societe/concerne_par/affaire/concerne_par/document + + +NB: manque dtail d'un cas comme /document/annee/2005/concerne/affaire/ + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwfs/cwfs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cwfs/cwfs.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,175 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +class Schema : + + def __init__(self, schema) : + self._schema = schema + + def get_attrs(self, entity) : + return self._schema[entity][0] + + def get_relations(self, entity) : + return self._schema[entity][1] + + def get_attr_index(self, entity, attr) : + return list(self._schema[entity][0]).index(attr) + +SCHEMA = Schema({'societe': ( ('nom','ville'), + [('concerne_par','affaire'), + ] ), + 'affaire': ( ('ref',), + [('concerne','societe'), + ('concerne_par', 'document') + ] ), + 'document':( ('fichier', 'annee','mois','jour','type'), + [('concerne','affaire'), + ] ), + }) + + + +DATA = { 'societe': [ ('CETIAD', 'Dijon'), + ('EDF_R&D', 'Clamart'), + ('Logilab', 'Paris'), + ], + 'affaire': [ ('CTIA01', 'CETIAD'), + ('EDFR01', 'EDF_R&D'), + ('EDFR02', 'EDF_R&D'), + ], + 'document':[ ('CTIA01-040906-PRE-1-01.pdf','2004','09','06','PRE','CTIA01'), + ('EDFR01-050201-CLI-1-01.pdf','2005','02','01','CLI','EDFR01'), + ('EDFR01-050322-OFR-1-01.pdf','2005','03','22','OFR','EDFR01'), + ], + } + +def get_data(entity, where=[]) : + for value in DATA[entity] : + for index, val in where : + if value[index] != val : + break + else : + yield value + +class PathParser : + + def __init__(self, schema, path) : + self.schema = schema + self.path = path + self._components = iter([comp for comp in self.path.split('/') if comp]) + self._entity = None + self._attr = None + self._rel = None + self._restrictions = [] + + def parse(self) : + self._entity = next(self._components) + try: + self.process_entity() + except StopIteration : + pass + + def process_entity(self) : + _next = next(self._components) + if _next in self.schema.get_attrs(self._entity) : + self._attr = _next + _next = next(self._components) + self._restrictions.append( (self._entity, self._attr, _next) ) + self._attr = None + self._rel = None + self.process_entity() + + def get_list(self) : + if self._rel : + return + elif self._attr : + where = [] + for e,a,v in self._restrictions : + i = self.schema.get_attr_index(e, a) + where.append( (i,v) ) + i = self.schema.get_attr_index(self._entity, self._attr) + for values in get_data(self._entity,where) : + yield values[i]+'/' + else : + attr_restrict = [a for e,a,v in self._restrictions] + for attr in self.schema.get_attrs(self._entity) : + if attr not in attr_restrict : + yield attr+'/' + for data in DATA[self._entity]: + yield data[0] + for nom, entity in self.schema.get_relations(self._entity) : + yield nom+'/' + yield entity+'/' + +def ls(path) : + p = PathParser(SCHEMA,path) + p.parse() + return list(p.get_list()) + + +class SytPathParser : + + def __init__(self, schema, path) : + self.schema = schema + self.path = path + self._components = iter([comp for comp in self.path.split('/') if comp]) + self._e_type = None + self._restrictions = [] + self._alphabet = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') + + def parse(self): + self._var = self._alphabet.pop(0) + self._e_type = next(self._components) + e_type = self._e_type.capitalize() + self._restrictions.append('%s is %s' % (self._var, e_type)) + try: + self.process_entity() + except StopIteration : + pass + return 'Any %s WHERE %s' % (self._var, ', '.join(self._restrictions)) + + def process_entity(self) : + _next = next(self._components) + if _next in self.schema.get_attrs(self._e_type) : + attr = _next + try: + _next = next(self._components) + self._restrictions.append('%s %s %s' % (self._var, attr, _next)) + except StopIteration: + a_var = self._alphabet.pop(0) + self._restrictions.append('%s %s %s' % (self._var, attr, a_var) ) + self._var = a_var + raise + elif _next in [r for r,e in self.schema.get_relations(self._e_type)]: + rel = _next + r_var = self._alphabet.pop(0) + self._restrictions.append('%s %s %s' % (self._var, rel, r_var)) + self._var = r_var + try: + _next = next(self._components) + self._restrictions.append('%s is %s' % (r_var, _next.capitalize())) + except StopIteration: + raise + self.process_entity() + + +def to_rql(path) : + p = SytPathParser(SCHEMA,path) + return p.parse() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwfs/cwfs_test.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cwfs/cwfs_test.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,66 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from logilab.common.testlib import TestCase, unittest_main + +import cubicwebfs +import sre + +def spec_parser(filename) : + """ + extract tests from specification + """ + sections = [] + buffer = "" + in_section = False + for line in open(filename) : + if line.startswith('Test::'): + in_section = True + buffer = "" + elif in_section : + if line.startswith(" ") or not line.strip() : + buffer += line.lstrip() + else : + sections.append(buffer) + in_section = False + tests = [] + for section in sections : + subsections = [t for t in section.strip().split('$ ls') if t] + for subsection in subsections : + path, results = subsection.splitlines()[0], subsection.splitlines()[1:] + path = path.strip() + items = set([i for i in sre.split('[\t\n]', '\n'.join(results)) if i]) + tests.append((path, items)) + return tests + +tests = spec_parser("cubicwebfs-spec.txt") + +class monTC(TestCase) : + pass + +for index, (path, results) in enumerate(tests) : + def f(self, p=path, r=results) : + res = set(cubicwebfs.ls(p)) + self.assertEqual(r, res) #, 'en trop %s\nmanque %s' % (r-results,results-r)) + f.__doc__ = "%s %s"%(index,path) + setattr(monTC,'test_%s'%index,f) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/cwzope/cwzope.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/cwzope/cwzope.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,50 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from AccessControl import getSecurityManager + +from cubicweb.dbapi import connect, Connection, Cursor +from cubicweb.common.utils import ResultSet, ResultSetIterator, ResultSetRow, Entity + +Connection.__allow_access_to_unprotected_subobjects__ = 1 +Cursor.__allow_access_to_unprotected_subobjects__ = 1 +ResultSet.__allow_access_to_unprotected_subobjects__ = 1 +ResultSetIterator.__allow_access_to_unprotected_subobjects__ = 1 +ResultSetRow.__allow_access_to_unprotected_subobjects__ = 1 +Entity.__allow_access_to_unprotected_subobjects__ = 1 + +CNX_CACHE = {} + +def get_connection(context, user=None, password=None, + host=None, database=None, group='cubicweb'): + """get a connection on an cubicweb server""" + request = context.REQUEST + zope_user = getSecurityManager().getUser() + if user is None: + user = zope_user.getId() + key = (user, host, database) + try: + return CNX_CACHE[key] + except KeyError: + if password is None: + password = zope_user._getPassword() + cnx = connect(user, password, host, database, group) + CNX_CACHE[key] = cnx + return cnx diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.10.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.10.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,37 @@ +from six import text_type + +from cubicweb.server.session import hooks_control + +for uri, cfg in config.read_sources_file().items(): + if uri in ('system', 'admin'): + continue + repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg.copy()) + +add_entity_type('CWSource') +add_relation_definition('CWSource', 'cw_source', 'CWSource') +add_entity_type('CWSourceHostConfig') + +with hooks_control(session, session.HOOKS_ALLOW_ALL, 'cw.sources'): + create_entity('CWSource', type=u'native', name=u'system') +commit() + +sql('INSERT INTO cw_source_relation(eid_from,eid_to) ' + 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s ' + 'WHERE s.cw_name=e.type') +commit() + +for uri, cfg in config.read_sources_file().items(): + if uri in ('system', 'admin'): + continue + repo.sources_by_uri.pop(uri) + config = u'\n'.join('%s=%s' % (key, value) for key, value in cfg.items() + if key != 'adapter' and value is not None) + create_entity('CWSource', name=text_type(uri), type=text_type(cfg['adapter']), + config=config) +commit() + +# rename cwprops for boxes/contentnavigation +for x in rql('Any X,XK WHERE X pkey XK, ' + 'X pkey ~= "boxes.%" OR ' + 'X pkey ~= "contentnavigation.%"').entities(): + x.cw_set(pkey=u'ctxcomponents.' + x.pkey.split('.', 1)[1]) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.10.0_common.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.10.0_common.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +option_group_changed('cleanup-session-time', 'web', 'main') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.10.4_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.10.4_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,8 @@ +for eschema in schema.entities(): + if not (eschema.final or 'cw_source' in eschema.subjrels): + add_relation_definition(eschema.type, 'cw_source', 'CWSource', ask_confirm=False) + +sql('INSERT INTO cw_source_relation(eid_from, eid_to) ' + 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s ' + 'WHERE s.cw_name=e.source AND NOT EXISTS(SELECT 1 FROM cw_source_relation WHERE eid_from=e.eid AND eid_to=s.cw_eid)') +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.10.5_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.10.5_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +sync_schema_props_perms('CWSourceHostConfig', syncperms=False) + +sql('INSERT INTO cw_source_relation(eid_from, eid_to) ' + 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s ' + 'WHERE s.cw_name=e.source AND NOT EXISTS(SELECT 1 FROM cw_source_relation WHERE eid_from=e.eid AND eid_to=s.cw_eid)') +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.10.7_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.10.7_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +add_attribute('TrInfo', 'tr_count') +sync_schema_props_perms('TrInfo') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.10.8_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.10.8_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +sync_schema_props_perms('CWSource', syncprops=False) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.10.9_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.10.9_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,32 @@ +import sys + +if confirm('fix some corrupted entities noticed on several instances?'): + rql('DELETE CWConstraint X WHERE NOT E constrained_by X') + rql('SET X is_instance_of Y WHERE X is Y, NOT X is_instance_of Y') + commit() + +if confirm('fix existing cwuri?'): + from logilab.common.shellutils import progress + from cubicweb.server.session import hooks_control + rset = rql('Any X, XC WHERE X cwuri XC, X cwuri ~= "%/eid/%"') + title = "%i entities to fix" % len(rset) + nbops = rset.rowcount + enabled = interactive_mode + with progress(title=title, nbops=nbops, size=30, enabled=enabled) as pb: + for i, row in enumerate(rset): + with hooks_control(session, session.HOOKS_DENY_ALL, 'integrity'): + data = {'eid': row[0], 'cwuri': row[1].replace(u'/eid', u'')} + rql('SET X cwuri %(cwuri)s WHERE X eid %(eid)s', data) + if not i % 100: # commit every 100 entities to limit memory consumption + pb.text = "%i committed" % i + commit(ask_confirm=False) + pb.update() + commit(ask_confirm=False) + +try: + from cubicweb import devtools + option_group_changed('anonymous-user', 'main', 'web') + option_group_changed('anonymous-password', 'main', 'web') +except ImportError: + # cubicweb-dev unavailable, nothing needed + pass diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.11.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.11.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,11 @@ +from datetime import datetime + +for rtype in ('cw_support', 'cw_dont_cross', 'cw_may_cross'): + drop_relation_type(rtype) + +add_entity_type('CWSourceSchemaConfig') + +if not 'url' in schema['CWSource'].subjrels: + add_attribute('CWSource', 'url') + add_attribute('CWSource', 'parser') + add_attribute('CWSource', 'latest_retrieval') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.12.9_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.12.9_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +sync_schema_props_perms('cw_source') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.13.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.13.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,3 @@ +sync_schema_props_perms('cw_source', syncprops=False) +if schema['BigInt'].eid is None: + add_entity_type('BigInt') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.13.3_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.13.3_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +drop_relation_definition('CWSourceSchemaConfig', 'cw_schema', 'CWAttribute') +sync_schema_props_perms('cw_schema') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.13.6_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.13.6_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +sync_schema_props_perms('CWSourceSchemaConfig') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.13.8_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.13.8_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,5 @@ +change_attribute_type('CWCache', 'timestamp', 'TZDatetime') +change_attribute_type('CWUser', 'last_login_time', 'TZDatetime') +change_attribute_type('CWSource', 'latest_retrieval', 'TZDatetime') +drop_attribute('CWSource', 'synchronizing') +add_attribute('CWSource', 'in_synchronization') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.14.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.14.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,15 @@ +from __future__ import print_function + +config['rql-cache-size'] = config['rql-cache-size'] * 10 + +add_entity_type('CWDataImport') + +from cubicweb.schema import CONSTRAINTS, guess_rrqlexpr_mainvars +for rqlcstr in rql('Any X,XT,XV WHERE X is CWConstraint, X cstrtype XT, X value XV,' + 'X cstrtype XT, XT name IN ("RQLUniqueConstraint","RQLConstraint","RQLVocabularyConstraint"),' + 'NOT X value ~= ";%"').entities(): + expression = rqlcstr.value + mainvars = guess_rrqlexpr_mainvars(expression) + yamscstr = CONSTRAINTS[rqlcstr.type](expression, mainvars) + rqlcstr.cw_set(value=yamscstr.serialize()) + print('updated', rqlcstr.type, rqlcstr.value.strip()) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.14.7_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.14.7_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +# migrate default format for TriInfo `comment_format` attribute +sync_schema_props_perms('TrInfo') + +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.15.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.15.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,10 @@ +sync_schema_props_perms('EmailAddress') + +for source in rql('CWSource X WHERE X type "ldapuser"').entities(): + config = source.dictconfig + host = config.pop('host', u'ldap') + protocol = config.pop('protocol', u'ldap') + source.cw_set(url=u'%s://%s' % (protocol, host)) + source.update_config(skip_unknown=True, **config) + +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.15.0_common.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.15.0_common.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,7 @@ +import ConfigParser +try: + undo_actions = config.cfgfile_parser.get('MAIN', 'undo-support', False) +except ConfigParser.NoOptionError: + pass # this conf. file was probably already migrated +else: + config.global_set_option('undo-enabled', bool(undo_actions)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.15.4_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.15.4_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,13 @@ +from __future__ import print_function + +from logilab.common.shellutils import generate_password +from cubicweb.server.utils import crypt_password + +for user in rql('CWUser U WHERE U cw_source S, S name "system", U upassword P, U login L').entities(): + salt = user.upassword.getvalue() + if crypt_password('', salt) == salt: + passwd = generate_password() + print('setting random password for user %s' % user.login) + user.set_attributes(upassword=passwd) + +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.15.9_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.15.9_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +sync_schema_props_perms(('State', 'state_of', 'Workflow'), commit=False) +sync_schema_props_perms(('State', 'name', 'String')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.16.1_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.16.1_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +sync_schema_props_perms(('State', 'state_of', 'Workflow'), commit=False) +sync_schema_props_perms(('State', 'name', 'String')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.17.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.17.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +add_attribute('CWAttribute', 'extra_props') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.17.11_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.17.11_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,7 @@ +for table, column in [ + ('transactions', 'tx_time'), + ('tx_entity_actions', 'tx_uuid'), + ('tx_relation_actions', 'tx_uuid')]: + repo.system_source.create_index(session, table, column) + +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.18.2_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.18.2_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +install_custom_sql_scripts() +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.18.4_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.18.4_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +sync_schema_props_perms('CWSource') +sync_schema_props_perms('CWSourceHostConfig') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.19.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.19.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,3 @@ +sql('DROP TABLE "deleted_entities"') + +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.20.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.20.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +sync_schema_props_perms('state_of') +sync_schema_props_perms('transition_of') +sync_schema_props_perms('State') +sync_schema_props_perms('BaseTransition') +sync_schema_props_perms('Transition') +sync_schema_props_perms('WorkflowTransition') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.20.7_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.20.7_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +if repo.system_source.dbdriver == 'postgres': + install_custom_sql_scripts() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.20.8_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.20.8_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +sync_schema_props_perms('cwuri') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.21.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.21.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,174 @@ +from __future__ import print_function + +from cubicweb.schema import PURE_VIRTUAL_RTYPES +from cubicweb.server.schema2sql import rschema_has_table + + +def add_foreign_keys(): + source = repo.system_source + if not source.dbhelper.alter_column_support: + return + for rschema in schema.relations(): + if rschema.inlined: + add_foreign_keys_inlined(rschema) + elif rschema_has_table(rschema, skip_relations=PURE_VIRTUAL_RTYPES): + add_foreign_keys_relation(rschema) + for eschema in schema.entities(): + if eschema.final: + continue + add_foreign_key_etype(eschema) + + +def add_foreign_keys_relation(rschema): + args = {'r': rschema.type} + count = sql('SELECT COUNT(*) FROM (' + ' SELECT eid_from FROM %(r)s_relation' + ' UNION' + ' SELECT eid_to FROM %(r)s_relation' + ' EXCEPT' + ' SELECT eid FROM entities) AS eids' % args, + ask_confirm=False)[0][0] + if count: + print('%s references %d unknown entities, deleting' % (rschema, count)) + sql('DELETE FROM %(r)s_relation ' + 'WHERE eid_from IN (SELECT eid_from FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args) + sql('DELETE FROM %(r)s_relation ' + 'WHERE eid_to IN (SELECT eid_to FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args) + + args['from_fk'] = '%(r)s_relation_eid_from_fkey' % args + args['to_fk'] = '%(r)s_relation_eid_to_fkey' % args + args['table'] = '%(r)s_relation' % args + if repo.system_source.dbdriver == 'postgres': + sql('ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(from_fk)s' % args, + ask_confirm=False) + sql('ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(to_fk)s' % args, + ask_confirm=False) + elif repo.system_source.dbdriver.startswith('sqlserver'): + sql("IF OBJECT_ID('%(from_fk)s', 'F') IS NOT NULL " + "ALTER TABLE %(table)s DROP CONSTRAINT %(from_fk)s" % args, + ask_confirm=False) + sql("IF OBJECT_ID('%(to_fk)s', 'F') IS NOT NULL " + "ALTER TABLE %(table)s DROP CONSTRAINT %(to_fk)s" % args, + ask_confirm=False) + sql('ALTER TABLE %(table)s ADD CONSTRAINT %(from_fk)s ' + 'FOREIGN KEY (eid_from) REFERENCES entities (eid)' % args, + ask_confirm=False) + sql('ALTER TABLE %(table)s ADD CONSTRAINT %(to_fk)s ' + 'FOREIGN KEY (eid_to) REFERENCES entities (eid)' % args, + ask_confirm=False) + + +def add_foreign_keys_inlined(rschema): + for eschema in rschema.subjects(): + args = {'e': eschema.type, 'r': rschema.type} + args['c'] = 'cw_%(e)s_cw_%(r)s_fkey' % args + + if eschema.rdef(rschema).cardinality[0] == '1': + broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IS NULL' % args, + ask_confirm=False) + if broken_eids: + print('Required relation %(e)s.%(r)s missing' % args) + args['eids'] = ', '.join(str(eid) for eid, in broken_eids) + rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args) + broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IN (SELECT cw_%(r)s FROM cw_%(e)s ' + 'EXCEPT SELECT eid FROM entities)' % args, + ask_confirm=False) + if broken_eids: + print('Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args) + args['eids'] = ', '.join(str(eid) for eid, in broken_eids) + rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args) + else: + if sql('SELECT COUNT(*) FROM (' + ' SELECT cw_%(r)s FROM cw_%(e)s WHERE cw_%(r)s IS NOT NULL' + ' EXCEPT' + ' SELECT eid FROM entities) AS eids' % args, + ask_confirm=False)[0][0]: + print('%(e)s.%(r)s references unknown entities, deleting relation' % args) + sql('UPDATE cw_%(e)s SET cw_%(r)s = NULL WHERE cw_%(r)s IS NOT NULL AND cw_%(r)s IN ' + '(SELECT cw_%(r)s FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args) + + if repo.system_source.dbdriver == 'postgres': + sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args, + ask_confirm=False) + elif repo.system_source.dbdriver.startswith('sqlserver'): + sql("IF OBJECT_ID('%(c)s', 'F') IS NOT NULL " + "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args, + ask_confirm=False) + sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s ' + 'FOREIGN KEY (cw_%(r)s) references entities(eid)' % args, + ask_confirm=False) + + +def add_foreign_key_etype(eschema): + args = {'e': eschema.type} + if sql('SELECT COUNT(*) FROM (' + ' SELECT cw_eid FROM cw_%(e)s' + ' EXCEPT' + ' SELECT eid FROM entities) AS eids' % args, + ask_confirm=False)[0][0]: + print('%(e)s has nonexistent entities, deleting' % args) + sql('DELETE FROM cw_%(e)s WHERE cw_eid IN ' + '(SELECT cw_eid FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args) + args['c'] = 'cw_%(e)s_cw_eid_fkey' % args + if repo.system_source.dbdriver == 'postgres': + sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args, + ask_confirm=False) + elif repo.system_source.dbdriver.startswith('sqlserver'): + sql("IF OBJECT_ID('%(c)s', 'F') IS NOT NULL " + "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args, + ask_confirm=False) + sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s ' + 'FOREIGN KEY (cw_eid) REFERENCES entities (eid)' % args, + ask_confirm=False) + + +add_foreign_keys() + +cu = session.cnxset.cu +helper = repo.system_source.dbhelper + +helper.drop_index(cu, 'entities', 'extid', False) +# don't use create_index because it doesn't work for columns that may be NULL +# on sqlserver +for query in helper.sqls_create_multicol_unique_index('entities', ['extid']): + cu.execute(query) + +if 'moved_entities' not in helper.list_tables(cu): + sql(''' + CREATE TABLE moved_entities ( + eid INTEGER PRIMARY KEY NOT NULL, + extid VARCHAR(256) UNIQUE + ) + ''') + +moved_entities = sql('SELECT -eid, extid FROM entities WHERE eid < 0', + ask_confirm=False) +if moved_entities: + cu.executemany('INSERT INTO moved_entities (eid, extid) VALUES (%s, %s)', + moved_entities) + sql('DELETE FROM entities WHERE eid < 0') + +commit() + +sync_schema_props_perms('CWEType') + +sync_schema_props_perms('cwuri') + +from cubicweb.server.schema2sql import check_constraint + +for cwconstraint in rql('Any C WHERE R constrained_by C').entities(): + cwrdef = cwconstraint.reverse_constrained_by[0] + rdef = cwrdef.yams_schema() + cstr = rdef.constraint_by_eid(cwconstraint.eid) + if cstr.type() not in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'): + continue + cstrname, check = check_constraint(rdef.subject, rdef.object, rdef.rtype.type, + cstr, helper, prefix='cw_') + args = {'e': rdef.subject.type, 'c': cstrname, 'v': check} + if repo.system_source.dbdriver == 'postgres': + sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args) + elif repo.system_source.dbdriver.startswith('sqlserver'): + sql("IF OBJECT_ID('%(c)s', 'C') IS NOT NULL " + "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args) + sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s CHECK(%(v)s)' % args) +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.21.1_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.21.1_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +# re-read ComputedRelation permissions from schema.py now that we're +# able to serialize them +for computedrtype in schema.iter_computed_relations(): + sync_schema_props_perms(computedrtype.type) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.21.2_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.21.2_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,7 @@ +sync_schema_props_perms('cwuri') + +helper = repo.system_source.dbhelper +cu = session.cnxset.cu +helper.set_null_allowed(cu, 'moved_entities', 'extid', 'VARCHAR(256)', False) + +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.22.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.22.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,21 @@ +if confirm('use Europe/Paris as timezone?'): + timezone = 'Europe/Paris' +else: + import pytz + while True: + timezone = raw_input('enter your timezone') + if timezone in pytz.common_timezones: + break + +dbdriver = repo.system_source.dbdriver +if dbdriver == 'postgres': + sql("SET TIME ZONE '%s'" % timezone) + +for entity in schema.entities(): + if entity.final: + continue + change_attribute_type(entity.type, 'creation_date', 'TZDatetime', ask_confirm=False) + change_attribute_type(entity.type, 'modification_date', 'TZDatetime', ask_confirm=False) + +if dbdriver == 'postgres': + sql("SET TIME ZONE UTC") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.3.5_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.3.5_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.4.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.4.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.4.0_common.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.4.0_common.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.4.3_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.4.3_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.5.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.5.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.5.10_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.5.10_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.5.3_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.5.3_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.6.1_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.6.1_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.7.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.7.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,40 @@ +typemap = repo.system_source.dbhelper.TYPE_MAPPING +sqls = """ +CREATE TABLE transactions ( + tx_uuid CHAR(32) PRIMARY KEY NOT NULL, + tx_user INTEGER NOT NULL, + tx_time %s NOT NULL +);; +CREATE INDEX transactions_tx_user_idx ON transactions(tx_user);; + +CREATE TABLE tx_entity_actions ( + tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE, + txa_action CHAR(1) NOT NULL, + txa_public %s NOT NULL, + txa_order INTEGER, + eid INTEGER NOT NULL, + etype VARCHAR(64) NOT NULL, + changes %s +);; +CREATE INDEX tx_entity_actions_txa_action_idx ON tx_entity_actions(txa_action);; +CREATE INDEX tx_entity_actions_txa_public_idx ON tx_entity_actions(txa_public);; +CREATE INDEX tx_entity_actions_eid_idx ON tx_entity_actions(eid);; +CREATE INDEX tx_entity_actions_etype_idx ON tx_entity_actions(etype);; + +CREATE TABLE tx_relation_actions ( + tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE, + txa_action CHAR(1) NOT NULL, + txa_public %s NOT NULL, + txa_order INTEGER, + eid_from INTEGER NOT NULL, + eid_to INTEGER NOT NULL, + rtype VARCHAR(256) NOT NULL +);; +CREATE INDEX tx_relation_actions_txa_action_idx ON tx_relation_actions(txa_action);; +CREATE INDEX tx_relation_actions_txa_public_idx ON tx_relation_actions(txa_public);; +CREATE INDEX tx_relation_actions_eid_from_idx ON tx_relation_actions(eid_from);; +CREATE INDEX tx_relation_actions_eid_to_idx ON tx_relation_actions(eid_to) +""" % (typemap['Datetime'], + typemap['Boolean'], typemap['Bytes'], typemap['Boolean']) +for statement in sqls.split(';;'): + sql(statement) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.7.2_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.7.2_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +sql('DROP FUNCTION IF EXISTS _fsopen(bytea)') +sql('DROP FUNCTION IF EXISTS fspath(bigint, text, text)') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.7.4_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.7.4_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +sync_schema_props_perms('TrInfo', syncprops=False) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.7.5_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.7.5_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +if versions_map['cubicweb'][0] == (3, 7, 4): + config['http-session-time'] *= 60 + config['cleanup-session-time'] *= 60 + config['cleanup-anonymous-session-time'] *= 60 diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.8.1_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.8.1_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +rql('SET X name "BoundaryConstraint" ' + 'WHERE X is CWConstraintType, X name "BoundConstraint"') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.8.3_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.8.3_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,3 @@ +if 'same_as' in schema: + sync_schema_props_perms('same_as', syncperms=False) +sync_schema_props_perms('Bookmark', syncperms=False) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.8.3_common.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.8.3_common.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +option_group_changed('port', 'main', 'web') +option_group_changed('query-log-file', 'main', 'web') +option_group_changed('profile', 'main', 'web') +option_group_changed('max-post-length', 'main', 'web') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.8.5_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.8.5_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,61 @@ +from __future__ import print_function + +def migrate_varchar_to_nvarchar(): + dbdriver = config.system_source_config['db-driver'] + if dbdriver != "sqlserver2005": + return + + introspection_sql = """\ +SELECT table_schema, table_name, column_name, is_nullable, character_maximum_length +FROM information_schema.columns +WHERE data_type = 'VARCHAR' and table_name <> 'SYSDIAGRAMS' +""" + has_index_sql = """\ +SELECT i.name AS index_name, + i.type_desc, + i.is_unique, + i.is_unique_constraint +FROM sys.indexes AS i, sys.index_columns as j, sys.columns as k +WHERE is_hypothetical = 0 AND i.index_id <> 0 +AND i.object_id = j.object_id +AND i.index_id = j.index_id +AND i.object_id = OBJECT_ID('%(table)s') +AND k.name = '%(col)s' +AND k.object_id=i.object_id +AND j.column_id = k.column_id;""" + + generated_statements = [] + for schema, table, column, is_nullable, length in sql(introspection_sql, ask_confirm=False): + qualified_table = '[%s].[%s]' % (schema, table) + rset = sql(has_index_sql % {'table': qualified_table, 'col':column}, + ask_confirm = False) + drops = [] + creates = [] + for idx_name, idx_type, idx_unique, is_unique_constraint in rset: + if is_unique_constraint: + drops.append('ALTER TABLE %s DROP CONSTRAINT %s' % (qualified_table, idx_name)) + creates.append('ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)' % (qualified_table, idx_name, column)) + else: + drops.append('DROP INDEX %s ON %s' % (idx_name, qualified_table)) + if idx_unique: + unique = 'UNIQUE' + else: + unique = '' + creates.append('CREATE %s %s INDEX %s ON %s(%s)' % (unique, idx_type, idx_name, qualified_table, column)) + + if length == -1: + length = 'max' + if is_nullable == 'YES': + not_null = 'NULL' + else: + not_null = 'NOT NULL' + alter_sql = 'ALTER TABLE %s ALTER COLUMN %s NVARCHAR(%s) %s' % (qualified_table, column, length, not_null) + generated_statements+= drops + [alter_sql] + creates + + + for statement in generated_statements: + print(statement) + sql(statement, ask_confirm=False) + commit() + +migrate_varchar_to_nvarchar() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.9.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.9.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,3 @@ +if repo.system_source.dbdriver == 'postgres': + sql('ALTER TABLE appears ADD COLUMN weight float') + sql('UPDATE appears SET weight=1.0 ') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/3.9.5_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/3.9.5_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +if not rql('CWConstraintType X WHERE X name "RQLUniqueConstraint"', + ask_confirm=False): + rql('INSERT CWConstraintType X: X name "RQLUniqueConstraint"', + ask_confirm=False) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/bootstrapmigration_repository.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/bootstrapmigration_repository.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,459 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""allways executed before all others in server migration + +it should only include low level schema changes +""" +from __future__ import print_function + +from six import text_type + +from cubicweb import ConfigurationError +from cubicweb.server.session import hooks_control +from cubicweb.server import schemaserial as ss + +applcubicwebversion, cubicwebversion = versions_map['cubicweb'] + +def _add_relation_definition_no_perms(subjtype, rtype, objtype): + rschema = fsschema.rschema(rtype) + rdef = rschema.rdefs[(subjtype, objtype)] + rdef.rtype = schema.rschema(rtype) + rdef.subject = schema.eschema(subjtype) + rdef.object = schema.eschema(objtype) + ss.execschemarql(rql, rdef, ss.rdef2rql(rdef, CSTRMAP, groupmap=None)) + commit(ask_confirm=False) + +def replace_eid_sequence_with_eid_numrange(session): + dbh = session.repo.system_source.dbhelper + cursor = session.cnxset.cu + try: + cursor.execute(dbh.sql_sequence_current_state('entities_id_seq')) + lasteid = cursor.fetchone()[0] + except: # programming error, already migrated + return + + cursor.execute(dbh.sql_drop_sequence('entities_id_seq')) + cursor.execute(dbh.sql_create_numrange('entities_id_seq')) + cursor.execute(dbh.sql_restart_numrange('entities_id_seq', initial_value=lasteid)) + session.commit() + +if applcubicwebversion <= (3, 13, 0) and cubicwebversion >= (3, 13, 1): + sql('ALTER TABLE entities ADD asource VARCHAR(64)') + sql('UPDATE entities SET asource=cw_name ' + 'FROM cw_CWSource, cw_source_relation ' + 'WHERE entities.eid=cw_source_relation.eid_from AND cw_source_relation.eid_to=cw_CWSource.cw_eid') + commit() + +if applcubicwebversion <= (3, 14, 4) and cubicwebversion >= (3, 14, 4): + from cubicweb.server import schema2sql as y2sql + dbhelper = repo.system_source.dbhelper + rdefdef = schema['CWSource'].rdef('name') + attrtype = y2sql.type_from_constraints(dbhelper, rdefdef.object, rdefdef.constraints).split()[0] + cursor = session.cnxset.cu + sql('UPDATE entities SET asource = source WHERE asource is NULL') + dbhelper.change_col_type(cursor, 'entities', 'asource', attrtype, False) + dbhelper.change_col_type(cursor, 'entities', 'source', attrtype, False) + + # we now have a functional asource column, start using the normal eid_type_source method + if repo.system_source.eid_type_source == repo.system_source.eid_type_source_pre_131: + del repo.system_source.eid_type_source + +if applcubicwebversion < (3, 19, 0) and cubicwebversion >= (3, 19, 0): + try: + # need explicit drop of the indexes on some database systems (sqlserver) + sql(repo.system_source.dbhelper.sql_drop_index('entities', 'mtime')) + sql('ALTER TABLE "entities" DROP COLUMN "mtime"') + sql('ALTER TABLE "entities" DROP COLUMN "source"') + except: # programming error, already migrated + print("Failed to drop mtime or source database columns") + print("'entities' table of the database has probably been already updated") + + commit() + + replace_eid_sequence_with_eid_numrange(session) + +if applcubicwebversion < (3, 20, 0) and cubicwebversion >= (3, 20, 0): + ss._IGNORED_PROPS.append('formula') + add_attribute('CWAttribute', 'formula', commit=False) + ss._IGNORED_PROPS.remove('formula') + commit() + add_entity_type('CWComputedRType') + commit() + +if schema['TZDatetime'].eid is None: + add_entity_type('TZDatetime', auto=False) +if schema['TZTime'].eid is None: + add_entity_type('TZTime', auto=False) + +if applcubicwebversion < (3, 18, 0) and cubicwebversion >= (3, 18, 0): + driver = config.system_source_config['db-driver'] + if not (driver == 'postgres' or driver.startswith('sqlserver')): + import sys + print('This migration is not supported for backends other than sqlserver or postgres (yet).', file=sys.stderr) + sys.exit(1) + + add_relation_definition('CWAttribute', 'add_permission', 'CWGroup') + add_relation_definition('CWAttribute', 'add_permission', 'RQLExpression') + + # a bad defaultval in 3.13.8 schema was fixed in 3.13.9, but the migration was missed + rql('SET ATTR defaultval NULL WHERE ATTR from_entity E, E name "CWSource", ATTR relation_type T, T name "in_synchronization"') + + # the migration gets confused when we change rdefs out from under it. So + # explicitly remove this size constraint so it doesn't stick around and break + # things later. + rdefeid = schema['defaultval'].rdefs.values()[0].eid + rql('DELETE CWConstraint C WHERE C cstrtype T, T name "SizeConstraint", R constrained_by C, R eid %(eid)s', {'eid': rdefeid}) + + sync_schema_props_perms('defaultval') + + def convert_defaultval(cwattr, default): + from decimal import Decimal + import yams + from cubicweb import Binary + if default is None: + return + if isinstance(default, Binary): + # partially migrated instance, try to be idempotent + return default + atype = cwattr.to_entity[0].name + if atype == 'Boolean': + # boolean attributes with default=False were stored as '' + assert default in ('True', 'False', ''), repr(default) + default = default == 'True' + elif atype in ('Int', 'BigInt'): + default = int(default) + elif atype == 'Float': + default = float(default) + elif atype == 'Decimal': + default = Decimal(default) + elif atype in ('Date', 'Datetime', 'TZDatetime', 'Time'): + try: + # handle NOW and TODAY, keep them stored as strings + yams.KEYWORD_MAP[atype][default.upper()] + default = default.upper() + except KeyError: + # otherwise get an actual date or datetime + default = yams.DATE_FACTORY_MAP[atype](default) + else: + assert atype == 'String', atype + default = text_type(default) + return Binary.zpickle(default) + + dbh = repo.system_source.dbhelper + + + sql('ALTER TABLE cw_cwattribute ADD new_defaultval %s' % dbh.TYPE_MAPPING['Bytes']) + + for cwattr in rql('CWAttribute X').entities(): + olddefault = cwattr.defaultval + if olddefault is not None: + req = "UPDATE cw_cwattribute SET new_defaultval = %(val)s WHERE cw_eid = %(eid)s" + args = {'val': dbh.binary_value(convert_defaultval(cwattr, olddefault).getvalue()), 'eid': cwattr.eid} + sql(req, args, ask_confirm=False) + + sql('ALTER TABLE cw_cwattribute DROP COLUMN cw_defaultval') + if driver == 'postgres': + sql('ALTER TABLE cw_cwattribute RENAME COLUMN new_defaultval TO cw_defaultval') + else: # sqlserver + sql("sp_rename 'cw_cwattribute.new_defaultval', 'cw_defaultval', 'COLUMN'") + + + # Set object type to "Bytes" for CWAttribute's "defaultval" attribute + rql('SET X to_entity B WHERE X is CWAttribute, X from_entity Y, Y name "CWAttribute", ' + 'X relation_type Z, Z name "defaultval", B name "Bytes", NOT X to_entity B') + + oldrdef = schema['CWAttribute'].rdef('defaultval') + import yams.buildobjs as ybo + newrdef = ybo.RelationDefinition('CWAttribute', 'defaultval', 'Bytes') + newrdef.eid = oldrdef.eid + schema.add_relation_def(newrdef) + schema.del_relation_def('CWAttribute', 'defaultval', 'String') + + commit() + + sync_schema_props_perms('defaultval') + + for rschema in schema.relations(): + if rschema.symmetric: + subjects = set(repr(e.type) for e in rschema.subjects()) + objects = set(repr(e.type) for e in rschema.objects()) + assert subjects == objects + martians = set(str(eid) for eid, in sql('SELECT eid_to FROM %s_relation, entities WHERE eid_to = eid AND type NOT IN (%s)' % + (rschema.type, ','.join(subjects)))) + martians |= set(str(eid) for eid, in sql('SELECT eid_from FROM %s_relation, entities WHERE eid_from = eid AND type NOT IN (%s)' % + (rschema.type, ','.join(subjects)))) + if martians: + martians = ','.join(martians) + print('deleting broken relations %s for eids %s' % (rschema.type, martians)) + sql('DELETE FROM %s_relation WHERE eid_from IN (%s) OR eid_to IN (%s)' % (rschema.type, martians, martians)) + with session.deny_all_hooks_but(): + rql('SET X %(r)s Y WHERE Y %(r)s X, NOT X %(r)s Y' % {'r': rschema.type}) + commit() + + + # multi columns unique constraints regeneration + from cubicweb.server import schemaserial + + # syncschema hooks would try to remove indices but + # 1) we already do that below + # 2) the hook expects the CWUniqueTogetherConstraint.name attribute that hasn't + # yet been added + with session.allow_all_hooks_but('syncschema'): + rql('DELETE CWUniqueTogetherConstraint C') + commit() + add_attribute('CWUniqueTogetherConstraint', 'name') + + # low-level wipe code for postgres & sqlserver, plain sql ... + if driver == 'postgres': + for indexname, in sql('select indexname from pg_indexes'): + if indexname.startswith('unique_'): + print('dropping index', indexname) + sql('DROP INDEX %s' % indexname) + commit() + elif driver.startswith('sqlserver'): + for viewname, in sql('select name from sys.views'): + if viewname.startswith('utv_'): + print('dropping view (index should be cascade-deleted)', viewname) + sql('DROP VIEW %s' % viewname) + commit() + + # recreate the constraints, hook will lead to low-level recreation + for eschema in sorted(schema.entities()): + if eschema._unique_together: + print('recreate unique indexes for', eschema) + rql_args = schemaserial.uniquetogether2rqls(eschema) + for rql, args in rql_args: + args['x'] = eschema.eid + session.execute(rql, args) + commit() + + # all attributes perms have to be refreshed ... + for rschema in sorted(schema.relations()): + if rschema.final: + if rschema.type in fsschema: + print('sync perms for', rschema.type) + sync_schema_props_perms(rschema.type, syncprops=False, ask_confirm=False, commit=False) + else: + print('WARNING: attribute %s missing from fs schema' % rschema.type) + commit() + +if applcubicwebversion < (3, 17, 0) and cubicwebversion >= (3, 17, 0): + try: + add_cube('sioc', update_database=False) + except ConfigurationError: + if not confirm('In cubicweb 3.17 sioc views have been moved to the sioc ' + 'cube, which is not installed. Continue anyway?'): + raise + try: + add_cube('embed', update_database=False) + except ConfigurationError: + if not confirm('In cubicweb 3.17 embedding views have been moved to the embed ' + 'cube, which is not installed. Continue anyway?'): + raise + try: + add_cube('geocoding', update_database=False) + except ConfigurationError: + if not confirm('In cubicweb 3.17 geocoding views have been moved to the geocoding ' + 'cube, which is not installed. Continue anyway?'): + raise + + +if applcubicwebversion <= (3, 14, 0) and cubicwebversion >= (3, 14, 0): + if 'require_permission' in schema and not 'localperms'in repo.config.cubes(): + from cubicweb import ExecutionError + try: + add_cube('localperms', update_database=False) + except ConfigurationError: + raise ExecutionError('In cubicweb 3.14, CWPermission and related stuff ' + 'has been moved to cube localperms. Install it first.') + + +if applcubicwebversion == (3, 6, 0) and cubicwebversion >= (3, 6, 0): + CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T', + ask_confirm=False)) + _add_relation_definition_no_perms('CWAttribute', 'update_permission', 'CWGroup') + _add_relation_definition_no_perms('CWAttribute', 'update_permission', 'RQLExpression') + rql('SET X update_permission Y WHERE X is CWAttribute, X add_permission Y') + drop_relation_definition('CWAttribute', 'delete_permission', 'CWGroup') + drop_relation_definition('CWAttribute', 'delete_permission', 'RQLExpression') + +elif applcubicwebversion < (3, 6, 0) and cubicwebversion >= (3, 6, 0): + CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T', + ask_confirm=False)) + session.set_cnxset() + permsdict = ss.deserialize_ertype_permissions(session) + + with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'): + for rschema in repo.schema.relations(): + rpermsdict = permsdict.get(rschema.eid, {}) + for rdef in rschema.rdefs.values(): + for action in rdef.ACTIONS: + actperms = [] + for something in rpermsdict.get(action == 'update' and 'add' or action, ()): + if isinstance(something, tuple): + actperms.append(rdef.rql_expression(*something)) + else: # group name + actperms.append(something) + rdef.set_action_permissions(action, actperms) + for action in ('read', 'add', 'delete'): + _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'CWGroup') + _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'RQLExpression') + for action in ('read', 'update'): + _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'CWGroup') + _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'RQLExpression') + for action in ('read', 'add', 'delete'): + rql('SET X %s_permission Y WHERE X is CWRelation, ' + 'RT %s_permission Y, X relation_type RT, Y is CWGroup' % (action, action)) + rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, ' + 'X %s_permission Y WHERE X is CWRelation, ' + 'X relation_type RT, RT %s_permission Y2, Y2 exprtype YET, ' + 'Y2 mainvars YMV, Y2 expression YEX' % (action, action)) + rql('SET X read_permission Y WHERE X is CWAttribute, ' + 'RT read_permission Y, X relation_type RT, Y is CWGroup') + rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, ' + 'X read_permission Y WHERE X is CWAttribute, ' + 'X relation_type RT, RT read_permission Y2, Y2 exprtype YET, ' + 'Y2 mainvars YMV, Y2 expression YEX') + rql('SET X update_permission Y WHERE X is CWAttribute, ' + 'RT add_permission Y, X relation_type RT, Y is CWGroup') + rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, ' + 'X update_permission Y WHERE X is CWAttribute, ' + 'X relation_type RT, RT add_permission Y2, Y2 exprtype YET, ' + 'Y2 mainvars YMV, Y2 expression YEX') + for action in ('read', 'add', 'delete'): + drop_relation_definition('CWRType', '%s_permission' % action, 'CWGroup', commit=False) + drop_relation_definition('CWRType', '%s_permission' % action, 'RQLExpression') + sync_schema_props_perms('read_permission', syncperms=False) # fix read_permission cardinality + +if applcubicwebversion < (3, 9, 6) and cubicwebversion >= (3, 9, 6) and not 'CWUniqueTogetherConstraint' in schema: + add_entity_type('CWUniqueTogetherConstraint') + +if not ('CWUniqueTogetherConstraint', 'CWRType') in schema['relations'].rdefs: + add_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWRType') + rql('SET C relations RT WHERE C relations RDEF, RDEF relation_type RT') + commit() + drop_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWAttribute') + drop_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWRelation') + + +if applcubicwebversion < (3, 4, 0) and cubicwebversion >= (3, 4, 0): + + with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'): + session.set_shared_data('do-not-insert-cwuri', True) + add_relation_type('cwuri') + base_url = session.base_url() + for eid, in rql('Any X', ask_confirm=False): + type, source, extid = session.describe(eid) + if source == 'system': + rql('SET X cwuri %(u)s WHERE X eid %(x)s', + {'x': eid, 'u': u'%s%s' % (base_url, eid)}) + isession.commit() + session.set_shared_data('do-not-insert-cwuri', False) + +if applcubicwebversion < (3, 5, 0) and cubicwebversion >= (3, 5, 0): + # check that migration is not doomed + rset = rql('Any X,Y WHERE X transition_of E, Y transition_of E, ' + 'X name N, Y name N, NOT X identity Y', + ask_confirm=False) + if rset: + from logilab.common.shellutils import ASK + if not ASK.confirm('Migration will fail because of transitions with the same name. ' + 'Continue anyway ?'): + import sys + sys.exit(1) + # proceed with migration + add_entity_type('Workflow') + add_entity_type('BaseTransition') + add_entity_type('WorkflowTransition') + add_entity_type('SubWorkflowExitPoint') + # drop explicit 'State allowed_transition Transition' since it should be + # infered due to yams inheritance. However we've to disable the schema + # sync hook first to avoid to destroy existing data... + try: + from cubicweb.hooks import syncschema + repo.vreg.unregister(syncschema.AfterDelRelationTypeHook) + try: + drop_relation_definition('State', 'allowed_transition', 'Transition') + finally: + repo.vreg.register(syncschema.AfterDelRelationTypeHook) + except ImportError: # syncschema is in CW >= 3.6 only + from cubicweb.server.schemahooks import after_del_relation_type + repo.hm.unregister_hook(after_del_relation_type, + 'after_delete_relation', 'relation_type') + try: + drop_relation_definition('State', 'allowed_transition', 'Transition') + finally: + repo.hm.register_hook(after_del_relation_type, + 'after_delete_relation', 'relation_type') + schema.rebuild_infered_relations() # need to be explicitly called once everything is in place + + for et in rql('DISTINCT Any ET,ETN WHERE S state_of ET, ET name ETN', + ask_confirm=False).entities(): + wf = add_workflow(u'default %s workflow' % et.name, et.name, + ask_confirm=False) + rql('SET S state_of WF WHERE S state_of ET, ET eid %(et)s, WF eid %(wf)s', + {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False) + rql('SET T transition_of WF WHERE T transition_of ET, ET eid %(et)s, WF eid %(wf)s', + {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False) + rql('SET WF initial_state S WHERE ET initial_state S, ET eid %(et)s, WF eid %(wf)s', + {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False) + + + rql('DELETE TrInfo TI WHERE NOT TI from_state S') + rql('SET TI by_transition T WHERE TI from_state FS, TI to_state TS, ' + 'FS allowed_transition T, T destination_state TS') + commit() + + drop_relation_definition('State', 'state_of', 'CWEType') + drop_relation_definition('Transition', 'transition_of', 'CWEType') + drop_relation_definition('CWEType', 'initial_state', 'State') + + sync_schema_props_perms() + +if applcubicwebversion < (3, 2, 2) and cubicwebversion >= (3, 2, 1): + from base64 import b64encode + for eid, extid in sql('SELECT eid, extid FROM entities ' + 'WHERE extid is NOT NULL', + ask_confirm=False): + sql('UPDATE entities SET extid=%(extid)s WHERE eid=%(eid)s', + {'extid': b64encode(extid), 'eid': eid}, ask_confirm=False) + commit() + +if applcubicwebversion < (3, 2, 0) and cubicwebversion >= (3, 2, 0): + add_cube('card', update_database=False) + + +if applcubicwebversion < (3, 21, 1) and cubicwebversion >= (3, 21, 1): + add_relation_definition('CWComputedRType', 'read_permission', 'CWGroup') + add_relation_definition('CWComputedRType', 'read_permission', 'RQLExpression') + + +def sync_constraint_types(): + """Make sure the repository knows about all constraint types defined in the code""" + from cubicweb.schema import CONSTRAINTS + repo_constraints = set(row[0] for row in rql('Any N WHERE X is CWConstraintType, X name N')) + + for cstrtype in set(CONSTRAINTS) - repo_constraints: + if cstrtype == 'BoundConstraint': + # was renamed to BoundaryConstraint, we don't need the old name + continue + rql('INSERT CWConstraintType X: X name %(name)s', {'name': cstrtype}) + + commit() + +sync_constraint_types() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/migration/postcreate.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/migration/postcreate.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,77 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb post creation script, set user's workflow""" +from __future__ import print_function + +from six import text_type + +from cubicweb import _ + + +# insert versions +create_entity('CWProperty', pkey=u'system.version.cubicweb', + value=text_type(config.cubicweb_version())) +for cube in config.cubes(): + create_entity('CWProperty', pkey=u'system.version.%s' % cube.lower(), + value=text_type(config.cube_version(cube))) + +# some entities have been added before schema entities, fix the 'is' and +# 'is_instance_of' relations +for rtype in ('is', 'is_instance_of'): + sql('INSERT INTO %s_relation ' + 'SELECT X.eid, ET.cw_eid FROM entities as X, cw_CWEType as ET ' + 'WHERE X.type=ET.cw_name AND NOT EXISTS(' + ' SELECT 1 from %s_relation ' + ' WHERE eid_from=X.eid AND eid_to=ET.cw_eid)' % (rtype, rtype)) + +# user workflow +userwf = add_workflow(_('default user workflow'), 'CWUser') +activated = userwf.add_state(_('activated'), initial=True) +deactivated = userwf.add_state(_('deactivated')) +userwf.add_transition(_('deactivate'), (activated,), deactivated, + requiredgroups=(u'managers',)) +userwf.add_transition(_('activate'), (deactivated,), activated, + requiredgroups=(u'managers',)) + +# create anonymous user if all-in-one config and anonymous user has been specified +if hasattr(config, 'anonymous_user'): + anonlogin, anonpwd = config.anonymous_user() + if anonlogin == session.user.login: + print('you are using a manager account as anonymous user.') + print('Hopefully this is not a production instance...') + elif anonlogin: + from cubicweb.server import create_user + create_user(session, text_type(anonlogin), anonpwd, u'guests') + +# need this since we already have at least one user in the database (the default admin) +for user in rql('Any X WHERE X is CWUser').entities(): + rql('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', + {'x': user.eid, 's': activated.eid}) + +# on interactive mode, ask for level 0 persistent options +if interactive_mode: + cfg = config.persistent_options_configuration() + cfg.input_config(inputlevel=0) + for section, options in cfg.options_by_section(): + for optname, optdict, value in options: + key = u'%s.%s' % (section, optname) + default = cfg.option_default(optname, optdict) + # only record values differing from default + if value != default: + rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s', + {'k': key, 'v': value}) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/scripts/chpasswd.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/scripts/chpasswd.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,48 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +import sys +import getpass + +from cubicweb import Binary +from cubicweb.server.utils import crypt_password + + +if __args__: + login = __args__.pop() +else: + login = raw_input("login? ") + +rset = rql('Any U WHERE U is CWUser, U login %(login)s', {'login': login}) + +if len(rset) != 1: + sys.exit("user '%s' does not exist!" % login) + +pass1 = getpass.getpass(prompt='Enter new password? ') +pass2 = getpass.getpass(prompt='Confirm? ') + +if pass1 != pass2: + sys.exit("passwords don't match!") + +crypted = crypt_password(pass1) + +cwuser = rset.get_entity(0,0) +cwuser.cw_set(upassword=Binary(crypted)) +commit() + +print("password updated.") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/scripts/cwuser_ldap2system.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/scripts/cwuser_ldap2system.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,42 @@ +from __future__ import print_function + +import base64 +from cubicweb.server.utils import crypt_password + +dbdriver = config.system_source_config['db-driver'] +from logilab.database import get_db_helper +dbhelper = get_db_helper(driver) + +insert = ('INSERT INTO cw_cwuser (cw_creation_date,' + ' cw_eid,' + ' cw_modification_date,' + ' cw_login,' + ' cw_firstname,' + ' cw_surname,' + ' cw_last_login_time,' + ' cw_upassword,' + ' cw_cwuri) ' + "VALUES (%(mtime)s, %(eid)s, %(mtime)s, %(login)s, " + " %(firstname)s, %(surname)s, %(mtime)s, %(pwd)s, 'foo');") +update = "UPDATE entities SET source='system' WHERE eid=%(eid)s;" +rset = sql("SELECT eid,type,source,extid,mtime FROM entities WHERE source!='system'", ask_confirm=False) +for eid, type, source, extid, mtime in rset: + if type != 'CWUser': + print("don't know what to do with entity type", type) + continue + if not source.lower().startswith('ldap'): + print("don't know what to do with source type", source) + continue + extid = base64.decodestring(extid) + ldapinfos = [x.strip().split('=') for x in extid.split(',')] + login = ldapinfos[0][1] + firstname = login.capitalize() + surname = login.capitalize() + args = dict(eid=eid, type=type, source=source, login=login, + firstname=firstname, surname=surname, mtime=mtime, + pwd=dbhelper.binary_value(crypt_password('toto'))) + print(args) + sql(insert, args) + sql(update, args) + +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/scripts/detect_cycle.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/scripts/detect_cycle.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,16 @@ +from __future__ import print_function + +try: + rtype, = __args__ +except ValueError: + print('USAGE: cubicweb-ctl shell detect_cycle.py -- ') + print() + +graph = {} +for fromeid, toeid in rql('Any X,Y WHERE X %s Y' % rtype): + graph.setdefault(fromeid, []).append(toeid) + +from logilab.common.graph import get_cycles + +for cycle in get_cycles(graph): + print('cycle', '->'.join(str(n) for n in cycle)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/scripts/ldap_change_base_dn.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/scripts/ldap_change_base_dn.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,26 @@ +from __future__ import print_function + +from base64 import b64decode, b64encode +try: + uri, newdn = __args__ +except ValueError: + print('USAGE: cubicweb-ctl shell ldap_change_base_dn.py -- ') + print() + print('you should not have updated your sources file yet') + +olddn = repo.sources_by_uri[uri].config['user-base-dn'] + +assert olddn != newdn + +raw_input("Ensure you've stopped the instance, type enter when done.") + +for eid, extid in sql("SELECT eid, extid FROM entities WHERE source='%s'" % uri): + olduserdn = b64decode(extid) + newuserdn = olduserdn.replace(olddn, newdn) + if newuserdn != olduserdn: + print(olduserdn, '->', newuserdn) + sql("UPDATE entities SET extid='%s' WHERE eid=%s" % (b64encode(newuserdn), eid)) + +commit() + +print('you can now update the sources file to the new dn and restart the instance') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/scripts/ldapuser2ldapfeed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/scripts/ldapuser2ldapfeed.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,98 @@ +"""turn a pyro source into a datafeed source + +Once this script is run, execute c-c db-check to cleanup relation tables. +""" +from __future__ import print_function + +import sys +from collections import defaultdict +from logilab.common.shellutils import generate_password + +try: + source_name, = __args__ + source = repo.sources_by_uri[source_name] +except ValueError: + print('you should specify the source name as script argument (i.e. after --' + ' on the command line)') + sys.exit(1) +except KeyError: + print('%s is not an active source' % source_name) + sys.exit(1) + +# check source is reachable before doing anything +if not source.get_connection().cnx: + print('%s is not reachable. Fix this before running this script' % source_name) + sys.exit(1) + +raw_input('Ensure you have shutdown all instances of this application before continuing.' + ' Type enter when ready.') + +system_source = repo.system_source + +from datetime import datetime +from cubicweb.server.edition import EditedEntity + + +print('******************** backport entity content ***************************') + +todelete = defaultdict(list) +extids = set() +duplicates = [] +for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities(): + etype = entity.cw_etype + if not source.support_entity(etype): + print("source doesn't support %s, delete %s" % (etype, entity.eid)) + todelete[etype].append(entity) + continue + try: + entity.complete() + except Exception: + print('%s %s much probably deleted, delete it (extid %s)' % ( + etype, entity.eid, entity.cw_metainformation()['extid'])) + todelete[etype].append(entity) + continue + print('get back', etype, entity.eid) + entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) + if not entity.creation_date: + entity.cw_edited['creation_date'] = datetime.utcnow() + if not entity.modification_date: + entity.cw_edited['modification_date'] = datetime.utcnow() + if not entity.upassword: + entity.cw_edited['upassword'] = generate_password() + extid = entity.cw_metainformation()['extid'] + if not entity.cwuri: + entity.cw_edited['cwuri'] = '%s/?dn=%s' % ( + source.urls[0], extid.decode('utf-8', 'ignore')) + print(entity.cw_edited) + if extid in extids: + duplicates.append(extid) + continue + extids.add(extid) + system_source.add_entity(session, entity) + sql("UPDATE entities SET source='system' " + "WHERE eid=%(eid)s", {'eid': entity.eid}) + +# only cleanup entities table, remaining stuff should be cleaned by a c-c +# db-check to be run after this script +if duplicates: + print('found %s duplicate entries' % len(duplicates)) + from pprint import pprint + pprint(duplicates) + +print(len(todelete), 'entities will be deleted') +for etype, entities in todelete.items(): + print('deleting', etype, [e.login for e in entities]) + system_source.delete_info_multi(session, entities, source_name) + + + +source_ent = rql('CWSource S WHERE S eid %(s)s', {'s': source.eid}).get_entity(0, 0) +source_ent.cw_set(type=u"ldapfeed", parser=u"ldapfeed") + + +if raw_input('Commit?') in 'yY': + print('committing') + commit() +else: + rollback() + print('rolled back') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/scripts/pyroforge2datafeed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/scripts/pyroforge2datafeed.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,134 @@ +"""turn a pyro source into a datafeed source + +Once this script is run, execute c-c db-check to cleanup relation tables. +""" +from __future__ import print_function + +import sys + +try: + source_name, = __args__ + source = repo.sources_by_uri[source_name] +except ValueError: + print('you should specify the source name as script argument (i.e. after --' + ' on the command line)') + sys.exit(1) +except KeyError: + print('%s is not an active source' % source_name) + sys.exit(1) + +# check source is reachable before doing anything +try: + source.get_connection()._repo +except AttributeError: + print('%s is not reachable. Fix this before running this script' % source_name) + sys.exit(1) + +raw_input('Ensure you have shutdown all instances of this application before continuing.' + ' Type enter when ready.') + +system_source = repo.system_source + +from base64 import b64encode +from cubicweb.server.edition import EditedEntity + +DONT_GET_BACK_ETYPES = set(( # XXX edit as desired + 'State', + 'RecipeStep', 'RecipeStepInput', 'RecipeStepOutput', + 'RecipeTransition', 'RecipeTransitionCondition', + 'NarvalConditionExpression', 'Recipe', + # XXX TestConfig + )) + + +print('******************** backport entity content ***************************') + +from cubicweb.server import debugged +todelete = {} +host = source.config['base-url'].split('://')[1] +for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities(): + etype = entity.cw_etype + if not source.support_entity(etype): + print("source doesn't support %s, delete %s" % (etype, entity.eid)) + elif etype in DONT_GET_BACK_ETYPES: + print('ignore %s, delete %s' % (etype, entity.eid)) + else: + try: + entity.complete() + if not host in entity.cwuri: + print('SKIP foreign entity', entity.cwuri, source.config['base-url']) + continue + except Exception: + print('%s %s much probably deleted, delete it (extid %s)' % ( + etype, entity.eid, entity.cw_metainformation()['extid'])) + else: + print('get back', etype, entity.eid) + entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) + system_source.add_entity(session, entity) + sql("UPDATE entities SET asource=%(asource)s, source='system', extid=%(extid)s " + "WHERE eid=%(eid)s", {'asource': source_name, + 'extid': b64encode(entity.cwuri), + 'eid': entity.eid}) + continue + todelete.setdefault(etype, []).append(entity) + +# only cleanup entities table, remaining stuff should be cleaned by a c-c +# db-check to be run after this script +for entities in todelete.values(): + system_source.delete_info_multi(session, entities, source_name) + + +print('******************** backport mapping **********************************') +session.disable_hook_categories('cw.sources') +mapping = [] +for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s', + {'s': source.eid}).entities(): + schemaent = mappart.cw_schema[0] + if schemaent.cw_etype != 'CWEType': + assert schemaent.cw_etype == 'CWRType' + sch = schema._eid_index[schemaent.eid] + for rdef in sch.rdefs.values(): + if not source.support_entity(rdef.subject) \ + or not source.support_entity(rdef.object): + continue + if rdef.subject in DONT_GET_BACK_ETYPES \ + and rdef.object in DONT_GET_BACK_ETYPES: + print('dont map', rdef) + continue + if rdef.subject in DONT_GET_BACK_ETYPES: + options = u'action=link\nlinkattr=name' + roles = 'object', + elif rdef.object in DONT_GET_BACK_ETYPES: + options = u'action=link\nlinkattr=name' + roles = 'subject', + else: + options = u'action=copy' + if rdef.rtype in ('use_environment',): + roles = 'object', + else: + roles = 'subject', + print('map', rdef, options, roles) + for role in roles: + mapping.append( ( + (str(rdef.subject), str(rdef.rtype), str(rdef.object)), + options + '\nrole=%s' % role) ) + mappart.cw_delete() + +source_ent = rql('CWSource S WHERE S eid %(s)s', {'s': source.eid}).get_entity(0, 0) +source_ent.init_mapping(mapping) + +# change source properties +config = u'''synchronize=yes +synchronization-interval=10min +delete-entities=no +''' +rql('SET X type "datafeed", X parser "cw.entityxml", X url %(url)s, X config %(config)s ' + 'WHERE X eid %(x)s', + {'x': source.eid, 'config': config, + 'url': source.config['base-url']+'/project'}) + + +commit() + +from cubes.apycot import recipes +recipes.create_quick_recipe(session) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/scripts/repair_file_1-9_migration.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/scripts/repair_file_1-9_migration.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,52 @@ +"""execute this script if you've migration to file >= 1.9.0 with cubicweb <= 3.9.2 + +FYI, this migration occurred : +* on our intranet on July 07 2010 +* on our extranet on July 16 2010 +""" +from __future__ import print_function + +try: + backupinstance, = __args__ +except ValueError: + print('USAGE: cubicweb-ctl shell repair_file_1-9_migration.py -- ') + print() + print('you should restored the backup on a new instance, accessible through pyro') + +from cubicweb import cwconfig, dbapi +from cubicweb.server.session import hooks_control + +defaultadmin = repo.config.default_admin_config +backupcfg = cwconfig.instance_configuration(backupinstance) +backupcfg.repairing = True +backuprepo, backupcnx = dbapi.in_memory_repo_cnx(backupcfg, defaultadmin['login'], + password=defaultadmin['password'], + host='localhost') +backupcu = backupcnx.cursor() + +with hooks_control(session, session.HOOKS_DENY_ALL): + rql('SET X is Y WHERE X is File, Y name "File", NOT X is Y') + rql('SET X is_instance_of Y WHERE X is File, Y name "File", NOT X is_instance_of Y') + for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,' + 'X from_entity Y, Y name "Image", X is CWRelation, ' + 'EXISTS(XX is CWRelation, XX relation_type RT, ' + 'XX from_entity YY, YY name "File")'): + if rtype in ('is', 'is_instance_of'): + continue + print(rtype) + for feid, xeid in backupcu.execute('Any F,X WHERE F %s X, F is IN (File,Image)' % rtype): + print('restoring relation %s between file %s and %s' % (rtype, feid, xeid), end=' ') + print(rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype), + {'f': feid, 'x': xeid})) + + for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,' + 'X to_entity Y, Y name "Image", X is CWRelation, ' + 'EXISTS(XX is CWRelation, XX relation_type RT, ' + 'XX to_entity YY, YY name "File")'): + print(rtype) + for feid, xeid in backupcu.execute('Any F,X WHERE X %s F, F is IN (File,Image)' % rtype): + print('restoring relation %s between %s and file %s' % (rtype, xeid, feid), end=' ') + print(rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype), + {'f': feid, 'x': xeid})) + +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/misc/scripts/repair_splitbrain_ldapuser_source.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/misc/scripts/repair_splitbrain_ldapuser_source.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,109 @@ +""" +CAUTION: READ THIS CAREFULLY + +Sometimes it happens that ldap (specifically ldapuser type) source +yield "ghost" users. The reasons may vary (server upgrade while some +instances are still running & syncing with the ldap source, unmanaged +updates to the upstream ldap, etc.). + +This script was written and refined enough times that we are confident +in that it does something reasonnable (at least it did for the +target application). + +However you should really REALLY understand what it does before +deciding to apply it for you. And then ADAPT it tou your needs. + +""" +from __future__ import print_function + +import base64 +from collections import defaultdict + +from cubicweb.server.session import hooks_control + +try: + source_name, = __args__ + source = repo.sources_by_uri[source_name] +except ValueError: + print('you should specify the source name as script argument (i.e. after --' + ' on the command line)') + sys.exit(1) +except KeyError: + print('%s is not an active source' % source_name) + sys.exit(1) + +# check source is reachable before doing anything +if not source.get_connection().cnx: + print('%s is not reachable. Fix this before running this script' % source_name) + sys.exit(1) + +def find_dupes(): + # XXX this retrieves entities from a source name "ldap" + # you will want to adjust + rset = sql("SELECT eid, extid FROM entities WHERE source='%s'" % source_name) + extid2eids = defaultdict(list) + for eid, extid in rset: + extid2eids[extid].append(eid) + return dict((base64.b64decode(extid).lower(), eids) + for extid, eids in extid2eids.items() + if len(eids) > 1) + +def merge_dupes(dupes, docommit=False): + gone_eids = [] + CWUser = schema['CWUser'] + for extid, eids in dupes.items(): + newest = eids.pop() # we merge everything on the newest + print('merging ghosts of', extid, 'into', newest) + # now we merge pairwise into the newest + for old in eids: + subst = {'old': old, 'new': newest} + print(' merging', old) + gone_eids.append(old) + for rschema in CWUser.subject_relations(): + if rschema.final or rschema == 'identity': + continue + if CWUser.rdef(rschema, 'subject').composite == 'subject': + # old 'composite' property is wiped ... + # think about email addresses, excel preferences + for eschema in rschema.objects(): + rql('DELETE %s X WHERE U %s X, U eid %%(old)s' % (eschema, rschema), subst) + else: + # relink the new user to its old relations + rql('SET NU %s X WHERE NU eid %%(new)s, NOT NU %s X, OU %s X, OU eid %%(old)s' % + (rschema, rschema, rschema), subst) + # delete the old relations + rql('DELETE U %s X WHERE U eid %%(old)s' % rschema, subst) + # same thing ... + for rschema in CWUser.object_relations(): + if rschema.final or rschema == 'identity': + continue + rql('SET X %s NU WHERE NU eid %%(new)s, NOT X %s NU, X %s OU, OU eid %%(old)s' % + (rschema, rschema, rschema), subst) + rql('DELETE X %s U WHERE U eid %%(old)s' % rschema, subst) + if not docommit: + rollback() + return + commit() # XXX flushing operations is wanted rather than really committing + print('clean up entities table') + sql('DELETE FROM entities WHERE eid IN (%s)' % (', '.join(str(x) for x in gone_eids))) + commit() + +def main(): + dupes = find_dupes() + if not dupes: + print('No duplicate user') + return + + print('Found %s duplicate user instances' % len(dupes)) + + while True: + print('Fix or dry-run? (f/d) ... or Ctrl-C to break out') + answer = raw_input('> ') + if answer.lower() not in 'fd': + continue + print('Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.') + raw_input('') + with hooks_control(session, session.HOOKS_DENY_ALL): + merge_dupes(dupes, docommit=answer=='f') + +main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/mttransforms.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/mttransforms.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,121 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""mime type transformation engine for cubicweb, based on mtconverter""" + +__docformat__ = "restructuredtext en" + +from logilab import mtconverter + +from logilab.mtconverter.engine import TransformEngine +from logilab.mtconverter.transform import Transform +from logilab.mtconverter import (register_base_transforms, + register_pil_transforms, + register_pygments_transforms) + +from cubicweb.utils import UStringIO +from cubicweb.uilib import rest_publish, markdown_publish, html_publish + +HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml') + +# CubicWeb specific transformations + +class rest_to_html(Transform): + inputs = ('text/rest', 'text/x-rst') + output = 'text/html' + def _convert(self, trdata): + return rest_publish(trdata.appobject, trdata.decode()) + +class markdown_to_html(Transform): + inputs = ('text/markdown', 'text/x-markdown') + output = 'text/html' + def _convert(self, trdata): + return markdown_publish(trdata.appobject, trdata.decode()) + +class html_to_html(Transform): + inputs = HTML_MIMETYPES + output = 'text/html' + def _convert(self, trdata): + return html_publish(trdata.appobject, trdata.data) + + +# Instantiate and configure the transformation engine + +mtconverter.UNICODE_POLICY = 'replace' + +ENGINE = TransformEngine() +ENGINE.add_transform(rest_to_html()) +ENGINE.add_transform(markdown_to_html()) +ENGINE.add_transform(html_to_html()) + +try: + from cubicweb.ext.tal import CubicWebContext, compile_template +except ImportError: + HAS_TAL = False + from cubicweb import schema + schema.NEED_PERM_FORMATS.remove('text/cubicweb-page-template') + +else: + HAS_TAL = True + + class ept_to_html(Transform): + inputs = ('text/cubicweb-page-template',) + output = 'text/html' + output_encoding = 'utf-8' + def _convert(self, trdata): + context = CubicWebContext() + appobject = trdata.appobject + context.update({'self': appobject, 'rset': appobject.cw_rset, + 'req': appobject._cw, + '_' : appobject._cw._, + 'user': appobject._cw.user}) + output = UStringIO() + template = compile_template(trdata.encode(self.output_encoding)) + template.expand(context, output) + return output.getvalue() + + ENGINE.add_transform(ept_to_html()) + +if register_pil_transforms(ENGINE, verb=False): + HAS_PIL_TRANSFORMS = True +else: + HAS_PIL_TRANSFORMS = False + +try: + from logilab.mtconverter.transforms import pygmentstransforms + for mt in ('text/plain',) + HTML_MIMETYPES: + try: + pygmentstransforms.mimetypes.remove(mt) + except ValueError: + continue + register_pygments_transforms(ENGINE, verb=False) + + def patch_convert(cls): + def _convert(self, trdata, origconvert=cls._convert): + add_css = getattr(trdata.appobject._cw, 'add_css', None) + if add_css is not None: + # session has no add_css, only http request + add_css('pygments.css') + return origconvert(self, trdata) + cls._convert = _convert + patch_convert(pygmentstransforms.PygmentsHTMLTransform) + + HAS_PYGMENTS_TRANSFORMS = True +except ImportError: + HAS_PYGMENTS_TRANSFORMS = False + +register_base_transforms(ENGINE, verb=False) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/multipart.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/multipart.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,416 @@ +# -*- coding: utf-8 -*- +''' +Parser for multipart/form-data +============================== + +This module provides a parser for the multipart/form-data format. It can read +from a file, a socket or a WSGI environment. The parser can be used to replace +cgi.FieldStorage (without the bugs) and works with Python 2.5+ and 3.x (2to3). + +Licence (MIT) +------------- + + Copyright (c) 2010, Marcel Hellkamp. + Inspired by the Werkzeug library: http://werkzeug.pocoo.org/ + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +''' + +__author__ = 'Marcel Hellkamp' +__version__ = '0.1' +__license__ = 'MIT' + +from tempfile import TemporaryFile +from wsgiref.headers import Headers +import re, sys +try: + from io import BytesIO +except ImportError: # pragma: no cover (fallback for Python 2.5) + from StringIO import StringIO as BytesIO + +from six import PY3, text_type +from six.moves.urllib.parse import parse_qs + +############################################################################## +################################ Helper & Misc ################################ +############################################################################## +# Some of these were copied from bottle: http://bottle.paws.de/ + +try: + from collections import MutableMapping as DictMixin +except ImportError: # pragma: no cover (fallback for Python 2.5) + from UserDict import DictMixin + +class MultiDict(DictMixin): + """ A dict that remembers old values for each key """ + def __init__(self, *a, **k): + self.dict = dict() + for k, v in dict(*a, **k).items(): + self[k] = v + + def __len__(self): return len(self.dict) + def __iter__(self): return iter(self.dict) + def __contains__(self, key): return key in self.dict + def __delitem__(self, key): del self.dict[key] + def keys(self): return self.dict.keys() + def __getitem__(self, key): return self.get(key, KeyError, -1) + def __setitem__(self, key, value): self.append(key, value) + + def append(self, key, value): self.dict.setdefault(key, []).append(value) + def replace(self, key, value): self.dict[key] = [value] + def getall(self, key): return self.dict.get(key) or [] + + def get(self, key, default=None, index=-1): + if key not in self.dict and default != KeyError: + return [default][index] + return self.dict[key][index] + + def iterallitems(self): + for key, values in self.dict.items(): + for value in values: + yield key, value + +def tob(data, enc='utf8'): # Convert strings to bytes (py2 and py3) + return data.encode(enc) if isinstance(data, text_type) else data + +def copy_file(stream, target, maxread=-1, buffer_size=2*16): + ''' Read from :stream and write to :target until :maxread or EOF. ''' + size, read = 0, stream.read + while 1: + to_read = buffer_size if maxread < 0 else min(buffer_size, maxread-size) + part = read(to_read) + if not part: return size + target.write(part) + size += len(part) + +############################################################################## +################################ Header Parser ################################ +############################################################################## + +_special = re.escape('()<>@,;:\\"/[]?={} \t') +_re_special = re.compile('[%s]' % _special) +_qstr = '"(?:\\\\.|[^"])*"' # Quoted string +_value = '(?:[^%s]+|%s)' % (_special, _qstr) # Save or quoted string +_option = '(?:;|^)\s*([^%s]+)\s*=\s*(%s)' % (_special, _value) +_re_option = re.compile(_option) # key=value part of an Content-Type like header + +def header_quote(val): + if not _re_special.search(val): + return val + return '"' + val.replace('\\','\\\\').replace('"','\\"') + '"' + +def header_unquote(val, filename=False): + if val[0] == val[-1] == '"': + val = val[1:-1] + if val[1:3] == ':\\' or val[:2] == '\\\\': + val = val.split('\\')[-1] # fix ie6 bug: full path --> filename + return val.replace('\\\\','\\').replace('\\"','"') + return val + +def parse_options_header(header, options=None): + if ';' not in header: + return header.lower().strip(), {} + ctype, tail = header.split(';', 1) + options = options or {} + for match in _re_option.finditer(tail): + key = match.group(1).lower() + value = header_unquote(match.group(2), key=='filename') + options[key] = value + return ctype, options + +############################################################################## +################################## Multipart ################################## +############################################################################## + + +class MultipartError(ValueError): pass + + +class MultipartParser(object): + + def __init__(self, stream, boundary, content_length=-1, + disk_limit=2**30, mem_limit=2**20, memfile_limit=2**18, + buffer_size=2**16, charset='latin1'): + ''' Parse a multipart/form-data byte stream. This object is an iterator + over the parts of the message. + + :param stream: A file-like stream. Must implement ``.read(size)``. + :param boundary: The multipart boundary as a byte string. + :param content_length: The maximum number of bytes to read. + ''' + self.stream, self.boundary = stream, boundary + self.content_length = content_length + self.disk_limit = disk_limit + self.memfile_limit = memfile_limit + self.mem_limit = min(mem_limit, self.disk_limit) + self.buffer_size = min(buffer_size, self.mem_limit) + self.charset = charset + if self.buffer_size - 6 < len(boundary): # "--boundary--\r\n" + raise MultipartError('Boundary does not fit into buffer_size.') + self._done = [] + self._part_iter = None + + def __iter__(self): + ''' Iterate over the parts of the multipart message. ''' + if not self._part_iter: + self._part_iter = self._iterparse() + for part in self._done: + yield part + for part in self._part_iter: + self._done.append(part) + yield part + + def parts(self): + ''' Returns a list with all parts of the multipart message. ''' + return list(iter(self)) + + def get(self, name, default=None): + ''' Return the first part with that name or a default value (None). ''' + for part in self: + if name == part.name: + return part + return default + + def get_all(self, name): + ''' Return a list of parts with that name. ''' + return [p for p in self if p.name == name] + + def _lineiter(self): + ''' Iterate over a binary file-like object line by line. Each line is + returned as a (line, line_ending) tuple. If the line does not fit + into self.buffer_size, line_ending is empty and the rest of the line + is returned with the next iteration. + ''' + read = self.stream.read + maxread, maxbuf = self.content_length, self.buffer_size + _bcrnl = tob('\r\n') + _bcr = _bcrnl[:1] + _bnl = _bcrnl[1:] + _bempty = _bcrnl[:0] # b'rn'[:0] -> b'' + buffer = _bempty # buffer for the last (partial) line + while 1: + data = read(maxbuf if maxread < 0 else min(maxbuf, maxread)) + maxread -= len(data) + lines = (buffer+data).splitlines(True) + len_first_line = len(lines[0]) + # be sure that the first line does not become too big + if len_first_line > self.buffer_size: + # at the same time don't split a '\r\n' accidentally + if (len_first_line == self.buffer_size+1 and + lines[0].endswith(_bcrnl)): + splitpos = self.buffer_size - 1 + else: + splitpos = self.buffer_size + lines[:1] = [lines[0][:splitpos], + lines[0][splitpos:]] + if data: + buffer = lines[-1] + lines = lines[:-1] + for line in lines: + if line.endswith(_bcrnl): yield line[:-2], _bcrnl + elif line.endswith(_bnl): yield line[:-1], _bnl + elif line.endswith(_bcr): yield line[:-1], _bcr + else: yield line, _bempty + if not data: + break + + def _iterparse(self): + lines, line = self._lineiter(), '' + separator = tob('--') + tob(self.boundary) + terminator = tob('--') + tob(self.boundary) + tob('--') + # Consume first boundary. Ignore leading blank lines + for line, nl in lines: + if line: break + if line != separator: + raise MultipartError("Stream does not start with boundary") + # For each part in stream... + mem_used, disk_used = 0, 0 # Track used resources to prevent DoS + is_tail = False # True if the last line was incomplete (cutted) + opts = {'buffer_size': self.buffer_size, + 'memfile_limit': self.memfile_limit, + 'charset': self.charset} + part = MultipartPart(**opts) + for line, nl in lines: + if line == terminator and not is_tail: + part.file.seek(0) + yield part + break + elif line == separator and not is_tail: + if part.is_buffered(): mem_used += part.size + else: disk_used += part.size + part.file.seek(0) + yield part + part = MultipartPart(**opts) + else: + is_tail = not nl # The next line continues this one + part.feed(line, nl) + if part.is_buffered(): + if part.size + mem_used > self.mem_limit: + raise MultipartError("Memory limit reached.") + elif part.size + disk_used > self.disk_limit: + raise MultipartError("Disk limit reached.") + if line != terminator: + raise MultipartError("Unexpected end of multipart stream.") + + +class MultipartPart(object): + + def __init__(self, buffer_size=2**16, memfile_limit=2**18, charset='latin1'): + self.headerlist = [] + self.headers = None + self.file = False + self.size = 0 + self._buf = tob('') + self.disposition, self.name, self.filename = None, None, None + self.content_type, self.charset = None, charset + self.memfile_limit = memfile_limit + self.buffer_size = buffer_size + + def feed(self, line, nl=''): + if self.file: + return self.write_body(line, nl) + return self.write_header(line, nl) + + def write_header(self, line, nl): + line = line.decode(self.charset or 'latin1') + if not nl: raise MultipartError('Unexpected end of line in header.') + if not line.strip(): # blank line -> end of header segment + self.finish_header() + elif line[0] in ' \t' and self.headerlist: + name, value = self.headerlist.pop() + self.headerlist.append((name, value+line.strip())) + else: + if ':' not in line: + raise MultipartError("Syntax error in header: No colon.") + name, value = line.split(':', 1) + self.headerlist.append((name.strip(), value.strip())) + + def write_body(self, line, nl): + if not line and not nl: return # This does not even flush the buffer + self.size += len(line) + len(self._buf) + self.file.write(self._buf + line) + self._buf = nl + if self.content_length > 0 and self.size > self.content_length: + raise MultipartError('Size of body exceeds Content-Length header.') + if self.size > self.memfile_limit and isinstance(self.file, BytesIO): + # TODO: What about non-file uploads that exceed the memfile_limit? + self.file, old = TemporaryFile(mode='w+b'), self.file + old.seek(0) + copy_file(old, self.file, self.size, self.buffer_size) + + def finish_header(self): + self.file = BytesIO() + self.headers = Headers(self.headerlist) + cdis = self.headers.get('Content-Disposition','') + ctype = self.headers.get('Content-Type','') + clen = self.headers.get('Content-Length','-1') + if not cdis: + raise MultipartError('Content-Disposition header is missing.') + self.disposition, self.options = parse_options_header(cdis) + self.name = self.options.get('name') + self.filename = self.options.get('filename') + self.content_type, options = parse_options_header(ctype) + self.charset = options.get('charset') or self.charset + self.content_length = int(self.headers.get('Content-Length','-1')) + + def is_buffered(self): + ''' Return true if the data is fully buffered in memory.''' + return isinstance(self.file, BytesIO) + + @property + def value(self): + ''' Data decoded with the specified charset ''' + pos = self.file.tell() + self.file.seek(0) + val = self.file.read() + self.file.seek(pos) + return val.decode(self.charset) + + def save_as(self, path): + fp = open(path, 'wb') + pos = self.file.tell() + try: + self.file.seek(0) + size = copy_file(self.file, fp) + finally: + self.file.seek(pos) + return size + +############################################################################## +#################################### WSGI #################################### +############################################################################## + +def parse_form_data(environ, charset='utf8', strict=False, **kw): + ''' Parse form data from an environ dict and return a (forms, files) tuple. + Both tuple values are dictionaries with the form-field name as a key + (unicode) and lists as values (multiple values per key are possible). + The forms-dictionary contains form-field values as unicode strings. + The files-dictionary contains :class:`MultipartPart` instances, either + because the form-field was a file-upload or the value is to big to fit + into memory limits. + + :param environ: An WSGI environment dict. + :param charset: The charset to use if unsure. (default: utf8) + :param strict: If True, raise :exc:`MultipartError` on any parsing + errors. These are silently ignored by default. + ''' + + forms, files = MultiDict(), MultiDict() + try: + if environ.get('REQUEST_METHOD','GET').upper() not in ('POST', 'PUT'): + raise MultipartError("Request method other than POST or PUT.") + content_length = int(environ.get('CONTENT_LENGTH', '-1')) + content_type = environ.get('CONTENT_TYPE', '') + if not content_type: + raise MultipartError("Missing Content-Type header.") + content_type, options = parse_options_header(content_type) + stream = environ.get('wsgi.input') or BytesIO() + kw['charset'] = charset = options.get('charset', charset) + if content_type == 'multipart/form-data': + boundary = options.get('boundary','') + if not boundary: + raise MultipartError("No boundary for multipart/form-data.") + for part in MultipartParser(stream, boundary, content_length, **kw): + if part.filename or not part.is_buffered(): + files[part.name] = part + else: # TODO: Big form-fields are in the files dict. really? + forms[part.name] = part.value + elif content_type in ('application/x-www-form-urlencoded', + 'application/x-url-encoded'): + mem_limit = kw.get('mem_limit', 2**20) + if content_length > mem_limit: + raise MultipartError("Request too big. Increase MAXMEM.") + data = stream.read(mem_limit) + if stream.read(1): # These is more that does not fit mem_limit + raise MultipartError("Request too big. Increase MAXMEM.") + if PY3: + data = data.decode('ascii') + data = parse_qs(data, keep_blank_values=True) + for key, values in data.items(): + for value in values: + if PY3: + forms[key] = value + else: + forms[key.decode(charset)] = value.decode(charset) + else: + raise MultipartError("Unsupported content type.") + except MultipartError: + if strict: raise + return forms, files diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/predicates.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/predicates.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1421 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Predicate classes +""" + +__docformat__ = "restructuredtext en" + +import logging +from warnings import warn +from operator import eq + +from six import string_types, integer_types +from six.moves import range + +from logilab.common.deprecation import deprecated +from logilab.common.registry import Predicate, objectify_predicate, yes + +from yams.schema import BASE_TYPES, role_name +from rql.nodes import Function + +from cubicweb import (Unauthorized, NoSelectableObject, NotAnEntity, + CW_EVENT_MANAGER, role) +from cubicweb.uilib import eid_param +from cubicweb.schema import split_expression + +yes = deprecated('[3.15] import yes() from use logilab.common.registry')(yes) + + +# abstract predicates / mixin helpers ########################################### + +class PartialPredicateMixIn(object): + """convenience mix-in for predicates that will look into the containing + class to find missing information. + + cf. `cubicweb.web.action.LinkToEntityAction` for instance + """ + def __call__(self, cls, *args, **kwargs): + self.complete(cls) + return super(PartialPredicateMixIn, self).__call__(cls, *args, **kwargs) + + +class EClassPredicate(Predicate): + """abstract class for predicates working on *entity class(es)* specified + explicitly or found of the result set. + + Here are entity lookup / scoring rules: + + * if `entity` is specified, return score for this entity's class + + * elif `rset`, `select` and `filtered_variable` are specified, return score + for the possible classes for variable in the given rql :class:`Select` + node + + * elif `rset` and `row` are specified, return score for the class of the + entity found in the specified cell, using column specified by `col` or 0 + + * elif `rset` is specified return score for each entity class found in the + column specified specified by the `col` argument or in column 0 if not + specified + + When there are several classes to be evaluated, return the sum of scores for + each entity class unless: + + - `mode` == 'all' (the default) and some entity class is scored + to 0, in which case 0 is returned + + - `mode` == 'any', in which case the first non-zero score is + returned + + - `accept_none` is False and some cell in the column has a None value + (this may occurs with outer join) + """ + def __init__(self, once_is_enough=None, accept_none=True, mode='all'): + if once_is_enough is not None: + warn("[3.14] once_is_enough is deprecated, use mode='any'", + DeprecationWarning, stacklevel=2) + if once_is_enough: + mode = 'any' + assert mode in ('any', 'all'), 'bad mode %s' % mode + self.once_is_enough = mode == 'any' + self.accept_none = accept_none + + def __call__(self, cls, req, rset=None, row=None, col=0, entity=None, + select=None, filtered_variable=None, + accept_none=None, + **kwargs): + if entity is not None: + return self.score_class(entity.__class__, req) + if not rset: + return 0 + if select is not None and filtered_variable is not None: + etypes = set(sol[filtered_variable.name] for sol in select.solutions) + elif row is None: + if accept_none is None: + accept_none = self.accept_none + if not accept_none and \ + any(row[col] is None for row in rset): + return 0 + etypes = rset.column_types(col) + else: + etype = rset.description[row][col] + # may have None in rset.description on outer join + if etype is None or rset.rows[row][col] is None: + return 0 + etypes = (etype,) + score = 0 + for etype in etypes: + escore = self.score(cls, req, etype) + if not escore and not self.once_is_enough: + return 0 + elif self.once_is_enough: + return escore + score += escore + return score + + def score(self, cls, req, etype): + if etype in BASE_TYPES: + return 0 + return self.score_class(req.vreg['etypes'].etype_class(etype), req) + + def score_class(self, eclass, req): + raise NotImplementedError() + + +class EntityPredicate(EClassPredicate): + """abstract class for predicates working on *entity instance(s)* specified + explicitly or found of the result set. + + Here are entity lookup / scoring rules: + + * if `entity` is specified, return score for this entity + + * elif `row` is specified, return score for the entity found in the + specified cell, using column specified by `col` or 0 + + * else return the sum of scores for each entity found in the column + specified specified by the `col` argument or in column 0 if not specified, + unless: + + - `mode` == 'all' (the default) and some entity class is scored + to 0, in which case 0 is returned + + - `mode` == 'any', in which case the first non-zero score is + returned + + - `accept_none` is False and some cell in the column has a None value + (this may occurs with outer join) + + .. Note:: + using :class:`EntityPredicate` or :class:`EClassPredicate` as base predicate + class impacts performance, since when no entity or row is specified the + later works on every different *entity class* found in the result set, + while the former works on each *entity* (eg each row of the result set), + which may be much more costly. + """ + + def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None, + entity=None, **kwargs): + if not rset and entity is None: + return 0 + score = 0 + if entity is not None: + score = self.score_entity(entity) + elif row is None: + col = col or 0 + if accept_none is None: + accept_none = self.accept_none + for row, rowvalue in enumerate(rset.rows): + if rowvalue[col] is None: # outer join + if not accept_none: + return 0 + continue + escore = self.score(req, rset, row, col) + if not escore and not self.once_is_enough: + return 0 + elif self.once_is_enough: + return escore + score += escore + else: + col = col or 0 + etype = rset.description[row][col] + if etype is not None: # outer join + score = self.score(req, rset, row, col) + return score + + def score(self, req, rset, row, col): + try: + return self.score_entity(rset.get_entity(row, col)) + except NotAnEntity: + return 0 + + def score_entity(self, entity): + raise NotImplementedError() + + +class ExpectedValuePredicate(Predicate): + """Take a list of expected values as initializer argument and store them + into the :attr:`expected` set attribute. You may also give a set as single + argument, which will then be referenced as set of expected values, + allowing modifications to the given set to be considered. + + You should implement one of :meth:`_values_set(cls, req, **kwargs)` or + :meth:`_get_value(cls, req, **kwargs)` method which should respectively + return the set of values or the unique possible value for the given context. + + You may also specify a `mode` behaviour as argument, as explained below. + + Returned score is: + + - 0 if `mode` == 'all' (the default) and at least one expected + values isn't found + + - 0 if `mode` == 'any' and no expected values isn't found at all + + - else the number of matching values + + Notice `mode` = 'any' with a single expected value has no effect at all. + """ + def __init__(self, *expected, **kwargs): + assert expected, self + if len(expected) == 1 and isinstance(expected[0], (set, dict)): + self.expected = expected[0] + else: + self.expected = frozenset(expected) + mode = kwargs.pop('mode', 'all') + assert mode in ('any', 'all'), 'bad mode %s' % mode + self.once_is_enough = mode == 'any' + assert not kwargs, 'unexpected arguments %s' % kwargs + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, + ','.join(sorted(str(s) for s in self.expected))) + + def __call__(self, cls, req, **kwargs): + values = self._values_set(cls, req, **kwargs) + if isinstance(values, dict): + if isinstance(self.expected, dict): + matching = 0 + for key, expected_value in self.expected.items(): + if key in values: + if (isinstance(expected_value, (list, tuple, frozenset, set)) + and values[key] in expected_value): + matching += 1 + elif values[key] == expected_value: + matching += 1 + if isinstance(self.expected, (set, frozenset)): + values = frozenset(values) + matching = len(values & self.expected) + else: + matching = len(values & self.expected) + if self.once_is_enough: + return matching + if matching == len(self.expected): + return matching + return 0 + + def _values_set(self, cls, req, **kwargs): + return frozenset( (self._get_value(cls, req, **kwargs),) ) + + def _get_value(self, cls, req, **kwargs): + raise NotImplementedError() + + +# bare predicates ############################################################## + +class match_kwargs(ExpectedValuePredicate): + """Return non-zero score if parameter names specified as initializer + arguments are specified in the input context. + + + Return a score corresponding to the number of expected parameters. + + When multiple parameters are expected, all of them should be found in + the input context unless `mode` keyword argument is given to 'any', + in which case a single matching parameter is enough. + """ + + def _values_set(self, cls, req, **kwargs): + return kwargs + + +class appobject_selectable(Predicate): + """Return 1 if another appobject is selectable using the same input context. + + Initializer arguments: + + * `registry`, a registry name + + * `regids`, object identifiers in this registry, one of them should be + selectable. + """ + selectable_score = 1 + def __init__(self, registry, *regids): + self.registry = registry + self.regids = regids + + def __call__(self, cls, req, **kwargs): + for regid in self.regids: + if req.vreg[self.registry].select_or_none(regid, req, **kwargs) is not None: + return self.selectable_score + return 0 + + +class adaptable(appobject_selectable): + """Return 1 if another appobject is selectable using the same input context. + + Initializer arguments: + + * `regids`, adapter identifiers (e.g. interface names) to which the context + (usually entities) should be adaptable. One of them should be selectable + when multiple identifiers are given. + """ + def __init__(self, *regids): + super(adaptable, self).__init__('adapters', *regids) + + def __call__(self, cls, req, **kwargs): + kwargs.setdefault('accept_none', False) + score = super(adaptable, self).__call__(cls, req, **kwargs) + if score == 0 and kwargs.get('rset') and len(kwargs['rset']) > 1 and not 'row' in kwargs: + # on rset containing several entity types, each row may be + # individually adaptable, while the whole rset won't be if the + # same adapter can't be used for each type + for row in range(len(kwargs['rset'])): + kwargs.setdefault('col', 0) + _score = super(adaptable, self).__call__(cls, req, row=row, **kwargs) + if not _score: + return 0 + # adjust score per row as expected by default adjust_score + # implementation + score += self.adjust_score(_score) + else: + score = self.adjust_score(score) + return score + + @staticmethod + def adjust_score(score): + # being adaptable to an interface should takes precedence other + # is_instance('Any'), but not other explicit + # is_instance('SomeEntityType'), and, for **a single entity**: + # * is_instance('Any') score is 1 + # * is_instance('SomeEntityType') score is at least 2 + if score >= 2: + return score - 0.5 + if score == 1: + return score + 0.5 + return score + + +class configuration_values(Predicate): + """Return 1 if the instance has an option set to a given value(s) in its + configuration file. + """ + # XXX this predicate could be evaluated on startup + def __init__(self, key, values): + self._key = key + if not isinstance(values, (tuple, list)): + values = (values,) + self._values = frozenset(values) + + def __call__(self, cls, req, **kwargs): + try: + return self._score + except AttributeError: + if req is None: + config = kwargs['repo'].config + else: + config = req.vreg.config + self._score = config[self._key] in self._values + return self._score + + +# rset predicates ############################################################## + +@objectify_predicate +def none_rset(cls, req, rset=None, **kwargs): + """Return 1 if the result set is None (eg usually not specified).""" + if rset is None: + return 1 + return 0 + + +# XXX == ~ none_rset +@objectify_predicate +def any_rset(cls, req, rset=None, **kwargs): + """Return 1 for any result set, whatever the number of rows in it, even 0.""" + if rset is not None: + return 1 + return 0 + + +@objectify_predicate +def nonempty_rset(cls, req, rset=None, **kwargs): + """Return 1 for result set containing one ore more rows.""" + if rset: + return 1 + return 0 + + +# XXX == ~ nonempty_rset +@objectify_predicate +def empty_rset(cls, req, rset=None, **kwargs): + """Return 1 for result set which doesn't contain any row.""" + if rset is not None and len(rset) == 0: + return 1 + return 0 + + +# XXX == multi_lines_rset(1) +@objectify_predicate +def one_line_rset(cls, req, rset=None, row=None, **kwargs): + """Return 1 if the result set is of size 1, or greater but a specific row in + the result set is specified ('row' argument). + """ + if rset is None and 'entity' in kwargs: + return 1 + if rset is not None and (row is not None or len(rset) == 1): + return 1 + return 0 + + +class multi_lines_rset(Predicate): + """Return 1 if the operator expression matches between `num` elements + in the result set and the `expected` value if defined. + + By default, multi_lines_rset(expected) matches equality expression: + `nb` row(s) in result set equals to expected value + But, you can perform richer comparisons by overriding default operator: + multi_lines_rset(expected, operator.gt) + + If `expected` is None, return 1 if the result set contains *at least* + two rows. + If rset is None, return 0. + """ + def __init__(self, expected=None, operator=eq): + self.expected = expected + self.operator = operator + + def match_expected(self, num): + if self.expected is None: + return num > 1 + return self.operator(num, self.expected) + + def __call__(self, cls, req, rset=None, **kwargs): + return int(rset is not None and self.match_expected(len(rset))) + + +class multi_columns_rset(multi_lines_rset): + """If `nb` is specified, return 1 if the result set has exactly `nb` column + per row. Else (`nb` is None), return 1 if the result set contains *at least* + two columns per row. Return 0 for empty result set. + """ + + def __call__(self, cls, req, rset=None, **kwargs): + # 'or 0' since we *must not* return None. Also don't use rset.rows so + # this selector will work if rset is a simple list of list. + return rset and self.match_expected(len(rset[0])) or 0 + + +class paginated_rset(Predicate): + """Return 1 or more for result set with more rows than one or more page + size. You can specify expected number of pages to the initializer (default + to one), and you'll get that number of pages as score if the result set is + big enough. + + Page size is searched in (respecting order): + * a `page_size` argument + * a `page_size` form parameters + * the `navigation.page-size` property (see :ref:`PersistentProperties`) + """ + def __init__(self, nbpages=1): + assert nbpages > 0 + self.nbpages = nbpages + + def __call__(self, cls, req, rset=None, **kwargs): + if rset is None: + return 0 + page_size = kwargs.get('page_size') + if page_size is None: + page_size = req.form.get('page_size') + if page_size is not None: + try: + page_size = int(page_size) + except ValueError: + page_size = None + if page_size is None: + page_size = req.property_value('navigation.page-size') + if len(rset) <= (page_size*self.nbpages): + return 0 + return self.nbpages + + +@objectify_predicate +def sorted_rset(cls, req, rset=None, **kwargs): + """Return 1 for sorted result set (e.g. from an RQL query containing an + ORDERBY clause), with exception that it will return 0 if the rset is + 'ORDERBY FTIRANK(VAR)' (eg sorted by rank value of the has_text index). + """ + if rset is None: + return 0 + selects = rset.syntax_tree().children + if (len(selects) > 1 or + not selects[0].orderby or + (isinstance(selects[0].orderby[0].term, Function) and + selects[0].orderby[0].term.name == 'FTIRANK') + ): + return 0 + return 2 + + +# XXX == multi_etypes_rset(1) +@objectify_predicate +def one_etype_rset(cls, req, rset=None, col=0, **kwargs): + """Return 1 if the result set contains entities which are all of the same + type in the column specified by the `col` argument of the input context, or + in column 0. + """ + if rset is None: + return 0 + if len(rset.column_types(col)) != 1: + return 0 + return 1 + + +class multi_etypes_rset(multi_lines_rset): + """If `nb` is specified, return 1 if the result set contains `nb` different + types of entities in the column specified by the `col` argument of the input + context, or in column 0. If `nb` is None, return 1 if the result set contains + *at least* two different types of entities. + """ + + def __call__(self, cls, req, rset=None, col=0, **kwargs): + # 'or 0' since we *must not* return None + return rset and self.match_expected(len(rset.column_types(col))) or 0 + + +@objectify_predicate +def logged_user_in_rset(cls, req, rset=None, row=None, col=0, **kwargs): + """Return positive score if the result set at the specified row / col + contains the eid of the logged user. + """ + if rset is None: + return 0 + return req.user.eid == rset[row or 0][col] + + +# entity predicates ############################################################# + +class composite_etype(Predicate): + """Return 1 for composite entities. + + A composite entity has an etype for which at least one relation + definition points in its direction with the + composite='subject'/'object' notation. + """ + + def __call__(self, cls, req, **kwargs): + entity = kwargs.pop('entity', None) + if entity is None: + return 0 + return entity.e_schema.is_composite + + + +class non_final_entity(EClassPredicate): + """Return 1 for entity of a non final entity type(s). Remember, "final" + entity types are String, Int, etc... This is equivalent to + `is_instance('Any')` but more optimized. + + See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity + class lookup / score rules according to the input context. + """ + def score(self, cls, req, etype): + if etype in BASE_TYPES: + return 0 + return 1 + + def score_class(self, eclass, req): + return 1 # necessarily true if we're there + + + +def _reset_is_instance_cache(vreg): + vreg._is_instance_predicate_cache = {} + +CW_EVENT_MANAGER.bind('before-registry-reset', _reset_is_instance_cache) + +class is_instance(EClassPredicate): + """Return non-zero score for entity that is an instance of the one of given + type(s). If multiple arguments are given, matching one of them is enough. + + Entity types should be given as string, the corresponding class will be + fetched from the registry at selection time. + + See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity + class lookup / score rules according to the input context. + + .. note:: the score will reflect class proximity so the most specific object + will be selected. + """ + + def __init__(self, *expected_etypes, **kwargs): + super(is_instance, self).__init__(**kwargs) + self.expected_etypes = expected_etypes + for etype in self.expected_etypes: + assert isinstance(etype, string_types), etype + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, + ','.join(str(s) for s in self.expected_etypes)) + + def score_class(self, eclass, req): + # cache on vreg to avoid reloading issues + try: + cache = req.vreg._is_instance_predicate_cache + except AttributeError: + # XXX 'before-registry-reset' not called for db-api connections + cache = req.vreg._is_instance_predicate_cache = {} + try: + expected_eclasses = cache[self] + except KeyError: + # turn list of entity types as string into a list of + # (entity class, parent classes) + etypesreg = req.vreg['etypes'] + expected_eclasses = cache[self] = [] + for etype in self.expected_etypes: + try: + expected_eclasses.append(etypesreg.etype_class(etype)) + except KeyError: + continue # entity type not in the schema + parents, any = req.vreg['etypes'].parent_classes(eclass.__regid__) + score = 0 + for expectedcls in expected_eclasses: + # adjust score according to class proximity + if expectedcls is eclass: + score += len(parents) + 4 + elif expectedcls is any: # Any + score += 1 + else: + for index, basecls in enumerate(reversed(parents)): + if expectedcls is basecls: + score += index + 3 + break + return score + + +class score_entity(EntityPredicate): + """Return score according to an arbitrary function given as argument which + will be called with input content entity as argument. + + This is a very useful predicate that will usually interest you since it + allows a lot of things without having to write a specific predicate. + + The function can return arbitrary value which will be casted to an integer + value at the end. + + See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity + lookup / score rules according to the input context. + """ + def __init__(self, scorefunc, once_is_enough=None, mode='all'): + super(score_entity, self).__init__(mode=mode, once_is_enough=once_is_enough) + def intscore(*args, **kwargs): + score = scorefunc(*args, **kwargs) + if not score: + return 0 + if isinstance(score, integer_types): + return score + return 1 + self.score_entity = intscore + + +class has_mimetype(EntityPredicate): + """Return 1 if the entity adapt to IDownloadable and has the given MIME type. + + You can give 'image/' to match any image for instance, or 'image/png' to match + only PNG images. + """ + def __init__(self, mimetype, once_is_enough=None, mode='all'): + super(has_mimetype, self).__init__(mode=mode, once_is_enough=once_is_enough) + self.mimetype = mimetype + + def score_entity(self, entity): + idownloadable = entity.cw_adapt_to('IDownloadable') + if idownloadable is None: + return 0 + mt = idownloadable.download_content_type() + if not (mt and mt.startswith(self.mimetype)): + return 0 + return 1 + + +class relation_possible(EntityPredicate): + """Return 1 for entity that supports the relation, provided that the + request's user may do some `action` on it (see below). + + The relation is specified by the following initializer arguments: + + * `rtype`, the name of the relation + + * `role`, the role of the entity in the relation, either 'subject' or + 'object', default to 'subject' + + * `target_etype`, optional name of an entity type that should be supported + at the other end of the relation + + * `action`, a relation schema action (e.g. one of 'read', 'add', 'delete', + default to 'read') which must be granted to the user, else a 0 score will + be returned. Give None if you don't want any permission checking. + + * `strict`, boolean (default to False) telling what to do when the user has + not globally the permission for the action (eg the action is not granted + to one of the user's groups) + + - when strict is False, if there are some local role defined for this + action (e.g. using rql expressions), then the permission will be + considered as granted + + - when strict is True, then the permission will be actually checked for + each entity + + Setting `strict` to True impacts performance for large result set since + you'll then get the :class:`~cubicweb.predicates.EntityPredicate` behaviour + while otherwise you get the :class:`~cubicweb.predicates.EClassPredicate`'s + one. See those classes documentation for entity lookup / score rules + according to the input context. + """ + + def __init__(self, rtype, role='subject', target_etype=None, + action='read', strict=False, **kwargs): + super(relation_possible, self).__init__(**kwargs) + self.rtype = rtype + self.role = role + self.target_etype = target_etype + self.action = action + self.strict = strict + + # hack hack hack + def __call__(self, cls, req, **kwargs): + # hack hack hack + if self.strict: + return EntityPredicate.__call__(self, cls, req, **kwargs) + return EClassPredicate.__call__(self, cls, req, **kwargs) + + def score(self, *args): + if self.strict: + return EntityPredicate.score(self, *args) + return EClassPredicate.score(self, *args) + + def _get_rschema(self, eclass): + eschema = eclass.e_schema + try: + if self.role == 'object': + return eschema.objrels[self.rtype] + else: + return eschema.subjrels[self.rtype] + except KeyError: + return None + + def score_class(self, eclass, req): + rschema = self._get_rschema(eclass) + if rschema is None: + return 0 # relation not supported + eschema = eclass.e_schema + if self.target_etype is not None: + try: + rdef = rschema.role_rdef(eschema, self.target_etype, self.role) + except KeyError: + return 0 + if self.action and not rdef.may_have_permission(self.action, req): + return 0 + teschema = req.vreg.schema.eschema(self.target_etype) + if not teschema.may_have_permission('read', req): + return 0 + elif self.action: + return rschema.may_have_permission(self.action, req, eschema, self.role) + return 1 + + def score_entity(self, entity): + rschema = self._get_rschema(entity) + if rschema is None: + return 0 # relation not supported + if self.action: + if self.target_etype is not None: + try: + rschema = rschema.role_rdef(entity.e_schema, + self.target_etype, self.role) + except KeyError: + return 0 + if self.role == 'subject': + if not rschema.has_perm(entity._cw, self.action, fromeid=entity.eid): + return 0 + elif not rschema.has_perm(entity._cw, self.action, toeid=entity.eid): + return 0 + if self.target_etype is not None: + req = entity._cw + teschema = req.vreg.schema.eschema(self.target_etype) + if not teschema.may_have_permission('read', req): + return 0 + return 1 + + +class partial_relation_possible(PartialPredicateMixIn, relation_possible): + """Same as :class:~`cubicweb.predicates.relation_possible`, but will look for + attributes of the selected class to get information which is otherwise + expected by the initializer, except for `action` and `strict` which are kept + as initializer arguments. + + This is useful to predefine predicate of an abstract class designed to be + customized. + """ + def __init__(self, action='read', **kwargs): + super(partial_relation_possible, self).__init__(None, None, None, + action, **kwargs) + + def complete(self, cls): + self.rtype = cls.rtype + self.role = role(cls) + self.target_etype = getattr(cls, 'target_etype', None) + + +class has_related_entities(EntityPredicate): + """Return 1 if entity support the specified relation and has some linked + entities by this relation , optionally filtered according to the specified + target type. + + The relation is specified by the following initializer arguments: + + * `rtype`, the name of the relation + + * `role`, the role of the entity in the relation, either 'subject' or + 'object', default to 'subject'. + + * `target_etype`, optional name of an entity type that should be found + at the other end of the relation + + See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity + lookup / score rules according to the input context. + """ + def __init__(self, rtype, role='subject', target_etype=None, **kwargs): + super(has_related_entities, self).__init__(**kwargs) + self.rtype = rtype + self.role = role + self.target_etype = target_etype + + def score_entity(self, entity): + relpossel = relation_possible(self.rtype, self.role, self.target_etype) + if not relpossel.score_class(entity.__class__, entity._cw): + return 0 + rset = entity.related(self.rtype, self.role) + if self.target_etype: + return any(r for r in rset.description if r[0] == self.target_etype) + return rset and 1 or 0 + + +class partial_has_related_entities(PartialPredicateMixIn, has_related_entities): + """Same as :class:~`cubicweb.predicates.has_related_entity`, but will look + for attributes of the selected class to get information which is otherwise + expected by the initializer. + + This is useful to predefine predicate of an abstract class designed to be + customized. + """ + def __init__(self, **kwargs): + super(partial_has_related_entities, self).__init__(None, None, None, + **kwargs) + + def complete(self, cls): + self.rtype = cls.rtype + self.role = role(cls) + self.target_etype = getattr(cls, 'target_etype', None) + + +class has_permission(EntityPredicate): + """Return non-zero score if request's user has the permission to do the + requested action on the entity. `action` is an entity schema action (eg one + of 'read', 'add', 'delete', 'update'). + + Here are entity lookup / scoring rules: + + * if `entity` is specified, check permission is granted for this entity + + * elif `row` is specified, check permission is granted for the entity found + in the specified cell + + * else check permission is granted for each entity found in the column + specified specified by the `col` argument or in column 0 + """ + def __init__(self, action): + self.action = action + + # don't use EntityPredicate.__call__ but this optimized implementation to + # avoid considering each entity when it's not necessary + def __call__(self, cls, req, rset=None, row=None, col=0, entity=None, **kwargs): + if entity is not None: + return self.score_entity(entity) + if rset is None: + return 0 + if row is None: + score = 0 + need_local_check = [] + geteschema = req.vreg.schema.eschema + user = req.user + action = self.action + for etype in rset.column_types(0): + if etype in BASE_TYPES: + return 0 + eschema = geteschema(etype) + if not user.matching_groups(eschema.get_groups(action)): + if eschema.has_local_role(action): + # have to ckeck local roles + need_local_check.append(eschema) + continue + else: + # even a local role won't be enough + return 0 + score += 1 + if need_local_check: + # check local role for entities of necessary types + for i, row in enumerate(rset): + if not rset.description[i][col] in need_local_check: + continue + # micro-optimisation instead of calling self.score(req, + # rset, i, col): rset may be large + if not rset.get_entity(i, col).cw_has_perm(action): + return 0 + score += 1 + return score + return self.score(req, rset, row, col) + + def score_entity(self, entity): + if entity.cw_has_perm(self.action): + return 1 + return 0 + + +class has_add_permission(EClassPredicate): + """Return 1 if request's user has the add permission on entity type + specified in the `etype` initializer argument, or according to entity found + in the input content if not specified. + + It also check that then entity type is not a strict subobject (e.g. may only + be used as a composed of another entity). + + See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity + class lookup / score rules according to the input context when `etype` is + not specified. + """ + def __init__(self, etype=None, **kwargs): + super(has_add_permission, self).__init__(**kwargs) + self.etype = etype + + def __call__(self, cls, req, **kwargs): + if self.etype is None: + return super(has_add_permission, self).__call__(cls, req, **kwargs) + return self.score(cls, req, self.etype) + + def score_class(self, eclass, req): + eschema = eclass.e_schema + if eschema.final or eschema.is_subobject(strict=True) \ + or not eschema.has_perm(req, 'add'): + return 0 + return 1 + + +class rql_condition(EntityPredicate): + """Return non-zero score if arbitrary rql specified in `expression` + initializer argument return some results for entity found in the input + context. Returned score is the number of items returned by the rql + condition. + + `expression` is expected to be a string containing an rql expression, which + must use 'X' variable to represent the context entity and may use 'U' to + represent the request's user. + + .. warning:: + If simply testing value of some attribute/relation of context entity (X), + you should rather use the :class:`score_entity` predicate which will + benefit from the ORM's request entities cache. + + See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity + lookup / score rules according to the input context. + """ + def __init__(self, expression, once_is_enough=None, mode='all', user_condition=False): + super(rql_condition, self).__init__(mode=mode, once_is_enough=once_is_enough) + self.user_condition = user_condition + if user_condition: + rql = 'Any COUNT(U) WHERE U eid %%(u)s, %s' % expression + elif 'U' in frozenset(split_expression(expression)): + rql = 'Any COUNT(X) WHERE X eid %%(x)s, U eid %%(u)s, %s' % expression + else: + rql = 'Any COUNT(X) WHERE X eid %%(x)s, %s' % expression + self.rql = rql + + def __str__(self): + return '%s(%r)' % (self.__class__.__name__, self.rql) + + def __call__(self, cls, req, **kwargs): + if self.user_condition: + try: + return req.execute(self.rql, {'u': req.user.eid})[0][0] + except Unauthorized: + return 0 + else: + return super(rql_condition, self).__call__(cls, req, **kwargs) + + def _score(self, req, eid): + try: + return req.execute(self.rql, {'x': eid, 'u': req.user.eid})[0][0] + except Unauthorized: + return 0 + + def score(self, req, rset, row, col): + return self._score(req, rset[row][col]) + + def score_entity(self, entity): + return self._score(entity._cw, entity.eid) + + +# workflow predicates ########################################################### + +class is_in_state(score_entity): + """Return 1 if entity is in one of the states given as argument list + + You should use this instead of your own :class:`score_entity` predicate to + avoid some gotchas: + + * possible views gives a fake entity with no state + * you must use the latest tr info thru the workflow adapter for repository + side checking of the current state + + In debug mode, this predicate can raise :exc:`ValueError` for unknown states names + (only checked on entities without a custom workflow) + + :rtype: int + """ + def __init__(self, *expected): + assert expected, self + self.expected = frozenset(expected) + def score(entity, expected=self.expected): + adapted = entity.cw_adapt_to('IWorkflowable') + # in debug mode only (time consuming) + if entity._cw.vreg.config.debugmode: + # validation can only be done for generic etype workflow because + # expected transition list could have been changed for a custom + # workflow (for the current entity) + if not entity.custom_workflow: + self._validate(adapted) + return self._score(adapted) + super(is_in_state, self).__init__(score) + + def _score(self, adapted): + trinfo = adapted.latest_trinfo() + if trinfo is None: # entity is probably in it's initial state + statename = adapted.state + else: + statename = trinfo.new_state.name + return statename in self.expected + + def _validate(self, adapted): + wf = adapted.current_workflow + valid = [n.name for n in wf.reverse_state_of] + unknown = sorted(self.expected.difference(valid)) + if unknown: + raise ValueError("%s: unknown state(s): %s" + % (wf.name, ",".join(unknown))) + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, + ','.join(str(s) for s in self.expected)) + + +def on_fire_transition(etype, tr_names, from_state_name=None): + """Return 1 when entity of the type `etype` is going through transition of + a name included in `tr_names`. + + You should use this predicate on 'after_add_entity' hook, since it's actually + looking for addition of `TrInfo` entities. Hence in the hook, `self.entity` + will reference the matching `TrInfo` entity, allowing to get all the + transition details (including the entity to which is applied the transition + but also its original state, transition, destination state, user...). + + See :class:`cubicweb.entities.wfobjs.TrInfo` for more information. + """ + if from_state_name is not None: + warn("on_fire_transition's from_state_name argument is unused", DeprecationWarning) + if isinstance(tr_names, string_types): + tr_names = set((tr_names,)) + def match_etype_and_transition(trinfo): + # take care trinfo.transition is None when calling change_state + return (trinfo.transition and trinfo.transition.name in tr_names + # is_instance() first two arguments are 'cls' (unused, so giving + # None is fine) and the request/session + and is_instance(etype)(None, trinfo._cw, entity=trinfo.for_entity)) + + return is_instance('TrInfo') & score_entity(match_etype_and_transition) + + +class match_transition(ExpectedValuePredicate): + """Return 1 if `transition` argument is found in the input context which has + a `.name` attribute matching one of the expected names given to the + initializer. + + This predicate is expected to be used to customise the status change form in + the web ui. + """ + def __call__(self, cls, req, transition=None, **kwargs): + # XXX check this is a transition that apply to the object? + if transition is None: + treid = req.form.get('treid', None) + if treid: + transition = req.entity_from_eid(treid) + if transition is not None and getattr(transition, 'name', None) in self.expected: + return 1 + return 0 + + +# logged user predicates ######################################################## + +@objectify_predicate +def no_cnx(cls, req, **kwargs): + """Return 1 if the web session has no connection set. This occurs when + anonymous access is not allowed and user isn't authenticated. + """ + if not req.cnx: + return 1 + return 0 + + +@objectify_predicate +def authenticated_user(cls, req, **kwargs): + """Return 1 if the user is authenticated (i.e. not the anonymous user). + """ + if req.session.anonymous_session: + return 0 + return 1 + + +@objectify_predicate +def anonymous_user(cls, req, **kwargs): + """Return 1 if the user is not authenticated (i.e. is the anonymous user). + """ + if req.session.anonymous_session: + return 1 + return 0 + + +class match_user_groups(ExpectedValuePredicate): + """Return a non-zero score if request's user is in at least one of the + groups given as initializer argument. Returned score is the number of groups + in which the user is. + + If the special 'owners' group is given and `rset` is specified in the input + context: + + * if `row` is specified check the entity at the given `row`/`col` (default + to 0) is owned by the user + + * else check all entities in `col` (default to 0) are owned by the user + """ + + def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): + if not getattr(req, 'cnx', True): # default to True for repo session instances + return 0 + user = req.user + if user is None: + return int('guests' in self.expected) + score = user.matching_groups(self.expected) + if not score and 'owners' in self.expected and rset: + if row is not None: + if not user.owns(rset[row][col]): + return 0 + score = 1 + else: + score = all(user.owns(r[col]) for r in rset) + return score + +# Web request predicates ######################################################## + +# XXX deprecate +@objectify_predicate +def primary_view(cls, req, view=None, **kwargs): + """Return 1 if: + + * *no view is specified* in the input context + + * a view is specified and its `.is_primary()` method return True + + This predicate is usually used by contextual components that only want to + appears for the primary view of an entity. + """ + if view is not None and not view.is_primary(): + return 0 + return 1 + + +@objectify_predicate +def contextual(cls, req, view=None, **kwargs): + """Return 1 if view's contextual property is true""" + if view is not None and view.contextual: + return 1 + return 0 + + +class match_view(ExpectedValuePredicate): + """Return 1 if a view is specified an as its registry id is in one of the + expected view id given to the initializer. + """ + def __call__(self, cls, req, view=None, **kwargs): + if view is None or not view.__regid__ in self.expected: + return 0 + return 1 + + +class match_context(ExpectedValuePredicate): + + def __call__(self, cls, req, context=None, **kwargs): + if not context in self.expected: + return 0 + return 1 + + +# XXX deprecate +@objectify_predicate +def match_context_prop(cls, req, context=None, **kwargs): + """Return 1 if: + + * no `context` is specified in input context (take care to confusion, here + `context` refers to a string given as an argument to the input context...) + + * specified `context` is matching the context property value for the + appobject using this predicate + + * the appobject's context property value is None + + This predicate is usually used by contextual components that want to appears + in a configurable place. + """ + if context is None: + return 1 + propval = req.property_value('%s.%s.context' % (cls.__registry__, + cls.__regid__)) + if propval and context != propval: + return 0 + return 1 + + +class match_search_state(ExpectedValuePredicate): + """Return 1 if the current request search state is in one of the expected + states given to the initializer. + + Known search states are either 'normal' or 'linksearch' (eg searching for an + object to create a relation with another). + + This predicate is usually used by action that want to appears or not according + to the ui search state. + """ + + def __call__(self, cls, req, **kwargs): + try: + if not req.search_state[0] in self.expected: + return 0 + except AttributeError: + return 1 # class doesn't care about search state, accept it + return 1 + + +class match_form_params(ExpectedValuePredicate): + """Return non-zero score if parameter names specified as initializer + arguments are specified in request's form parameters. + + Return a score corresponding to the number of expected parameters. + + When multiple parameters are expected, all of them should be found in + the input context unless `mode` keyword argument is given to 'any', + in which case a single matching parameter is enough. + """ + + def __init__(self, *expected, **kwargs): + """override default __init__ to allow either named or positional + parameters. + """ + if kwargs and expected: + raise ValueError("match_form_params() can't be called with both " + "positional and named arguments") + if expected: + if len(expected) == 1 and not isinstance(expected[0], string_types): + raise ValueError("match_form_params() positional arguments " + "must be strings") + super(match_form_params, self).__init__(*expected) + else: + super(match_form_params, self).__init__(kwargs) + + def _values_set(self, cls, req, **kwargs): + return req.form + + +class match_http_method(ExpectedValuePredicate): + """Return non-zero score if one of the HTTP methods specified as + initializer arguments is the HTTP method of the request (GET, POST, ...). + """ + + def __call__(self, cls, req, **kwargs): + return int(req.http_method() in self.expected) + + +class match_edited_type(ExpectedValuePredicate): + """return non-zero if main edited entity type is the one specified as + initializer argument, or is among initializer arguments if `mode` == 'any'. + """ + + def _values_set(self, cls, req, **kwargs): + try: + return frozenset((req.form['__type:%s' % req.form['__maineid']],)) + except KeyError: + return frozenset() + + +class match_form_id(ExpectedValuePredicate): + """return non-zero if request form identifier is the one specified as + initializer argument, or is among initializer arguments if `mode` == 'any'. + """ + + def _values_set(self, cls, req, **kwargs): + try: + return frozenset((req.form['__form_id'],)) + except KeyError: + return frozenset() + + +class specified_etype_implements(is_instance): + """Return non-zero score if the entity type specified by an 'etype' key + searched in (by priority) input context kwargs and request form parameters + match a known entity type (case insensitivly), and it's associated entity + class is of one of the type(s) given to the initializer. If multiple + arguments are given, matching one of them is enough. + + .. note:: as with :class:`~cubicweb.predicates.is_instance`, entity types + should be given as string and the score will reflect class + proximity so the most specific object will be selected. + + This predicate is usually used by views holding entity creation forms (since + we've no result set to work on). + """ + + def __call__(self, cls, req, **kwargs): + try: + etype = kwargs['etype'] + except KeyError: + try: + etype = req.form['etype'] + except KeyError: + return 0 + else: + # only check this is a known type if etype comes from req.form, + # else we want the error to propagate + try: + etype = req.vreg.case_insensitive_etypes[etype.lower()] + req.form['etype'] = etype + except KeyError: + return 0 + score = self.score_class(req.vreg['etypes'].etype_class(etype), req) + if score: + eschema = req.vreg.schema.eschema(etype) + if eschema.may_have_permission('add', req): + return score + return 0 + + +class attribute_edited(EntityPredicate): + """Scores if the specified attribute has been edited This is useful for + selection of forms by the edit controller. + + The initial use case is on a form, in conjunction with match_transition, + which will not score at edit time:: + + is_instance('Version') & (match_transition('ready') | + attribute_edited('publication_date')) + """ + def __init__(self, attribute, once_is_enough=None, mode='all'): + super(attribute_edited, self).__init__(mode=mode, once_is_enough=once_is_enough) + self._attribute = attribute + + def score_entity(self, entity): + return eid_param(role_name(self._attribute, 'subject'), entity.eid) in entity._cw.form + + +# Other predicates ############################################################## + +class match_exception(ExpectedValuePredicate): + """Return 1 if exception given as `exc` in the input context is an instance + of one of the class given on instanciation of this predicate. + """ + def __init__(self, *expected): + assert expected, self + # we want a tuple, not a set as done in the parent class + self.expected = expected + + def __call__(self, cls, req, exc=None, **kwargs): + if exc is not None and isinstance(exc, self.expected): + return 1 + return 0 + + +@objectify_predicate +def debug_mode(cls, req, rset=None, **kwargs): + """Return 1 if running in debug mode.""" + return req.vreg.config.debugmode and 1 or 0 diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/pylintext.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/pylintext.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,50 @@ +"""https://pastebin.logilab.fr/show/860/""" + +from astroid import MANAGER, InferenceError, nodes, scoped_nodes +from astroid.builder import AstroidBuilder + +def turn_function_to_class(node): + """turn a Function node into a Class node (in-place)""" + node.__class__ = scoped_nodes.Class + node.bases = () + # remove return nodes so that we don't get warned about 'return outside + # function' by pylint + for rnode in node.nodes_of_class(nodes.Return): + rnode.parent.body.remove(rnode) + # that seems to be enough :) + + +def cubicweb_transform(module): + # handle objectify_predicate decorator (and its former name until bw compat + # is kept). Only look at module level functions, should be enough. + for assnodes in module.locals.values(): + for node in assnodes: + if isinstance(node, scoped_nodes.Function) and node.decorators: + for decorator in node.decorators.nodes: + try: + for infered in decorator.infer(): + if infered.name in ('objectify_predicate', 'objectify_selector'): + turn_function_to_class(node) + break + else: + continue + break + except InferenceError: + continue + # add yams base types into 'yams.buildobjs', astng doesn't grasp globals() + # magic in there + if module.name == 'yams.buildobjs': + from yams import BASE_TYPES + for etype in BASE_TYPES: + module.locals[etype] = [scoped_nodes.Class(etype, None)] + # add data() to uiprops module + if module.name.split('.')[-1] == 'uiprops': + fake = AstroidBuilder(MANAGER).string_build(''' +def data(string): + return u'' +''') + module.locals['data'] = fake.locals['data'] + +def register(linter): + """called when loaded by pylint --load-plugins, nothing to do here""" + MANAGER.register_transform(nodes.Module, cubicweb_transform) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/pytestconf.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/pytestconf.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,48 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""pytest configuration file: we need this to properly remove ressources +cached on test classes, at least until we've proper support for teardown_class +""" +import sys +from os.path import split, splitext +from logilab.common.pytest import PyTester + +class CustomPyTester(PyTester): + def testfile(self, filename, batchmode=False): + try: + return super(CustomPyTester, self).testfile(filename, batchmode) + finally: + modname = splitext(split(filename)[1])[0] + try: + module = sys.modules[modname] + except KeyError: + # error during test module import + return + for cls in vars(module).values(): + if getattr(cls, '__module__', None) != modname: + continue + clean_repo_test_cls(cls) + +def clean_repo_test_cls(cls): + if 'repo' in cls.__dict__: + if not cls.repo.shutting_down: + cls.repo.shutdown() + del cls.repo + for clsattr in ('cnx', 'config', '_config', 'vreg', 'schema'): + if clsattr in cls.__dict__: + delattr(cls, clsattr) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/repoapi.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/repoapi.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,71 @@ +# copyright 2013-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Official API to access the content of a repository +""" +from warnings import warn + +from six import add_metaclass + +from logilab.common.deprecation import class_deprecated + +from cubicweb.utils import parse_repo_uri +from cubicweb import AuthenticationError +from cubicweb.server.session import Connection + + +### public API ###################################################### + +def get_repository(uri=None, config=None, vreg=None): + """get a repository for the given URI or config/vregistry (in case we're + loading the repository for a client, eg web server, configuration). + + The returned repository may be an in-memory repository or a proxy object + using a specific RPC method, depending on the given URI. + """ + if uri is not None: + warn('[3.22] get_repository only wants a config') + + assert config is not None, 'get_repository(config=config)' + return config.repository(vreg) + +def connect(repo, login, **kwargs): + """Take credential and return associated Connection. + + raise AuthenticationError if the credential are invalid.""" + sessionid = repo.connect(login, **kwargs) + session = repo._get_session(sessionid) + # XXX the autoclose_session should probably be handle on the session directly + # this is something to consider once we have proper server side Connection. + return Connection(session) + +def anonymous_cnx(repo): + """return a Connection for Anonymous user. + + raises an AuthenticationError if anonymous usage is not allowed + """ + anoninfo = getattr(repo.config, 'anonymous_user', lambda: None)() + if anoninfo is None: # no anonymous user + raise AuthenticationError('anonymous access is not authorized') + anon_login, anon_password = anoninfo + # use vreg's repository cache + return connect(repo, anon_login, password=anon_password) + + +@add_metaclass(class_deprecated) +class ClientConnection(Connection): + __deprecation_warning__ = '[3.20] %(cls)s is deprecated, use Connection instead' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/req.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/req.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,508 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Base class for request/session""" + +__docformat__ = "restructuredtext en" + +from warnings import warn +from datetime import time, datetime, timedelta + +from six import PY2, PY3, text_type +from six.moves.urllib.parse import parse_qs, parse_qsl, quote as urlquote, unquote as urlunquote, urlsplit, urlunsplit + +from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated +from logilab.common.date import ustrftime, strptime, todate, todatetime + +from rql.utils import rqlvar_maker + +from cubicweb import (Unauthorized, NoSelectableObject, NoResultError, + MultipleResultsError, uilib) +from cubicweb.rset import ResultSet + +ONESECOND = timedelta(0, 1, 0) +CACHE_REGISTRY = {} + +class FindEntityError(Exception): + """raised when find_one_entity() can not return one and only one entity""" + +class Cache(dict): + def __init__(self): + super(Cache, self).__init__() + _now = datetime.now() + self.cache_creation_date = _now + self.latest_cache_lookup = _now + + +class RequestSessionBase(object): + """base class containing stuff shared by server session and web request + + request/session is the main resources accessor, mainly through it's vreg + attribute: + + :attribute vreg: the instance's registry + :attribute vreg.schema: the instance's schema + :attribute vreg.config: the instance's configuration + """ + is_request = True # False for repository session + + def __init__(self, vreg): + self.vreg = vreg + try: + encoding = vreg.property_value('ui.encoding') + except Exception: # no vreg or property not registered + encoding = 'utf-8' + self.encoding = encoding + # cache result of execution for (rql expr / eids), + # should be emptied on commit/rollback of the server session / web + # connection + self.user = None + self.local_perm_cache = {} + self._ = text_type + + def _set_user(self, orig_user): + """set the user for this req_session_base + + A special method is needed to ensure the linked user is linked to the + connection too. + """ + rset = self.eid_rset(orig_user.eid, 'CWUser') + user_cls = self.vreg['etypes'].etype_class('CWUser') + user = user_cls(self, rset, row=0, groups=orig_user.groups, + properties=orig_user.properties) + user.cw_attr_cache['login'] = orig_user.login # cache login + self.user = user + self.set_entity_cache(user) + self.set_language(user.prefered_language()) + + + def set_language(self, lang): + """install i18n configuration for `lang` translation. + + Raises :exc:`KeyError` if translation doesn't exist. + """ + self.lang = lang + gettext, pgettext = self.vreg.config.translations[lang] + # use _cw.__ to translate a message without registering it to the catalog + self._ = self.__ = gettext + self.pgettext = pgettext + + def get_option_value(self, option): + raise NotImplementedError + + def property_value(self, key): + """return value of the property with the given key, giving priority to + user specific value if any, else using site value + """ + if self.user: + val = self.user.property_value(key) + if val is not None: + return val + return self.vreg.property_value(key) + + def etype_rset(self, etype, size=1): + """return a fake result set for a particular entity type""" + rset = ResultSet([('A',)]*size, '%s X' % etype, + description=[(etype,)]*size) + def get_entity(row, col=0, etype=etype, req=self, rset=rset): + return req.vreg['etypes'].etype_class(etype)(req, rset, row, col) + rset.get_entity = get_entity + rset.req = self + return rset + + def eid_rset(self, eid, etype=None): + """return a result set for the given eid without doing actual query + (we have the eid, we can suppose it exists and user has access to the + entity) + """ + eid = int(eid) + if etype is None: + etype = self.entity_metas(eid)['type'] + rset = ResultSet([(eid,)], 'Any X WHERE X eid %(x)s', {'x': eid}, + [(etype,)]) + rset.req = self + return rset + + def empty_rset(self): + """ return a guaranteed empty result """ + rset = ResultSet([], 'Any X WHERE X eid -1') + rset.req = self + return rset + + def entity_from_eid(self, eid, etype=None): + """return an entity instance for the given eid. No query is done""" + try: + return self.entity_cache(eid) + except KeyError: + rset = self.eid_rset(eid, etype) + entity = rset.get_entity(0, 0) + self.set_entity_cache(entity) + return entity + + def entity_cache(self, eid): + raise KeyError + + def set_entity_cache(self, entity): + pass + + def create_entity(self, etype, **kwargs): + """add a new entity of the given type + + Example (in a shell session): + + >>> c = create_entity('Company', name=u'Logilab') + >>> create_entity('Person', firstname=u'John', surname=u'Doe', + ... works_for=c) + + """ + cls = self.vreg['etypes'].etype_class(etype) + return cls.cw_instantiate(self.execute, **kwargs) + + @deprecated('[3.18] use find(etype, **kwargs).entities()') + def find_entities(self, etype, **kwargs): + """find entities of the given type and attribute values. + + >>> users = find_entities('CWGroup', name=u'users') + >>> groups = find_entities('CWGroup') + """ + return self.find(etype, **kwargs).entities() + + @deprecated('[3.18] use find(etype, **kwargs).one()') + def find_one_entity(self, etype, **kwargs): + """find one entity of the given type and attribute values. + raise :exc:`FindEntityError` if can not return one and only one entity. + + >>> users = find_one_entity('CWGroup', name=u'users') + >>> groups = find_one_entity('CWGroup') + Exception() + """ + try: + return self.find(etype, **kwargs).one() + except (NoResultError, MultipleResultsError) as e: + raise FindEntityError("%s: (%s, %s)" % (str(e), etype, kwargs)) + + def find(self, etype, **kwargs): + """find entities of the given type and attribute values. + + :returns: A :class:`ResultSet` + + >>> users = find('CWGroup', name=u"users").one() + >>> groups = find('CWGroup').entities() + """ + parts = ['Any X WHERE X is %s' % etype] + varmaker = rqlvar_maker(defined='X') + eschema = self.vreg.schema.eschema(etype) + for attr, value in kwargs.items(): + if isinstance(value, list) or isinstance(value, tuple): + raise NotImplementedError("List of values are not supported") + if hasattr(value, 'eid'): + kwargs[attr] = value.eid + if attr.startswith('reverse_'): + attr = attr[8:] + assert attr in eschema.objrels, \ + '%s not in %s object relations' % (attr, eschema) + parts.append( + '%(varname)s %(attr)s X, ' + '%(varname)s eid %%(reverse_%(attr)s)s' + % {'attr': attr, 'varname': next(varmaker)}) + else: + assert attr in eschema.subjrels, \ + '%s not in %s subject relations' % (attr, eschema) + parts.append('X %(attr)s %%(%(attr)s)s' % {'attr': attr}) + + rql = ', '.join(parts) + + return self.execute(rql, kwargs) + + def ensure_ro_rql(self, rql): + """raise an exception if the given rql is not a select query""" + first = rql.split(None, 1)[0].lower() + if first in ('insert', 'set', 'delete'): + raise Unauthorized(self._('only select queries are authorized')) + + def get_cache(self, cachename): + """cachename should be dotted names as in : + + - cubicweb.mycache + - cubes.blog.mycache + - etc. + """ + warn.warning('[3.19] .get_cache will disappear soon. ' + 'Distributed caching mechanisms are being introduced instead.' + 'Other caching mechanism can be used more reliably ' + 'to the same effect.', + DeprecationWarning) + if cachename in CACHE_REGISTRY: + cache = CACHE_REGISTRY[cachename] + else: + cache = CACHE_REGISTRY[cachename] = Cache() + _now = datetime.now() + if _now > cache.latest_cache_lookup + ONESECOND: + ecache = self.execute( + 'Any C,T WHERE C is CWCache, C name %(name)s, C timestamp T', + {'name':cachename}).get_entity(0,0) + cache.latest_cache_lookup = _now + if not ecache.valid(cache.cache_creation_date): + cache.clear() + cache.cache_creation_date = _now + return cache + + # url generation methods ################################################## + + def build_url(self, *args, **kwargs): + """return an absolute URL using params dictionary key/values as URL + parameters. Values are automatically URL quoted, and the + publishing method to use may be specified or will be guessed. + + if ``__secure__`` argument is True, the request will try to build a + https url. + + raises :exc:`ValueError` if None is found in arguments + """ + # use *args since we don't want first argument to be "anonymous" to + # avoid potential clash with kwargs + method = None + if args: + assert len(args) == 1, 'only 0 or 1 non-named-argument expected' + method = args[0] + if method is None: + method = 'view' + # XXX I (adim) think that if method is passed explicitly, we should + # not try to process it and directly call req.build_url() + base_url = kwargs.pop('base_url', None) + if base_url is None: + secure = kwargs.pop('__secure__', None) + base_url = self.base_url(secure=secure) + if '_restpath' in kwargs: + assert method == 'view', repr(method) + path = kwargs.pop('_restpath') + else: + path = method + if not kwargs: + return u'%s%s' % (base_url, path) + return u'%s%s?%s' % (base_url, path, self.build_url_params(**kwargs)) + + def build_url_params(self, **kwargs): + """return encoded params to incorporate them in a URL""" + args = [] + for param, values in kwargs.items(): + if not isinstance(values, (list, tuple)): + values = (values,) + for value in values: + assert value is not None + args.append(u'%s=%s' % (param, self.url_quote(value))) + return '&'.join(args) + + def url_quote(self, value, safe=''): + """urllib.quote is not unicode safe, use this method to do the + necessary encoding / decoding. Also it's designed to quote each + part of a url path and so the '/' character will be encoded as well. + """ + if PY2 and isinstance(value, unicode): + quoted = urlquote(value.encode(self.encoding), safe=safe) + return unicode(quoted, self.encoding) + return urlquote(str(value), safe=safe) + + def url_unquote(self, quoted): + """returns a unicode unquoted string + + decoding is based on `self.encoding` which is the encoding + used in `url_quote` + """ + if PY3: + return urlunquote(quoted) + if isinstance(quoted, unicode): + quoted = quoted.encode(self.encoding) + try: + return unicode(urlunquote(quoted), self.encoding) + except UnicodeDecodeError: # might occurs on manually typed URLs + return unicode(urlunquote(quoted), 'iso-8859-1') + + def url_parse_qsl(self, querystring): + """return a list of (key, val) found in the url quoted query string""" + if PY3: + for key, val in parse_qsl(querystring): + yield key, val + return + if isinstance(querystring, unicode): + querystring = querystring.encode(self.encoding) + for key, val in parse_qsl(querystring): + try: + yield unicode(key, self.encoding), unicode(val, self.encoding) + except UnicodeDecodeError: # might occurs on manually typed URLs + yield unicode(key, 'iso-8859-1'), unicode(val, 'iso-8859-1') + + + def rebuild_url(self, url, **newparams): + """return the given url with newparams inserted. If any new params + is already specified in the url, it's overriden by the new value + + newparams may only be mono-valued. + """ + if PY2 and isinstance(url, unicode): + url = url.encode(self.encoding) + schema, netloc, path, query, fragment = urlsplit(url) + query = parse_qs(query) + # sort for testing predictability + for key, val in sorted(newparams.items()): + query[key] = (self.url_quote(val),) + query = '&'.join(u'%s=%s' % (param, value) + for param, values in sorted(query.items()) + for value in values) + return urlunsplit((schema, netloc, path, query, fragment)) + + # bound user related methods ############################################### + + @cached + def user_data(self): + """returns a dictionary with this user's information. + + The keys are : + + login + The user login + + name + The user name, returned by user.name() + + email + The user principal email + + """ + userinfo = {} + user = self.user + userinfo['login'] = user.login + userinfo['name'] = user.name() + userinfo['email'] = user.cw_adapt_to('IEmailable').get_email() + return userinfo + + # formating methods ####################################################### + + def view(self, __vid, rset=None, __fallback_oid=None, __registry='views', + initargs=None, w=None, **kwargs): + """Select object with the given id (`__oid`) then render it. If the + object isn't selectable, try to select fallback object if + `__fallback_oid` is specified. + + If specified `initargs` is expected to be a dictionary containing + arguments that should be given to selection (hence to object's __init__ + as well), but not to render(). Other arbitrary keyword arguments will be + given to selection *and* to render(), and so should be handled by + object's call or cell_call method.. + """ + if initargs is None: + initargs = kwargs + else: + initargs.update(kwargs) + try: + view = self.vreg[__registry].select(__vid, self, rset=rset, **initargs) + except NoSelectableObject: + if __fallback_oid is None: + raise + view = self.vreg[__registry].select(__fallback_oid, self, + rset=rset, **initargs) + return view.render(w=w, **kwargs) + + def printable_value(self, attrtype, value, props=None, displaytime=True, + formatters=uilib.PRINTERS): + """return a displayablye value (i.e. unicode string)""" + if value is None: + return u'' + try: + as_string = formatters[attrtype] + except KeyError: + self.error('given bad attrtype %s', attrtype) + return unicode(value) + return as_string(value, self, props, displaytime) + + def format_date(self, date, date_format=None, time=False): + """return a string for a date time according to instance's + configuration + """ + if date is not None: + if date_format is None: + if time: + date_format = self.property_value('ui.datetime-format') + else: + date_format = self.property_value('ui.date-format') + return ustrftime(date, date_format) + return u'' + + def format_time(self, time): + """return a string for a time according to instance's + configuration + """ + if time is not None: + return ustrftime(time, self.property_value('ui.time-format')) + return u'' + + def format_float(self, num): + """return a string for floating point number according to instance's + configuration + """ + if num is not None: + return self.property_value('ui.float-format') % num + return u'' + + def parse_datetime(self, value, etype='Datetime'): + """get a datetime or time from a string (according to etype) + Datetime formatted as Date are accepted + """ + assert etype in ('Datetime', 'Date', 'Time'), etype + # XXX raise proper validation error + if etype == 'Datetime': + format = self.property_value('ui.datetime-format') + try: + return todatetime(strptime(value, format)) + except ValueError: + pass + elif etype == 'Time': + format = self.property_value('ui.time-format') + try: + # (adim) I can't find a way to parse a Time with a custom format + date = strptime(value, format) # this returns a DateTime + return time(date.hour, date.minute, date.second) + except ValueError: + raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') + % {'value': value, 'format': format}) + try: + format = self.property_value('ui.date-format') + dt = strptime(value, format) + if etype == 'Datetime': + return todatetime(dt) + return todate(dt) + except ValueError: + raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') + % {'value': value, 'format': format}) + + def _base_url(self, secure=None): + if secure: + return self.vreg.config.get('https-url') or self.vreg.config['base-url'] + return self.vreg.config['base-url'] + + def base_url(self, secure=None): + """return the root url of the instance + """ + url = self._base_url(secure=secure) + return url if url is None else url.rstrip('/') + '/' + + # abstract methods to override according to the web front-end ############# + + def describe(self, eid, asdict=False): + """return a tuple (type, sourceuri, extid) for the entity with id """ + raise NotImplementedError diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/rqlrewrite.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/rqlrewrite.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,933 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""RQL rewriting utilities : insert rql expression snippets into rql syntax +tree. + +This is used for instance for read security checking in the repository. +""" +__docformat__ = "restructuredtext en" + +from six import text_type, string_types + +from rql import nodes as n, stmts, TypeResolverException +from rql.utils import common_parent + +from yams import BadSchemaDefinition + +from logilab.common import tempattr +from logilab.common.graph import has_path + +from cubicweb import Unauthorized +from cubicweb.schema import RRQLExpression + +def cleanup_solutions(rqlst, solutions): + for sol in solutions: + for vname in list(sol): + if not (vname in rqlst.defined_vars or vname in rqlst.aliases): + del sol[vname] + + +def add_types_restriction(schema, rqlst, newroot=None, solutions=None): + if newroot is None: + assert solutions is None + if hasattr(rqlst, '_types_restr_added'): + return + solutions = rqlst.solutions + newroot = rqlst + rqlst._types_restr_added = True + else: + assert solutions is not None + rqlst = rqlst.stmt + eschema = schema.eschema + allpossibletypes = {} + for solution in solutions: + for varname, etype in solution.items(): + # XXX not considering aliases by design, right ? + if varname not in newroot.defined_vars or eschema(etype).final: + continue + allpossibletypes.setdefault(varname, set()).add(etype) + # XXX could be factorized with add_etypes_restriction from rql 0.31 + for varname in sorted(allpossibletypes): + var = newroot.defined_vars[varname] + stinfo = var.stinfo + if stinfo.get('uidrel') is not None: + continue # eid specified, no need for additional type specification + try: + typerel = rqlst.defined_vars[varname].stinfo.get('typerel') + except KeyError: + assert varname in rqlst.aliases + continue + if newroot is rqlst and typerel is not None: + mytyperel = typerel + else: + for vref in var.references(): + rel = vref.relation() + if rel and rel.is_types_restriction(): + mytyperel = rel + break + else: + mytyperel = None + possibletypes = allpossibletypes[varname] + if mytyperel is not None: + if mytyperel.r_type == 'is_instance_of': + # turn is_instance_of relation into a is relation since we've + # all possible solutions and don't want to bother with + # potential is_instance_of incompatibility + mytyperel.r_type = 'is' + if len(possibletypes) > 1: + node = n.Function('IN') + for etype in sorted(possibletypes): + node.append(n.Constant(etype, 'etype')) + else: + etype = next(iter(possibletypes)) + node = n.Constant(etype, 'etype') + comp = mytyperel.children[1] + comp.replace(comp.children[0], node) + else: + # variable has already some strict types restriction. new + # possible types can only be a subset of existing ones, so only + # remove no more possible types + for cst in mytyperel.get_nodes(n.Constant): + if not cst.value in possibletypes: + cst.parent.remove(cst) + else: + # we have to add types restriction + if stinfo.get('scope') is not None: + rel = var.scope.add_type_restriction(var, possibletypes) + else: + # tree is not annotated yet, no scope set so add the restriction + # to the root + rel = newroot.add_type_restriction(var, possibletypes) + stinfo['typerel'] = rel + stinfo['possibletypes'] = possibletypes + + +def remove_solutions(origsolutions, solutions, defined): + """when a rqlst has been generated from another by introducing security + assertions, this method returns solutions which are contained in orig + solutions + """ + newsolutions = [] + for origsol in origsolutions: + for newsol in solutions[:]: + for var, etype in origsol.items(): + try: + if newsol[var] != etype: + try: + defined[var].stinfo['possibletypes'].remove(newsol[var]) + except KeyError: + pass + break + except KeyError: + # variable has been rewritten + continue + else: + newsolutions.append(newsol) + solutions.remove(newsol) + return newsolutions + + +def _add_noinvariant(noinvariant, restricted, select, nbtrees): + # a variable can actually be invariant if it has not been restricted for + # security reason or if security assertion hasn't modified the possible + # solutions for the query + for vname in restricted: + try: + var = select.defined_vars[vname] + except KeyError: + # this is an alias + continue + if nbtrees != 1 or len(var.stinfo['possibletypes']) != 1: + noinvariant.add(var) + + +def _expand_selection(terms, selected, aliases, select, newselect): + for term in terms: + for vref in term.iget_nodes(n.VariableRef): + if not vref.name in selected: + select.append_selected(vref) + colalias = newselect.get_variable(vref.name, len(aliases)) + aliases.append(n.VariableRef(colalias)) + selected.add(vref.name) + +def _has_multiple_cardinality(etypes, rdef, ttypes_func, cardindex): + """return True if relation definitions from entity types (`etypes`) to + target types returned by the `ttypes_func` function all have single (1 or ?) + cardinality. + """ + for etype in etypes: + for ttype in ttypes_func(etype): + if rdef(etype, ttype).cardinality[cardindex] in '+*': + return True + return False + +def _compatible_relation(relations, stmt, sniprel): + """Search among given rql relation nodes if there is one 'compatible' with the + snippet relation, and return it if any, else None. + + A relation is compatible if it: + * belongs to the currently processed statement, + * isn't negged (i.e. direct parent is a NOT node) + * isn't optional (outer join) or similarly as the snippet relation + """ + for rel in relations: + # don't share if relation's scope is not the current statement + if rel.scope is not stmt: + continue + # don't share neged relation + if rel.neged(strict=True): + continue + # don't share optional relation, unless the snippet relation is + # similarly optional + if rel.optional and rel.optional != sniprel.optional: + continue + return rel + return None + + +def iter_relations(stinfo): + # this is a function so that test may return relation in a predictable order + return stinfo['relations'] - stinfo['rhsrelations'] + + +class Unsupported(Exception): + """raised when an rql expression can't be inserted in some rql query + because it create an unresolvable query (eg no solutions found) + """ + +class VariableFromSubQuery(Exception): + """flow control exception to indicate that a variable is coming from a + subquery, and let parent act accordingly + """ + def __init__(self, variable): + self.variable = variable + + +class RQLRewriter(object): + """Insert some rql snippets into another rql syntax tree, for security / + relation vocabulary. This implies that it should only restrict results of + the original query, not generate new ones. Hence, inserted snippets are + inserted under an EXISTS node. + + This class *isn't thread safe*. + """ + + def __init__(self, session): + self.session = session + vreg = session.vreg + self.schema = vreg.schema + self.annotate = vreg.rqlhelper.annotate + self._compute_solutions = vreg.solutions + + def compute_solutions(self): + self.annotate(self.select) + try: + self._compute_solutions(self.session, self.select, self.kwargs) + except TypeResolverException: + raise Unsupported(str(self.select)) + if len(self.select.solutions) < len(self.solutions): + raise Unsupported() + + def insert_local_checks(self, select, kwargs, + localchecks, restricted, noinvariant): + """ + select: the rql syntax tree Select node + kwargs: query arguments + + localchecks: {(('Var name', (rqlexpr1, rqlexpr2)), + ('Var name1', (rqlexpr1, rqlexpr23))): [solution]} + + (see querier._check_permissions docstring for more information) + + restricted: set of variable names to which an rql expression has to be + applied + + noinvariant: set of variable names that can't be considered has + invariant due to security reason (will be filed by this method) + """ + nbtrees = len(localchecks) + myunion = union = select.parent + # transform in subquery when len(localchecks)>1 and groups + if nbtrees > 1 and (select.orderby or select.groupby or + select.having or select.has_aggregat or + select.distinct or + select.limit or select.offset): + newselect = stmts.Select() + # only select variables in subqueries + origselection = select.selection + select.select_only_variables() + select.has_aggregat = False + # create subquery first so correct node are used on copy + # (eg ColumnAlias instead of Variable) + aliases = [n.VariableRef(newselect.get_variable(vref.name, i)) + for i, vref in enumerate(select.selection)] + selected = set(vref.name for vref in aliases) + # now copy original selection and groups + for term in origselection: + newselect.append_selected(term.copy(newselect)) + if select.orderby: + sortterms = [] + for sortterm in select.orderby: + sortterms.append(sortterm.copy(newselect)) + for fnode in sortterm.get_nodes(n.Function): + if fnode.name == 'FTIRANK': + # we've to fetch the has_text relation as well + var = fnode.children[0].variable + rel = next(iter(var.stinfo['ftirels'])) + assert not rel.ored(), 'unsupported' + newselect.add_restriction(rel.copy(newselect)) + # remove relation from the orig select and + # cleanup variable stinfo + rel.parent.remove(rel) + var.stinfo['ftirels'].remove(rel) + var.stinfo['relations'].remove(rel) + # XXX not properly re-annotated after security insertion? + newvar = newselect.get_variable(var.name) + newvar.stinfo.setdefault('ftirels', set()).add(rel) + newvar.stinfo.setdefault('relations', set()).add(rel) + newselect.set_orderby(sortterms) + _expand_selection(select.orderby, selected, aliases, select, newselect) + select.orderby = () # XXX dereference? + if select.groupby: + newselect.set_groupby([g.copy(newselect) for g in select.groupby]) + _expand_selection(select.groupby, selected, aliases, select, newselect) + select.groupby = () # XXX dereference? + if select.having: + newselect.set_having([g.copy(newselect) for g in select.having]) + _expand_selection(select.having, selected, aliases, select, newselect) + select.having = () # XXX dereference? + if select.limit: + newselect.limit = select.limit + select.limit = None + if select.offset: + newselect.offset = select.offset + select.offset = 0 + myunion = stmts.Union() + newselect.set_with([n.SubQuery(aliases, myunion)], check=False) + newselect.distinct = select.distinct + solutions = [sol.copy() for sol in select.solutions] + cleanup_solutions(newselect, solutions) + newselect.set_possible_types(solutions) + # if some solutions doesn't need rewriting, insert original + # select as first union subquery + if () in localchecks: + myunion.append(select) + # we're done, replace original select by the new select with + # subqueries (more added in the loop below) + union.replace(select, newselect) + elif not () in localchecks: + union.remove(select) + for lcheckdef, lchecksolutions in localchecks.items(): + if not lcheckdef: + continue + myrqlst = select.copy(solutions=lchecksolutions) + myunion.append(myrqlst) + # in-place rewrite + annotation / simplification + lcheckdef = [({var: 'X'}, rqlexprs) for var, rqlexprs in lcheckdef] + self.rewrite(myrqlst, lcheckdef, kwargs) + _add_noinvariant(noinvariant, restricted, myrqlst, nbtrees) + if () in localchecks: + select.set_possible_types(localchecks[()]) + add_types_restriction(self.schema, select) + _add_noinvariant(noinvariant, restricted, select, nbtrees) + self.annotate(union) + + def rewrite(self, select, snippets, kwargs, existingvars=None): + """ + snippets: (varmap, list of rql expression) + with varmap a *dict* {select var: snippet var} + """ + self.select = select + # remove_solutions used below require a copy + self.solutions = solutions = select.solutions[:] + self.kwargs = kwargs + self.u_varname = None + self.removing_ambiguity = False + self.exists_snippet = {} + self.pending_keys = [] + self.existingvars = existingvars + # we have to annotate the rqlst before inserting snippets, even though + # we'll have to redo it later + self.annotate(select) + self.insert_snippets(snippets) + if not self.exists_snippet and self.u_varname: + # U has been inserted than cancelled, cleanup + select.undefine_variable(select.defined_vars[self.u_varname]) + # clean solutions according to initial solutions + newsolutions = remove_solutions(solutions, select.solutions, + select.defined_vars) + assert len(newsolutions) >= len(solutions), ( + 'rewritten rql %s has lost some solutions, there is probably ' + 'something wrong in your schema permission (for instance using a ' + 'RQLExpression which inserts a relation which doesn\'t exist in ' + 'the schema)\nOrig solutions: %s\nnew solutions: %s' % ( + select, solutions, newsolutions)) + if len(newsolutions) > len(solutions): + newsolutions = self.remove_ambiguities(snippets, newsolutions) + assert newsolutions + select.solutions = newsolutions + add_types_restriction(self.schema, select) + + def insert_snippets(self, snippets, varexistsmap=None): + self.rewritten = {} + for varmap, rqlexprs in snippets: + if isinstance(varmap, dict): + varmap = tuple(sorted(varmap.items())) + else: + assert isinstance(varmap, tuple), varmap + if varexistsmap is not None and not varmap in varexistsmap: + continue + self.insert_varmap_snippets(varmap, rqlexprs, varexistsmap) + + def init_from_varmap(self, varmap, varexistsmap=None): + self.varmap = varmap + self.revvarmap = {} + self.varinfos = [] + for i, (selectvar, snippetvar) in enumerate(varmap): + assert snippetvar in 'SOX' + self.revvarmap[snippetvar] = (selectvar, i) + vi = {} + self.varinfos.append(vi) + try: + vi['const'] = int(selectvar) + vi['rhs_rels'] = vi['lhs_rels'] = {} + except ValueError: + try: + vi['stinfo'] = sti = self.select.defined_vars[selectvar].stinfo + except KeyError: + vi['stinfo'] = sti = self._subquery_variable(selectvar) + if varexistsmap is None: + # build an index for quick access to relations + vi['rhs_rels'] = {} + for rel in sti.get('rhsrelations', []): + vi['rhs_rels'].setdefault(rel.r_type, []).append(rel) + vi['lhs_rels'] = {} + for rel in sti.get('relations', []): + if not rel in sti.get('rhsrelations', []): + vi['lhs_rels'].setdefault(rel.r_type, []).append(rel) + else: + vi['rhs_rels'] = vi['lhs_rels'] = {} + + def _subquery_variable(self, selectvar): + raise VariableFromSubQuery(selectvar) + + def insert_varmap_snippets(self, varmap, rqlexprs, varexistsmap): + try: + self.init_from_varmap(varmap, varexistsmap) + except VariableFromSubQuery as ex: + # variable may have been moved to a newly inserted subquery + # we should insert snippet in that subquery + subquery = self.select.aliases[ex.variable].query + assert len(subquery.children) == 1, subquery + subselect = subquery.children[0] + RQLRewriter(self.session).rewrite(subselect, [(varmap, rqlexprs)], + self.kwargs) + return + self._insert_scope = None + previous = None + inserted = False + for rqlexpr in rqlexprs: + self.current_expr = rqlexpr + if varexistsmap is None: + try: + new = self.insert_snippet(varmap, rqlexpr.snippet_rqlst, previous) + except Unsupported: + continue + inserted = True + if new is not None and self._insert_scope is None: + self.exists_snippet[rqlexpr] = new + previous = previous or new + else: + # called to reintroduce snippet due to ambiguity creation, + # so skip snippets which are not introducing this ambiguity + exists = varexistsmap[varmap] + if self.exists_snippet.get(rqlexpr) is exists: + self.insert_snippet(varmap, rqlexpr.snippet_rqlst, exists) + if varexistsmap is None and not inserted: + # no rql expression found matching rql solutions. User has no access right + raise Unauthorized() # XXX may also be because of bad constraints in schema definition + + def insert_snippet(self, varmap, snippetrqlst, previous=None): + new = snippetrqlst.where.accept(self) + existing = self.existingvars + self.existingvars = None + try: + return self._insert_snippet(varmap, previous, new) + finally: + self.existingvars = existing + + def _inserted_root(self, new): + if not isinstance(new, (n.Exists, n.Not)): + new = n.Exists(new) + return new + + def _insert_snippet(self, varmap, previous, new): + """insert `new` snippet into the syntax tree, which have been rewritten + using `varmap`. In cases where an action is protected by several rql + expresssion, `previous` will be the first rql expression which has been + inserted, and so should be ORed with the following expressions. + """ + if new is not None: + if self._insert_scope is None: + insert_scope = None + for vi in self.varinfos: + scope = vi.get('stinfo', {}).get('scope', self.select) + if insert_scope is None: + insert_scope = scope + else: + insert_scope = common_parent(scope, insert_scope) + else: + insert_scope = self._insert_scope + if self._insert_scope is None and any(vi.get('stinfo', {}).get('optrelations') + for vi in self.varinfos): + assert previous is None + self._insert_scope, new = self.snippet_subquery(varmap, new) + self.insert_pending() + #self._insert_scope = None + return new + new = self._inserted_root(new) + if previous is None: + insert_scope.add_restriction(new) + else: + grandpa = previous.parent + or_ = n.Or(previous, new) + grandpa.replace(previous, or_) + if not self.removing_ambiguity: + try: + self.compute_solutions() + except Unsupported: + # some solutions have been lost, can't apply this rql expr + if previous is None: + self.current_statement().remove_node(new, undefine=True) + else: + grandpa.replace(or_, previous) + self._cleanup_inserted(new) + raise + else: + with tempattr(self, '_insert_scope', new): + self.insert_pending() + return new + self.insert_pending() + + def insert_pending(self): + """pending_keys hold variable referenced by U has__permission X + relation. + + Once the snippet introducing this has been inserted and solutions + recomputed, we have to insert snippet defined for of entity + types taken by X + """ + stmt = self.current_statement() + while self.pending_keys: + key, action = self.pending_keys.pop() + try: + varname = self.rewritten[key] + except KeyError: + try: + varname = self.revvarmap[key[-1]][0] + except KeyError: + # variable isn't used anywhere else, we can't insert security + raise Unauthorized() + ptypes = stmt.defined_vars[varname].stinfo['possibletypes'] + if len(ptypes) > 1: + # XXX dunno how to handle this + self.session.error( + 'cant check security of %s, ambigous type for %s in %s', + stmt, varname, key[0]) # key[0] == the rql expression + raise Unauthorized() + etype = next(iter(ptypes)) + eschema = self.schema.eschema(etype) + if not eschema.has_perm(self.session, action): + rqlexprs = eschema.get_rqlexprs(action) + if not rqlexprs: + raise Unauthorized() + self.insert_snippets([({varname: 'X'}, rqlexprs)]) + + def snippet_subquery(self, varmap, transformedsnippet): + """introduce the given snippet in a subquery""" + subselect = stmts.Select() + snippetrqlst = n.Exists(transformedsnippet.copy(subselect)) + get_rschema = self.schema.rschema + aliases = [] + done = set() + for i, (selectvar, _) in enumerate(varmap): + need_null_test = False + subselectvar = subselect.get_variable(selectvar) + subselect.append_selected(n.VariableRef(subselectvar)) + aliases.append(selectvar) + todo = [(selectvar, self.varinfos[i]['stinfo'])] + while todo: + varname, stinfo = todo.pop() + done.add(varname) + for rel in iter_relations(stinfo): + if rel in done: + continue + done.add(rel) + rschema = get_rschema(rel.r_type) + if rschema.final or rschema.inlined: + rel.children[0].name = varname # XXX explain why + subselect.add_restriction(rel.copy(subselect)) + for vref in rel.children[1].iget_nodes(n.VariableRef): + if isinstance(vref.variable, n.ColumnAlias): + # XXX could probably be handled by generating the + # subquery into the detected subquery + raise BadSchemaDefinition( + "cant insert security because of usage two inlined " + "relations in this query. You should probably at " + "least uninline %s" % rel.r_type) + subselect.append_selected(vref.copy(subselect)) + aliases.append(vref.name) + self.select.remove_node(rel) + # when some inlined relation has to be copied in the + # subquery and that relation is optional, we need to + # test that either value is NULL or that the snippet + # condition is satisfied + if varname == selectvar and rel.optional and rschema.inlined: + need_null_test = True + # also, if some attributes or inlined relation of the + # object variable are accessed, we need to get all those + # from the subquery as well + if vref.name not in done and rschema.inlined: + # we can use vref here define in above for loop + ostinfo = vref.variable.stinfo + for orel in iter_relations(ostinfo): + orschema = get_rschema(orel.r_type) + if orschema.final or orschema.inlined: + todo.append( (vref.name, ostinfo) ) + break + if need_null_test: + snippetrqlst = n.Or( + n.make_relation(subselect.get_variable(selectvar), 'is', + (None, None), n.Constant, + operator='='), + snippetrqlst) + subselect.add_restriction(snippetrqlst) + if self.u_varname: + # generate an identifier for the substitution + argname = subselect.allocate_varname() + while argname in self.kwargs: + argname = subselect.allocate_varname() + subselect.add_constant_restriction(subselect.get_variable(self.u_varname), + 'eid', text_type(argname), 'Substitute') + self.kwargs[argname] = self.session.user.eid + add_types_restriction(self.schema, subselect, subselect, + solutions=self.solutions) + myunion = stmts.Union() + myunion.append(subselect) + aliases = [n.VariableRef(self.select.get_variable(name, i)) + for i, name in enumerate(aliases)] + self.select.add_subquery(n.SubQuery(aliases, myunion), check=False) + self._cleanup_inserted(transformedsnippet) + try: + self.compute_solutions() + except Unsupported: + # some solutions have been lost, can't apply this rql expr + self.select.remove_subquery(self.select.with_[-1]) + raise + return subselect, snippetrqlst + + def remove_ambiguities(self, snippets, newsolutions): + # the snippet has introduced some ambiguities, we have to resolve them + # "manually" + variantes = self.build_variantes(newsolutions) + # insert "is" where necessary + varexistsmap = {} + self.removing_ambiguity = True + for (erqlexpr, varmap, oldvarname), etype in variantes[0].items(): + varname = self.rewritten[(erqlexpr, varmap, oldvarname)] + var = self.select.defined_vars[varname] + exists = var.references()[0].scope + exists.add_constant_restriction(var, 'is', etype, 'etype') + varexistsmap[varmap] = exists + # insert ORED exists where necessary + for variante in variantes[1:]: + self.insert_snippets(snippets, varexistsmap) + for key, etype in variante.items(): + varname = self.rewritten[key] + try: + var = self.select.defined_vars[varname] + except KeyError: + # not a newly inserted variable + continue + exists = var.references()[0].scope + exists.add_constant_restriction(var, 'is', etype, 'etype') + # recompute solutions + self.compute_solutions() + # clean solutions according to initial solutions + return remove_solutions(self.solutions, self.select.solutions, + self.select.defined_vars) + + def build_variantes(self, newsolutions): + variantes = set() + for sol in newsolutions: + variante = [] + for key, newvar in self.rewritten.items(): + variante.append( (key, sol[newvar]) ) + variantes.add(tuple(variante)) + # rebuild variantes as dict + variantes = [dict(variante) for variante in variantes] + # remove variable which have always the same type + for key in self.rewritten: + it = iter(variantes) + etype = next(it)[key] + for variante in it: + if variante[key] != etype: + break + else: + for variante in variantes: + del variante[key] + return variantes + + def _cleanup_inserted(self, node): + # cleanup inserted variable references + removed = set() + for vref in node.iget_nodes(n.VariableRef): + vref.unregister_reference() + if not vref.variable.stinfo['references']: + # no more references, undefine the variable + del self.select.defined_vars[vref.name] + removed.add(vref.name) + for key, newvar in list(self.rewritten.items()): + if newvar in removed: + del self.rewritten[key] + + + def _may_be_shared_with(self, sniprel, target): + """if the snippet relation can be skipped to use a relation from the + original query, return that relation node + """ + if sniprel.neged(strict=True): + return None # no way + rschema = self.schema.rschema(sniprel.r_type) + stmt = self.current_statement() + for vi in self.varinfos: + try: + if target == 'object': + orels = vi['lhs_rels'][sniprel.r_type] + cardindex = 0 + ttypes_func = rschema.objects + rdef = rschema.rdef + else: # target == 'subject': + orels = vi['rhs_rels'][sniprel.r_type] + cardindex = 1 + ttypes_func = rschema.subjects + rdef = lambda x, y: rschema.rdef(y, x) + except KeyError: + # may be raised by vi['xhs_rels'][sniprel.r_type] + continue + # if cardinality isn't in '?1', we can't ignore the snippet relation + # and use variable from the original query + if _has_multiple_cardinality(vi['stinfo']['possibletypes'], rdef, + ttypes_func, cardindex): + continue + orel = _compatible_relation(orels, stmt, sniprel) + if orel is not None: + return orel + return None + + def _use_orig_term(self, snippet_varname, term): + key = (self.current_expr, self.varmap, snippet_varname) + if key in self.rewritten: + stmt = self.current_statement() + insertedvar = stmt.defined_vars.pop(self.rewritten[key]) + for inserted_vref in insertedvar.references(): + inserted_vref.parent.replace(inserted_vref, term.copy(stmt)) + self.rewritten[key] = term.name + + def _get_varname_or_term(self, vname): + stmt = self.current_statement() + if vname == 'U': + stmt = self.select + if self.u_varname is None: + self.u_varname = stmt.allocate_varname() + # generate an identifier for the substitution + argname = stmt.allocate_varname() + while argname in self.kwargs: + argname = stmt.allocate_varname() + # insert "U eid %(u)s" + stmt.add_constant_restriction( + stmt.get_variable(self.u_varname), + 'eid', text_type(argname), 'Substitute') + self.kwargs[argname] = self.session.user.eid + return self.u_varname + key = (self.current_expr, self.varmap, vname) + try: + return self.rewritten[key] + except KeyError: + self.rewritten[key] = newvname = stmt.allocate_varname() + return newvname + + # visitor methods ########################################################## + + def _visit_binary(self, node, cls): + newnode = cls() + for c in node.children: + new = c.accept(self) + if new is None: + continue + newnode.append(new) + if len(newnode.children) == 0: + return None + if len(newnode.children) == 1: + return newnode.children[0] + return newnode + + def _visit_unary(self, node, cls): + newc = node.children[0].accept(self) + if newc is None: + return None + newnode = cls() + newnode.append(newc) + return newnode + + def visit_and(self, node): + return self._visit_binary(node, n.And) + + def visit_or(self, node): + return self._visit_binary(node, n.Or) + + def visit_not(self, node): + return self._visit_unary(node, n.Not) + + def visit_exists(self, node): + return self._visit_unary(node, n.Exists) + + def keep_var(self, varname): + if varname in 'SO': + return varname in self.existingvars + if varname == 'U': + return True + vargraph = self.current_expr.vargraph + for existingvar in self.existingvars: + #path = has_path(vargraph, varname, existingvar) + if not varname in vargraph or has_path(vargraph, varname, existingvar): + return True + # no path from this variable to an existing variable + return False + + def visit_relation(self, node): + lhs, rhs = node.get_variable_parts() + # remove relations where an unexistant variable and or a variable linked + # to an unexistant variable is used. + if self.existingvars: + if not self.keep_var(lhs.name): + return + if node.r_type in ('has_add_permission', 'has_update_permission', + 'has_delete_permission', 'has_read_permission'): + assert lhs.name == 'U' + action = node.r_type.split('_')[1] + key = (self.current_expr, self.varmap, rhs.name) + self.pending_keys.append( (key, action) ) + return + if isinstance(rhs, n.VariableRef): + if self.existingvars and not self.keep_var(rhs.name): + return + if lhs.name in self.revvarmap and rhs.name != 'U': + orel = self._may_be_shared_with(node, 'object') + if orel is not None: + self._use_orig_term(rhs.name, orel.children[1].children[0]) + return + elif rhs.name in self.revvarmap and lhs.name != 'U': + orel = self._may_be_shared_with(node, 'subject') + if orel is not None: + self._use_orig_term(lhs.name, orel.children[0]) + return + rel = n.Relation(node.r_type, node.optional) + for c in node.children: + rel.append(c.accept(self)) + return rel + + def visit_comparison(self, node): + cmp_ = n.Comparison(node.operator) + for c in node.children: + cmp_.append(c.accept(self)) + return cmp_ + + def visit_mathexpression(self, node): + cmp_ = n.MathExpression(node.operator) + for c in node.children: + cmp_.append(c.accept(self)) + return cmp_ + + def visit_function(self, node): + """generate filter name for a function""" + function_ = n.Function(node.name) + for c in node.children: + function_.append(c.accept(self)) + return function_ + + def visit_constant(self, node): + """generate filter name for a constant""" + return n.Constant(node.value, node.type) + + def visit_variableref(self, node): + """get the sql name for a variable reference""" + stmt = self.current_statement() + if node.name in self.revvarmap: + selectvar, index = self.revvarmap[node.name] + vi = self.varinfos[index] + if vi.get('const') is not None: + return n.Constant(vi['const'], 'Int') + return n.VariableRef(stmt.get_variable(selectvar)) + vname_or_term = self._get_varname_or_term(node.name) + if isinstance(vname_or_term, string_types): + return n.VariableRef(stmt.get_variable(vname_or_term)) + # shared term + return vname_or_term.copy(stmt) + + def current_statement(self): + if self._insert_scope is None: + return self.select + return self._insert_scope.stmt + + +class RQLRelationRewriter(RQLRewriter): + """Insert some rql snippets into another rql syntax tree, replacing computed + relations by their associated rule. + + This class *isn't thread safe*. + """ + def __init__(self, session): + super(RQLRelationRewriter, self).__init__(session) + self.rules = {} + for rschema in self.schema.iter_computed_relations(): + self.rules[rschema.type] = RRQLExpression(rschema.rule) + + def rewrite(self, union, kwargs=None): + self.kwargs = kwargs + self.removing_ambiguity = False + self.existingvars = None + self.pending_keys = None + for relation in union.iget_nodes(n.Relation): + if relation.r_type in self.rules: + self.select = relation.stmt + self.solutions = solutions = self.select.solutions[:] + self.current_expr = self.rules[relation.r_type] + self._insert_scope = relation.scope + self.rewritten = {} + lhs, rhs = relation.get_variable_parts() + varmap = {lhs.name: 'S', rhs.name: 'O'} + self.init_from_varmap(tuple(sorted(varmap.items()))) + self.insert_snippet(varmap, self.current_expr.snippet_rqlst) + self.select.remove_node(relation) + + def _subquery_variable(self, selectvar): + return self.select.aliases[selectvar].stinfo + + def _inserted_root(self, new): + return new diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/rset.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/rset.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,730 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""The `ResultSet` class which is returned as result of an rql query""" +__docformat__ = "restructuredtext en" + +from warnings import warn + +from six import PY3 +from six.moves import range + +from logilab.common import nullobject +from logilab.common.decorators import cached, clear_cache, copy_cache +from rql import nodes, stmts + +from cubicweb import NotAnEntity, NoResultError, MultipleResultsError + + +_MARKER = nullobject() + + +class ResultSet(object): + """A result set wraps a RQL query result. This object implements + partially the list protocol to allow direct use as a list of + result rows. + + :type rowcount: int + :param rowcount: number of rows in the result + + :type rows: list + :param rows: list of rows of result + + :type description: list + :param description: + result's description, using the same structure as the result itself + + :type rql: str or unicode + :param rql: the original RQL query string + """ + + def __init__(self, results, rql, args=None, description=None, rqlst=None): + if rqlst is not None: + warn('[3.20] rqlst parameter is deprecated', + DeprecationWarning, stacklevel=2) + self.rows = results + self.rowcount = results and len(results) or 0 + # original query and arguments + self.rql = rql + self.args = args + # entity types for each cell (same shape as rows) + # maybe discarded if specified when the query has been executed + if description is None: + self.description = [] + else: + self.description = description + # set to (limit, offset) when a result set is limited using the + # .limit method + self.limited = None + # set by the cursor which returned this resultset + self.req = None + # actions cache + self._rsetactions = None + + def __str__(self): + if not self.rows: + return '' % self.rql + return '' % (self.rql, len(self.rows)) + + def __repr__(self): + if not self.rows: + return '' % self.rql + rows = self.rows + if len(rows) > 10: + rows = rows[:10] + ['...'] + if len(rows) > 1: + # add a line break before first entity if more that one. + pattern = '' + else: + pattern = '' + + if not self.description: + return pattern % (self.rql, len(self.rows), + '\n'.join(str(r) for r in rows)) + return pattern % (self.rql, len(self.rows), + '\n'.join('%s (%s)' % (r, d) + for r, d in zip(rows, self.description))) + + def possible_actions(self, **kwargs): + if self._rsetactions is None: + self._rsetactions = {} + if kwargs: + key = tuple(sorted(kwargs.items())) + else: + key = None + try: + return self._rsetactions[key] + except KeyError: + actions = self.req.vreg['actions'].poss_visible_objects( + self.req, rset=self, **kwargs) + self._rsetactions[key] = actions + return actions + + def __len__(self): + """returns the result set's size""" + return self.rowcount + + def __getitem__(self, i): + """returns the ith element of the result set""" + return self.rows[i] #ResultSetRow(self.rows[i]) + + def __iter__(self): + """Returns an iterator over rows""" + return iter(self.rows) + + def __add__(self, rset): + # XXX buggy implementation (.rql and .args attributes at least much + # probably differ) + # at least rql could be fixed now that we have union and sub-queries + # but I tend to think that since we have that, we should not need this + # method anymore (syt) + rset = ResultSet(self.rows+rset.rows, self.rql, self.args, + self.description + rset.description) + rset.req = self.req + return rset + + def copy(self, rows=None, descr=None): + if rows is None: + rows = self.rows[:] + descr = self.description[:] + rset = ResultSet(rows, self.rql, self.args, descr) + rset.req = self.req + return rset + + def transformed_rset(self, transformcb): + """ the result set according to a given column types + + :type transormcb: callable(row, desc) + :param transformcb: + a callable which should take a row and its type description as + parameters, and return the transformed row and type description. + + + :type col: int + :param col: the column index + + :rtype: `ResultSet` + """ + rows, descr = [], [] + rset = self.copy(rows, descr) + for row, desc in zip(self.rows, self.description): + nrow, ndesc = transformcb(row, desc) + if ndesc: # transformcb returns None for ndesc to skip that row + rows.append(nrow) + descr.append(ndesc) + rset.rowcount = len(rows) + return rset + + def filtered_rset(self, filtercb, col=0): + """filter the result set according to a given filtercb + + :type filtercb: callable(entity) + :param filtercb: + a callable which should take an entity as argument and return + False if it should be skipped, else True + + :type col: int + :param col: the column index + + :rtype: `ResultSet` + """ + rows, descr = [], [] + rset = self.copy(rows, descr) + for i in range(len(self)): + if not filtercb(self.get_entity(i, col)): + continue + rows.append(self.rows[i]) + descr.append(self.description[i]) + rset.rowcount = len(rows) + return rset + + + def sorted_rset(self, keyfunc, reverse=False, col=0): + """sorts the result set according to a given keyfunc + + :type keyfunc: callable(entity) + :param keyfunc: + a callable which should take an entity as argument and return + the value used to compare and sort + + :type reverse: bool + :param reverse: if the result should be reversed + + :type col: int + :param col: the column index. if col = -1, the whole row are used + + :rtype: `ResultSet` + """ + rows, descr = [], [] + rset = self.copy(rows, descr) + if col >= 0: + entities = sorted(enumerate(self.entities(col)), + key=lambda t: keyfunc(t[1]), reverse=reverse) + else: + entities = sorted(enumerate(self), + key=lambda t: keyfunc(t[1]), reverse=reverse) + for index, _ in entities: + rows.append(self.rows[index]) + descr.append(self.description[index]) + rset.rowcount = len(rows) + return rset + + def split_rset(self, keyfunc=None, col=0, return_dict=False): + """splits the result set in multiple result sets according to + a given key + + :type keyfunc: callable(entity or FinalType) + :param keyfunc: + a callable which should take a value of the rset in argument and + return the value used to group the value. If not define, raw value + of the specified columns is used. + + :type col: int + :param col: the column index. if col = -1, the whole row are used + + :type return_dict: Boolean + :param return_dict: If true, the function return a mapping + (key -> rset) instead of a list of rset + + :rtype: List of `ResultSet` or mapping of `ResultSet` + + """ + result = [] + mapping = {} + for idx, line in enumerate(self): + if col >= 0: + try: + key = self.get_entity(idx, col) + except NotAnEntity: + key = line[col] + else: + key = line + if keyfunc is not None: + key = keyfunc(key) + + if key not in mapping: + rows, descr = [], [] + rset = self.copy(rows, descr) + mapping[key] = rset + result.append(rset) + else: + rset = mapping[key] + rset.rows.append(self.rows[idx]) + rset.description.append(self.description[idx]) + for rset in result: + rset.rowcount = len(rset.rows) + if return_dict: + return mapping + else: + return result + + def limited_rql(self): + """returns a printable rql for the result set associated to the object, + with limit/offset correctly set according to maximum page size and + currently displayed page when necessary + """ + # try to get page boundaries from the navigation component + # XXX we should probably not have a ref to this component here (eg in + # cubicweb) + nav = self.req.vreg['components'].select_or_none('navigation', self.req, + rset=self) + if nav: + start, stop = nav.page_boundaries() + rql = self._limit_offset_rql(stop - start, start) + # result set may have be limited manually in which case navigation won't + # apply + elif self.limited: + rql = self._limit_offset_rql(*self.limited) + # navigation component doesn't apply and rset has not been limited, no + # need to limit query + else: + rql = self.printable_rql() + return rql + + def _limit_offset_rql(self, limit, offset): + rqlst = self.syntax_tree() + if len(rqlst.children) == 1: + select = rqlst.children[0] + olimit, ooffset = select.limit, select.offset + select.limit, select.offset = limit, offset + rql = rqlst.as_string(kwargs=self.args) + # restore original limit/offset + select.limit, select.offset = olimit, ooffset + else: + newselect = stmts.Select() + newselect.limit = limit + newselect.offset = offset + aliases = [nodes.VariableRef(newselect.get_variable(chr(65+i), i)) + for i in range(len(rqlst.children[0].selection))] + for vref in aliases: + newselect.append_selected(nodes.VariableRef(vref.variable)) + newselect.set_with([nodes.SubQuery(aliases, rqlst)], check=False) + newunion = stmts.Union() + newunion.append(newselect) + rql = newunion.as_string(kwargs=self.args) + rqlst.parent = None + return rql + + def limit(self, limit, offset=0, inplace=False): + """limit the result set to the given number of rows optionally starting + from an index different than 0 + + :type limit: int + :param limit: the maximum number of results + + :type offset: int + :param offset: the offset index + + :type inplace: bool + :param inplace: + if true, the result set is modified in place, else a new result set + is returned and the original is left unmodified + + :rtype: `ResultSet` + """ + stop = limit+offset + rows = self.rows[offset:stop] + descr = self.description[offset:stop] + if inplace: + rset = self + rset.rows, rset.description = rows, descr + rset.rowcount = len(rows) + clear_cache(rset, 'description_struct') + if offset: + clear_cache(rset, 'get_entity') + # we also have to fix/remove from the request entity cache entities + # which get a wrong rset reference by this limit call + for entity in self.req.cached_entities(): + if entity.cw_rset is self: + if offset <= entity.cw_row < stop: + entity.cw_row = entity.cw_row - offset + else: + entity.cw_rset = entity.as_rset() + entity.cw_row = entity.cw_col = 0 + else: + rset = self.copy(rows, descr) + if not offset: + # can copy built entity caches + copy_cache(rset, 'get_entity', self) + rset.limited = (limit, offset) + return rset + + def printable_rql(self, encoded=_MARKER): + """return the result set's origin rql as a string, with arguments + substitued + """ + if encoded is not _MARKER: + warn('[3.21] the "encoded" argument is deprecated', DeprecationWarning) + encoding = self.req.encoding + rqlstr = self.syntax_tree().as_string(kwargs=self.args) + if PY3: + return rqlstr + # sounds like we get encoded or unicode string due to a bug in as_string + if not encoded: + if isinstance(rqlstr, unicode): + return rqlstr + return unicode(rqlstr, encoding) + else: + if isinstance(rqlstr, unicode): + return rqlstr.encode(encoding) + return rqlstr + + # client helper methods ################################################### + + def entities(self, col=0): + """iter on entities with eid in the `col` column of the result set""" + for i in range(len(self)): + # may have None values in case of outer join (or aggregat on eid + # hacks) + if self.rows[i][col] is not None: + yield self.get_entity(i, col) + + def iter_rows_with_entities(self): + """ iterates over rows, and for each row + eids are converted to plain entities + """ + for i, row in enumerate(self): + _row = [] + for j, col in enumerate(row): + try: + _row.append(self.get_entity(i, j) if col is not None else col) + except NotAnEntity: + _row.append(col) + yield _row + + def complete_entity(self, row, col=0, skip_bytes=True): + """short cut to get an completed entity instance for a particular + row (all instance's attributes have been fetched) + """ + entity = self.get_entity(row, col) + entity.complete(skip_bytes=skip_bytes) + return entity + + @cached + def get_entity(self, row, col): + """convenience method for query retrieving a single entity, returns a + partially initialized Entity instance. + + .. warning:: + + Due to the cache wrapping this function, you should NEVER give row as + a named parameter (i.e. `rset.get_entity(0, 1)` is OK but + `rset.get_entity(row=0, col=1)` isn't) + + :type row,col: int, int + :param row,col: + row and col numbers localizing the entity among the result's table + + :return: the partially initialized `Entity` instance + """ + etype = self.description[row][col] + try: + eschema = self.req.vreg.schema.eschema(etype) + if eschema.final: + raise NotAnEntity(etype) + except KeyError: + raise NotAnEntity(etype) + return self._build_entity(row, col) + + def one(self, col=0): + """Retrieve exactly one entity from the query. + + If the result set is empty, raises :exc:`NoResultError`. + If the result set has more than one row, raises + :exc:`MultipleResultsError`. + + :type col: int + :param col: The column localising the entity in the unique row + + :return: the partially initialized `Entity` instance + """ + if len(self) == 1: + return self.get_entity(0, col) + elif len(self) == 0: + raise NoResultError("No row was found for one()") + else: + raise MultipleResultsError("Multiple rows were found for one()") + + def _build_entity(self, row, col): + """internal method to get a single entity, returns a partially + initialized Entity instance. + + partially means that only attributes selected in the RQL query will be + directly assigned to the entity. + + :type row,col: int, int + :param row,col: + row and col numbers localizing the entity among the result's table + + :return: the partially initialized `Entity` instance + """ + req = self.req + if req is None: + raise AssertionError('dont call get_entity with no req on the result set') + rowvalues = self.rows[row] + eid = rowvalues[col] + assert eid is not None + # return cached entity if exists. This also avoids potential recursion + # XXX should we consider updating a cached entity with possible + # new attributes found in this resultset ? + try: + entity = req.entity_cache(eid) + except KeyError: + pass + else: + if entity.cw_rset is None: + # entity has no rset set, this means entity has been created by + # the querier (req is a repository session) and so jas no rset + # info. Add it. + entity.cw_rset = self + entity.cw_row = row + entity.cw_col = col + return entity + # build entity instance + etype = self.description[row][col] + entity = self.req.vreg['etypes'].etype_class(etype)(req, rset=self, + row=row, col=col) + entity.eid = eid + # cache entity + req.set_entity_cache(entity) + # try to complete the entity if there are some additional columns + if len(rowvalues) > 1: + eschema = entity.e_schema + eid_col, attr_cols, rel_cols = self._rset_structure(eschema, col) + entity.eid = rowvalues[eid_col] + for attr, col_idx in attr_cols.items(): + entity.cw_attr_cache[attr] = rowvalues[col_idx] + for (rtype, role), col_idx in rel_cols.items(): + value = rowvalues[col_idx] + if value is None: + if role == 'subject': + rql = 'Any Y WHERE X %s Y, X eid %s' + else: + rql = 'Any Y WHERE Y %s X, X eid %s' + rrset = ResultSet([], rql % (rtype, entity.eid)) + rrset.req = req + else: + rrset = self._build_entity(row, col_idx).as_rset() + entity.cw_set_relation_cache(rtype, role, rrset) + return entity + + @cached + def _rset_structure(self, eschema, entity_col): + eid_col = col = entity_col + rqlst = self.syntax_tree() + get_rschema = eschema.schema.rschema + attr_cols = {} + rel_cols = {} + if rqlst.TYPE == 'select': + # UNION query, find the subquery from which this entity has been + # found + select, col = rqlst.locate_subquery(entity_col, eschema.type, self.args) + else: + select = rqlst + # take care, due to outer join support, we may find None + # values for non final relation + for i, attr, role in attr_desc_iterator(select, col, entity_col): + rschema = get_rschema(attr) + if rschema.final: + if attr == 'eid': + eid_col = i + else: + attr_cols[attr] = i + else: + # XXX takefirst=True to remove warning triggered by ambiguous relations + rdef = eschema.rdef(attr, role, takefirst=True) + # only keep value if it can't be multivalued + if rdef.role_cardinality(role) in '1?': + rel_cols[(attr, role)] = i + return eid_col, attr_cols, rel_cols + + @cached + def syntax_tree(self): + """return the syntax tree (:class:`rql.stmts.Union`) for the + originating query. You can expect it to have solutions + computed and it will be properly annotated. + """ + return self.req.vreg.parse(self.req, self.rql, self.args) + + @cached + def column_types(self, col): + """return the list of different types in the column with the given col + + :type col: int + :param col: the index of the desired column + + :rtype: list + :return: the different entities type found in the column + """ + return frozenset(struc[-1][col] for struc in self.description_struct()) + + @cached + def description_struct(self): + """return a list describing sequence of results with the same + description, e.g. : + [[0, 4, ('Bug',)] + [[0, 4, ('Bug',), [5, 8, ('Story',)] + [[0, 3, ('Project', 'Version',)]] + """ + result = [] + last = None + for i, row in enumerate(self.description): + if row != last: + if last is not None: + result[-1][1] = i - 1 + result.append( [i, None, row] ) + last = row + if last is not None: + result[-1][1] = i + return result + + def _locate_query_params(self, rqlst, row, col): + locate_query_col = col + etype = self.description[row][col] + # final type, find a better one to locate the correct subquery + # (ambiguous if possible) + eschema = self.req.vreg.schema.eschema + if eschema(etype).final: + for select in rqlst.children: + try: + myvar = select.selection[col].variable + except AttributeError: + # not a variable + continue + for i in range(len(select.selection)): + if i == col: + continue + coletype = self.description[row][i] + # None description possible on column resulting from an + # outer join + if coletype is None or eschema(coletype).final: + continue + try: + ivar = select.selection[i].variable + except AttributeError: + # not a variable + continue + # check variables don't comes from a subquery or are both + # coming from the same subquery + if getattr(ivar, 'query', None) is getattr(myvar, 'query', None): + etype = coletype + locate_query_col = i + if len(self.column_types(i)) > 1: + return etype, locate_query_col + return etype, locate_query_col + + @cached + def related_entity(self, row, col): + """given an cell of the result set, try to return a (entity, relation + name) tuple to which this cell is linked. + + This is especially useful when the cell is an attribute of an entity, + to get the entity to which this attribute belongs to. + """ + rqlst = self.syntax_tree() + # UNION query, we've first to find a 'pivot' column to use to get the + # actual query from which the row is coming + etype, locate_query_col = self._locate_query_params(rqlst, row, col) + # now find the query from which this entity has been found. Returned + # select node may be a subquery with different column indexes. + select = rqlst.locate_subquery(locate_query_col, etype, self.args)[0] + # then get the index of root query's col in the subquery + col = rqlst.subquery_selection_index(select, col) + if col is None: + # XXX unexpected, should fix subquery_selection_index ? + return None, None + try: + myvar = select.selection[col].variable + except AttributeError: + # not a variable + return None, None + rel = myvar.main_relation() + if rel is not None: + index = rel.children[0].root_selection_index() + if index is not None and self.rows[row][index]: + try: + entity = self.get_entity(row, index) + return entity, rel.r_type + except NotAnEntity as exc: + return None, None + return None, None + + @cached + def searched_text(self): + """returns the searched text in case of full-text search + + :return: searched text or `None` if the query is not + a full-text query + """ + rqlst = self.syntax_tree() + for rel in rqlst.iget_nodes(nodes.Relation): + if rel.r_type == 'has_text': + __, rhs = rel.get_variable_parts() + return rhs.eval(self.args) + return None + +def _get_variable(term): + # XXX rewritten const + # use iget_nodes for (hack) case where we have things like MAX(V) + for vref in term.iget_nodes(nodes.VariableRef): + return vref.variable + +def attr_desc_iterator(select, selectidx, rootidx): + """return an iterator on a list of 2-uple (index, attr_relation) + localizing attribute relations of the main variable in a result's row + + :type rqlst: rql.stmts.Select + :param rqlst: the RQL syntax tree to describe + + :return: + a generator on (index, relation, target) describing column being + attribute of the main variable + """ + rootselect = select + while rootselect.parent.parent is not None: + rootselect = rootselect.parent.parent.parent + rootmain = rootselect.selection[selectidx] + rootmainvar = _get_variable(rootmain) + assert rootmainvar + root = rootselect.parent + selectmain = select.selection[selectidx] + for i, term in enumerate(rootselect.selection): + try: + # don't use _get_variable here: if the term isn't a variable + # (function...), we don't want it to be used as an entity attribute + # or relation's value (XXX beside MAX/MIN trick?) + rootvar = term.variable + except AttributeError: + continue + if rootvar.name == rootmainvar.name: + continue + if select is not rootselect and isinstance(rootvar, nodes.ColumnAlias): + term = select.selection[root.subquery_selection_index(select, i)] + var = _get_variable(term) + if var is None: + continue + for ref in var.references(): + rel = ref.relation() + if rel is None or rel.is_types_restriction(): + continue + lhs, rhs = rel.get_variable_parts() + if selectmain.is_equivalent(lhs): + if rhs.is_equivalent(term): + yield (i, rel.r_type, 'subject') + elif selectmain.is_equivalent(rhs): + if lhs.is_equivalent(term): + yield (i, rel.r_type, 'object') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/rtags.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/rtags.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,270 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +A RelationTag object is an object which allows to link a configuration +information to a relation definition. For instance, the standard +primary view uses a RelationTag object (uicfg.primaryview_section) to +get the section to display relations. + +.. sourcecode:: python + + # display ``entry_of`` relations in the ``relations`` section in the ``BlogEntry`` primary view + uicfg.primaryview_section.tag_subject_of(('BlogEntry', 'entry_of', '*'), + 'relations') + + # hide every relation ``entry_of`` in the ``Blog`` primary view + uicfg.primaryview_section.tag_object_of(('*', 'entry_of', 'Blog'), 'hidden') + +Three primitives are defined: + * ``tag_subject_of`` tag a relation in the subject's context + * ``tag_object_of`` tag a relation in the object's context + * ``tag_attribute`` shortcut for tag_subject_of +""" +__docformat__ = "restructuredtext en" + +import logging +from warnings import warn + +from six import string_types + +from logilab.common.logging_ext import set_log_methods +from logilab.common.registry import RegistrableInstance, yes + +def _ensure_str_key(key): + return tuple(str(k) for k in key) + +class RegistrableRtags(RegistrableInstance): + __registry__ = 'uicfg' + __select__ = yes() + + +class RelationTags(RegistrableRtags): + """a tag store for full relation definitions : + + (subject type, relation type, object type, tagged) + + allowing to set tags using wildcard (eg '*') as subject type / object type + + This class associates a single tag to each key. + """ + _allowed_values = None + # _init expected to be a method (introduced in 3.17), while _initfunc a + # function given as __init__ argument and kept for bw compat + _init = _initfunc = None + + def __init__(self): + self._tagdefs = {} + + def __repr__(self): + # find a way to have more infos but keep it readable + # (in error messages in case of an ambiguity for instance) + return '%s (%s): %s' % (id(self), self.__regid__, self.__class__) + + # dict compat + def __getitem__(self, key): + return self.get(*key) + __contains__ = __getitem__ + + def clear(self): + self._tagdefs.clear() + + def _get_keys(self, stype, rtype, otype, tagged): + keys = [] + if '*' not in (stype, otype): + keys.append(('*', rtype, '*', tagged)) + if '*' != stype: + keys.append(('*', rtype, otype, tagged)) + if '*' != otype: + keys.append((stype, rtype, '*', tagged)) + keys.append((stype, rtype, otype, tagged)) + return keys + + def init(self, schema, check=True): + # XXX check existing keys against schema + if check: + for (stype, rtype, otype, tagged), value in list(self._tagdefs.items()): + for ertype in (stype, rtype, otype): + if ertype != '*' and not ertype in schema: + self.warning('removing rtag %s: %s, %s undefined in schema', + (stype, rtype, otype, tagged), value, ertype) + self.del_rtag(stype, rtype, otype, tagged) + break + if self._init is not None: + self.apply(schema, self._init) + + def apply(self, schema, func): + for eschema in schema.entities(): + if eschema.final: + continue + for rschema, tschemas, role in eschema.relation_definitions(True): + for tschema in tschemas: + if role == 'subject': + sschema, oschema = eschema, tschema + else: + sschema, oschema = tschema, eschema + func(sschema, rschema, oschema, role) + + # rtag declaration api #################################################### + + def tag_attribute(self, key, *args, **kwargs): + key = list(key) + key.append('*') + key.append('subject') + self.tag_relation(key, *args, **kwargs) + + def tag_subject_of(self, key, *args, **kwargs): + key = list(key) + key.append('subject') + self.tag_relation(key, *args, **kwargs) + + def tag_object_of(self, key, *args, **kwargs): + key = list(key) + key.append('object') + self.tag_relation(key, *args, **kwargs) + + def tag_relation(self, key, tag): + assert len(key) == 4, 'bad key: %s' % list(key) + if self._allowed_values is not None: + assert tag in self._allowed_values, \ + '%r is not an allowed tag (should be in %s)' % ( + tag, self._allowed_values) + self._tagdefs[_ensure_str_key(key)] = tag + return tag + + def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs): + if isinstance(attr, string_types): + attr, role = attr, 'subject' + else: + attr, role = attr + if role == 'subject': + self.tag_subject_of((etype, attr, desttype), *args, **kwargs) + else: + self.tag_object_of((desttype, attr, etype), *args, **kwargs) + + + # rtag runtime api ######################################################## + + def del_rtag(self, *key): + del self._tagdefs[key] + + def get(self, *key): + for key in reversed(self._get_keys(*key)): + try: + return self._tagdefs[key] + except KeyError: + continue + return None + + def etype_get(self, etype, rtype, role, ttype='*'): + if role == 'subject': + return self.get(etype, rtype, ttype, role) + return self.get(ttype, rtype, etype, role) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + + +class RelationTagsSet(RelationTags): + """This class associates a set of tags to each key. + """ + tag_container_cls = set + + def tag_relation(self, key, tag): + rtags = self._tagdefs.setdefault(_ensure_str_key(key), + self.tag_container_cls()) + rtags.add(tag) + return rtags + + def get(self, stype, rtype, otype, tagged): + rtags = self.tag_container_cls() + for key in self._get_keys(stype, rtype, otype, tagged): + try: + rtags.update(self._tagdefs[key]) + except KeyError: + continue + return rtags + + +class RelationTagsDict(RelationTagsSet): + """This class associates a set of tags to each key.""" + tag_container_cls = dict + + def tag_relation(self, key, tag): + key = _ensure_str_key(key) + try: + rtags = self._tagdefs[key] + rtags.update(tag) + return rtags + except KeyError: + self._tagdefs[key] = tag + return tag + + def setdefault(self, key, tagkey, tagvalue): + key = _ensure_str_key(key) + try: + rtags = self._tagdefs[key] + rtags.setdefault(tagkey, tagvalue) + return rtags + except KeyError: + self._tagdefs[key] = {tagkey: tagvalue} + return self._tagdefs[key] + + +class RelationTagsBool(RelationTags): + _allowed_values = frozenset((True, False)) + + +class NoTargetRelationTagsDict(RelationTagsDict): + + @property + def name(self): + return self.__class__.name + + # tag_subject_of / tag_object_of issue warning if '*' is not given as target + # type, while tag_relation handle it silently since it may be used during + # initialization + def tag_subject_of(self, key, tag): + subj, rtype, obj = key + if obj != '*': + self.warning('using explict target type in %s.tag_subject_of() ' + 'has no effect, use (%s, %s, "*") instead of (%s, %s, %s)', + self.name, subj, rtype, subj, rtype, obj) + super(NoTargetRelationTagsDict, self).tag_subject_of((subj, rtype, '*'), tag) + + def tag_object_of(self, key, tag): + subj, rtype, obj = key + if subj != '*': + self.warning('using explict subject type in %s.tag_object_of() ' + 'has no effect, use ("*", %s, %s) instead of (%s, %s, %s)', + self.name, rtype, obj, subj, rtype, obj) + super(NoTargetRelationTagsDict, self).tag_object_of(('*', rtype, obj), tag) + + def tag_relation(self, key, tag): + if key[-1] == 'subject' and key[-2] != '*': + if isinstance(key, tuple): + key = list(key) + key[-2] = '*' + elif key[-1] == 'object' and key[0] != '*': + if isinstance(key, tuple): + key = list(key) + key[0] = '*' + super(NoTargetRelationTagsDict, self).tag_relation(key, tag) + + +set_log_methods(RelationTags, logging.getLogger('cubicweb.rtags')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1458 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""classes to define schemas for CubicWeb""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import re +from os.path import join, basename +from logging import getLogger +from warnings import warn + +from six import PY2, text_type, string_types, add_metaclass +from six.moves import range + +from logilab.common import tempattr +from logilab.common.decorators import cached, clear_cache, monkeypatch, cachedproperty +from logilab.common.logging_ext import set_log_methods +from logilab.common.deprecation import deprecated, class_moved, moved +from logilab.common.textutils import splitstrip +from logilab.common.graph import get_cycles + +import yams +from yams import BadSchemaDefinition, buildobjs as ybo +from yams.schema import Schema, ERSchema, EntitySchema, RelationSchema, \ + RelationDefinitionSchema, PermissionMixIn, role_name +from yams.constraints import (BaseConstraint, FormatConstraint, BoundaryConstraint, + IntervalBoundConstraint, StaticVocabularyConstraint) +from yams.reader import (CONSTRAINTS, PyFileReader, SchemaLoader, + cleanup_sys_modules, fill_schema_from_namespace) + +from rql import parse, nodes, RQLSyntaxError, TypeResolverException +from rql.analyze import ETypeResolver + +import cubicweb +from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized, _ + +try: + from cubicweb import server +except ImportError: + # We need to lookup DEBUG from there, + # however a pure dbapi client may not have it. + class server(object): pass + server.DEBUG = False + + +PURE_VIRTUAL_RTYPES = set(('identity', 'has_text',)) +VIRTUAL_RTYPES = set(('eid', 'identity', 'has_text',)) + +# set of meta-relations available for every entity types +META_RTYPES = set(( + 'owned_by', 'created_by', 'is', 'is_instance_of', 'identity', + 'eid', 'creation_date', 'cw_source', 'modification_date', 'has_text', 'cwuri', + )) +WORKFLOW_RTYPES = set(('custom_workflow', 'in_state', 'wf_info_for')) +WORKFLOW_DEF_RTYPES = set(('workflow_of', 'state_of', 'transition_of', + 'initial_state', 'default_workflow', + 'allowed_transition', 'destination_state', + 'from_state', 'to_state', 'condition', + 'subworkflow', 'subworkflow_state', 'subworkflow_exit', + 'by_transition', + )) +SYSTEM_RTYPES = set(('in_group', 'require_group', + # cwproperty + 'for_user', + 'cw_schema', 'cw_import_of', 'cw_for_source', + 'cw_host_config_of', + )) | WORKFLOW_RTYPES +NO_I18NCONTEXT = META_RTYPES | WORKFLOW_RTYPES + +SKIP_COMPOSITE_RELS = [('cw_source', 'subject')] + +# set of entity and relation types used to build the schema +SCHEMA_TYPES = set(( + 'CWEType', 'CWRType', 'CWComputedRType', 'CWAttribute', 'CWRelation', + 'CWConstraint', 'CWConstraintType', 'CWUniqueTogetherConstraint', + 'RQLExpression', + 'specializes', + 'relation_type', 'from_entity', 'to_entity', + 'constrained_by', 'cstrtype', + 'constraint_of', 'relations', + 'read_permission', 'add_permission', + 'delete_permission', 'update_permission', + )) + +WORKFLOW_TYPES = set(('Transition', 'State', 'TrInfo', 'Workflow', + 'WorkflowTransition', 'BaseTransition', + 'SubWorkflowExitPoint')) + +INTERNAL_TYPES = set(('CWProperty', 'CWCache', 'ExternalUri', 'CWDataImport', + 'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig')) + +UNIQUE_CONSTRAINTS = ('SizeConstraint', 'FormatConstraint', + 'StaticVocabularyConstraint', + 'RQLVocabularyConstraint') + +_LOGGER = getLogger('cubicweb.schemaloader') + +# entity and relation schema created from serialized schema have an eid +ybo.ETYPE_PROPERTIES += ('eid',) +ybo.RTYPE_PROPERTIES += ('eid',) + +def build_schema_from_namespace(items): + schema = CubicWebSchema('noname') + fill_schema_from_namespace(schema, items, register_base_types=False) + return schema + +# Bases for manipulating RQL in schema ######################################### + +def guess_rrqlexpr_mainvars(expression): + defined = set(split_expression(expression)) + mainvars = set() + if 'S' in defined: + mainvars.add('S') + if 'O' in defined: + mainvars.add('O') + if 'U' in defined: + mainvars.add('U') + if not mainvars: + raise BadSchemaDefinition('unable to guess selection variables in %r' + % expression) + return mainvars + +def split_expression(rqlstring): + for expr in rqlstring.split(','): + for noparen1 in expr.split('('): + for noparen2 in noparen1.split(')'): + for word in noparen2.split(): + yield word + +def normalize_expression(rqlstring): + """normalize an rql expression to ease schema synchronization (avoid + suppressing and reinserting an expression if only a space has been + added/removed for instance) + """ + union = parse(u'Any 1 WHERE %s' % rqlstring).as_string() + if PY2 and isinstance(union, str): + union = union.decode('utf-8') + return union.split(' WHERE ', 1)[1] + + +def _check_valid_formula(rdef, formula_rqlst): + """Check the formula is a valid RQL query with some restriction (no union, + single selected node, etc.), raise BadSchemaDefinition if not + """ + if len(formula_rqlst.children) != 1: + raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: ' + 'can not use UNION in formula %(form)r' % + {'attr' : rdef.rtype, + 'etype' : rdef.subject.type, + 'form' : rdef.formula}) + select = formula_rqlst.children[0] + if len(select.selection) != 1: + raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: ' + 'can only select one term in formula %(form)r' % + {'attr' : rdef.rtype, + 'etype' : rdef.subject.type, + 'form' : rdef.formula}) + term = select.selection[0] + types = set(term.get_type(sol) for sol in select.solutions) + if len(types) != 1: + raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: ' + 'multiple possible types (%(types)s) for formula %(form)r' % + {'attr' : rdef.rtype, + 'etype' : rdef.subject.type, + 'types' : list(types), + 'form' : rdef.formula}) + computed_type = types.pop() + expected_type = rdef.object.type + if computed_type != expected_type: + raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: ' + 'computed attribute type (%(comp_type)s) mismatch with ' + 'specified type (%(attr_type)s)' % + {'attr' : rdef.rtype, + 'etype' : rdef.subject.type, + 'comp_type' : computed_type, + 'attr_type' : expected_type}) + + +class RQLExpression(object): + """Base class for RQL expression used in schema (constraints and + permissions) + """ + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + # to be defined in concrete classes + rqlst = None + predefined_variables = None + full_rql = None + + def __init__(self, expression, mainvars, eid): + """ + :type mainvars: sequence of RQL variables' names. Can be provided as a + comma separated string. + :param mainvars: names of the variables being selected. + + """ + self.eid = eid # eid of the entity representing this rql expression + assert mainvars, 'bad mainvars %s' % mainvars + if isinstance(mainvars, string_types): + mainvars = set(splitstrip(mainvars)) + elif not isinstance(mainvars, set): + mainvars = set(mainvars) + self.mainvars = mainvars + self.expression = normalize_expression(expression) + try: + self.full_rql = self.rqlst.as_string() + except RQLSyntaxError: + raise RQLSyntaxError(expression) + for mainvar in mainvars: + # if variable is predefined, an extra reference is inserted + # automatically (`VAR eid %(v)s`) + if mainvar in self.predefined_variables: + min_refs = 3 + else: + min_refs = 2 + if len(self.rqlst.defined_vars[mainvar].references()) < min_refs: + _LOGGER.warn('You did not use the %s variable in your RQL ' + 'expression %s', mainvar, self) + # syntax tree used by read security (inserted in queries when necessary) + self.snippet_rqlst = parse(self.minimal_rql, print_errors=False).children[0] + # graph of links between variables, used by rql rewriter + self.vargraph = vargraph(self.rqlst) + # useful for some instrumentation, e.g. localperms permcheck command + self.package = ybo.PACKAGE + + def __str__(self): + return self.full_rql + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, self.full_rql) + + def __lt__(self, other): + if hasattr(other, 'expression'): + return self.expression < other.expression + return True + + def __eq__(self, other): + if hasattr(other, 'expression'): + return self.expression == other.expression + return False + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return hash(self.expression) + + def __deepcopy__(self, memo): + return self.__class__(self.expression, self.mainvars) + def __getstate__(self): + return (self.expression, self.mainvars) + def __setstate__(self, state): + self.__init__(*state) + + @cachedproperty + def rqlst(self): + select = parse(self.minimal_rql, print_errors=False).children[0] + defined = set(split_expression(self.expression)) + for varname in self.predefined_variables: + if varname in defined: + select.add_eid_restriction(select.get_variable(varname), varname.lower(), 'Substitute') + return select + + # permission rql expression specific stuff ################################# + + @cached + def transform_has_permission(self): + found = None + rqlst = self.rqlst + for var in rqlst.defined_vars.values(): + for varref in var.references(): + rel = varref.relation() + if rel is None: + continue + try: + prefix, action, suffix = rel.r_type.split('_') + except ValueError: + continue + if prefix != 'has' or suffix != 'permission' or \ + not action in ('add', 'delete', 'update', 'read'): + continue + if found is None: + found = [] + rqlst.save_state() + assert rel.children[0].name == 'U' + objvar = rel.children[1].children[0].variable + rqlst.remove_node(rel) + selected = [v.name for v in rqlst.get_selected_variables()] + if objvar.name not in selected: + colindex = len(selected) + rqlst.add_selected(objvar) + else: + colindex = selected.index(objvar.name) + found.append((action, colindex)) + # remove U eid %(u)s if U is not used in any other relation + uvrefs = rqlst.defined_vars['U'].references() + if len(uvrefs) == 1: + rqlst.remove_node(uvrefs[0].relation()) + if found is not None: + rql = rqlst.as_string() + if len(rqlst.selection) == 1 and isinstance(rqlst.where, nodes.Relation): + # only "Any X WHERE X eid %(x)s" remaining, no need to execute the rql + keyarg = rqlst.selection[0].name.lower() + else: + keyarg = None + rqlst.recover() + return rql, found, keyarg + return rqlst.as_string(), None, None + + def _check(self, _cw, **kwargs): + """return True if the rql expression is matching the given relation + between fromeid and toeid + + _cw may be a request or a server side transaction + """ + creating = kwargs.get('creating') + if not creating and self.eid is not None: + key = (self.eid, tuple(sorted(kwargs.items()))) + try: + return _cw.local_perm_cache[key] + except KeyError: + pass + rql, has_perm_defs, keyarg = self.transform_has_permission() + # when creating an entity, expression related to X satisfied + if creating and 'X' in self.rqlst.defined_vars: + return True + if keyarg is None: + kwargs.setdefault('u', _cw.user.eid) + try: + rset = _cw.execute(rql, kwargs, build_descr=True) + except NotImplementedError: + self.critical('cant check rql expression, unsupported rql %s', rql) + if self.eid is not None: + _cw.local_perm_cache[key] = False + return False + except TypeResolverException as ex: + # some expression may not be resolvable with current kwargs + # (type conflict) + self.warning('%s: %s', rql, str(ex)) + if self.eid is not None: + _cw.local_perm_cache[key] = False + return False + except Unauthorized as ex: + self.debug('unauthorized %s: %s', rql, str(ex)) + if self.eid is not None: + _cw.local_perm_cache[key] = False + return False + else: + rset = _cw.eid_rset(kwargs[keyarg]) + # if no special has_*_permission relation in the rql expression, just + # check the result set contains something + if has_perm_defs is None: + if rset: + if self.eid is not None: + _cw.local_perm_cache[key] = True + return True + elif rset: + # check every special has_*_permission relation is satisfied + get_eschema = _cw.vreg.schema.eschema + try: + for eaction, col in has_perm_defs: + for i in range(len(rset)): + eschema = get_eschema(rset.description[i][col]) + eschema.check_perm(_cw, eaction, eid=rset[i][col]) + if self.eid is not None: + _cw.local_perm_cache[key] = True + return True + except Unauthorized: + pass + if self.eid is not None: + _cw.local_perm_cache[key] = False + return False + + @property + def minimal_rql(self): + return 'Any %s WHERE %s' % (','.join(sorted(self.mainvars)), + self.expression) + + + +# rql expressions for use in permission definition ############################# + +class ERQLExpression(RQLExpression): + predefined_variables = 'XU' + + def __init__(self, expression, mainvars=None, eid=None): + RQLExpression.__init__(self, expression, mainvars or 'X', eid) + + def check(self, _cw, eid=None, creating=False, **kwargs): + if 'X' in self.rqlst.defined_vars: + if eid is None: + if creating: + return self._check(_cw, creating=True, **kwargs) + return False + assert creating == False + return self._check(_cw, x=eid, **kwargs) + return self._check(_cw, **kwargs) + + +class CubicWebRelationDefinitionSchema(RelationDefinitionSchema): + def constraint_by_eid(self, eid): + for cstr in self.constraints: + if cstr.eid == eid: + return cstr + raise ValueError('No constraint with eid %d' % eid) + + def rql_expression(self, expression, mainvars=None, eid=None): + """rql expression factory""" + if self.rtype.final: + return ERQLExpression(expression, mainvars, eid) + return RRQLExpression(expression, mainvars, eid) + + def check_permission_definitions(self): + super(CubicWebRelationDefinitionSchema, self).check_permission_definitions() + schema = self.subject.schema + for action, groups in self.permissions.items(): + for group_or_rqlexpr in groups: + if action == 'read' and \ + isinstance(group_or_rqlexpr, RQLExpression): + msg = "can't use rql expression for read permission of %s" + raise BadSchemaDefinition(msg % self) + if self.final and isinstance(group_or_rqlexpr, RRQLExpression): + msg = "can't use RRQLExpression on %s, use an ERQLExpression" + raise BadSchemaDefinition(msg % self) + if not self.final and isinstance(group_or_rqlexpr, ERQLExpression): + msg = "can't use ERQLExpression on %s, use a RRQLExpression" + raise BadSchemaDefinition(msg % self) + +def vargraph(rqlst): + """ builds an adjacency graph of variables from the rql syntax tree, e.g: + Any O,S WHERE T subworkflow_exit S, T subworkflow WF, O state_of WF + => {'WF': ['O', 'T'], 'S': ['T'], 'T': ['WF', 'S'], 'O': ['WF']} + """ + vargraph = {} + for relation in rqlst.get_nodes(nodes.Relation): + try: + rhsvarname = relation.children[1].children[0].variable.name + lhsvarname = relation.children[0].name + except AttributeError: + pass + else: + vargraph.setdefault(lhsvarname, []).append(rhsvarname) + vargraph.setdefault(rhsvarname, []).append(lhsvarname) + #vargraph[(lhsvarname, rhsvarname)] = relation.r_type + return vargraph + + +class GeneratedConstraint(object): + def __init__(self, rqlst, mainvars): + self.snippet_rqlst = rqlst + self.mainvars = mainvars + self.vargraph = vargraph(rqlst) + + +class RRQLExpression(RQLExpression): + predefined_variables = 'SOU' + + def __init__(self, expression, mainvars=None, eid=None): + if mainvars is None: + mainvars = guess_rrqlexpr_mainvars(expression) + RQLExpression.__init__(self, expression, mainvars, eid) + + def check(self, _cw, fromeid=None, toeid=None): + kwargs = {} + if 'S' in self.rqlst.defined_vars: + if fromeid is None: + return False + kwargs['s'] = fromeid + if 'O' in self.rqlst.defined_vars: + if toeid is None: + return False + kwargs['o'] = toeid + return self._check(_cw, **kwargs) + + +# In yams, default 'update' perm for attributes granted to managers and owners. +# Within cw, we want to default to users who may edit the entity holding the +# attribute. +# These default permissions won't be checked by the security hooks: +# since they delegate checking to the entity, we can skip actual checks. +ybo.DEFAULT_ATTRPERMS['update'] = ('managers', ERQLExpression('U has_update_permission X')) +ybo.DEFAULT_ATTRPERMS['add'] = ('managers', ERQLExpression('U has_add_permission X')) + +# we don't want 'add' or 'delete' permissions on computed relation types +# (they're hardcoded to '()' on computed relation definitions) +if 'add' in yams.DEFAULT_COMPUTED_RELPERMS: + del yams.DEFAULT_COMPUTED_RELPERMS['add'] +if 'delete' in yams.DEFAULT_COMPUTED_RELPERMS: + del yams.DEFAULT_COMPUTED_RELPERMS['delete'] + + +PUB_SYSTEM_ENTITY_PERMS = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'delete': ('managers',), + 'update': ('managers',), + } +PUB_SYSTEM_REL_PERMS = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'delete': ('managers',), + } +PUB_SYSTEM_ATTR_PERMS = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'update': ('managers',), + } +RO_REL_PERMS = { + 'read': ('managers', 'users', 'guests',), + 'add': (), + 'delete': (), + } +RO_ATTR_PERMS = { + 'read': ('managers', 'users', 'guests',), + 'add': ybo.DEFAULT_ATTRPERMS['add'], + 'update': (), + } + +# XXX same algorithm as in reorder_cubes and probably other place, +# may probably extract a generic function +def order_eschemas(eschemas): + """return entity schemas ordered such that entity types which specializes an + other one appears after that one + """ + graph = {} + for eschema in eschemas: + if eschema.specializes(): + graph[eschema] = set((eschema.specializes(),)) + else: + graph[eschema] = set() + cycles = get_cycles(graph) + if cycles: + cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles) + raise Exception('cycles in entity schema specialization: %s' + % cycles) + eschemas = [] + while graph: + # sorted to get predictable results + for eschema, deps in sorted(graph.items()): + if not deps: + eschemas.append(eschema) + del graph[eschema] + for deps in graph.values(): + try: + deps.remove(eschema) + except KeyError: + continue + return eschemas + +def bw_normalize_etype(etype): + if etype in ETYPE_NAME_MAP: + msg = '%s has been renamed to %s, please update your code' % ( + etype, ETYPE_NAME_MAP[etype]) + warn(msg, DeprecationWarning, stacklevel=4) + etype = ETYPE_NAME_MAP[etype] + return etype + +def display_name(req, key, form='', context=None): + """return a internationalized string for the key (schema entity or relation + name) in a given form + """ + assert form in ('', 'plural', 'subject', 'object') + if form == 'subject': + form = '' + if form: + key = key + '_' + form + # ensure unicode + if context is not None: + return text_type(req.pgettext(context, key)) + else: + return text_type(req._(key)) + + +# Schema objects definition ################################################### + +def ERSchema_display_name(self, req, form='', context=None): + """return a internationalized string for the entity/relation type name in + a given form + """ + return display_name(req, self.type, form, context) +ERSchema.display_name = ERSchema_display_name + +@cached +def get_groups(self, action): + """return the groups authorized to perform on entities of + this type + + :type action: str + :param action: the name of a permission + + :rtype: tuple + :return: names of the groups with the given permission + """ + assert action in self.ACTIONS, action + #assert action in self._groups, '%s %s' % (self, action) + try: + return frozenset(g for g in self.permissions[action] if isinstance(g, string_types)) + except KeyError: + return () +PermissionMixIn.get_groups = get_groups + +@cached +def get_rqlexprs(self, action): + """return the rql expressions representing queries to check the user is allowed + to perform on entities of this type + + :type action: str + :param action: the name of a permission + + :rtype: tuple + :return: the rql expressions with the given permission + """ + assert action in self.ACTIONS, action + #assert action in self._rqlexprs, '%s %s' % (self, action) + try: + return tuple(g for g in self.permissions[action] if not isinstance(g, string_types)) + except KeyError: + return () +PermissionMixIn.get_rqlexprs = get_rqlexprs + +orig_set_action_permissions = PermissionMixIn.set_action_permissions +def set_action_permissions(self, action, permissions): + """set the groups and rql expressions allowing to perform on + entities of this type + + :type action: str + :param action: the name of a permission + + :type permissions: tuple + :param permissions: the groups and rql expressions allowing the given action + """ + orig_set_action_permissions(self, action, tuple(permissions)) + clear_cache(self, 'get_rqlexprs') + clear_cache(self, 'get_groups') +PermissionMixIn.set_action_permissions = set_action_permissions + +def has_local_role(self, action): + """return true if the action *may* be granted locally (i.e. either rql + expressions or the owners group are used in security definition) + + XXX this method is only there since we don't know well how to deal with + 'add' action checking. Also find a better name would be nice. + """ + assert action in self.ACTIONS, action + if self.get_rqlexprs(action): + return True + if action in ('update', 'delete'): + return 'owners' in self.get_groups(action) + return False +PermissionMixIn.has_local_role = has_local_role + +def may_have_permission(self, action, req): + if action != 'read' and not (self.has_local_role('read') or + self.has_perm(req, 'read')): + return False + return self.has_local_role(action) or self.has_perm(req, action) +PermissionMixIn.may_have_permission = may_have_permission + +def has_perm(self, _cw, action, **kwargs): + """return true if the action is granted globally or locally""" + try: + self.check_perm(_cw, action, **kwargs) + return True + except Unauthorized: + return False +PermissionMixIn.has_perm = has_perm + + +def check_perm(self, _cw, action, **kwargs): + # NB: _cw may be a server transaction or a request object. + # + # check user is in an allowed group, if so that's enough internal + # transactions should always stop there + DBG = False + if server.DEBUG & server.DBG_SEC: + if action in server._SECURITY_CAPS: + _self_str = str(self) + if server._SECURITY_ITEMS: + if any(item in _self_str for item in server._SECURITY_ITEMS): + DBG = True + else: + DBG = True + groups = self.get_groups(action) + if _cw.user.matching_groups(groups): + if DBG: + print('check_perm: %r %r: user matches %s' % (action, _self_str, groups)) + return + # if 'owners' in allowed groups, check if the user actually owns this + # object, if so that's enough + # + # NB: give _cw to user.owns since user is not be bound to a transaction on + # the repository side + if 'owners' in groups and ( + kwargs.get('creating') + or ('eid' in kwargs and _cw.user.owns(kwargs['eid']))): + if DBG: + print('check_perm: %r %r: user is owner or creation time' % + (action, _self_str)) + return + # else if there is some rql expressions, check them + if DBG: + print('check_perm: %r %r %s' % + (action, _self_str, [(rqlexpr, kwargs, rqlexpr.check(_cw, **kwargs)) + for rqlexpr in self.get_rqlexprs(action)])) + if any(rqlexpr.check(_cw, **kwargs) + for rqlexpr in self.get_rqlexprs(action)): + return + raise Unauthorized(action, str(self)) +PermissionMixIn.check_perm = check_perm + + +CubicWebRelationDefinitionSchema._RPROPERTIES['eid'] = None +# remember rproperties defined at this point. Others will have to be serialized in +# CWAttribute.extra_props +KNOWN_RPROPERTIES = CubicWebRelationDefinitionSchema.ALL_PROPERTIES() + + +class CubicWebEntitySchema(EntitySchema): + """a entity has a type, a set of subject and or object relations + the entity schema defines the possible relations for a given type and some + constraints on those relations + """ + def __init__(self, schema=None, edef=None, eid=None, **kwargs): + super(CubicWebEntitySchema, self).__init__(schema, edef, **kwargs) + if eid is None and edef is not None: + eid = getattr(edef, 'eid', None) + self.eid = eid + + def targets(self, role): + assert role in ('subject', 'object') + if role == 'subject': + return self.subjrels.values() + return self.objrels.values() + + @cachedproperty + def composite_rdef_roles(self): + """Return all relation definitions that define the current entity + type as a composite. + """ + rdef_roles = [] + for role in ('subject', 'object'): + for rschema in self.targets(role): + if rschema.final: + continue + for rdef in rschema.rdefs.values(): + if (role == 'subject' and rdef.subject == self) or \ + (role == 'object' and rdef.object == self): + crole = rdef.composite + if crole == role: + rdef_roles.append((rdef, role)) + return rdef_roles + + @cachedproperty + def is_composite(self): + return bool(len(self.composite_rdef_roles)) + + def check_permission_definitions(self): + super(CubicWebEntitySchema, self).check_permission_definitions() + for groups in self.permissions.values(): + for group_or_rqlexpr in groups: + if isinstance(group_or_rqlexpr, RRQLExpression): + msg = "can't use RRQLExpression on %s, use an ERQLExpression" + raise BadSchemaDefinition(msg % self.type) + + def is_subobject(self, strict=False, skiprels=None): + if skiprels is None: + skiprels = SKIP_COMPOSITE_RELS + else: + skiprels += SKIP_COMPOSITE_RELS + return super(CubicWebEntitySchema, self).is_subobject(strict, + skiprels=skiprels) + + def attribute_definitions(self): + """return an iterator on attribute definitions + + attribute relations are a subset of subject relations where the + object's type is a final entity + + an attribute definition is a 2-uple : + * name of the relation + * schema of the destination entity type + """ + iter = super(CubicWebEntitySchema, self).attribute_definitions() + for rschema, attrschema in iter: + if rschema.type == 'has_text': + continue + yield rschema, attrschema + + def main_attribute(self): + """convenience method that returns the *main* (i.e. the first non meta) + attribute defined in the entity schema + """ + for rschema, _ in self.attribute_definitions(): + if not (rschema in META_RTYPES + or self.is_metadata(rschema)): + return rschema + + def add_subject_relation(self, rschema): + """register the relation schema as possible subject relation""" + super(CubicWebEntitySchema, self).add_subject_relation(rschema) + if rschema.final: + if self.rdef(rschema).get('fulltextindexed'): + self._update_has_text() + elif rschema.fulltext_container: + self._update_has_text() + + def add_object_relation(self, rschema): + """register the relation schema as possible object relation""" + super(CubicWebEntitySchema, self).add_object_relation(rschema) + if rschema.fulltext_container: + self._update_has_text() + + def del_subject_relation(self, rtype): + super(CubicWebEntitySchema, self).del_subject_relation(rtype) + if 'has_text' in self.subjrels: + self._update_has_text(deletion=True) + + def del_object_relation(self, rtype): + super(CubicWebEntitySchema, self).del_object_relation(rtype) + if 'has_text' in self.subjrels: + self._update_has_text(deletion=True) + + def _update_has_text(self, deletion=False): + may_need_has_text, has_has_text = False, False + need_has_text = None + for rschema in self.subject_relations(): + if rschema.final: + if rschema == 'has_text': + has_has_text = True + elif self.rdef(rschema).get('fulltextindexed'): + may_need_has_text = True + elif rschema.fulltext_container: + if rschema.fulltext_container == 'subject': + may_need_has_text = True + else: + need_has_text = False + for rschema in self.object_relations(): + if rschema.fulltext_container: + if rschema.fulltext_container == 'object': + may_need_has_text = True + else: + need_has_text = False + if need_has_text is None: + need_has_text = may_need_has_text + if need_has_text and not has_has_text and not deletion: + rdef = ybo.RelationDefinition(self.type, 'has_text', 'String', + __permissions__=RO_ATTR_PERMS) + self.schema.add_relation_def(rdef) + elif not need_has_text and has_has_text: + # use rschema.del_relation_def and not schema.del_relation_def to + # avoid deleting the relation type accidentally... + self.schema['has_text'].del_relation_def(self, self.schema['String']) + + def schema_entity(self): # XXX @property for consistency with meta + """return True if this entity type is used to build the schema""" + return self.type in SCHEMA_TYPES + + def rql_expression(self, expression, mainvars=None, eid=None): + """rql expression factory""" + return ERQLExpression(expression, mainvars, eid) + + +class CubicWebRelationSchema(PermissionMixIn, RelationSchema): + permissions = {} + ACTIONS = () + rdef_class = CubicWebRelationDefinitionSchema + + def __init__(self, schema=None, rdef=None, eid=None, **kwargs): + if rdef is not None: + # if this relation is inlined + self.inlined = rdef.inlined + super(CubicWebRelationSchema, self).__init__(schema, rdef, **kwargs) + if eid is None and rdef is not None: + eid = getattr(rdef, 'eid', None) + self.eid = eid + + def init_computed_relation(self, rdef): + self.ACTIONS = ('read',) + super(CubicWebRelationSchema, self).init_computed_relation(rdef) + + def advertise_new_add_permission(self): + pass + + def check_permission_definitions(self): + RelationSchema.check_permission_definitions(self) + PermissionMixIn.check_permission_definitions(self) + + @property + def meta(self): + return self.type in META_RTYPES + + def schema_relation(self): # XXX @property for consistency with meta + """return True if this relation type is used to build the schema""" + return self.type in SCHEMA_TYPES + + def may_have_permission(self, action, req, eschema=None, role=None): + if eschema is not None: + for tschema in self.targets(eschema, role): + rdef = self.role_rdef(eschema, tschema, role) + if rdef.may_have_permission(action, req): + return True + else: + for rdef in self.rdefs.values(): + if rdef.may_have_permission(action, req): + return True + return False + + def has_perm(self, _cw, action, **kwargs): + """return true if the action is granted globally or locally""" + if self.final: + assert not ('fromeid' in kwargs or 'toeid' in kwargs), kwargs + assert action in ('read', 'update') + if 'eid' in kwargs: + subjtype = _cw.entity_metas(kwargs['eid'])['type'] + else: + subjtype = objtype = None + else: + assert not 'eid' in kwargs, kwargs + assert action in ('read', 'add', 'delete') + if 'fromeid' in kwargs: + subjtype = _cw.entity_metas(kwargs['fromeid'])['type'] + elif 'frometype' in kwargs: + subjtype = kwargs.pop('frometype') + else: + subjtype = None + if 'toeid' in kwargs: + objtype = _cw.entity_metas(kwargs['toeid'])['type'] + elif 'toetype' in kwargs: + objtype = kwargs.pop('toetype') + else: + objtype = None + if objtype and subjtype: + return self.rdef(subjtype, objtype).has_perm(_cw, action, **kwargs) + elif subjtype: + for tschema in self.targets(subjtype, 'subject'): + rdef = self.rdef(subjtype, tschema) + if not rdef.has_perm(_cw, action, **kwargs): + return False + elif objtype: + for tschema in self.targets(objtype, 'object'): + rdef = self.rdef(tschema, objtype) + if not rdef.has_perm(_cw, action, **kwargs): + return False + else: + for rdef in self.rdefs.values(): + if not rdef.has_perm(_cw, action, **kwargs): + return False + return True + + @deprecated('use .rdef(subjtype, objtype).role_cardinality(role)') + def cardinality(self, subjtype, objtype, target): + return self.rdef(subjtype, objtype).role_cardinality(target) + + +class CubicWebSchema(Schema): + """set of entities and relations schema defining the possible data sets + used in an application + + :type name: str + :ivar name: name of the schema, usually the instance identifier + + :type base: str + :ivar base: path of the directory where the schema is defined + """ + reading_from_database = False + entity_class = CubicWebEntitySchema + relation_class = CubicWebRelationSchema + no_specialization_inference = ('identity',) + + def __init__(self, *args, **kwargs): + self._eid_index = {} + super(CubicWebSchema, self).__init__(*args, **kwargs) + ybo.register_base_types(self) + rschema = self.add_relation_type(ybo.RelationType('eid')) + rschema.final = True + rschema = self.add_relation_type(ybo.RelationType('has_text')) + rschema.final = True + rschema = self.add_relation_type(ybo.RelationType('identity')) + rschema.final = False + + etype_name_re = r'[A-Z][A-Za-z0-9]*[a-z]+[A-Za-z0-9]*$' + def add_entity_type(self, edef): + edef.name = str(edef.name) + edef.name = bw_normalize_etype(edef.name) + if not re.match(self.etype_name_re, edef.name): + raise BadSchemaDefinition( + '%r is not a valid name for an entity type. It should start ' + 'with an upper cased letter and be followed by at least a ' + 'lower cased letter' % edef.name) + eschema = super(CubicWebSchema, self).add_entity_type(edef) + if not eschema.final: + # automatically add the eid relation to non final entity types + rdef = ybo.RelationDefinition(eschema.type, 'eid', 'Int', + cardinality='11', uid=True, + __permissions__=RO_ATTR_PERMS) + self.add_relation_def(rdef) + rdef = ybo.RelationDefinition(eschema.type, 'identity', eschema.type, + __permissions__=RO_REL_PERMS) + self.add_relation_def(rdef) + self._eid_index[eschema.eid] = eschema + return eschema + + def add_relation_type(self, rdef): + if not rdef.name.islower(): + raise BadSchemaDefinition( + '%r is not a valid name for a relation type. It should be ' + 'lower cased' % rdef.name) + rdef.name = str(rdef.name) + rschema = super(CubicWebSchema, self).add_relation_type(rdef) + self._eid_index[rschema.eid] = rschema + return rschema + + def add_relation_def(self, rdef): + """build a part of a relation schema + (i.e. add a relation between two specific entity's types) + + :type subject: str + :param subject: entity's type that is subject of the relation + + :type rtype: str + :param rtype: the relation's type (i.e. the name of the relation) + + :type obj: str + :param obj: entity's type that is object of the relation + + :rtype: RelationSchema + :param: the newly created or just completed relation schema + """ + rdef.name = rdef.name.lower() + rdef.subject = bw_normalize_etype(rdef.subject) + rdef.object = bw_normalize_etype(rdef.object) + rdefs = super(CubicWebSchema, self).add_relation_def(rdef) + if rdefs: + try: + self._eid_index[rdef.eid] = rdefs + except AttributeError: + pass # not a serialized schema + return rdefs + + def del_relation_type(self, rtype): + rschema = self.rschema(rtype) + self._eid_index.pop(rschema.eid, None) + super(CubicWebSchema, self).del_relation_type(rtype) + + def del_relation_def(self, subjtype, rtype, objtype): + for k, v in self._eid_index.items(): + if not isinstance(v, RelationDefinitionSchema): + continue + if v.subject == subjtype and v.rtype == rtype and v.object == objtype: + del self._eid_index[k] + break + super(CubicWebSchema, self).del_relation_def(subjtype, rtype, objtype) + + def del_entity_type(self, etype): + eschema = self.eschema(etype) + self._eid_index.pop(eschema.eid, None) + # deal with has_text first, else its automatic deletion (see above) + # may trigger an error in ancestor's del_entity_type method + if 'has_text' in eschema.subject_relations(): + self.del_relation_def(etype, 'has_text', 'String') + super(CubicWebSchema, self).del_entity_type(etype) + + def schema_by_eid(self, eid): + return self._eid_index[eid] + + def iter_computed_attributes(self): + for relation in self.relations(): + for rdef in relation.rdefs.values(): + if rdef.final and rdef.formula is not None: + yield rdef + + def iter_computed_relations(self): + for relation in self.relations(): + if relation.rule: + yield relation + + def finalize(self): + super(CubicWebSchema, self).finalize() + self.finalize_computed_attributes() + self.finalize_computed_relations() + + def finalize_computed_attributes(self): + """Check computed attributes validity (if any), else raise + `BadSchemaDefinition` + """ + analyzer = ETypeResolver(self) + for rdef in self.iter_computed_attributes(): + rqlst = parse(rdef.formula) + select = rqlst.children[0] + select.add_type_restriction(select.defined_vars['X'], str(rdef.subject)) + analyzer.visit(select) + _check_valid_formula(rdef, rqlst) + rdef.formula_select = select # avoid later recomputation + + + def finalize_computed_relations(self): + """Build relation definitions for computed relations + + The subject and object types are infered using rql analyzer. + """ + analyzer = ETypeResolver(self) + for rschema in self.iter_computed_relations(): + # XXX rule is valid if both S and O are defined and not in an exists + rqlexpr = RRQLExpression(rschema.rule) + rqlst = rqlexpr.snippet_rqlst + analyzer.visit(rqlst) + couples = set((sol['S'], sol['O']) for sol in rqlst.solutions) + for subjtype, objtype in couples: + if self[objtype].final: + raise BadSchemaDefinition('computed relations cannot be final') + rdef = ybo.RelationDefinition( + subjtype, rschema.type, objtype, + __permissions__={'add': (), + 'delete': (), + 'read': rschema.permissions['read']}) + rdef.infered = True + self.add_relation_def(rdef) + + def rebuild_infered_relations(self): + super(CubicWebSchema, self).rebuild_infered_relations() + self.finalize_computed_attributes() + self.finalize_computed_relations() + + +# additional cw specific constraints ########################################### + +# these are implemented as CHECK constraints in sql, don't do the work +# twice +StaticVocabularyConstraint.check = lambda *args: True +IntervalBoundConstraint.check = lambda *args: True +BoundaryConstraint.check = lambda *args: True + +class BaseRQLConstraint(RRQLExpression, BaseConstraint): + """base class for rql constraints""" + distinct_query = None + + def serialize(self): + # start with a semicolon for bw compat, see below + return ';' + ','.join(sorted(self.mainvars)) + ';' + self.expression + + @classmethod + def deserialize(cls, value): + _, mainvars, expression = value.split(';', 2) + return cls(expression, mainvars) + + def check(self, entity, rtype, value): + """return true if the value satisfy the constraint, else false""" + # implemented as a hook in the repository + return 1 + + def __str__(self): + if self.distinct_query: + selop = 'Any' + else: + selop = 'DISTINCT Any' + return '%s(%s %s WHERE %s)' % (self.__class__.__name__, selop, + ','.join(sorted(self.mainvars)), + self.expression) + + def __repr__(self): + return '<%s @%#x>' % (self.__str__(), id(self)) + + +class RQLVocabularyConstraint(BaseRQLConstraint): + """the rql vocabulary constraint: + + limits the proposed values to a set of entities returned by an rql query, + but this is not enforced at the repository level + + `expression` is an additional rql restriction that will be added to + a predefined query, where the S and O variables respectively represent + the subject and the object of the relation + + `mainvars` is a set of variables that should be used as selection variables + (i.e. `'Any %s WHERE ...' % mainvars`). If not specified, an attempt will be + made to guess it based on the variables used in the expression. + """ + + def repo_check(self, session, eidfrom, rtype, eidto): + """raise ValidationError if the relation doesn't satisfy the constraint + """ + pass # this is a vocabulary constraint, not enforced + + +class RepoEnforcedRQLConstraintMixIn(object): + + def __init__(self, expression, mainvars=None, msg=None): + super(RepoEnforcedRQLConstraintMixIn, self).__init__(expression, mainvars) + self.msg = msg + + def serialize(self): + # start with a semicolon for bw compat, see below + return ';%s;%s\n%s' % (','.join(sorted(self.mainvars)), self.expression, + self.msg or '') + + @classmethod + def deserialize(cls, value): + value, msg = value.split('\n', 1) + _, mainvars, expression = value.split(';', 2) + return cls(expression, mainvars, msg) + + def repo_check(self, session, eidfrom, rtype, eidto=None): + """raise ValidationError if the relation doesn't satisfy the constraint + """ + if not self.match_condition(session, eidfrom, eidto): + # XXX at this point if both or neither of S and O are in mainvar we + # dunno if the validation error `occurred` on eidfrom or eidto (from + # user interface point of view) + # + # possible enhancement: check entity being created, it's probably + # the main eid unless this is a composite relation + if eidto is None or 'S' in self.mainvars or not 'O' in self.mainvars: + maineid = eidfrom + qname = role_name(rtype, 'subject') + else: + maineid = eidto + qname = role_name(rtype, 'object') + if self.msg: + msg = session._(self.msg) + else: + msg = '%(constraint)s %(expression)s failed' % { + 'constraint': session._(self.type()), + 'expression': self.expression} + raise ValidationError(maineid, {qname: msg}) + + def exec_query(self, _cw, eidfrom, eidto): + if eidto is None: + # checking constraint for an attribute relation + expression = 'S eid %(s)s, ' + self.expression + args = {'s': eidfrom} + else: + expression = 'S eid %(s)s, O eid %(o)s, ' + self.expression + args = {'s': eidfrom, 'o': eidto} + if 'U' in self.rqlst.defined_vars: + expression = 'U eid %(u)s, ' + expression + args['u'] = _cw.user.eid + rql = 'Any %s WHERE %s' % (','.join(sorted(self.mainvars)), expression) + if self.distinct_query: + rql = 'DISTINCT ' + rql + return _cw.execute(rql, args, build_descr=False) + + +class RQLConstraint(RepoEnforcedRQLConstraintMixIn, BaseRQLConstraint): + """the rql constraint is similar to the RQLVocabularyConstraint but + are also enforced at the repository level + """ + distinct_query = False + + def match_condition(self, session, eidfrom, eidto): + return self.exec_query(session, eidfrom, eidto) + + +class RQLUniqueConstraint(RepoEnforcedRQLConstraintMixIn, BaseRQLConstraint): + """the unique rql constraint check that the result of the query isn't + greater than one. + + You *must* specify `mainvars` when instantiating the constraint since there + is no way to guess it correctly (e.g. if using S,O or U the constraint will + always be satisfied because we've to use a DISTINCT query). + """ + # XXX turns mainvars into a required argument in __init__ + distinct_query = True + + def match_condition(self, session, eidfrom, eidto): + return len(self.exec_query(session, eidfrom, eidto)) <= 1 + + +# workflow extensions ######################################################### + +from yams.buildobjs import _add_relation as yams_add_relation + +class workflowable_definition(ybo.metadefinition): + """extends default EntityType's metaclass to add workflow relations + (i.e. in_state, wf_info_for and custom_workflow). This is the default + metaclass for WorkflowableEntityType. + """ + def __new__(mcs, name, bases, classdict): + abstract = classdict.pop('__abstract__', False) + cls = super(workflowable_definition, mcs).__new__(mcs, name, bases, + classdict) + if not abstract: + make_workflowable(cls) + return cls + + +@add_metaclass(workflowable_definition) +class WorkflowableEntityType(ybo.EntityType): + """Use this base class instead of :class:`EntityType` to have workflow + relations (i.e. `in_state`, `wf_info_for` and `custom_workflow`) on your + entity type. + """ + __abstract__ = True + + +def make_workflowable(cls, in_state_descr=None): + """Adds workflow relations as :class:`WorkflowableEntityType`, but usable on + existing classes which are not using that base class. + """ + existing_rels = set(rdef.name for rdef in cls.__relations__) + # let relation types defined in cw.schemas.workflow carrying + # cardinality, constraints and other relation definition properties + etype = getattr(cls, 'name', cls.__name__) + if 'custom_workflow' not in existing_rels: + rdef = ybo.RelationDefinition(etype, 'custom_workflow', 'Workflow') + yams_add_relation(cls.__relations__, rdef) + if 'in_state' not in existing_rels: + rdef = ybo.RelationDefinition(etype, 'in_state', 'State', + description=in_state_descr) + yams_add_relation(cls.__relations__, rdef) + if 'wf_info_for' not in existing_rels: + rdef = ybo.RelationDefinition('TrInfo', 'wf_info_for', etype) + yams_add_relation(cls.__relations__, rdef) + + +# schema loading ############################################################## + +CONSTRAINTS['RQLConstraint'] = RQLConstraint +CONSTRAINTS['RQLUniqueConstraint'] = RQLUniqueConstraint +CONSTRAINTS['RQLVocabularyConstraint'] = RQLVocabularyConstraint +CONSTRAINTS.pop('MultipleStaticVocabularyConstraint', None) # don't want this in cw yams schema +PyFileReader.context.update(CONSTRAINTS) + + +class BootstrapSchemaLoader(SchemaLoader): + """cubicweb specific schema loader, loading only schema necessary to read + the persistent schema + """ + schemacls = CubicWebSchema + + def load(self, config, path=(), **kwargs): + """return a Schema instance from the schema definition read + from + """ + return super(BootstrapSchemaLoader, self).load( + path, config.appid, register_base_types=False, **kwargs) + + def _load_definition_files(self, cubes=None): + # bootstraping, ignore cubes + filepath = join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'bootstrap.py') + self.info('loading %s', filepath) + with tempattr(ybo, 'PACKAGE', 'cubicweb'): # though we don't care here + self.handle_file(filepath) + + def unhandled_file(self, filepath): + """called when a file without handler associated has been found""" + self.warning('ignoring file %r', filepath) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + +class CubicWebSchemaLoader(BootstrapSchemaLoader): + """cubicweb specific schema loader, automatically adding metadata to the + instance's schema + """ + + def load(self, config, **kwargs): + """return a Schema instance from the schema definition read + from + """ + self.info('loading %s schemas', ', '.join(config.cubes())) + self.extrapath = {} + for cubesdir in config.cubes_search_path(): + if cubesdir != config.CUBES_DIR: + self.extrapath[cubesdir] = 'cubes' + if config.apphome: + path = tuple(reversed([config.apphome] + config.cubes_path())) + else: + path = tuple(reversed(config.cubes_path())) + try: + return super(CubicWebSchemaLoader, self).load(config, path=path, **kwargs) + finally: + # we've to cleanup modules imported from cubicweb.schemas as well + cleanup_sys_modules([join(cubicweb.CW_SOFTWARE_ROOT, 'schemas')]) + + def _load_definition_files(self, cubes): + for filepath in (join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'bootstrap.py'), + join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'base.py'), + join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'workflow.py'), + join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'Bookmark.py')): + self.info('loading %s', filepath) + with tempattr(ybo, 'PACKAGE', 'cubicweb'): + self.handle_file(filepath) + for cube in cubes: + for filepath in self.get_schema_files(cube): + with tempattr(ybo, 'PACKAGE', basename(cube)): + self.handle_file(filepath) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + + +set_log_methods(CubicWebSchemaLoader, getLogger('cubicweb.schemaloader')) +set_log_methods(BootstrapSchemaLoader, getLogger('cubicweb.bootstrapschemaloader')) +set_log_methods(RQLExpression, getLogger('cubicweb.schema')) + +# _() is just there to add messages to the catalog, don't care about actual +# translation +MAY_USE_TEMPLATE_FORMAT = set(('managers',)) +NEED_PERM_FORMATS = [_('text/cubicweb-page-template')] + +@monkeypatch(FormatConstraint) +def vocabulary(self, entity=None, form=None): + cw = None + if form is None and entity is not None: + cw = entity._cw + elif form is not None: + cw = form._cw + if cw is not None: + if hasattr(cw, 'write_security'): # test it's a session and not a request + # cw is a server session + hasperm = not cw.write_security or \ + not cw.is_hook_category_activated('integrity') or \ + cw.user.matching_groups(MAY_USE_TEMPLATE_FORMAT) + else: + hasperm = cw.user.matching_groups(MAY_USE_TEMPLATE_FORMAT) + if hasperm: + return self.regular_formats + tuple(NEED_PERM_FORMATS) + return self.regular_formats + +# XXX itou for some Statement methods +from rql import stmts +orig_get_etype = stmts.ScopeNode.get_etype +def bw_get_etype(self, name): + return orig_get_etype(self, bw_normalize_etype(name)) +stmts.ScopeNode.get_etype = bw_get_etype + +orig_add_main_variable_delete = stmts.Delete.add_main_variable +def bw_add_main_variable_delete(self, etype, vref): + return orig_add_main_variable_delete(self, bw_normalize_etype(etype), vref) +stmts.Delete.add_main_variable = bw_add_main_variable_delete + +orig_add_main_variable_insert = stmts.Insert.add_main_variable +def bw_add_main_variable_insert(self, etype, vref): + return orig_add_main_variable_insert(self, bw_normalize_etype(etype), vref) +stmts.Insert.add_main_variable = bw_add_main_variable_insert + +orig_set_statement_type = stmts.Select.set_statement_type +def bw_set_statement_type(self, etype): + return orig_set_statement_type(self, bw_normalize_etype(etype)) +stmts.Select.set_statement_type = bw_set_statement_type diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/schemas/Bookmark.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/schemas/Bookmark.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,49 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""the Bookmark entity type for internal links + +""" +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from yams.buildobjs import EntityType, RelationType, SubjectRelation, String +from cubicweb.schema import RRQLExpression + +class Bookmark(EntityType): + """bookmarks are used to have user's specific internal links""" + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users',), + 'delete': ('managers', 'owners',), + 'update': ('managers', 'owners',), + } + + title = String(required=True, maxsize=128, internationalizable=True) + path = String(maxsize=2048, required=True, + description=_("relative url of the bookmarked page")) + + bookmarked_by = SubjectRelation('CWUser', + description=_("users using this bookmark")) + + +class bookmarked_by(RelationType): + __permissions__ = {'read': ('managers', 'users', 'guests',), + # test user in users group to avoid granting permission to anonymous user + 'add': ('managers', RRQLExpression('O identity U, U in_group G, G name "users"')), + 'delete': ('managers', RRQLExpression('O identity U, U in_group G, G name "users"')), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/schemas/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/schemas/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,51 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some constants and classes to define schema permissions""" + +__docformat__ = "restructuredtext en" + +from cubicweb.schema import RO_REL_PERMS, RO_ATTR_PERMS, \ + PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, \ + ERQLExpression, RRQLExpression + +# permissions for "meta" entity type (readable by anyone, can only be +# added/deleted by managers) +META_ETYPE_PERMS = PUB_SYSTEM_ENTITY_PERMS # XXX deprecates +# permissions for "meta" relation type (readable by anyone, can only be +# added/deleted by managers) +META_RTYPE_PERMS = PUB_SYSTEM_REL_PERMS # XXX deprecates +# permissions for relation type that should only set by hooks using unsafe +# execute, readable by anyone +HOOKS_RTYPE_PERMS = RO_REL_PERMS # XXX deprecates + + +from logilab.common.modutils import LazyObject +from logilab.common.deprecation import deprecated +class MyLazyObject(LazyObject): + + def _getobj(self): + try: + return super(MyLazyObject, self)._getobj() + except ImportError: + raise ImportError('In cubicweb 3.14, function %s has been moved to ' + 'cube localperms. Install it first.' % self.obj) + +for name in ('xperm', 'xexpr', 'xrexpr', 'xorexpr', 'sexpr', 'restricted_sexpr', + 'restricted_oexpr', 'oexpr', 'relxperm', 'relxexpr', '_perm'): + msg = '[3.14] import %s from cubes.localperms' % name + globals()[name] = deprecated(msg, name=name, doc='deprecated')(MyLazyObject('cubes.localperms', name)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/schemas/_regproc.mysql.sql --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/schemas/_regproc.mysql.sql Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,22 @@ +/* -*- sql -*- + + mysql specific registered procedures, + +*/ + +/* XXX limit_size version dealing with format as postgres version does. + XXX mysql doesn't support overloading, each function should have a different name + + NOTE: fulltext renamed since it cause a mysql name conflict + */ + +CREATE FUNCTION text_limit_size(vfulltext TEXT, maxsize INT) +RETURNS TEXT +NO SQL +BEGIN + IF LENGTH(vfulltext) < maxsize THEN + RETURN vfulltext; + ELSE + RETURN SUBSTRING(vfulltext from 1 for maxsize) || '...'; + END IF; +END ;; diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/schemas/_regproc.postgres.sql --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/schemas/_regproc.postgres.sql Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,55 @@ +/* -*- sql -*- + + postgres specific registered procedures, + require the plpgsql language installed + +*/ + +DROP FUNCTION IF EXISTS comma_join (anyarray) CASCADE; +CREATE FUNCTION comma_join (anyarray) RETURNS text AS $$ + SELECT array_to_string($1, ', ') +$$ LANGUAGE SQL;; + + +DROP FUNCTION IF EXISTS cw_array_append_unique (anyarray, anyelement) CASCADE; +CREATE FUNCTION cw_array_append_unique (anyarray, anyelement) RETURNS anyarray AS $$ + SELECT array_append($1, (SELECT $2 WHERE $2 <> ALL($1))) +$$ LANGUAGE SQL;; + +DROP AGGREGATE IF EXISTS group_concat (anyelement) CASCADE; +CREATE AGGREGATE group_concat ( + basetype = anyelement, + sfunc = cw_array_append_unique, + stype = anyarray, + finalfunc = comma_join, + initcond = '{}' +);; + + +DROP FUNCTION IF EXISTS limit_size (fulltext text, format text, maxsize integer); +CREATE FUNCTION limit_size (fulltext text, format text, maxsize integer) RETURNS text AS $$ +DECLARE + plaintext text; +BEGIN + IF char_length(fulltext) < maxsize THEN + RETURN fulltext; + END IF; + IF format = 'text/html' OR format = 'text/xhtml' OR format = 'text/xml' THEN + plaintext := regexp_replace(fulltext, '<[a-zA-Z/][^>]*>', '', 'g'); + ELSE + plaintext := fulltext; + END IF; + IF char_length(plaintext) < maxsize THEN + RETURN plaintext; + ELSE + RETURN substring(plaintext from 1 for maxsize) || '...'; + END IF; +END +$$ LANGUAGE plpgsql;; + +DROP FUNCTION IF EXISTS text_limit_size (fulltext text, maxsize integer); +CREATE FUNCTION text_limit_size (fulltext text, maxsize integer) RETURNS text AS $$ +BEGIN + RETURN limit_size(fulltext, 'text/plain', maxsize); +END +$$ LANGUAGE plpgsql;; diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/schemas/base.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/schemas/base.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,383 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""core CubicWeb schema, but not necessary at bootstrap time""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from yams.buildobjs import (EntityType, RelationType, RelationDefinition, + SubjectRelation, + String, TZDatetime, Datetime, Password, Interval, + Boolean, UniqueConstraint) +from cubicweb.schema import ( + RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression, + PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS, + RO_ATTR_PERMS) + +class CWUser(WorkflowableEntityType): + """define a CubicWeb user""" + __permissions__ = { + 'read': ('managers', 'users', ERQLExpression('X identity U')), + 'add': ('managers',), + 'delete': ('managers',), + 'update': ('managers', ERQLExpression('X identity U, NOT U in_group G, G name "guests"'),), + } + + login = String(required=True, unique=True, maxsize=64, + description=_('unique identifier used to connect to the application')) + upassword = Password(required=True) # password is a reserved word for mysql + firstname = String(maxsize=64) + surname = String(maxsize=64) + last_login_time = TZDatetime(description=_('last connection date')) + in_group = SubjectRelation('CWGroup', cardinality='+*', + constraints=[RQLConstraint('NOT O name "owners"')], + description=_('groups grant permissions to the user')) + + +class EmailAddress(EntityType): + """an electronic mail address associated to a short alias""" + __permissions__ = { + # application that wishes public email, or use it for something else + # than users (eg Company, Person), should explicitly change permissions + 'read': ('managers', ERQLExpression('U use_email X')), + 'add': ('managers', 'users',), + 'delete': ('managers', 'owners', ERQLExpression('P use_email X, U has_update_permission P')), + 'update': ('managers', 'owners', ERQLExpression('P use_email X, U has_update_permission P')), + } + + alias = String(fulltextindexed=True, maxsize=56) + address = String(required=True, fulltextindexed=True, + indexed=True, unique=True, maxsize=128) + prefered_form = SubjectRelation('EmailAddress', cardinality='?*', + description=_('when multiple addresses are equivalent \ +(such as python-projects@logilab.org and python-projects@lists.logilab.org), set this \ +to indicate which is the preferred form.')) + +class use_email(RelationType): + fulltext_container = 'subject' + + +class use_email_relation(RelationDefinition): + """user's email account""" + name = "use_email" + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', RRQLExpression('U has_update_permission S'),), + 'delete': ('managers', RRQLExpression('U has_update_permission S'),), + } + subject = "CWUser" + object = "EmailAddress" + cardinality = '*?' + composite = 'subject' + + +class primary_email(RelationDefinition): + """the prefered email""" + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', RRQLExpression('U has_update_permission S'),), + 'delete': ('managers', RRQLExpression('U has_update_permission S'),), + } + subject = "CWUser" + object = "EmailAddress" + cardinality = '??' + constraints= [RQLConstraint('S use_email O')] + + +class prefered_form(RelationType): + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + # XXX should have update __permissions__ on both subject and object, + # though by doing this we will probably have no way to add + # this relation in the web ui. The easiest way to acheive this + # is probably to be able to have "U has_update_permission O" as + # RQLConstraint of the relation definition, though this is not yet + # possible + 'add': ('managers', RRQLExpression('U has_update_permission S'),), + 'delete': ('managers', RRQLExpression('U has_update_permission S'),), + } + +class in_group(RelationType): + """core relation indicating a user's groups""" + __permissions__ = PUB_SYSTEM_REL_PERMS + +class owned_by(RelationType): + """core relation indicating owners of an entity. This relation + implicitly put the owner into the owners group for the entity + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', RRQLExpression('S owned_by U'),), + 'delete': ('managers', RRQLExpression('S owned_by U'),), + } + # 0..n cardinality for entities created by internal session (no attached user) + # and to support later deletion of a user which has created some entities + cardinality = '**' + subject = '*' + object = 'CWUser' + +class created_by(RelationType): + """core relation indicating the original creator of an entity""" + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'delete': ('managers',), + } + # 0..1 cardinality for entities created by internal session (no attached user) + # and to support later deletion of a user which has created some entities + cardinality = '?*' + subject = '*' + object = 'CWUser' + + +class creation_date(RelationType): + """creation time of an entity""" + __permissions__ = PUB_SYSTEM_ATTR_PERMS + cardinality = '11' + subject = '*' + object = 'TZDatetime' + + +class modification_date(RelationType): + """latest modification time of an entity""" + __permissions__ = PUB_SYSTEM_ATTR_PERMS + cardinality = '11' + subject = '*' + object = 'TZDatetime' + + +class cwuri(RelationType): + """internal entity uri""" + __permissions__ = RO_ATTR_PERMS + cardinality = '11' + subject = '*' + object = 'String' + + +# XXX find a better relation name +class for_user(RelationType): + """link a property to the user which want this property customization. Unless + you're a site manager, this relation will be handled automatically. + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'delete': ('managers',), + } + inlined = True + subject = 'CWProperty' + object = 'CWUser' + composite = 'object' + cardinality = '?*' + + +class ExternalUri(EntityType): + """a URI representing an object in external data store""" + uri = String(required=True, unique=True, maxsize=256, + description=_('the URI of the object')) + + +class same_as(RelationType): + """generic relation to specify that an external entity represent the same + object as a local one: + http://www.w3.org/TR/owl-ref/#sameAs-def + """ + #NOTE: You'll have to explicitly declare which entity types can have a + #same_as relation + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users'), + 'delete': ('managers', 'owners'), + } + cardinality = '**' + symmetric = True + # NOTE: the 'object = ExternalUri' declaration will still be mandatory + # in the cube's schema. + object = 'ExternalUri' + + +class CWCache(EntityType): + """a simple cache entity characterized by a name and + a validity date. + + The target application is responsible for updating timestamp + when necessary to invalidate the cache (typically in hooks). + + Also, checkout the AppObject.get_cache() method. + """ + # XXX only handle by hooks, shouldn't be readable/editable at all through + # the ui and so no permissions should be granted, no? + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'update': ('managers', 'users',), # XXX + 'delete': ('managers',), + } + + name = String(required=True, unique=True, maxsize=128, + description=_('name of the cache')) + timestamp = TZDatetime(default='NOW') + + +class CWSource(EntityType): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'update': ('managers',), + 'delete': ('managers',), + } + name = String(required=True, unique=True, maxsize=128, + description=_('name of the source')) + type = String(required=True, maxsize=20, description=_('type of the source')) + config = String(description=_('source\'s configuration. One key=value per ' + 'line, authorized keys depending on the ' + 'source\'s type'), + __permissions__={ + 'read': ('managers',), + 'add': ('managers',), + 'update': ('managers',), + }) + # put this here and not in a subclass even if it's only for some sources + # since having subclasses on generic relation (cw_source) double the number + # of rdef in the schema, and make ms planning harder since queries solutions + # may changes when sources are specified + url = String(description=_('URLs from which content will be imported. You can put one url per line')) + parser = String(description=_('parser to use to extract entities from content retrieved at given URLs.')) + latest_retrieval = TZDatetime(description=_('latest synchronization time')) + in_synchronization = TZDatetime(description=_('start timestamp of the currently in synchronization, or NULL when no synchronization in progress.')) + + +ENTITY_MANAGERS_PERMISSIONS = { + 'read': ('managers',), + 'add': ('managers',), + 'update': ('managers',), + 'delete': ('managers',), + } +RELATION_MANAGERS_PERMISSIONS = { + 'read': ('managers',), + 'add': ('managers',), + 'delete': ('managers',), + } + + +class CWSourceHostConfig(EntityType): + __permissions__ = ENTITY_MANAGERS_PERMISSIONS + __unique_together__ = [('match_host', 'cw_host_config_of')] + match_host = String(required=True, maxsize=128, + description=_('regexp matching host(s) to which this config applies')) + config = String(required=True, + description=_('Source\'s configuration for a particular host. ' + 'One key=value per line, authorized keys ' + 'depending on the source\'s type, overriding ' + 'values defined on the source.'), + __permissions__={ + 'read': ('managers',), + 'add': ('managers',), + 'update': ('managers',), + }) + + +class cw_host_config_of(RelationDefinition): + __permissions__ = RELATION_MANAGERS_PERMISSIONS + subject = 'CWSourceHostConfig' + object = 'CWSource' + cardinality = '1*' + composite = 'object' + inlined = True + +class cw_source(RelationDefinition): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'delete': ('managers',), + } + subject = '*' + object = 'CWSource' + cardinality = '1*' + composite = 'object' + + +class CWDataImport(EntityType): + __permissions__ = ENTITY_MANAGERS_PERMISSIONS + start_timestamp = TZDatetime() + end_timestamp = TZDatetime() + log = String() + status = String(required=True, internationalizable=True, indexed=True, + default='in progress', + vocabulary=[_('in progress'), _('success'), _('failed')]) + +class cw_import_of(RelationDefinition): + __permissions__ = RELATION_MANAGERS_PERMISSIONS + subject = 'CWDataImport' + object = 'CWSource' + cardinality = '1*' + composite = 'object' + + +class CWSourceSchemaConfig(EntityType): + __permissions__ = ENTITY_MANAGERS_PERMISSIONS + cw_for_source = SubjectRelation( + 'CWSource', inlined=True, cardinality='1*', composite='object', + __permissions__=RELATION_MANAGERS_PERMISSIONS) + options = String(description=_('allowed options depends on the source type')) + + +class rtype_cw_schema(RelationDefinition): + __permissions__ = RELATION_MANAGERS_PERMISSIONS + name = 'cw_schema' + subject = 'CWSourceSchemaConfig' + object = ('CWEType', 'CWRType') + inlined = True + cardinality = '1*' + composite = 'object' + constraints = [RQLConstraint('NOT O final TRUE')] + +class rdef_cw_schema(RelationDefinition): + __permissions__ = RELATION_MANAGERS_PERMISSIONS + name = 'cw_schema' + subject = 'CWSourceSchemaConfig' + object = 'CWRelation' + inlined = True + cardinality = '1*' + composite = 'object' + +# "abtract" relation types, no definition in cubicweb itself ################### + +class identical_to(RelationType): + """identical to""" + symmetric = True + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + # XXX should have update __permissions__ on both subject and object, + # though by doing this we will probably have no way to add + # this relation in the web ui. The easiest way to acheive this + # is probably to be able to have "U has_update_permission O" as + # RQLConstraint of the relation definition, though this is not yet + # possible + 'add': ('managers', RRQLExpression('U has_update_permission S'),), + 'delete': ('managers', RRQLExpression('U has_update_permission S'),), + } + +class see_also(RelationType): + """generic relation to link one entity to another""" + symmetric = True + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', RRQLExpression('U has_update_permission S'),), + 'delete': ('managers', RRQLExpression('U has_update_permission S'),), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/schemas/bootstrap.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/schemas/bootstrap.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,357 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""core CubicWeb schema necessary for bootstrapping the actual instance's schema +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from yams.buildobjs import (EntityType, RelationType, RelationDefinition, Bytes, + SubjectRelation, RichString, String, Boolean, Int) +from cubicweb.schema import ( + RQLConstraint, + PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS + ) + +# not restricted since as "is" is handled as other relations, guests need +# access to this +class CWEType(EntityType): + """define an entity type, used to build the instance schema""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + name = String(required=True, indexed=True, internationalizable=True, + unique=True, maxsize=64) + description = RichString(internationalizable=True, + description=_('semantic description of this entity type')) + # necessary to filter using RQL + final = Boolean(default=False, description=_('automatic')) + + +class CWRType(EntityType): + """define a relation type, used to build the instance schema""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + name = String(required=True, indexed=True, internationalizable=True, + unique=True, maxsize=64) + description = RichString(internationalizable=True, + description=_('semantic description of this relation type')) + symmetric = Boolean(description=_('is this relation equivalent in both direction ?')) + inlined = Boolean(description=_('is this relation physically inlined? you should know what you\'re doing if you are changing this!')) + fulltext_container = String(description=_('if full text content of subject/object entity ' + 'should be added to other side entity (the container).'), + vocabulary=('', _('subject'), _('object')), + maxsize=8, default=None) + final = Boolean(description=_('automatic')) + + +class CWComputedRType(EntityType): + """define a virtual relation type, used to build the instance schema""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + name = String(required=True, indexed=True, internationalizable=True, + unique=True, maxsize=64) + description = RichString(internationalizable=True, + description=_('semantic description of this relation type')) + rule = String(required=True) + + +class CWAttribute(EntityType): + """define a final relation: link a final relation type from a non final + entity to a final entity type. + + used to build the instance schema + """ + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + relation_type = SubjectRelation('CWRType', cardinality='1*', + constraints=[RQLConstraint('O final TRUE')], + composite='object') + from_entity = SubjectRelation('CWEType', cardinality='1*', + constraints=[RQLConstraint('O final FALSE')], + composite='object') + to_entity = SubjectRelation('CWEType', cardinality='1*', + constraints=[RQLConstraint('O final TRUE')], + composite='object') + constrained_by = SubjectRelation('CWConstraint', cardinality='*1', composite='subject') + + cardinality = String(maxsize=2, internationalizable=True, + vocabulary=[_('?1'), _('11')], + description=_('subject/object cardinality')) + ordernum = Int(description=('control subject entity\'s relations order'), default=0) + + formula = String(maxsize=2048) + indexed = Boolean(description=_('create an index for quick search on this attribute')) + fulltextindexed = Boolean(description=_('index this attribute\'s value in the plain text index')) + internationalizable = Boolean(description=_('is this attribute\'s value translatable')) + defaultval = Bytes(description=_('default value as gziped pickled python object')) + extra_props = Bytes(description=_('additional type specific properties')) + + description = RichString(internationalizable=True, + description=_('semantic description of this attribute')) + + +CARDINALITY_VOCAB = [_('?*'), _('1*'), _('+*'), _('**'), + _('?+'), _('1+'), _('++'), _('*+'), + _('?1'), _('11'), _('+1'), _('*1'), + _('??'), _('1?'), _('+?'), _('*?')] + +class CWRelation(EntityType): + """define a non final relation: link a non final relation type from a non + final entity to a non final entity type. + + used to build the instance schema + """ + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + relation_type = SubjectRelation('CWRType', cardinality='1*', + constraints=[RQLConstraint('O final FALSE')], + composite='object') + from_entity = SubjectRelation('CWEType', cardinality='1*', + constraints=[RQLConstraint('O final FALSE')], + composite='object') + to_entity = SubjectRelation('CWEType', cardinality='1*', + constraints=[RQLConstraint('O final FALSE')], + composite='object') + constrained_by = SubjectRelation('CWConstraint', cardinality='*1', composite='subject') + + cardinality = String(maxsize=2, internationalizable=True, + vocabulary=CARDINALITY_VOCAB, + description=_('subject/object cardinality')) + ordernum = Int(description=_('control subject entity\'s relations order'), + default=0) + composite = String(description=_('is the subject/object entity of the relation ' + 'composed of the other ? This implies that when ' + 'the composite is deleted, composants are also ' + 'deleted.'), + vocabulary=('', _('subject'), _('object')), + maxsize=8, default=None) + + description = RichString(internationalizable=True, + description=_('semantic description of this relation')) + + +# not restricted since it has to be read when checking allowed transitions +class RQLExpression(EntityType): + """define a rql expression used to define permissions""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + exprtype = String(required=True, vocabulary=['ERQLExpression', 'RRQLExpression']) + mainvars = String(maxsize=8, + description=_('name of the main variables which should be ' + 'used in the selection if necessary (comma ' + 'separated)')) + expression = String(required=True, + description=_('restriction part of a rql query. ' + 'For entity rql expression, X and U are ' + 'predefined respectivly to the current object and to ' + 'the request user. For relation rql expression, ' + 'S, O and U are predefined respectivly to the current ' + 'relation\'subject, object and to ' + 'the request user. ')) + + +class CWConstraint(EntityType): + """define a schema constraint""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + cstrtype = SubjectRelation('CWConstraintType', cardinality='1*') + value = String(description=_('depends on the constraint type')) + + +class CWUniqueTogetherConstraint(EntityType): + """defines a sql-level multicolumn unique index""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + name = String(required=True, unique=True, maxsize=64) + constraint_of = SubjectRelation('CWEType', cardinality='1*', composite='object', + inlined=True) + relations = SubjectRelation('CWRType', cardinality='+*', + constraints=[RQLConstraint( + 'S constraint_of ET, RDEF relation_type O, RDEF from_entity ET, ' + 'O final TRUE OR (O final FALSE AND O inlined TRUE)')]) + + +class CWConstraintType(EntityType): + """define a schema constraint type""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + name = String(required=True, indexed=True, internationalizable=True, + unique=True, maxsize=64) + + +# not restricted since it has to be read when checking allowed transitions +class CWGroup(EntityType): + """define a CubicWeb users group""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + name = String(required=True, indexed=True, internationalizable=True, + unique=True, maxsize=64) + + +class CWProperty(EntityType): + """used for cubicweb configuration. Once a property has been created you + can't change the key. + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', 'users',), + 'update': ('managers', 'owners',), + 'delete': ('managers', 'owners',), + } + # key is a reserved word for mysql + pkey = String(required=True, internationalizable=True, maxsize=256, + description=_('defines what\'s the property is applied for. ' + 'You must select this first to be able to set ' + 'value')) + value = String(internationalizable=True, maxsize=256) + +class relation_type(RelationType): + """link a relation definition to its relation type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + +class from_entity(RelationType): + """link a relation definition to its subject entity type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + +class to_entity(RelationType): + """link a relation definition to its object entity type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + +class constrained_by(RelationType): + """constraints applying on this relation""" + __permissions__ = PUB_SYSTEM_REL_PERMS + +class cstrtype(RelationType): + """constraint factory""" + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + + +class read_permission_cwgroup(RelationDefinition): + """groups allowed to read entities/relations of this type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + name = 'read_permission' + subject = ('CWEType', 'CWAttribute', 'CWRelation', 'CWComputedRType') + object = 'CWGroup' + cardinality = '**' + +class add_permission_cwgroup(RelationDefinition): + """groups allowed to add entities/relations of this type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + name = 'add_permission' + subject = ('CWEType', 'CWRelation', 'CWAttribute') + object = 'CWGroup' + cardinality = '**' + +class delete_permission_cwgroup(RelationDefinition): + """groups allowed to delete entities/relations of this type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + name = 'delete_permission' + subject = ('CWEType', 'CWRelation') + object = 'CWGroup' + cardinality = '**' + +class update_permission_cwgroup(RelationDefinition): + """groups allowed to update entities/relations of this type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + name = 'update_permission' + subject = ('CWEType', 'CWAttribute') + object = 'CWGroup' + cardinality = '**' + +class read_permission_rqlexpr(RelationDefinition): + """rql expression allowing to read entities/relations of this type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + name = 'read_permission' + subject = ('CWEType', 'CWAttribute', 'CWRelation', 'CWComputedRType') + object = 'RQLExpression' + cardinality = '*?' + composite = 'subject' + +class add_permission_rqlexpr(RelationDefinition): + """rql expression allowing to add entities/relations of this type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + name = 'add_permission' + subject = ('CWEType', 'CWRelation', 'CWAttribute') + object = 'RQLExpression' + cardinality = '*?' + composite = 'subject' + +class delete_permission_rqlexpr(RelationDefinition): + """rql expression allowing to delete entities/relations of this type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + name = 'delete_permission' + subject = ('CWEType', 'CWRelation') + object = 'RQLExpression' + cardinality = '*?' + composite = 'subject' + +class update_permission_rqlexpr(RelationDefinition): + """rql expression allowing to update entities/relations of this type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + name = 'update_permission' + subject = ('CWEType', 'CWAttribute') + object = 'RQLExpression' + cardinality = '*?' + composite = 'subject' + + +class is_(RelationType): + """core relation indicating the type of an entity + """ + name = 'is' + # don't explicitly set composite here, this is handled anyway + #composite = 'object' + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': (), + 'delete': (), + } + cardinality = '1*' + subject = '*' + object = 'CWEType' + +class is_instance_of(RelationType): + """core relation indicating the types (including specialized types) + of an entity + """ + # don't explicitly set composite here, this is handled anyway + #composite = 'object' + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': (), + 'delete': (), + } + cardinality = '+*' + subject = '*' + object = 'CWEType' + +class specializes(RelationType): + name = 'specializes' + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'delete': ('managers',), + } + cardinality = '?*' + subject = 'CWEType' + object = 'CWEType' + +def post_build_callback(schema): + """set attributes permissions for schema/workflow entities""" + from cubicweb.schema import SCHEMA_TYPES, WORKFLOW_TYPES, META_RTYPES + wftypes = WORKFLOW_TYPES - set(('TrInfo',)) + for eschema in schema.entities(): + if eschema in SCHEMA_TYPES or eschema in wftypes: + for rschema in eschema.subject_relations(): + if rschema.final and not rschema in META_RTYPES: + rdef = eschema.rdef(rschema) + rdef.permissions = PUB_SYSTEM_ATTR_PERMS diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/schemas/workflow.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/schemas/workflow.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,283 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""workflow related schemas + +""" +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from yams.buildobjs import (EntityType, RelationType, RelationDefinition, + SubjectRelation, + RichString, String, Int) +from cubicweb.schema import RQLConstraint +from cubicweb.schemas import (PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, + RO_REL_PERMS) + +class Workflow(EntityType): + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + + name = String(required=True, indexed=True, internationalizable=True, + maxsize=256) + description = RichString(default_format='text/rest', + description=_('semantic description of this workflow')) + + workflow_of = SubjectRelation('CWEType', cardinality='+*', + description=_('entity types which may use this workflow'), + constraints=[RQLConstraint('O final FALSE')]) + + initial_state = SubjectRelation('State', cardinality='?*', + constraints=[RQLConstraint('O state_of S', + msg=_('state doesn\'t belong to this workflow'))], + description=_('initial state for this workflow')) + + +class default_workflow(RelationType): + """default workflow for an entity type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + + subject = 'CWEType' + object = 'Workflow' + cardinality = '?*' + constraints = [RQLConstraint('S final FALSE, O workflow_of S', + msg=_('workflow isn\'t a workflow for this type'))] + + +class State(EntityType): + """used to associate simple states to an entity type and/or to define + workflows + """ + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + __unique_together__ = [('name', 'state_of')] + name = String(required=True, indexed=True, internationalizable=True, maxsize=256) + description = RichString(default_format='text/rest', + description=_('semantic description of this state')) + + # XXX should be on BaseTransition w/ AND/OR selectors when we will + # implements #345274 + allowed_transition = SubjectRelation('BaseTransition', cardinality='**', + constraints=[RQLConstraint('S state_of WF, O transition_of WF', + msg=_('state and transition don\'t belong the the same workflow'))], + description=_('allowed transitions from this state')) + state_of = SubjectRelation('Workflow', cardinality='1*', composite='object', inlined=True, + description=_('workflow to which this state belongs')) + + +class BaseTransition(EntityType): + """abstract base class for transitions""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + __unique_together__ = [('name', 'transition_of')] + + name = String(required=True, indexed=True, internationalizable=True, maxsize=256) + type = String(vocabulary=(_('normal'), _('auto')), default='normal') + description = RichString(description=_('semantic description of this transition')) + + transition_of = SubjectRelation('Workflow', cardinality='1*', composite='object', inlined=True, + description=_('workflow to which this transition belongs')) + + +class require_group(RelationDefinition): + """group in which a user should be to be allowed to pass this transition""" + __permissions__ = PUB_SYSTEM_REL_PERMS + subject = 'BaseTransition' + object = 'CWGroup' + + +class condition(RelationDefinition): + """a RQL expression which should return some results, else the transition + won't be available. + + This query may use X and U variables that will respectivly represents the + current entity and the current user. + """ + __permissions__ = PUB_SYSTEM_REL_PERMS + subject = 'BaseTransition' + object = 'RQLExpression' + cardinality = '*?' + composite = 'subject' + + +class Transition(BaseTransition): + """use to define a transition from one or multiple states to a destination + states in workflow's definitions. Transition without destination state will + go back to the state from which we arrived to the current state. + """ + __specializes_schema__ = True + + destination_state = SubjectRelation( + 'State', cardinality='?*', + constraints=[RQLConstraint('S transition_of WF, O state_of WF', + msg=_('state and transition don\'t belong the the same workflow'))], + description=_('destination state for this transition')) + + +class WorkflowTransition(BaseTransition): + """special transition allowing to go through a sub-workflow""" + __specializes_schema__ = True + + subworkflow = SubjectRelation('Workflow', cardinality='1*', + constraints=[RQLConstraint('S transition_of WF, WF workflow_of ET, O workflow_of ET', + msg=_('subworkflow isn\'t a workflow for the same types as the transition\'s workflow'))] + ) + # XXX use exit_of and inline it + subworkflow_exit = SubjectRelation('SubWorkflowExitPoint', cardinality='*1', + composite='subject') + + +class SubWorkflowExitPoint(EntityType): + """define how we get out from a sub-workflow""" + subworkflow_state = SubjectRelation( + 'State', cardinality='1*', + constraints=[RQLConstraint('T subworkflow_exit S, T subworkflow WF, O state_of WF', + msg=_('exit state must be a subworkflow state'))], + description=_('subworkflow state')) + destination_state = SubjectRelation( + 'State', cardinality='?*', + constraints=[RQLConstraint('T subworkflow_exit S, T transition_of WF, O state_of WF', + msg=_('destination state must be in the same workflow as our parent transition'))], + description=_('destination state. No destination state means that transition ' + 'should go back to the state from which we\'ve entered the ' + 'subworkflow.')) + + +class TrInfo(EntityType): + """workflow history item""" + # 'add' security actually done by hooks + __permissions__ = { + 'read': ('managers', 'users', 'guests',), # XXX U has_read_permission O ? + 'add': ('managers', 'users', 'guests',), + 'delete': (), # XXX should we allow managers to delete TrInfo? + 'update': ('managers', 'owners',), + } + # The unique_together constraint ensures that 2 repositories + # sharing the db won't be able to fire a transition simultaneously + # on the same entity tr_count is filled in the FireTransitionHook + # to the number of TrInfo attached to the entity on which we + # attempt to fire a transition. In other word, it contains the + # rank of the TrInfo for that entity, and the constraint says we + # cannot have 2 TrInfo with the same rank. + __unique_together__ = [('tr_count', 'wf_info_for')] + from_state = SubjectRelation('State', cardinality='1*', inlined=True) + to_state = SubjectRelation('State', cardinality='1*', inlined=True) + # make by_transition optional because we want to allow managers to set + # entity into an arbitrary state without having to respect wf transition + by_transition = SubjectRelation('BaseTransition', cardinality='?*') + comment = RichString(fulltextindexed=True, default_format='text/plain') + tr_count = Int(description='autocomputed attribute used to ensure transition coherency') + # get actor and date time using owned_by and creation_date + +class from_state(RelationType): + __permissions__ = RO_REL_PERMS.copy() + inlined = True + +class to_state(RelationType): + __permissions__ = RO_REL_PERMS.copy() + inlined = True + +class by_transition(RelationType): + # 'add' security actually done by hooks + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users', 'guests',), + 'delete': (), + } + inlined = True + + +class workflow_of(RelationType): + """link a workflow to one or more entity type""" + __permissions__ = PUB_SYSTEM_REL_PERMS + +class state_of(RelationType): + """link a state to one or more workflow""" + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + +class transition_of(RelationType): + """link a transition to one or more workflow""" + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + +class destination_state(RelationType): + """destination state of a transition""" + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + +class allowed_transition(RelationType): + """allowed transitions from this state""" + __permissions__ = PUB_SYSTEM_REL_PERMS + +class initial_state(RelationType): + """indicate which state should be used by default when an entity using + states is created + """ + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + + +class subworkflow(RelationType): + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + +class exit_point(RelationType): + __permissions__ = PUB_SYSTEM_REL_PERMS + +class subworkflow_state(RelationType): + __permissions__ = PUB_SYSTEM_REL_PERMS + inlined = True + + +# "abstract" relations, set by WorkflowableEntityType ########################## + +class custom_workflow(RelationType): + """allow to set a specific workflow for an entity""" + __permissions__ = PUB_SYSTEM_REL_PERMS + + cardinality = '?*' + constraints = [RQLConstraint('S is ET, O workflow_of ET', + msg=_('workflow isn\'t a workflow for this type'))] + object = 'Workflow' + + +class wf_info_for(RelationType): + """link a transition information to its object""" + # 'add' security actually done by hooks + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users', 'guests',), + 'delete': (), + } + inlined = True + + cardinality = '1*' + composite = 'object' + fulltext_container = composite + subject = 'TrInfo' + + +class in_state(RelationType): + """indicate the current state of an entity""" + __permissions__ = RO_REL_PERMS + + # not inlined intentionnally since when using ldap sources, user'state + # has to be stored outside the CWUser table + inlined = False + + cardinality = '1*' + constraints = [RQLConstraint('S is ET, O state_of WF, WF workflow_of ET', + msg=_('state doesn\'t apply to this entity\'s type'))] + object = 'State' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/selectors.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/selectors.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,107 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from warnings import warn + +from six import string_types + +from logilab.common.deprecation import deprecated, class_renamed + +from cubicweb.predicates import * + + +warn('[3.15] cubicweb.selectors renamed into cubicweb.predicates', + DeprecationWarning, stacklevel=2) + +# XXX pre 3.15 bw compat +from cubicweb.appobject import (objectify_selector, traced_selection, + lltrace, yes) + +ExpectedValueSelector = class_renamed('ExpectedValueSelector', + ExpectedValuePredicate) +EClassSelector = class_renamed('EClassSelector', EClassPredicate) +EntitySelector = class_renamed('EntitySelector', EntityPredicate) + + +class on_transition(is_in_state): + """Return 1 if entity is in one of the transitions given as argument list + + Especially useful to match passed transition to enable notifications when + your workflow allows several transition to the same states. + + Note that if workflow `change_state` adapter method is used, this predicate + will not be triggered. + + You should use this instead of your own :class:`score_entity` predicate to + avoid some gotchas: + + * possible views gives a fake entity with no state + * you must use the latest tr info thru the workflow adapter for repository + side checking of the current state + + In debug mode, this predicate can raise: + :raises: :exc:`ValueError` for unknown transition names + (etype workflow only not checked in custom workflow) + + :rtype: int + """ + @deprecated('[3.12] on_transition is deprecated, you should rather use ' + 'on_fire_transition(etype, trname)') + def __init__(self, *expected): + super(on_transition, self).__init__(*expected) + + def _score(self, adapted): + trinfo = adapted.latest_trinfo() + if trinfo and trinfo.by_transition: + return trinfo.by_transition[0].name in self.expected + + def _validate(self, adapted): + wf = adapted.current_workflow + valid = [n.name for n in wf.reverse_transition_of] + unknown = sorted(self.expected.difference(valid)) + if unknown: + raise ValueError("%s: unknown transition(s): %s" + % (wf.name, ",".join(unknown))) + + +entity_implements = class_renamed('entity_implements', is_instance) + +class _but_etype(EntityPredicate): + """accept if the given entity types are not found in the result set. + + See `EntityPredicate` documentation for behaviour when row is not specified. + + :param *etypes: entity types (`string_types`) which should be refused + """ + def __init__(self, *etypes): + super(_but_etype, self).__init__() + self.but_etypes = etypes + + def score(self, req, rset, row, col): + if rset.description[row][col] in self.but_etypes: + return 0 + return 1 + +but_etype = class_renamed('but_etype', _but_etype, 'use ~is_instance(*etypes) instead') + +# XXX deprecated the one_* variants of predicates below w/ multi_xxx(nb=1)? +# take care at the implementation though (looking for the 'row' argument's +# value) +two_lines_rset = class_renamed('two_lines_rset', multi_lines_rset) +two_cols_rset = class_renamed('two_cols_rset', multi_columns_rset) +two_etypes_rset = class_renamed('two_etypes_rset', multi_etypes_rset) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,364 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Server subcube of cubicweb : defines objects used only on the server +(repository) side + +The server module contains functions to initialize a new repository. +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +from os.path import join, exists +from glob import glob +from contextlib import contextmanager + +from six import text_type, string_types +from six.moves import filter + +from logilab.common.modutils import LazyObject +from logilab.common.textutils import splitstrip +from logilab.common.registry import yes +from logilab import database + +from yams import BASE_GROUPS + +from cubicweb import CW_SOFTWARE_ROOT +from cubicweb.appobject import AppObject + +class ShuttingDown(BaseException): + """raised when trying to access some resources while the repository is + shutting down. Inherit from BaseException so that `except Exception` won't + catch it. + """ + +# server-side services ######################################################### + +class Service(AppObject): + """Base class for services. + + A service is a selectable object that performs an action server-side. + Use :class:`cubicweb.dbapi.Connection.call_service` to call them from + the web-side. + + When inheriting this class, do not forget to define at least the __regid__ + attribute (and probably __select__ too). + """ + __registry__ = 'services' + __select__ = yes() + + def call(self, **kwargs): + raise NotImplementedError + + +# server-side debugging ######################################################## + +# server debugging flags. They may be combined using binary operators. + +#:no debug information +DBG_NONE = 0 #: no debug information +#: rql execution information +DBG_RQL = 1 +#: executed sql +DBG_SQL = 2 +#: repository events +DBG_REPO = 4 +#: multi-sources +DBG_MS = 8 +#: hooks +DBG_HOOKS = 16 +#: operations +DBG_OPS = 32 +#: security +DBG_SEC = 64 +#: more verbosity +DBG_MORE = 128 +#: all level enabled +DBG_ALL = DBG_RQL + DBG_SQL + DBG_REPO + DBG_MS + DBG_HOOKS + DBG_OPS + DBG_SEC + DBG_MORE + +_SECURITY_ITEMS = [] +_SECURITY_CAPS = ['read', 'add', 'update', 'delete', 'transition'] + +#: current debug mode +DEBUG = 0 + +@contextmanager +def tunesecurity(items=(), capabilities=()): + """Context manager to use in conjunction with DBG_SEC. + + This allows some tuning of: + * the monitored capabilities ('read', 'add', ....) + * the object being checked by the security checkers + + When no item is given, all of them will be watched. + By default all capabilities are monitored, unless specified. + + Example use:: + + from cubicweb.server import debugged, DBG_SEC, tunesecurity + with debugged(DBG_SEC): + with tunesecurity(items=('Elephant', 'trumps'), + capabilities=('update', 'delete')): + babar.cw_set(trumps=celeste) + flore.cw_delete() + + ==> + + check_perm: 'update' 'relation Elephant.trumps.Elephant' + [(ERQLExpression(Any X WHERE U has_update_permission X, X eid %(x)s, U eid %(u)s), + {'eid': 2167}, True)] + check_perm: 'delete' 'Elephant' + [(ERQLExpression(Any X WHERE U has_delete_permission X, X eid %(x)s, U eid %(u)s), + {'eid': 2168}, True)] + + """ + olditems = _SECURITY_ITEMS[:] + _SECURITY_ITEMS.extend(list(items)) + oldactions = _SECURITY_CAPS[:] + _SECURITY_CAPS[:] = capabilities + yield + _SECURITY_ITEMS[:] = olditems + _SECURITY_CAPS[:] = oldactions + +def set_debug(debugmode): + """change the repository debugging mode""" + global DEBUG + if not debugmode: + DEBUG = 0 + return + if isinstance(debugmode, string_types): + for mode in splitstrip(debugmode, sep='|'): + DEBUG |= globals()[mode] + else: + DEBUG |= debugmode + +class debugged(object): + """Context manager and decorator to help debug the repository. + + It can be used either as a context manager: + + >>> with debugged('DBG_RQL | DBG_REPO'): + ... # some code in which you want to debug repository activity, + ... # seing information about RQL being executed an repository events. + + or as a function decorator: + + >>> @debugged('DBG_RQL | DBG_REPO') + ... def some_function(): + ... # some code in which you want to debug repository activity, + ... # seing information about RQL being executed an repository events + + The debug mode will be reset to its original value when leaving the "with" + block or the decorated function. + """ + def __init__(self, debugmode): + self.debugmode = debugmode + self._clevel = None + + def __enter__(self): + """enter with block""" + self._clevel = DEBUG + set_debug(self.debugmode) + + def __exit__(self, exctype, exc, traceback): + """leave with block""" + set_debug(self._clevel) + return traceback is None + + def __call__(self, func): + """decorate function""" + def wrapped(*args, **kwargs): + _clevel = DEBUG + set_debug(self.debugmode) + try: + return func(*args, **kwargs) + finally: + set_debug(self._clevel) + return wrapped + +# database initialization ###################################################### + +def create_user(session, login, pwd, *groups): + # monkey patch this method if you want to customize admin/anon creation + # (that maybe necessary if you change CWUser's schema) + user = session.create_entity('CWUser', login=login, upassword=pwd) + for group in groups: + session.execute('SET U in_group G WHERE U eid %(u)s, G name %(group)s', + {'u': user.eid, 'group': text_type(group)}) + return user + +def init_repository(config, interactive=True, drop=False, vreg=None, + init_config=None): + """initialise a repository database by creating tables add filling them + with the minimal set of entities (ie at least the schema, base groups and + a initial user) + """ + from cubicweb.repoapi import get_repository, connect + from cubicweb.server.repository import Repository + from cubicweb.server.utils import manager_userpasswd + from cubicweb.server.sqlutils import sqlexec, sqlschema, sql_drop_all_user_tables + from cubicweb.server.sqlutils import _SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION as drop_filter + # configuration to avoid db schema loading and user'state checking + # on connection + config.creating = True + config.consider_user_state = False + config.cubicweb_appobject_path = set(('hooks', 'entities')) + config.cube_appobject_path = set(('hooks', 'entities')) + # only enable the system source at initialization time + repo = Repository(config, vreg=vreg) + if init_config is not None: + # further config initialization once it has been bootstrapped + init_config(config) + schema = repo.schema + sourcescfg = config.read_sources_file() + source = sourcescfg['system'] + driver = source['db-driver'] + with repo.internal_cnx() as cnx: + sqlcnx = cnx.cnxset.cnx + sqlcursor = cnx.cnxset.cu + execute = sqlcursor.execute + if drop: + helper = database.get_db_helper(driver) + dropsql = sql_drop_all_user_tables(helper, sqlcursor) + # We may fail dropping some tables because of table dependencies, in a first pass. + # So, we try a second drop sequence to drop remaining tables if needed. + # Note that 2 passes is an arbitrary choice as it seems enough for our usecases + # (looping may induce infinite recursion when user have no rights for example). + # Here we try to keep code simple and backend independent. That's why we don't try to + # distinguish remaining tables (missing privileges, dependencies, ...). + failed = sqlexec(dropsql, execute, cnx=sqlcnx, + pbtitle='-> dropping tables (first pass)') + if failed: + failed = sqlexec(failed, execute, cnx=sqlcnx, + pbtitle='-> dropping tables (second pass)') + remainings = list(filter(drop_filter, helper.list_tables(sqlcursor))) + assert not remainings, 'Remaining tables: %s' % ', '.join(remainings) + handler = config.migration_handler(schema, interactive=False, repo=repo, cnx=cnx) + # install additional driver specific sql files + handler.cmd_install_custom_sql_scripts() + for cube in reversed(config.cubes()): + handler.cmd_install_custom_sql_scripts(cube) + _title = '-> creating tables ' + print(_title, end=' ') + # schema entities and relations tables + # can't skip entities table even if system source doesn't support them, + # they are used sometimes by generated sql. Keeping them empty is much + # simpler than fixing this... + schemasql = sqlschema(schema, driver) + #skip_entities=[str(e) for e in schema.entities() + # if not repo.system_source.support_entity(str(e))]) + failed = sqlexec(schemasql, execute, pbtitle=_title, delimiter=';;') + if failed: + print('The following SQL statements failed. You should check your schema.') + print(failed) + raise Exception('execution of the sql schema failed, you should check your schema') + sqlcursor.close() + sqlcnx.commit() + with repo.internal_cnx() as cnx: + # insert entity representing the system source + ssource = cnx.create_entity('CWSource', type=u'native', name=u'system') + repo.system_source.eid = ssource.eid + cnx.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid}) + # insert base groups and default admin + print('-> inserting default user and default groups.') + try: + login = text_type(sourcescfg['admin']['login']) + pwd = sourcescfg['admin']['password'] + except KeyError: + if interactive: + msg = 'enter login and password of the initial manager account' + login, pwd = manager_userpasswd(msg=msg, confirm=True) + else: + login, pwd = text_type(source['db-user']), source['db-password'] + # sort for eid predicatability as expected in some server tests + for group in sorted(BASE_GROUPS): + cnx.create_entity('CWGroup', name=text_type(group)) + admin = create_user(cnx, login, pwd, u'managers') + cnx.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s', + {'u': admin.eid}) + cnx.commit() + repo.shutdown() + # re-login using the admin user + config._cubes = None # avoid assertion error + repo = get_repository(config=config) + with connect(repo, login, password=pwd) as cnx: + with cnx.security_enabled(False, False): + repo.system_source.eid = ssource.eid # redo this manually + handler = config.migration_handler(schema, interactive=False, + cnx=cnx, repo=repo) + # serialize the schema + initialize_schema(config, schema, handler) + # yoo ! + cnx.commit() + repo.system_source.init_creating() + cnx.commit() + repo.shutdown() + # restore initial configuration + config.creating = False + config.consider_user_state = True + # (drop instance attribute to get back to class attribute) + del config.cubicweb_appobject_path + del config.cube_appobject_path + print('-> database for instance %s initialized.' % config.appid) + + +def initialize_schema(config, schema, mhandler, event='create'): + from cubicweb.server.schemaserial import serialize_schema + cnx = mhandler.cnx + cubes = config.cubes() + # deactivate every hooks but those responsible to set metadata + # so, NO INTEGRITY CHECKS are done, to have quicker db creation. + # Active integrity is kept else we may pb such as two default + # workflows for one entity type. + with cnx.deny_all_hooks_but('metadata', 'activeintegrity'): + # execute cubicweb's pre script + mhandler.cmd_exec_event_script('pre%s' % event) + # execute cubes pre script if any + for cube in reversed(cubes): + mhandler.cmd_exec_event_script('pre%s' % event, cube) + # execute instance's pre script (useful in tests) + mhandler.cmd_exec_event_script('pre%s' % event, apphome=True) + # enter instance'schema into the database + serialize_schema(cnx, schema) + cnx.commit() + # execute cubicweb's post script + mhandler.cmd_exec_event_script('post%s' % event) + # execute cubes'post script if any + for cube in reversed(cubes): + mhandler.cmd_exec_event_script('post%s' % event, cube) + # execute instance's post script (useful in tests) + mhandler.cmd_exec_event_script('post%s' % event, apphome=True) + + +# sqlite'stored procedures have to be registered at connection opening time +from logilab.database import SQL_CONNECT_HOOKS + +# add to this set relations which should have their add security checking done +# *BEFORE* adding the actual relation (done after by default) +BEFORE_ADD_RELATIONS = set(('owned_by',)) + +# add to this set relations which should have their add security checking done +# *at COMMIT TIME* (done after by default) +ON_COMMIT_ADD_RELATIONS = set(()) + +# available sources registry +SOURCE_TYPES = {'native': LazyObject('cubicweb.server.sources.native', 'NativeSQLSource'), + 'datafeed': LazyObject('cubicweb.server.sources.datafeed', 'DataFeedSource'), + 'ldapfeed': LazyObject('cubicweb.server.sources.ldapfeed', 'LDAPFeedSource'), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/checkintegrity.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/checkintegrity.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,410 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Integrity checking tool for instances: + +* integrity of a CubicWeb repository. Hum actually only the system database is + checked. +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +from datetime import datetime + +from logilab.common.shellutils import ProgressBar + +from cubicweb.schema import PURE_VIRTUAL_RTYPES, VIRTUAL_RTYPES, UNIQUE_CONSTRAINTS +from cubicweb.server.sqlutils import SQL_PREFIX + +def notify_fixed(fix): + if fix: + sys.stderr.write(' [FIXED]') + sys.stderr.write('\n') + +def has_eid(cnx, sqlcursor, eid, eids): + """return true if the eid is a valid eid""" + if eid in eids: + return eids[eid] + sqlcursor.execute('SELECT type FROM entities WHERE eid=%s' % eid) + try: + etype = sqlcursor.fetchone()[0] + except Exception: + eids[eid] = False + return False + if etype not in cnx.vreg.schema: + eids[eid] = False + return False + sqlcursor.execute('SELECT * FROM %s%s WHERE %seid=%s' % (SQL_PREFIX, etype, + SQL_PREFIX, eid)) + result = sqlcursor.fetchall() + if len(result) == 0: + eids[eid] = False + return False + elif len(result) > 1: + msg = (' More than one entity with eid %s exists in source!\n' + ' WARNING : Unable to fix this, do it yourself!\n') + sys.stderr.write(msg % eid) + eids[eid] = True + return True + +# XXX move to yams? +def etype_fti_containers(eschema, _done=None): + if _done is None: + _done = set() + _done.add(eschema) + containers = tuple(eschema.fulltext_containers()) + if containers: + for rschema, target in containers: + if target == 'object': + targets = rschema.objects(eschema) + else: + targets = rschema.subjects(eschema) + for targeteschema in targets: + if targeteschema in _done: + continue + _done.add(targeteschema) + for container in etype_fti_containers(targeteschema, _done): + yield container + else: + yield eschema + +def reindex_entities(schema, cnx, withpb=True, etypes=None): + """reindex all entities in the repository""" + # deactivate modification_date hook since we don't want them + # to be updated due to the reindexation + repo = cnx.repo + dbhelper = repo.system_source.dbhelper + cursor = cnx.cnxset.cu + if not dbhelper.has_fti_table(cursor): + print('no text index table') + dbhelper.init_fti(cursor) + repo.system_source.do_fti = True # ensure full-text indexation is activated + if etypes is None: + print('Reindexing entities') + etypes = set() + for eschema in schema.entities(): + if eschema.final: + continue + indexable_attrs = tuple(eschema.indexable_attributes()) # generator + if not indexable_attrs: + continue + for container in etype_fti_containers(eschema): + etypes.add(container) + # clear fti table first + cnx.system_sql('DELETE FROM %s' % dbhelper.fti_table) + else: + print('Reindexing entities of type %s' % \ + ', '.join(sorted(str(e) for e in etypes))) + # clear fti table first. Use subquery for sql compatibility + cnx.system_sql("DELETE FROM %s WHERE EXISTS(SELECT 1 FROM ENTITIES " + "WHERE eid=%s AND type IN (%s))" % ( + dbhelper.fti_table, dbhelper.fti_uid_attr, + ','.join("'%s'" % etype for etype in etypes))) + if withpb: + pb = ProgressBar(len(etypes) + 1) + pb.update() + # reindex entities by generating rql queries which set all indexable + # attribute to their current value + source = repo.system_source + for eschema in etypes: + etype_class = cnx.vreg['etypes'].etype_class(str(eschema)) + for rset in etype_class.cw_fti_index_rql_limit(cnx): + source.fti_index_entities(cnx, rset.entities()) + # clear entity cache to avoid high memory consumption on big tables + cnx.drop_entity_cache() + if withpb: + pb.update() + if withpb: + pb.finish() + + +def check_schema(schema, cnx, eids, fix=1): + """check serialized schema""" + print('Checking serialized schema') + rql = ('Any COUNT(X),RN,SN,ON,CTN GROUPBY RN,SN,ON,CTN ORDERBY 1 ' + 'WHERE X is CWConstraint, R constrained_by X, ' + 'R relation_type RT, RT name RN, R from_entity ST, ST name SN, ' + 'R to_entity OT, OT name ON, X cstrtype CT, CT name CTN') + for count, rn, sn, on, cstrname in cnx.execute(rql): + if count == 1: + continue + if cstrname in UNIQUE_CONSTRAINTS: + print("ERROR: got %s %r constraints on relation %s.%s.%s" % ( + count, cstrname, sn, rn, on)) + if fix: + print('dunno how to fix, do it yourself') + + + +def check_text_index(schema, cnx, eids, fix=1): + """check all entities registered in the text index""" + print('Checking text index') + msg = ' Entity with eid %s exists in the text index but in no source (autofix will remove from text index)' + cursor = cnx.system_sql('SELECT uid FROM appears;') + for row in cursor.fetchall(): + eid = row[0] + if not has_eid(cnx, cursor, eid, eids): + sys.stderr.write(msg % eid) + if fix: + cnx.system_sql('DELETE FROM appears WHERE uid=%s;' % eid) + notify_fixed(fix) + + +def check_entities(schema, cnx, eids, fix=1): + """check all entities registered in the repo system table""" + print('Checking entities system table') + # system table but no source + msg = ' Entity %s with eid %s exists in the system table but in no source (autofix will delete the entity)' + cursor = cnx.system_sql('SELECT eid,type FROM entities;') + for row in cursor.fetchall(): + eid, etype = row + if not has_eid(cnx, cursor, eid, eids): + sys.stderr.write(msg % (etype, eid)) + if fix: + cnx.system_sql('DELETE FROM entities WHERE eid=%s;' % eid) + notify_fixed(fix) + # source in entities, but no relation cw_source + # XXX this (get_versions) requires a second connection to the db when we already have one open + applcwversion = cnx.repo.get_versions().get('cubicweb') + if applcwversion >= (3, 13, 1): # entities.asource appeared in 3.13.1 + cursor = cnx.system_sql('SELECT e.eid FROM entities as e, cw_CWSource as s ' + 'WHERE s.cw_name=e.asource AND ' + 'NOT EXISTS(SELECT 1 FROM cw_source_relation as cs ' + ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' + 'ORDER BY e.eid') + msg = (' Entity with eid %s refers to source in entities table, ' + 'but is missing relation cw_source (autofix will create the relation)\n') + for row in cursor.fetchall(): + sys.stderr.write(msg % row[0]) + if fix: + cnx.system_sql('INSERT INTO cw_source_relation (eid_from, eid_to) ' + 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWSource as s ' + 'WHERE s.cw_name=e.asource AND NOT EXISTS(SELECT 1 FROM cw_source_relation as cs ' + ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') + notify_fixed(True) + # inconsistencies for 'is' + msg = ' %s #%s is missing relation "is" (autofix will create the relation)\n' + cursor = cnx.system_sql('SELECT e.type, e.eid FROM entities as e, cw_CWEType as s ' + 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_relation as cs ' + ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' + 'ORDER BY e.eid') + for row in cursor.fetchall(): + sys.stderr.write(msg % tuple(row)) + if fix: + cnx.system_sql('INSERT INTO is_relation (eid_from, eid_to) ' + 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWEType as s ' + 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_relation as cs ' + ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') + notify_fixed(True) + # inconsistencies for 'is_instance_of' + msg = ' %s #%s is missing relation "is_instance_of" (autofix will create the relation)\n' + cursor = cnx.system_sql('SELECT e.type, e.eid FROM entities as e, cw_CWEType as s ' + 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs ' + ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' + 'ORDER BY e.eid') + for row in cursor.fetchall(): + sys.stderr.write(msg % tuple(row)) + if fix: + cnx.system_sql('INSERT INTO is_instance_of_relation (eid_from, eid_to) ' + 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWEType as s ' + 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs ' + ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') + notify_fixed(True) + print('Checking entities tables') + msg = ' Entity with eid %s exists in the %s table but not in the system table (autofix will delete the entity)' + for eschema in schema.entities(): + if eschema.final: + continue + table = SQL_PREFIX + eschema.type + column = SQL_PREFIX + 'eid' + cursor = cnx.system_sql('SELECT %s FROM %s;' % (column, table)) + for row in cursor.fetchall(): + eid = row[0] + # eids is full since we have fetched everything from the entities table, + # no need to call has_eid + if not eid in eids or not eids[eid]: + sys.stderr.write(msg % (eid, eschema.type)) + if fix: + cnx.system_sql('DELETE FROM %s WHERE %s=%s;' % (table, column, eid)) + notify_fixed(fix) + + +def bad_related_msg(rtype, target, eid, fix): + msg = ' A relation %s with %s eid %s exists but no such entity in sources' + sys.stderr.write(msg % (rtype, target, eid)) + notify_fixed(fix) + +def bad_inlined_msg(rtype, parent_eid, eid, fix): + msg = (' An inlined relation %s from %s to %s exists but the latter ' + 'entity does not exist') + sys.stderr.write(msg % (rtype, parent_eid, eid)) + notify_fixed(fix) + + +def check_relations(schema, cnx, eids, fix=1): + """check that eids referenced by relations are registered in the repo system + table + """ + print('Checking relations') + for rschema in schema.relations(): + if rschema.final or rschema.type in PURE_VIRTUAL_RTYPES: + continue + if rschema.inlined: + for subjtype in rschema.subjects(): + table = SQL_PREFIX + str(subjtype) + column = SQL_PREFIX + str(rschema) + sql = 'SELECT cw_eid,%s FROM %s WHERE %s IS NOT NULL;' % ( + column, table, column) + cursor = cnx.system_sql(sql) + for row in cursor.fetchall(): + parent_eid, eid = row + if not has_eid(cnx, cursor, eid, eids): + bad_inlined_msg(rschema, parent_eid, eid, fix) + if fix: + sql = 'UPDATE %s SET %s=NULL WHERE %s=%s;' % ( + table, column, column, eid) + cnx.system_sql(sql) + continue + try: + cursor = cnx.system_sql('SELECT eid_from FROM %s_relation;' % rschema) + except Exception as ex: + # usually because table doesn't exist + print('ERROR', ex) + continue + for row in cursor.fetchall(): + eid = row[0] + if not has_eid(cnx, cursor, eid, eids): + bad_related_msg(rschema, 'subject', eid, fix) + if fix: + sql = 'DELETE FROM %s_relation WHERE eid_from=%s;' % ( + rschema, eid) + cnx.system_sql(sql) + cursor = cnx.system_sql('SELECT eid_to FROM %s_relation;' % rschema) + for row in cursor.fetchall(): + eid = row[0] + if not has_eid(cnx, cursor, eid, eids): + bad_related_msg(rschema, 'object', eid, fix) + if fix: + sql = 'DELETE FROM %s_relation WHERE eid_to=%s;' % ( + rschema, eid) + cnx.system_sql(sql) + + +def check_mandatory_relations(schema, cnx, eids, fix=1): + """check entities missing some mandatory relation""" + print('Checking mandatory relations') + msg = '%s #%s is missing mandatory %s relation %s (autofix will delete the entity)' + for rschema in schema.relations(): + if rschema.final or rschema in PURE_VIRTUAL_RTYPES or rschema in ('is', 'is_instance_of'): + continue + smandatory = set() + omandatory = set() + for rdef in rschema.rdefs.values(): + if rdef.cardinality[0] in '1+': + smandatory.add(rdef.subject) + if rdef.cardinality[1] in '1+': + omandatory.add(rdef.object) + for role, etypes in (('subject', smandatory), ('object', omandatory)): + for etype in etypes: + if role == 'subject': + rql = 'Any X WHERE NOT X %s Y, X is %s' % (rschema, etype) + else: + rql = 'Any X WHERE NOT Y %s X, X is %s' % (rschema, etype) + for entity in cnx.execute(rql).entities(): + sys.stderr.write(msg % (entity.cw_etype, entity.eid, role, rschema)) + if fix: + #if entity.cw_describe()['source']['uri'] == 'system': XXX + entity.cw_delete() # XXX this is BRUTAL! + notify_fixed(fix) + + +def check_mandatory_attributes(schema, cnx, eids, fix=1): + """check for entities stored in the system source missing some mandatory + attribute + """ + print('Checking mandatory attributes') + msg = '%s #%s is missing mandatory attribute %s (autofix will delete the entity)' + for rschema in schema.relations(): + if not rschema.final or rschema in VIRTUAL_RTYPES: + continue + for rdef in rschema.rdefs.values(): + if rdef.cardinality[0] in '1+': + rql = 'Any X WHERE X %s NULL, X is %s, X cw_source S, S name "system"' % ( + rschema, rdef.subject) + for entity in cnx.execute(rql).entities(): + sys.stderr.write(msg % (entity.cw_etype, entity.eid, rschema)) + if fix: + entity.cw_delete() + notify_fixed(fix) + + +def check_metadata(schema, cnx, eids, fix=1): + """check entities has required metadata + + FIXME: rewrite using RQL queries ? + """ + print('Checking metadata') + cursor = cnx.system_sql("SELECT DISTINCT type FROM entities;") + eidcolumn = SQL_PREFIX + 'eid' + msg = ' %s with eid %s has no %s (autofix will set it to now)' + for etype, in cursor.fetchall(): + if etype not in cnx.vreg.schema: + sys.stderr.write('entities table references unknown type %s\n' % + etype) + if fix: + cnx.system_sql("DELETE FROM entities WHERE type = %(type)s", + {'type': etype}) + continue + table = SQL_PREFIX + etype + for rel, default in ( ('creation_date', datetime.utcnow()), + ('modification_date', datetime.utcnow()), ): + column = SQL_PREFIX + rel + cursor = cnx.system_sql("SELECT %s FROM %s WHERE %s is NULL" + % (eidcolumn, table, column)) + for eid, in cursor.fetchall(): + sys.stderr.write(msg % (etype, eid, rel)) + if fix: + cnx.system_sql("UPDATE %s SET %s=%%(v)s WHERE %s=%s ;" + % (table, column, eidcolumn, eid), + {'v': default}) + notify_fixed(fix) + + +def check(repo, cnx, checks, reindex, fix, withpb=True): + """check integrity of instance's repository, + using given user and password to locally connect to the repository + (no running cubicweb server needed) + """ + # yo, launch checks + if checks: + eids_cache = {} + with cnx.security_enabled(read=False, write=False): # ensure no read security + for check in checks: + check_func = globals()['check_%s' % check] + check_func(repo.schema, cnx, eids_cache, fix=fix) + if fix: + cnx.commit() + else: + print() + if not fix: + print('WARNING: Diagnostic run, nothing has been corrected') + if reindex: + cnx.rollback() + reindex_entities(repo.schema, cnx, withpb=withpb) + cnx.commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/cwzmq.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/cwzmq.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# copyright 2012-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from threading import Thread +from logging import getLogger + +import zmq +from zmq.eventloop import ioloop +import zmq.eventloop.zmqstream + +from cubicweb import set_log_methods + + +ctx = zmq.Context() + + +class ZMQComm(object): + """ + A simple ZMQ-based notification bus. + + There should at most one instance of this class attached to a + Repository. A typical usage may be something like:: + + def callback(msg): + self.info('received message: %s', ' '.join(msg)) + repo.app_instances_bus.subscribe('hello', callback) + + to subsribe to the 'hello' kind of message. On the other side, to + emit a notification, call:: + + repo.app_instances_bus.publish(['hello', 'world']) + + See http://docs.cubicweb.org for more details. + """ + def __init__(self): + self.ioloop = ioloop.IOLoop() + self._topics = {} + self._subscribers = [] + self.publisher = None + + def add_publisher(self, address): + assert self.publisher is None, "more than one publisher is not supported" + self.publisher = Publisher(self.ioloop, address) + + def add_subscription(self, topic, callback): + for subscriber in self._subscribers: + subscriber.subscribe(topic, callback) + self._topics[topic] = callback + + def add_subscriber(self, address): + subscriber = Subscriber(self.ioloop, address) + for topic, callback in self._topics.items(): + subscriber.subscribe(topic, callback) + self._subscribers.append(subscriber) + + def publish(self, msg): + if self.publisher is None: + return + self.publisher.send(msg) + + def start(self): + Thread(target=self.ioloop.start).start() + + def stop(self): + self.ioloop.add_callback(self.ioloop.stop) + + def __del__(self): + self.ioloop.close() + + +class Publisher(object): + def __init__(self, ioloop, address): + self.address = address + self._topics = {} + self._subscribers = [] + self.ioloop = ioloop + def callback(): + s = ctx.socket(zmq.PUB) + self.stream = zmq.eventloop.zmqstream.ZMQStream(s, io_loop=ioloop) + self.stream.bind(self.address) + self.debug('start publisher on %s', self.address) + ioloop.add_callback(callback) + + def send(self, msg): + self.ioloop.add_callback(lambda:self.stream.send_multipart(msg)) + + +class Subscriber(object): + def __init__(self, ioloop, address): + self.address = address + self.dispatch_table = {} + self.ioloop = ioloop + def callback(): + s = ctx.socket(zmq.SUB) + self.stream = zmq.eventloop.zmqstream.ZMQStream(s, io_loop=ioloop) + self.stream.on_recv(self.dispatch) + self.stream.connect(self.address) + self.debug('start subscriber on %s', self.address) + ioloop.add_callback(callback) + + def dispatch(self, msg): + try: + f = self.dispatch_table[msg[0]] + except KeyError: + return + f(msg) + + def subscribe(self, topic, callback): + self.dispatch_table[topic] = callback + self.ioloop.add_callback(lambda: self.stream.setsockopt(zmq.SUBSCRIBE, topic)) + + +set_log_methods(Publisher, getLogger('cubicweb.zmq.pub')) +set_log_methods(Subscriber, getLogger('cubicweb.zmq.sub')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/edition.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/edition.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,159 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""helper classes to handle server-side edition of entities""" +__docformat__ = "restructuredtext en" + +from copy import copy +from yams import ValidationError + + +_MARKER = object() + +class dict_protocol_catcher(object): + def __init__(self, entity): + self.__entity = entity + def __getitem__(self, attr): + return self.__entity.cw_edited[attr] + def __setitem__(self, attr, value): + self.__entity.cw_edited[attr] = value + def __getattr__(self, attr): + return getattr(self.__entity, attr) + + +class EditedEntity(dict): + """encapsulate entities attributes being written by an RQL query""" + def __init__(self, entity, **kwargs): + super(EditedEntity, self).__init__(**kwargs) + self.entity = entity + self.skip_security = set() + self.querier_pending_relations = {} + self.saved = False + + def __hash__(self): + # dict|set keyable + return hash(id(self)) + + def __lt__(self, other): + # we don't want comparison by value inherited from dict + raise NotImplementedError + + def __eq__(self, other): + return self is other + + def __ne__(self, other): + return not (self == other) + + def __setitem__(self, attr, value): + assert attr != 'eid' + # don't add attribute into skip_security if already in edited + # attributes, else we may accidentally skip a desired security check + if attr not in self: + self.skip_security.add(attr) + self.edited_attribute(attr, value) + + def __delitem__(self, attr): + assert not self.saved, 'too late to modify edited attributes' + super(EditedEntity, self).__delitem__(attr) + self.entity.cw_attr_cache.pop(attr, None) + + def __copy__(self): + # default copy protocol fails in EditedEntity.__setitem__ because + # copied entity has no skip_security attribute at this point + return EditedEntity(self.entity, **self) + + def pop(self, attr, *args): + # don't update skip_security by design (think to storage api) + assert not self.saved, 'too late to modify edited attributes' + value = super(EditedEntity, self).pop(attr, *args) + self.entity.cw_attr_cache.pop(attr, *args) + return value + + def setdefault(self, attr, default): + assert attr != 'eid' + # don't add attribute into skip_security if already in edited + # attributes, else we may accidentally skip a desired security check + if attr not in self: + self[attr] = default + return self[attr] + + def update(self, values, skipsec=True): + if skipsec: + setitem = self.__setitem__ + else: + setitem = self.edited_attribute + for attr, value in values.items(): + setitem(attr, value) + + def edited_attribute(self, attr, value): + """attribute being edited by a rql query: should'nt be added to + skip_security + """ + assert not self.saved, 'too late to modify edited attributes' + super(EditedEntity, self).__setitem__(attr, value) + self.entity.cw_attr_cache[attr] = value + if self.entity._cw.vreg.schema.rschema(attr).final: + self.entity._cw_dont_cache_attribute(attr) + + def oldnewvalue(self, attr): + """returns the couple (old attr value, new attr value) + + NOTE: will only work in a before_update_entity hook + """ + assert not self.saved, 'too late to get the old value' + # get new value and remove from local dict to force a db query to + # fetch old value + newvalue = self.entity.cw_attr_cache.pop(attr, _MARKER) + oldvalue = getattr(self.entity, attr) + if newvalue is not _MARKER: + self.entity.cw_attr_cache[attr] = newvalue + else: + newvalue = oldvalue + return oldvalue, newvalue + + def set_defaults(self): + """set default values according to the schema""" + for attr, value in self.entity.e_schema.defaults(): + if not attr in self: + self[str(attr)] = value + + def check(self, creation=False): + """check the entity edition against its schema. Only final relation + are checked here, constraint on actual relations are checked in hooks + """ + entity = self.entity + if creation: + # on creations, we want to check all relations, especially + # required attributes + relations = [rschema for rschema in entity.e_schema.subject_relations() + if rschema.final and rschema.type != 'eid'] + else: + relations = [entity._cw.vreg.schema.rschema(rtype) + for rtype in self] + try: + entity.e_schema.check(dict_protocol_catcher(entity), + creation=creation, relations=relations) + except ValidationError as ex: + ex.entity = self.entity.eid + raise + + def clone(self): + thecopy = EditedEntity(copy(self.entity)) + thecopy.entity.cw_attr_cache = copy(self.entity.cw_attr_cache) + thecopy.entity._cw_related_cache = {} + thecopy.update(self, skipsec=False) + return thecopy diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/hook.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/hook.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1024 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +Generalities +------------ + +Paraphrasing the `emacs`_ documentation, let us say that hooks are an important +mechanism for customizing an application. A hook is basically a list of +functions to be called on some well-defined occasion (this is called `running +the hook`). + +.. _`emacs`: http://www.gnu.org/software/emacs/manual/html_node/emacs/Hooks.html + +Hooks +~~~~~ + +In |cubicweb|, hooks are subclasses of the :class:`~cubicweb.server.hook.Hook` +class. They are selected over a set of pre-defined `events` (and possibly more +conditions, hooks being selectable appobjects like views and components). They +should implement a :meth:`~cubicweb.server.hook.Hook.__call__` method that will +be called when the hook is triggered. + +There are two families of events: data events (before / after any individual +update of an entity / or a relation in the repository) and server events (such +as server startup or shutdown). In a typical application, most of the hooks are +defined over data events. + +Also, some :class:`~cubicweb.server.hook.Operation` may be registered by hooks, +which will be fired when the transaction is commited or rolled back. + +The purpose of data event hooks is usually to complement the data model as +defined in the schema, which is static by nature and only provide a restricted +builtin set of dynamic constraints, with dynamic or value driven behaviours. +For instance they can serve the following purposes: + +* enforcing constraints that the static schema cannot express (spanning several + entities/relations, exotic value ranges and cardinalities, etc.) + +* implement computed attributes + +It is functionally equivalent to a `database trigger`_, except that database +triggers definition languages are not standardized, hence not portable (for +instance, PL/SQL works with Oracle and PostgreSQL but not SqlServer nor Sqlite). + +.. _`database trigger`: http://en.wikipedia.org/wiki/Database_trigger + + +.. hint:: + + It is a good practice to write unit tests for each hook. See an example in + :ref:`hook_test` + +Operations +~~~~~~~~~~ + +Operations are subclasses of the :class:`~cubicweb.server.hook.Operation` class +that may be created by hooks and scheduled to happen on `precommit`, +`postcommit` or `rollback` event (i.e. respectivly before/after a commit or +before a rollback of a transaction). + +Hooks are being fired immediately on data operations, and it is sometime +necessary to delay the actual work down to a time where we can expect all +information to be there, or when all other hooks have run (though take case +since operations may themselves trigger hooks). Also while the order of +execution of hooks is data dependant (and thus hard to predict), it is possible +to force an order on operations. + +So, for such case where you may miss some information that may be set later in +the transaction, you should instantiate an operation in the hook. + +Operations may be used to: + +* implements a validation check which needs that all relations be already set on + an entity + +* process various side effects associated with a transaction such as filesystem + udpates, mail notifications, etc. + + +Events +------ + +Hooks are mostly defined and used to handle `dataflow`_ operations. It +means as data gets in (entities added, updated, relations set or +unset), specific events are issued and the Hooks matching these events +are called. + +You can get the event that triggered a hook by accessing its `event` +attribute. + +.. _`dataflow`: http://en.wikipedia.org/wiki/Dataflow + + +Entity modification related events +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When called for one of these events, hook will have an `entity` attribute +containing the entity instance. + +- `before_add_entity`, `before_update_entity`: + + On those events, you can access the modified attributes of the entity using + the `entity.cw_edited` dictionary. The values can be modified and the old + values can be retrieved. + + If you modify the `entity.cw_edited` dictionary in the hook, that is before + the database operations take place, you will avoid the need to process a whole + new rql query and the underlying backend query (eg usually sql) will contain + the modified data. For example: + + .. sourcecode:: python + + self.entity.cw_edited['age'] = 42 + + will modify the age before it is written to the backend storage. + + Similarly, removing an attribute from `cw_edited` will cancel its + modification: + + .. sourcecode:: python + + del self.entity.cw_edited['age'] + + On a `before_update_entity` event, you can access the old and new values: + + .. sourcecode:: python + + old, new = entity.cw_edited.oldnewvalue('age') + +- `after_add_entity`, `after_update_entity` + + On those events, you can get the list of attributes that were modified using + the `entity.cw_edited` dictionary, but you can not modify it or get the old + value of an attribute. + +- `before_delete_entity`, `after_delete_entity` + + On those events, the entity has no `cw_edited` dictionary. + +.. note:: `self.entity.cw_set(age=42)` will set the `age` attribute to + 42. But to do so, it will generate a rql query that will have to be processed, + hence may trigger some hooks, etc. This could lead to infinitely looping hooks. + +Relation modification related events +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When called for one of these events, hook will have `eidfrom`, `rtype`, `eidto` +attributes containing respectively the eid of the subject entity, the relation +type and the eid of the object entity. + +* `before_add_relation`, `before_delete_relation` + + On those events, you can still get the original relation by issuing a rql query. + +* `after_add_relation`, `after_delete_relation` + +Specific selectors are shipped for these kinds of events, see in particular +:class:`~cubicweb.server.hook.match_rtype`. + +Also note that relations can be added or deleted, but not updated. + +Non data events +~~~~~~~~~~~~~~~ + +Hooks called on server start/maintenance/stop event (e.g. +`server_startup`, `server_maintenance`, `before_server_shutdown`, +`server_shutdown`) have a `repo` attribute, but *their `_cw` attribute +is None*. The `server_startup` is called on regular startup, while +`server_maintenance` is called on cubicweb-ctl upgrade or shell +commands. `server_shutdown` is called anyway but connections to the +native source is impossible; `before_server_shutdown` handles that. + +Hooks called on backup/restore event (eg `server_backup`, +`server_restore`) have a `repo` and a `timestamp` attributes, but +*their `_cw` attribute is None*. + +Hooks called on session event (eg `session_open`, `session_close`) have no +special attribute. + + +API +--- + +Hooks control +~~~~~~~~~~~~~ + +It is sometimes convenient to explicitly enable or disable some hooks. For +instance if you want to disable some integrity checking hook. This can be +controlled more finely through the `category` class attribute, which is a string +giving a category name. One can then uses the +:meth:`~cubicweb.server.session.Connection.deny_all_hooks_but` and +:meth:`~cubicweb.server.session.Connection.allow_all_hooks_but` context managers to +explicitly enable or disable some categories. + +The existing categories are: + +* ``security``, security checking hooks + +* ``worfklow``, workflow handling hooks + +* ``metadata``, hooks setting meta-data on newly created entities + +* ``notification``, email notification hooks + +* ``integrity``, data integrity checking hooks + +* ``activeintegrity``, data integrity consistency hooks, that you should **never** + want to disable + +* ``syncsession``, hooks synchronizing existing sessions + +* ``syncschema``, hooks synchronizing instance schema (including the physical database) + +* ``email``, email address handling hooks + +* ``bookmark``, bookmark entities handling hooks + + +Nothing precludes one to invent new categories and use existing mechanisms to +filter them in or out. + + +Hooks specific predicates +~~~~~~~~~~~~~~~~~~~~~~~~~ +.. autoclass:: cubicweb.server.hook.match_rtype +.. autoclass:: cubicweb.server.hook.match_rtype_sets + + +Hooks and operations classes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. autoclass:: cubicweb.server.hook.Hook +.. autoclass:: cubicweb.server.hook.Operation +.. autoclass:: cubicweb.server.hook.LateOperation +.. autoclass:: cubicweb.server.hook.DataOperationMixIn +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from warnings import warn +from logging import getLogger +from itertools import chain + +from logilab.common.decorators import classproperty, cached +from logilab.common.deprecation import deprecated, class_renamed +from logilab.common.logging_ext import set_log_methods +from logilab.common.registry import (NotPredicate, OrPredicate, + objectify_predicate) + +from cubicweb import RegistryNotFound, server +from cubicweb.cwvreg import CWRegistry, CWRegistryStore +from cubicweb.predicates import ExpectedValuePredicate, is_instance +from cubicweb.appobject import AppObject + +ENTITIES_HOOKS = set(('before_add_entity', 'after_add_entity', + 'before_update_entity', 'after_update_entity', + 'before_delete_entity', 'after_delete_entity')) +RELATIONS_HOOKS = set(('before_add_relation', 'after_add_relation' , + 'before_delete_relation','after_delete_relation')) +SYSTEM_HOOKS = set(('server_backup', 'server_restore', + 'server_startup', 'server_maintenance', + 'server_shutdown', 'before_server_shutdown', + 'session_open', 'session_close')) +ALL_HOOKS = ENTITIES_HOOKS | RELATIONS_HOOKS | SYSTEM_HOOKS + +def _iter_kwargs(entities, eids_from_to, kwargs): + if not entities and not eids_from_to: + yield kwargs + elif entities: + for entity in entities: + kwargs['entity'] = entity + yield kwargs + else: + for subject, object in eids_from_to: + kwargs.update({'eidfrom': subject, 'eidto': object}) + yield kwargs + + +class HooksRegistry(CWRegistry): + + def register(self, obj, **kwargs): + obj.check_events() + super(HooksRegistry, self).register(obj, **kwargs) + + def call_hooks(self, event, cnx=None, **kwargs): + """call `event` hooks for an entity or a list of entities (passed + respectively as the `entity` or ``entities`` keyword argument). + """ + kwargs['event'] = event + if cnx is None: # True for events such as server_start + for hook in sorted(self.possible_objects(cnx, **kwargs), + key=lambda x: x.order): + hook() + else: + if 'entities' in kwargs: + assert 'entity' not in kwargs, \ + 'can\'t pass "entities" and "entity" arguments simultaneously' + assert 'eids_from_to' not in kwargs, \ + 'can\'t pass "entities" and "eids_from_to" arguments simultaneously' + entities = kwargs.pop('entities') + eids_from_to = [] + elif 'eids_from_to' in kwargs: + entities = [] + eids_from_to = kwargs.pop('eids_from_to') + else: + entities = [] + eids_from_to = [] + pruned = self.get_pruned_hooks(cnx, event, + entities, eids_from_to, kwargs) + + # by default, hooks are executed with security turned off + with cnx.security_enabled(read=False): + for _kwargs in _iter_kwargs(entities, eids_from_to, kwargs): + hooks = sorted(self.filtered_possible_objects(pruned, cnx, **_kwargs), + key=lambda x: x.order) + debug = server.DEBUG & server.DBG_HOOKS + with cnx.security_enabled(write=False): + with cnx.running_hooks_ops(): + for hook in hooks: + if debug: + print(event, _kwargs, hook) + hook() + + def get_pruned_hooks(self, cnx, event, entities, eids_from_to, kwargs): + """return a set of hooks that should not be considered by filtered_possible objects + + the idea is to make a first pass over all the hooks in the + registry and to mark put some of them in a pruned list. The + pruned hooks are the one which: + + * are disabled at the connection level + + * have a selector containing a :class:`match_rtype` or an + :class:`is_instance` predicate which does not match the rtype / etype + of the relations / entities for which we are calling the hooks. This + works because the repository calls the hooks grouped by rtype or by + etype when using the entities or eids_to_from keyword arguments + + Only hooks with a simple predicate or an AndPredicate of simple + predicates are considered for disabling. + + """ + if 'entity' in kwargs: + entities = [kwargs['entity']] + if len(entities): + look_for_selector = is_instance + etype = entities[0].__regid__ + elif 'rtype' in kwargs: + look_for_selector = match_rtype + etype = None + else: # nothing to prune, how did we get there ??? + return set() + cache_key = (event, kwargs.get('rtype'), etype) + pruned = cnx.pruned_hooks_cache.get(cache_key) + if pruned is not None: + return pruned + pruned = set() + cnx.pruned_hooks_cache[cache_key] = pruned + if look_for_selector is not None: + for id, hooks in self.items(): + for hook in hooks: + enabled_cat, main_filter = hook.filterable_selectors() + if enabled_cat is not None: + if not enabled_cat(hook, cnx): + pruned.add(hook) + continue + if main_filter is not None: + if isinstance(main_filter, match_rtype) and \ + (main_filter.frometypes is not None or \ + main_filter.toetypes is not None): + continue + first_kwargs = next(_iter_kwargs(entities, eids_from_to, kwargs)) + if not main_filter(hook, cnx, **first_kwargs): + pruned.add(hook) + return pruned + + + def filtered_possible_objects(self, pruned, *args, **kwargs): + for appobjects in self.values(): + if pruned: + filtered_objects = [obj for obj in appobjects if obj not in pruned] + if not filtered_objects: + continue + else: + filtered_objects = appobjects + obj = self._select_best(filtered_objects, + *args, **kwargs) + if obj is None: + continue + yield obj + +class HooksManager(object): + def __init__(self, vreg): + self.vreg = vreg + + def call_hooks(self, event, cnx=None, **kwargs): + try: + registry = self.vreg['%s_hooks' % event] + except RegistryNotFound: + return # no hooks for this event + registry.call_hooks(event, cnx, **kwargs) + + +for event in ALL_HOOKS: + CWRegistryStore.REGISTRY_FACTORY['%s_hooks' % event] = HooksRegistry + + +# some hook specific predicates ################################################# + +@objectify_predicate +def enabled_category(cls, req, **kwargs): + if req is None: + return True # XXX how to deactivate server startup / shutdown event + return req.is_hook_activated(cls) + +@objectify_predicate +def issued_from_user_query(cls, req, **kwargs): + return 0 if req.hooks_in_progress else 1 + +from_dbapi_query = class_renamed('from_dbapi_query', + issued_from_user_query, + message='[3.21] ') + + +class rechain(object): + def __init__(self, *iterators): + self.iterators = iterators + def __iter__(self): + return iter(chain(*self.iterators)) + + +class match_rtype(ExpectedValuePredicate): + """accept if the relation type is found in expected ones. Optional + named parameters `frometypes` and `toetypes` can be used to restrict + target subject and/or object entity types of the relation. + + :param \*expected: possible relation types + :param frometypes: candidate entity types as subject of relation + :param toetypes: candidate entity types as object of relation + """ + def __init__(self, *expected, **more): + self.expected = expected + self.frometypes = more.pop('frometypes', None) + self.toetypes = more.pop('toetypes', None) + assert not more, "unexpected kwargs in match_rtype: %s" % more + + def __call__(self, cls, req, *args, **kwargs): + if kwargs.get('rtype') not in self.expected: + return 0 + if self.frometypes is not None and \ + req.entity_metas(kwargs['eidfrom'])['type'] not in self.frometypes: + return 0 + if self.toetypes is not None and \ + req.entity_metas(kwargs['eidto'])['type'] not in self.toetypes: + return 0 + return 1 + + +class match_rtype_sets(ExpectedValuePredicate): + """accept if the relation type is in one of the sets given as initializer + argument. The goal of this predicate is that it keeps reference to original sets, + so modification to thoses sets are considered by the predicate. For instance + + .. sourcecode:: python + + MYSET = set() + + class Hook1(Hook): + __regid__ = 'hook1' + __select__ = Hook.__select__ & match_rtype_sets(MYSET) + ... + + class Hook2(Hook): + __regid__ = 'hook2' + __select__ = Hook.__select__ & match_rtype_sets(MYSET) + + Client code can now change `MYSET`, this will changes the selection criteria + of :class:`Hook1` and :class:`Hook1`. + """ + + def __init__(self, *expected): + self.expected = expected + + def __call__(self, cls, req, *args, **kwargs): + for rel_set in self.expected: + if kwargs.get('rtype') in rel_set: + return 1 + return 0 + + +# base class for hook ########################################################## + +class Hook(AppObject): + """Base class for hook. + + Hooks being appobjects like views, they have a `__regid__` and a `__select__` + class attribute. Like all appobjects, hooks have the `self._cw` attribute which + represents the current connection. In entity hooks, a `self.entity` attribute is + also present. + + The `events` tuple is used by the base class selector to dispatch the hook + on the right events. It is possible to dispatch on multiple events at once + if needed (though take care as hook attribute may vary as described above). + + .. Note:: + + Do not forget to extend the base class selectors as in: + + .. sourcecode:: python + + class MyHook(Hook): + __regid__ = 'whatever' + __select__ = Hook.__select__ & is_instance('Person') + + else your hooks will be called madly, whatever the event. + """ + __select__ = enabled_category() + # set this in derivated classes + events = None + category = None + order = 0 + # stop pylint from complaining about missing attributes in Hooks classes + eidfrom = eidto = entity = rtype = repo = None + + @classmethod + @cached + def filterable_selectors(cls): + search = cls.__select__.search_selector + if search((NotPredicate, OrPredicate)): + return None, None + enabled_cat = search(enabled_category) + main_filter = search((is_instance, match_rtype)) + return enabled_cat, main_filter + + @classmethod + def check_events(cls): + try: + for event in cls.events: + if event not in ALL_HOOKS: + raise Exception('bad event %s on %s.%s' % ( + event, cls.__module__, cls.__name__)) + except AttributeError: + raise + except TypeError: + raise Exception('bad .events attribute %s on %s.%s' % ( + cls.events, cls.__module__, cls.__name__)) + + @classmethod + def __registered__(cls, reg): + cls.check_events() + + @classproperty + def __registries__(cls): + if cls.events is None: + return [] + return ['%s_hooks' % ev for ev in cls.events] + + known_args = set(('entity', 'rtype', 'eidfrom', 'eidto', 'repo', 'timestamp')) + def __init__(self, req, event, **kwargs): + for arg in self.known_args: + if arg in kwargs: + setattr(self, arg, kwargs.pop(arg)) + super(Hook, self).__init__(req, **kwargs) + self.event = event + +set_log_methods(Hook, getLogger('cubicweb.hook')) + + +# abtract hooks for relation propagation ####################################### +# See example usage in hooks of the nosylist cube + +class PropagateRelationHook(Hook): + """propagate some `main_rtype` relation on entities linked as object of + `subject_relations` or as subject of `object_relations` (the watched + relations). + + This hook ensure that when one of the watched relation is added, the + `main_rtype` relation is added to the target entity of the relation. + Notice there are no default behaviour defined when a watched relation is + deleted, you'll have to handle this by yourself. + + You usually want to use the :class:`match_rtype_sets` predicate on concrete + classes. + """ + events = ('after_add_relation',) + + # to set in concrete class + main_rtype = None + subject_relations = None + object_relations = None + + def __call__(self): + assert self.main_rtype + for eid in (self.eidfrom, self.eidto): + etype = self._cw.entity_metas(eid)['type'] + if self.main_rtype not in self._cw.vreg.schema.eschema(etype).subjrels: + return + if self.rtype in self.subject_relations: + meid, seid = self.eidfrom, self.eidto + else: + assert self.rtype in self.object_relations + meid, seid = self.eidto, self.eidfrom + self._cw.execute( + 'SET E %s P WHERE X %s P, X eid %%(x)s, E eid %%(e)s, NOT E %s P' + % (self.main_rtype, self.main_rtype, self.main_rtype), + {'x': meid, 'e': seid}) + + +class PropagateRelationAddHook(Hook): + """Propagate to entities at the end of watched relations when a `main_rtype` + relation is added. + + `subject_relations` and `object_relations` attributes should be specified on + subclasses and are usually shared references with attributes of the same + name on :class:`PropagateRelationHook`. + + Because of those shared references, you can use `skip_subject_relations` and + `skip_object_relations` attributes when you don't want to propagate to + entities linked through some particular relations. + """ + events = ('after_add_relation',) + + # to set in concrete class (mandatory) + subject_relations = None + object_relations = None + # to set in concrete class (optionally) + skip_subject_relations = () + skip_object_relations = () + + def __call__(self): + eschema = self._cw.vreg.schema.eschema(self._cw.entity_metas(self.eidfrom)['type']) + execute = self._cw.execute + for rel in self.subject_relations: + if rel in eschema.subjrels and not rel in self.skip_subject_relations: + execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' + 'X %s R, NOT R %s P' % (self.rtype, rel, self.rtype), + {'x': self.eidfrom, 'p': self.eidto}) + for rel in self.object_relations: + if rel in eschema.objrels and not rel in self.skip_object_relations: + execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' + 'R %s X, NOT R %s P' % (self.rtype, rel, self.rtype), + {'x': self.eidfrom, 'p': self.eidto}) + + +class PropagateRelationDelHook(PropagateRelationAddHook): + """Propagate to entities at the end of watched relations when a `main_rtype` + relation is deleted. + + This is the opposite of the :class:`PropagateRelationAddHook`, see its + documentation for how to use this class. + """ + events = ('after_delete_relation',) + + def __call__(self): + eschema = self._cw.vreg.schema.eschema(self._cw.entity_metas(self.eidfrom)['type']) + execute = self._cw.execute + for rel in self.subject_relations: + if rel in eschema.subjrels and not rel in self.skip_subject_relations: + execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' + 'X %s R' % (self.rtype, rel), + {'x': self.eidfrom, 'p': self.eidto}) + for rel in self.object_relations: + if rel in eschema.objrels and not rel in self.skip_object_relations: + execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' + 'R %s X' % (self.rtype, rel), + {'x': self.eidfrom, 'p': self.eidto}) + + + +# abstract classes for operation ############################################### + +class Operation(object): + """Base class for operations. + + Operation may be instantiated in the hooks' `__call__` method. It always + takes a connection object as first argument (accessible as `.cnx` from the + operation instance), and optionally all keyword arguments needed by the + operation. These keyword arguments will be accessible as attributes from the + operation instance. + + An operation is triggered on connections set events related to commit / + rollback transations. Possible events are: + + * `precommit`: + + the transaction is being prepared for commit. You can freely do any heavy + computation, raise an exception if the commit can't go. or even add some + new operations during this phase. If you do anything which has to be + reverted if the commit fails afterwards (eg altering the file system for + instance), you'll have to support the 'revertprecommit' event to revert + things by yourself + + * `revertprecommit`: + + if an operation failed while being pre-commited, this event is triggered + for all operations which had their 'precommit' event already fired to let + them revert things (including the operation which made the commit fail) + + * `rollback`: + + the transaction has been either rolled back either: + + * intentionally + * a 'precommit' event failed, in which case all operations are rolled back + once 'revertprecommit'' has been called + + * `postcommit`: + + the transaction is over. All the ORM entities accessed by the earlier + transaction are invalid. If you need to work on the database, you need to + start a new transaction, for instance using a new internal connection, + which you will need to commit. + + For an operation to support an event, one has to implement the `_event` method with no arguments. + + The order of operations may be important, and is controlled according to + the insert_index's method output (whose implementation vary according to the + base hook class used). + """ + + def __init__(self, cnx, **kwargs): + self.cnx = cnx + self.__dict__.update(kwargs) + self.register(cnx) + # execution information + self.processed = None # 'precommit', 'commit' + self.failed = False + + @property + @deprecated('[3.19] Operation.session is deprecated, use Operation.cnx instead') + def session(self): + return self.cnx + + def register(self, cnx): + cnx.add_operation(self, self.insert_index()) + + def insert_index(self): + """return the index of the latest instance which is not a + LateOperation instance + """ + # faster by inspecting operation in reverse order for heavy transactions + i = None + for i, op in enumerate(reversed(self.cnx.pending_operations)): + if isinstance(op, (LateOperation, SingleLastOperation)): + continue + return -i or None + if i is None: + return None + return -(i + 1) + + def handle_event(self, event): + """delegate event handling to the opertaion""" + getattr(self, event)() + + def precommit_event(self): + """the observed connections set is preparing a commit""" + + def revertprecommit_event(self): + """an error went when pre-commiting this operation or a later one + + should revert pre-commit's changes but take care, they may have not + been all considered if it's this operation which failed + """ + + def rollback_event(self): + """the observed connections set has been rolled back + + do nothing by default + """ + + def postcommit_event(self): + """the observed connections set has committed""" + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + +set_log_methods(Operation, getLogger('cubicweb.session')) + +def _container_add(container, value): + {set: set.add, list: list.append}[container.__class__](container, value) + + +class DataOperationMixIn(object): + """Mix-in class to ease applying a single operation on a set of data, + avoiding to create as many as operation as they are individual modification. + The body of the operation must then iterate over the values that have been + stored in a single operation instance. + + You should try to use this instead of creating on operation for each + `value`, since handling operations becomes costly on massive data import. + + Usage looks like: + + .. sourcecode:: python + + class MyEntityHook(Hook): + __regid__ = 'my.entity.hook' + __select__ = Hook.__select__ & is_instance('MyEntity') + events = ('after_add_entity',) + + def __call__(self): + MyOperation.get_instance(self._cw).add_data(self.entity) + + + class MyOperation(DataOperationMixIn, Operation): + def precommit_event(self): + for bucket in self.get_data(): + process(bucket) + + You can modify the `containercls` class attribute, which defines the + container class that should be instantiated to hold payloads. An instance is + created on instantiation, and then the :meth:`add_data` method will add the + given data to the existing container. Default to a `set`. Give `list` if you + want to keep arrival ordering. You can also use another kind of container + by redefining :meth:`_build_container` and :meth:`add_data` + + More optional parameters can be given to the `get_instance` operation, that + will be given to the operation constructor (for obvious reasons those + parameters should not vary accross different calls to this method for a + given operation). + + .. Note:: + For sanity reason `get_data` will reset the operation, so that once + the operation has started its treatment, if some hook want to push + additional data to this same operation, a new instance will be created + (else that data has a great chance to be never treated). This implies: + + * you should **always** call `get_data` when starting treatment + + * you should **never** call `get_data` for another reason. + """ + containercls = set + + @classproperty + def data_key(cls): + return ('cw.dataops', cls.__name__) + + @classmethod + def get_instance(cls, cnx, **kwargs): + # no need to lock: transaction_data already comes from thread's local storage + try: + return cnx.transaction_data[cls.data_key] + except KeyError: + op = cnx.transaction_data[cls.data_key] = cls(cnx, **kwargs) + return op + + def __init__(self, *args, **kwargs): + super(DataOperationMixIn, self).__init__(*args, **kwargs) + self._container = self._build_container() + self._processed = False + + def __contains__(self, value): + return value in self._container + + def _build_container(self): + return self.containercls() + + def union(self, data): + """only when container is a set""" + assert not self._processed, """Trying to add data to a closed operation. +Iterating over operation data closed it and should be reserved to precommit / +postcommit method of the operation.""" + self._container |= data + + def add_data(self, data): + assert not self._processed, """Trying to add data to a closed operation. +Iterating over operation data closed it and should be reserved to precommit / +postcommit method of the operation.""" + _container_add(self._container, data) + + def remove_data(self, data): + assert not self._processed, """Trying to add data to a closed operation. +Iterating over operation data closed it and should be reserved to precommit / +postcommit method of the operation.""" + self._container.remove(data) + + def get_data(self): + assert not self._processed, """Trying to get data from a closed operation. +Iterating over operation data closed it and should be reserved to precommit / +postcommit method of the operation.""" + self._processed = True + op = self.cnx.transaction_data.pop(self.data_key) + assert op is self, "Bad handling of operation data, found %s instead of %s for key %s" % ( + op, self, self.data_key) + return self._container + + + +class LateOperation(Operation): + """special operation which should be called after all possible (ie non late) + operations + """ + def insert_index(self): + """return the index of the lastest instance which is not a + SingleLastOperation instance + """ + # faster by inspecting operation in reverse order for heavy transactions + i = None + for i, op in enumerate(reversed(self.cnx.pending_operations)): + if isinstance(op, SingleLastOperation): + continue + return -i or None + if i is None: + return None + return -(i + 1) + + + +class SingleLastOperation(Operation): + """special operation which should be called once and after all other + operations + """ + + def register(self, cnx): + """override register to handle cases where this operation has already + been added + """ + operations = cnx.pending_operations + index = self.equivalent_index(operations) + if index is not None: + equivalent = operations.pop(index) + else: + equivalent = None + cnx.add_operation(self, self.insert_index()) + return equivalent + + def equivalent_index(self, operations): + """return the index of the equivalent operation if any""" + for i, op in enumerate(reversed(operations)): + if op.__class__ is self.__class__: + return -(i+1) + return None + + def insert_index(self): + return None + + +class SendMailOp(SingleLastOperation): + def __init__(self, cnx, msg=None, recipients=None, **kwargs): + # may not specify msg yet, as + # `cubicweb.sobjects.supervision.SupervisionMailOp` + if msg is not None: + assert recipients + self.to_send = [(msg, recipients)] + else: + assert recipients is None + self.to_send = [] + super(SendMailOp, self).__init__(cnx, **kwargs) + + def register(self, cnx): + previous = super(SendMailOp, self).register(cnx) + if previous: + self.to_send = previous.to_send + self.to_send + + def postcommit_event(self): + self.cnx.repo.threaded_task(self.sendmails) + + def sendmails(self): + self.cnx.vreg.config.sendmails(self.to_send) + + +class RQLPrecommitOperation(Operation): + # to be defined in concrete classes + rqls = None + + def precommit_event(self): + execute = self.cnx.execute + for rql in self.rqls: + execute(*rql) + + +class CleanupNewEidsCacheOp(DataOperationMixIn, SingleLastOperation): + """on rollback of a insert query we have to remove from repository's + type/source cache eids of entities added in that transaction. + + NOTE: querier's rqlst/solutions cache may have been polluted too with + queries such as Any X WHERE X eid 32 if 32 has been rolled back however + generated queries are unpredictable and analysing all the cache probably + too expensive. Notice that there is no pb when using args to specify eids + instead of giving them into the rql string. + """ + data_key = 'neweids' + + def rollback_event(self): + """the observed connections set has been rolled back, + remove inserted eid from repository type/source cache + """ + try: + self.cnx.repo.clear_caches(self.get_data()) + except KeyError: + pass + +class CleanupDeletedEidsCacheOp(DataOperationMixIn, SingleLastOperation): + """on commit of delete query, we have to remove from repository's + type/source cache eids of entities deleted in that transaction. + """ + data_key = 'pendingeids' + def postcommit_event(self): + """the observed connections set has been rolled back, + remove inserted eid from repository type/source cache + """ + try: + eids = self.get_data() + self.cnx.repo.clear_caches(eids) + self.cnx.repo.app_instances_bus.publish(['delete'] + list(str(eid) for eid in eids)) + except KeyError: + pass diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/migractions.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/migractions.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1603 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""a class implementing basic actions used in migration scripts. + +The following schema actions are supported for now: +* add/drop/rename attribute +* add/drop entity/relation type +* rename entity type + +The following data actions are supported for now: +* add an entity +* execute raw RQL queries +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +import os +import tarfile +import tempfile +import shutil +import os.path as osp +from datetime import datetime +from glob import glob +from copy import copy +from warnings import warn +from contextlib import contextmanager + +from six import PY2, text_type + +from logilab.common.deprecation import deprecated +from logilab.common.decorators import cached, clear_cache + +from yams.buildobjs import EntityType +from yams.constraints import SizeConstraint +from yams.schema import RelationDefinitionSchema + +from cubicweb import CW_SOFTWARE_ROOT, AuthenticationError, ExecutionError +from cubicweb.predicates import is_instance +from cubicweb.schema import (ETYPE_NAME_MAP, META_RTYPES, VIRTUAL_RTYPES, + PURE_VIRTUAL_RTYPES, + CubicWebRelationSchema, order_eschemas) +from cubicweb.cwvreg import CW_EVENT_MANAGER +from cubicweb import repoapi +from cubicweb.migration import MigrationHelper, yes +from cubicweb.server import hook, schemaserial as ss +from cubicweb.server.schema2sql import eschema2sql, rschema2sql, unique_index_name, sql_type +from cubicweb.server.utils import manager_userpasswd +from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX + + +class ClearGroupMap(hook.Hook): + __regid__ = 'cw.migration.clear_group_mapping' + __select__ = hook.Hook.__select__ & is_instance('CWGroup') + events = ('after_add_entity', 'after_update_entity',) + def __call__(self): + clear_cache(self.mih, 'group_mapping') + self.mih._synchronized.clear() + + @classmethod + def mih_register(cls, repo): + # may be already registered in tests (e.g. unittest_migractions at + # least) + if not cls.__regid__ in repo.vreg['after_add_entity_hooks']: + repo.vreg.register(ClearGroupMap) + + +class ServerMigrationHelper(MigrationHelper): + """specific migration helper for server side migration scripts, + providing actions related to schema/data migration + """ + + def __init__(self, config, schema, interactive=True, + repo=None, cnx=None, verbosity=1, connect=True): + MigrationHelper.__init__(self, config, interactive, verbosity) + if not interactive: + assert cnx + assert repo + if cnx is not None: + assert repo + self.cnx = cnx + self.repo = repo + self.session = cnx.session + elif connect: + self.repo = config.repository() + self.set_cnx() + else: + self.session = None + # no config on shell to a remote instance + if config is not None and (cnx or connect): + repo = self.repo + # register a hook to clear our group_mapping cache and the + # self._synchronized set when some group is added or updated + ClearGroupMap.mih = self + ClearGroupMap.mih_register(repo) + CW_EVENT_MANAGER.bind('after-registry-reload', + ClearGroupMap.mih_register, repo) + # notify we're starting maintenance (called instead of server_start + # which is called on regular start + repo.hm.call_hooks('server_maintenance', repo=repo) + if not schema and not config.quick_start: + insert_lperms = self.repo.get_versions()['cubicweb'] < (3, 14, 0) and 'localperms' in config.available_cubes() + if insert_lperms: + cubes = config._cubes + config._cubes += ('localperms',) + try: + schema = config.load_schema(expand_cubes=True) + finally: + if insert_lperms: + config._cubes = cubes + self.fs_schema = schema + self._synchronized = set() + + # overriden from base MigrationHelper ###################################### + + def set_cnx(self): + try: + login = self.repo.config.default_admin_config['login'] + pwd = self.repo.config.default_admin_config['password'] + except KeyError: + login, pwd = manager_userpasswd() + while True: + try: + self.cnx = repoapi.connect(self.repo, login, password=pwd) + if not 'managers' in self.cnx.user.groups: + print('migration need an account in the managers group') + else: + break + except AuthenticationError: + print('wrong user/password') + except (KeyboardInterrupt, EOFError): + print('aborting...') + sys.exit(0) + try: + login, pwd = manager_userpasswd() + except (KeyboardInterrupt, EOFError): + print('aborting...') + sys.exit(0) + self.session = self.repo._get_session(self.cnx.sessionid) + + def cube_upgraded(self, cube, version): + self.cmd_set_property('system.version.%s' % cube.lower(), + text_type(version)) + self.commit() + + def shutdown(self): + if self.repo is not None: + self.repo.shutdown() + + def migrate(self, vcconf, toupgrade, options): + if not options.fs_only: + if options.backup_db is None: + self.backup_database() + elif options.backup_db: + self.backup_database(askconfirm=False) + # disable notification during migration + with self.cnx.allow_all_hooks_but('notification'): + super(ServerMigrationHelper, self).migrate(vcconf, toupgrade, options) + + def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): + try: + return super(ServerMigrationHelper, self).cmd_process_script( + migrscript, funcname, *args, **kwargs) + except ExecutionError as err: + sys.stderr.write("-> %s\n" % err) + except BaseException: + self.rollback() + raise + + # Adjust docstring + cmd_process_script.__doc__ = MigrationHelper.cmd_process_script.__doc__ + + # server specific migration methods ######################################## + + def backup_database(self, backupfile=None, askconfirm=True, format='native'): + config = self.config + repo = self.repo + # paths + timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + instbkdir = osp.join(config.appdatahome, 'backup') + if not osp.exists(instbkdir): + os.makedirs(instbkdir) + backupfile = backupfile or osp.join(instbkdir, '%s-%s.tar.gz' + % (config.appid, timestamp)) + # check backup has to be done + if osp.exists(backupfile) and not \ + self.confirm('Backup file %s exists, overwrite it?' % backupfile): + print('-> no backup done.') + return + elif askconfirm and not self.confirm('Backup %s database?' % config.appid): + print('-> no backup done.') + return + open(backupfile,'w').close() # kinda lock + os.chmod(backupfile, 0o600) + # backup + source = repo.system_source + tmpdir = tempfile.mkdtemp() + try: + failed = False + try: + source.backup(osp.join(tmpdir, source.uri), self.confirm, format=format) + except Exception as ex: + print('-> error trying to backup %s [%s]' % (source.uri, ex)) + if not self.confirm('Continue anyway?', default='n'): + raise SystemExit(1) + else: + failed = True + with open(osp.join(tmpdir, 'format.txt'), 'w') as format_file: + format_file.write('%s\n' % format) + with open(osp.join(tmpdir, 'versions.txt'), 'w') as version_file: + versions = repo.get_versions() + for cube, version in versions.items(): + version_file.write('%s %s\n' % (cube, version)) + if not failed: + bkup = tarfile.open(backupfile, 'w|gz') + for filename in os.listdir(tmpdir): + bkup.add(osp.join(tmpdir, filename), filename) + bkup.close() + # call hooks + repo.hm.call_hooks('server_backup', repo=repo, timestamp=timestamp) + # done + print('-> backup file', backupfile) + finally: + shutil.rmtree(tmpdir) + + def restore_database(self, backupfile, drop=True, askconfirm=True, format='native'): + # check + if not osp.exists(backupfile): + raise ExecutionError("Backup file %s doesn't exist" % backupfile) + if askconfirm and not self.confirm('Restore %s database from %s ?' + % (self.config.appid, backupfile)): + return + # unpack backup + tmpdir = tempfile.mkdtemp() + try: + bkup = tarfile.open(backupfile, 'r|gz') + except tarfile.ReadError: + # assume restoring old backup + shutil.copy(backupfile, osp.join(tmpdir, 'system')) + else: + for name in bkup.getnames(): + if name[0] in '/.': + raise ExecutionError('Security check failed, path starts with "/" or "."') + bkup.close() # XXX seek error if not close+open !?! + bkup = tarfile.open(backupfile, 'r|gz') + bkup.extractall(path=tmpdir) + bkup.close() + if osp.isfile(osp.join(tmpdir, 'format.txt')): + with open(osp.join(tmpdir, 'format.txt')) as format_file: + written_format = format_file.readline().strip() + if written_format in ('portable', 'native'): + format = written_format + self.config.init_cnxset_pool = False + repo = self.repo = self.config.repository() + source = repo.system_source + try: + source.restore(osp.join(tmpdir, source.uri), self.confirm, drop, format) + except Exception as exc: + print('-> error trying to restore %s [%s]' % (source.uri, exc)) + if not self.confirm('Continue anyway?', default='n'): + raise SystemExit(1) + shutil.rmtree(tmpdir) + # call hooks + repo.init_cnxset_pool() + repo.hm.call_hooks('server_restore', repo=repo, timestamp=backupfile) + print('-> database restored.') + + def commit(self): + self.cnx.commit() + + def rollback(self): + self.cnx.rollback() + + def rqlexecall(self, rqliter, ask_confirm=False): + for rql, kwargs in rqliter: + self.rqlexec(rql, kwargs, ask_confirm=ask_confirm) + + @cached + def _create_context(self): + """return a dictionary to use as migration script execution context""" + context = super(ServerMigrationHelper, self)._create_context() + context.update({'commit': self.checkpoint, + 'rollback': self.rollback, + 'sql': self.sqlexec, + 'rql': self.rqlexec, + 'rqliter': self.rqliter, + 'schema': self.repo.get_schema(), + 'cnx': self.cnx, + 'fsschema': self.fs_schema, + 'session' : self.cnx, + 'repo' : self.repo, + }) + return context + + @cached + def group_mapping(self): + """cached group mapping""" + return ss.group_mapping(self.cnx) + + def cstrtype_mapping(self): + """cached constraint types mapping""" + return ss.cstrtype_mapping(self.cnx) + + def cmd_exec_event_script(self, event, cube=None, funcname=None, + *args, **kwargs): + """execute a cube event scripts `migration/.py` where event + is one of 'precreate', 'postcreate', 'preremove' and 'postremove'. + """ + assert event in ('precreate', 'postcreate', 'preremove', 'postremove') + if cube: + cubepath = self.config.cube_dir(cube) + apc = osp.join(cubepath, 'migration', '%s.py' % event) + elif kwargs.pop('apphome', False): + apc = osp.join(self.config.apphome, 'migration', '%s.py' % event) + else: + apc = osp.join(self.config.migration_scripts_dir(), '%s.py' % event) + if osp.exists(apc): + if self.config.free_wheel: + self.cmd_deactivate_verification_hooks() + self.info('executing %s', apc) + confirm = self.confirm + execscript_confirm = self.execscript_confirm + self.confirm = yes + self.execscript_confirm = yes + try: + if event == 'postcreate': + with self.cnx.allow_all_hooks_but(): + return self.cmd_process_script(apc, funcname, *args, **kwargs) + return self.cmd_process_script(apc, funcname, *args, **kwargs) + finally: + self.confirm = confirm + self.execscript_confirm = execscript_confirm + if self.config.free_wheel: + self.cmd_reactivate_verification_hooks() + + def cmd_install_custom_sql_scripts(self, cube=None): + """install a cube custom sql scripts `schema/*..sql` where + depends on the instance main database backend (eg 'postgres', + 'mysql'...) + """ + driver = self.repo.system_source.dbdriver + if cube is None: + directory = osp.join(CW_SOFTWARE_ROOT, 'schemas') + else: + directory = osp.join(self.config.cube_dir(cube), 'schema') + sql_scripts = glob(osp.join(directory, '*.%s.sql' % driver)) + for fpath in sql_scripts: + print('-> installing', fpath) + failed = sqlexec(open(fpath).read(), self.cnx.system_sql, False, + delimiter=';;') + if failed: + print('-> ERROR, skipping', fpath) + + # schema synchronization internals ######################################## + + def _synchronize_permissions(self, erschema, teid): + """permission synchronization for an entity or relation type""" + assert teid, erschema + if 'update' in erschema.ACTIONS or erschema.final: + # entity type + exprtype = u'ERQLExpression' + else: + # relation type + exprtype = u'RRQLExpression' + gm = self.group_mapping() + confirm = self.verbosity >= 2 + # * remove possibly deprecated permission (eg in the persistent schema + # but not in the new schema) + # * synchronize existing expressions + # * add new groups/expressions + for action in erschema.ACTIONS: + perm = '%s_permission' % action + # handle groups + newgroups = list(erschema.get_groups(action)) + for geid, gname in self.rqlexec('Any G, GN WHERE T %s G, G name GN, ' + 'T eid %%(x)s' % perm, {'x': teid}, + ask_confirm=False): + if not gname in newgroups: + if not confirm or self.confirm('Remove %s permission of %s to %s?' + % (action, erschema, gname)): + self.rqlexec('DELETE T %s G WHERE G eid %%(x)s, T eid %s' + % (perm, teid), + {'x': geid}, ask_confirm=False) + else: + newgroups.remove(gname) + for gname in newgroups: + if not confirm or self.confirm('Grant %s permission of %s to %s?' + % (action, erschema, gname)): + try: + self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s' + % (perm, teid), + {'x': gm[gname]}, ask_confirm=False) + except KeyError: + self.error('can grant %s perm to unexistant group %s', + action, gname) + # handle rql expressions + newexprs = dict((expr.expression, expr) for expr in erschema.get_rqlexprs(action)) + for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, ' + 'T eid %s' % (perm, teid), + ask_confirm=False): + if not expression in newexprs: + if not confirm or self.confirm('Remove %s expression for %s permission of %s?' + % (expression, action, erschema)): + # deleting the relation will delete the expression entity + self.rqlexec('DELETE T %s E WHERE E eid %%(x)s, T eid %s' + % (perm, teid), + {'x': expreid}, ask_confirm=False) + else: + newexprs.pop(expression) + for expression in newexprs.values(): + expr = expression.expression + if not confirm or self.confirm('Add %s expression for %s permission of %s?' + % (expr, action, erschema)): + self.rqlexec('INSERT RQLExpression X: X exprtype %%(exprtype)s, ' + 'X expression %%(expr)s, X mainvars %%(vars)s, T %s X ' + 'WHERE T eid %%(x)s' % perm, + {'expr': expr, 'exprtype': exprtype, + 'vars': u','.join(sorted(expression.mainvars)), + 'x': teid}, + ask_confirm=False) + + def _synchronize_rschema(self, rtype, syncrdefs=True, + syncperms=True, syncprops=True): + """synchronize properties of the persistent relation schema against its + current definition: + + * description + * symmetric, meta + * inlined + * relation definitions if `syncrdefs` + * permissions if `syncperms` + + physical schema changes should be handled by repository's schema hooks + """ + rtype = str(rtype) + if rtype in self._synchronized: + return + if syncrdefs and syncperms and syncprops: + self._synchronized.add(rtype) + rschema = self.fs_schema.rschema(rtype) + reporschema = self.repo.schema.rschema(rtype) + if syncprops: + assert reporschema.eid, reporschema + self.rqlexecall(ss.updaterschema2rql(rschema, reporschema.eid), + ask_confirm=self.verbosity>=2) + if rschema.rule: + if syncperms: + self._synchronize_permissions(rschema, reporschema.eid) + elif syncrdefs: + for subj, obj in rschema.rdefs: + if (subj, obj) not in reporschema.rdefs: + continue + if rschema in VIRTUAL_RTYPES: + continue + self._synchronize_rdef_schema(subj, rschema, obj, + syncprops=syncprops, + syncperms=syncperms) + + def _synchronize_eschema(self, etype, syncrdefs=True, + syncperms=True, syncprops=True): + """synchronize properties of the persistent entity schema against + its current definition: + + * description + * internationalizable, fulltextindexed, indexed, meta + * relations from/to this entity + * __unique_together__ + * permissions if `syncperms` + """ + etype = str(etype) + if etype in self._synchronized: + return + if syncrdefs and syncperms and syncprops: + self._synchronized.add(etype) + repoeschema = self.repo.schema.eschema(etype) + try: + eschema = self.fs_schema.eschema(etype) + except KeyError: + return # XXX somewhat unexpected, no?... + if syncprops: + repospschema = repoeschema.specializes() + espschema = eschema.specializes() + if repospschema and not espschema: + self.rqlexec('DELETE X specializes Y WHERE X is CWEType, X name %(x)s', + {'x': str(repoeschema)}, ask_confirm=False) + elif not repospschema and espschema: + self.rqlexec('SET X specializes Y WHERE X is CWEType, X name %(x)s, ' + 'Y is CWEType, Y name %(y)s', + {'x': str(repoeschema), 'y': str(espschema)}, + ask_confirm=False) + self.rqlexecall(ss.updateeschema2rql(eschema, repoeschema.eid), + ask_confirm=self.verbosity >= 2) + if syncperms: + self._synchronize_permissions(eschema, repoeschema.eid) + if syncrdefs: + for rschema, targettypes, role in eschema.relation_definitions(True): + if rschema in VIRTUAL_RTYPES: + continue + if role == 'subject': + if not rschema in repoeschema.subject_relations(): + continue + subjtypes, objtypes = [etype], targettypes + else: # role == 'object' + if not rschema in repoeschema.object_relations(): + continue + subjtypes, objtypes = targettypes, [etype] + self._synchronize_rschema(rschema, syncrdefs=False, + syncprops=syncprops, syncperms=syncperms) + if rschema.rule: # rdef for computed rtype are infered hence should not be + # synchronized + continue + reporschema = self.repo.schema.rschema(rschema) + for subj in subjtypes: + for obj in objtypes: + if (subj, obj) not in reporschema.rdefs: + continue + self._synchronize_rdef_schema(subj, rschema, obj, + syncprops=syncprops, syncperms=syncperms) + if syncprops: # need to process __unique_together__ after rdefs were processed + # mappings from constraint name to columns + # filesystem (fs) and repository (repo) wise + fs = {} + repo = {} + for cols in eschema._unique_together or (): + fs[unique_index_name(repoeschema, cols)] = sorted(cols) + schemaentity = self.cnx.entity_from_eid(repoeschema.eid) + for entity in schemaentity.related('constraint_of', 'object', + targettypes=('CWUniqueTogetherConstraint',)).entities(): + repo[entity.name] = sorted(rel.name for rel in entity.relations) + added = set(fs) - set(repo) + removed = set(repo) - set(fs) + + for name in removed: + self.rqlexec('DELETE CWUniqueTogetherConstraint C WHERE C name %(name)s', + {'name': name}) + + def possible_unique_constraint(cols): + for name in cols: + rschema = repoeschema.subjrels.get(name) + if rschema is None: + print('dont add %s unique constraint on %s, missing %s' % ( + ','.join(cols), eschema, name)) + return False + if not (rschema.final or rschema.inlined): + print('dont add %s unique constraint on %s, %s is neither final nor inlined' % ( + ','.join(cols), eschema, name)) + return False + return True + + for name in added: + if possible_unique_constraint(fs[name]): + rql, substs = ss._uniquetogether2rql(eschema, fs[name]) + substs['x'] = repoeschema.eid + substs['name'] = name + self.rqlexec(rql, substs) + + def _synchronize_rdef_schema(self, subjtype, rtype, objtype, + syncperms=True, syncprops=True): + """synchronize properties of the persistent relation definition schema + against its current definition: + * order and other properties + * constraints + * permissions + """ + subjtype, objtype = str(subjtype), str(objtype) + rschema = self.fs_schema.rschema(rtype) + if rschema.rule: + raise ExecutionError('Cannot synchronize a relation definition for a ' + 'computed relation (%s)' % rschema) + reporschema = self.repo.schema.rschema(rschema) + if (subjtype, rschema, objtype) in self._synchronized: + return + if syncperms and syncprops: + self._synchronized.add((subjtype, rschema, objtype)) + if rschema.symmetric: + self._synchronized.add((objtype, rschema, subjtype)) + rdef = rschema.rdef(subjtype, objtype) + if rdef.infered: + return # don't try to synchronize infered relation defs + repordef = reporschema.rdef(subjtype, objtype) + confirm = self.verbosity >= 2 + if syncprops: + # properties + self.rqlexecall(ss.updaterdef2rql(rdef, repordef.eid), + ask_confirm=confirm) + # constraints + # 0. eliminate the set of unmodified constraints from the sets of + # old/new constraints + newconstraints = set(rdef.constraints) + oldconstraints = set(repordef.constraints) + unchanged_constraints = newconstraints & oldconstraints + newconstraints -= unchanged_constraints + oldconstraints -= unchanged_constraints + # 1. remove old constraints and update constraints of the same type + # NOTE: don't use rschema.constraint_by_type because it may be + # out of sync with newconstraints when multiple + # constraints of the same type are used + for cstr in oldconstraints: + self.rqlexec('DELETE CWConstraint C WHERE C eid %(x)s', + {'x': cstr.eid}, ask_confirm=confirm) + # 2. add new constraints + cstrtype_map = self.cstrtype_mapping() + self.rqlexecall(ss.constraints2rql(cstrtype_map, newconstraints, + repordef.eid), + ask_confirm=confirm) + if syncperms and not rschema in VIRTUAL_RTYPES: + self._synchronize_permissions(rdef, repordef.eid) + + # base actions ############################################################ + + def checkpoint(self, ask_confirm=True): + """checkpoint action""" + if not ask_confirm or self.confirm('Commit now ?', shell=False): + self.commit() + + def cmd_add_cube(self, cube, update_database=True): + self.cmd_add_cubes( (cube,), update_database) + + def cmd_add_cubes(self, cubes, update_database=True): + """update_database is telling if the database schema should be updated + or if only the relevant eproperty should be inserted (for the case where + a cube has been extracted from an existing instance, so the + cube schema is already in there) + """ + newcubes = super(ServerMigrationHelper, self).cmd_add_cubes(cubes) + if not newcubes: + return + for cube in newcubes: + self.cmd_set_property('system.version.'+cube, + self.config.cube_version(cube)) + # ensure added cube is in config cubes + # XXX worth restoring on error? + if not cube in self.config._cubes: + self.config._cubes += (cube,) + if not update_database: + self.commit() + return + newcubes_schema = self.config.load_schema(construction_mode='non-strict') + # XXX we have to replace fs_schema, used in cmd_add_relation_type + # etc. and fsschema of migration script contexts + self.fs_schema = newcubes_schema + self.update_context('fsschema', self.fs_schema) + new = set() + # execute pre-create files + driver = self.repo.system_source.dbdriver + for cube in reversed(newcubes): + self.cmd_install_custom_sql_scripts(cube) + self.cmd_exec_event_script('precreate', cube) + # add new entity and relation types + for rschema in newcubes_schema.relations(): + if not rschema in self.repo.schema: + self.cmd_add_relation_type(rschema.type) + new.add(rschema.type) + toadd = [eschema for eschema in newcubes_schema.entities() + if not eschema in self.repo.schema] + for eschema in order_eschemas(toadd): + self.cmd_add_entity_type(eschema.type) + new.add(eschema.type) + # check if attributes has been added to existing entities + for rschema in newcubes_schema.relations(): + existingschema = self.repo.schema.rschema(rschema.type) + for (fromtype, totype) in rschema.rdefs: + # if rdef already exists or is infered from inheritance, + # don't add it + if (fromtype, totype) in existingschema.rdefs \ + or rschema.rdefs[(fromtype, totype)].infered: + continue + # check we should actually add the relation definition + if not (fromtype in new or totype in new or rschema in new): + continue + self.cmd_add_relation_definition(str(fromtype), rschema.type, + str(totype)) + # execute post-create files + for cube in reversed(newcubes): + with self.cnx.allow_all_hooks_but(): + self.cmd_exec_event_script('postcreate', cube) + self.commit() + + def cmd_drop_cube(self, cube, removedeps=False): + removedcubes = super(ServerMigrationHelper, self).cmd_drop_cube( + cube, removedeps) + if not removedcubes: + return + fsschema = self.fs_schema + removedcubes_schema = self.config.load_schema(construction_mode='non-strict') + reposchema = self.repo.schema + # execute pre-remove files + for cube in reversed(removedcubes): + self.cmd_exec_event_script('preremove', cube) + # remove cubes'entity and relation types + for rschema in fsschema.relations(): + if not rschema in removedcubes_schema and rschema in reposchema: + self.cmd_drop_relation_type(rschema.type) + toremove = [eschema for eschema in fsschema.entities() + if not eschema in removedcubes_schema + and eschema in reposchema] + for eschema in reversed(order_eschemas(toremove)): + self.cmd_drop_entity_type(eschema.type) + for rschema in fsschema.relations(): + if rschema in removedcubes_schema and rschema in reposchema: + # check if attributes/relations has been added to entities from + # other cubes + for fromtype, totype in rschema.rdefs: + if (fromtype, totype) not in removedcubes_schema[rschema.type].rdefs and \ + (fromtype, totype) in reposchema[rschema.type].rdefs: + self.cmd_drop_relation_definition( + str(fromtype), rschema.type, str(totype)) + # execute post-remove files + for cube in reversed(removedcubes): + self.cmd_exec_event_script('postremove', cube) + self.rqlexec('DELETE CWProperty X WHERE X pkey %(pk)s', + {'pk': u'system.version.'+cube}, ask_confirm=False) + self.commit() + + # schema migration actions ################################################ + + def cmd_add_attribute(self, etype, attrname, attrtype=None, commit=True): + """add a new attribute on the given entity type""" + if attrtype is None: + rschema = self.fs_schema.rschema(attrname) + attrtype = rschema.objects(etype)[0] + self.cmd_add_relation_definition(etype, attrname, attrtype, commit=commit) + + def cmd_drop_attribute(self, etype, attrname, commit=True): + """drop an existing attribute from the given entity type + + `attrname` is a string giving the name of the attribute to drop + """ + try: + rschema = self.repo.schema.rschema(attrname) + attrtype = rschema.objects(etype)[0] + except KeyError: + print('warning: attribute %s %s is not known, skip deletion' % ( + etype, attrname)) + else: + self.cmd_drop_relation_definition(etype, attrname, attrtype, + commit=commit) + + def cmd_rename_attribute(self, etype, oldname, newname, commit=True): + """rename an existing attribute of the given entity type + + `oldname` is a string giving the name of the existing attribute + `newname` is a string giving the name of the renamed attribute + """ + eschema = self.fs_schema.eschema(etype) + attrtype = eschema.destination(newname) + # have to commit this first step anyway to get the definition + # actually in the schema + self.cmd_add_attribute(etype, newname, attrtype, commit=True) + # skipp NULL values if the attribute is required + rql = 'SET X %s VAL WHERE X is %s, X %s VAL' % (newname, etype, oldname) + card = eschema.rdef(newname).cardinality[0] + if card == '1': + rql += ', NOT X %s NULL' % oldname + self.rqlexec(rql, ask_confirm=self.verbosity>=2) + # XXX if both attributes fulltext indexed, should skip fti rebuild + # XXX if old attribute was fti indexed but not the new one old value + # won't be removed from the index (this occurs on other kind of + # fulltextindexed change...) + self.cmd_drop_attribute(etype, oldname, commit=commit) + + def cmd_add_entity_type(self, etype, auto=True, commit=True): + """register a new entity type + + in auto mode, automatically register entity's relation where the + targeted type is known + """ + instschema = self.repo.schema + eschema = self.fs_schema.eschema(etype) + if etype in instschema and not (eschema.final and eschema.eid is None): + print('warning: %s already known, skip addition' % etype) + return + confirm = self.verbosity >= 2 + groupmap = self.group_mapping() + cstrtypemap = self.cstrtype_mapping() + # register the entity into CWEType + execute = self.cnx.execute + if eschema.final and eschema not in instschema: + # final types are expected to be in the living schema by default, but they are not if + # the type is defined in a cube that is being added + edef = EntityType(eschema.type, __permissions__=eschema.permissions) + instschema.add_entity_type(edef) + ss.execschemarql(execute, eschema, ss.eschema2rql(eschema, groupmap)) + # add specializes relation if needed + specialized = eschema.specializes() + if specialized: + try: + specialized.eid = instschema[specialized].eid + except KeyError: + raise ExecutionError('trying to add entity type but parent type is ' + 'not yet in the database schema') + self.rqlexecall(ss.eschemaspecialize2rql(eschema), ask_confirm=confirm) + # register entity's attributes + for rschema, attrschema in eschema.attribute_definitions(): + # ignore those meta relations, they will be automatically added + if rschema.type in META_RTYPES: + continue + if not attrschema.type in instschema: + self.cmd_add_entity_type(attrschema.type, False, False) + if not rschema.type in instschema: + # need to add the relation type and to commit to get it + # actually in the schema + self.cmd_add_relation_type(rschema.type, False, commit=True) + # register relation definition + rdef = self._get_rdef(rschema, eschema, eschema.destination(rschema)) + ss.execschemarql(execute, rdef, ss.rdef2rql(rdef, cstrtypemap, groupmap),) + # take care to newly introduced base class + # XXX some part of this should probably be under the "if auto" block + for spschema in eschema.specialized_by(recursive=False): + try: + instspschema = instschema[spschema] + except KeyError: + # specialized entity type not in schema, ignore + continue + if instspschema.specializes() != eschema: + self.rqlexec('SET D specializes P WHERE D eid %(d)s, P name %(pn)s', + {'d': instspschema.eid, 'pn': eschema.type}, + ask_confirm=confirm) + for rschema, tschemas, role in spschema.relation_definitions(True): + for tschema in tschemas: + if not tschema in instschema: + continue + if role == 'subject': + subjschema = spschema + objschema = tschema + if rschema.final and rschema in instspschema.subjrels: + # attribute already set, has_rdef would check if + # it's of the same type, we don't want this so + # simply skip here + continue + elif role == 'object': + subjschema = tschema + objschema = spschema + if (rschema.rdef(subjschema, objschema).infered + or (instschema.has_relation(rschema) and + (subjschema, objschema) in instschema[rschema].rdefs)): + continue + self.cmd_add_relation_definition( + subjschema.type, rschema.type, objschema.type) + if auto: + # we have commit here to get relation types actually in the schema + self.commit() + added = [] + for rschema in eschema.subject_relations(): + # attribute relation have already been processed and + # 'owned_by'/'created_by' will be automatically added + if rschema.final or rschema.type in META_RTYPES: + continue + rtypeadded = rschema.type in instschema + for targetschema in rschema.objects(etype): + # ignore relations where the targeted type is not in the + # current instance schema + targettype = targetschema.type + if not targettype in instschema and targettype != etype: + continue + if not rtypeadded: + # need to add the relation type and to commit to get it + # actually in the schema + added.append(rschema.type) + self.cmd_add_relation_type(rschema.type, False, commit=True) + rtypeadded = True + # register relation definition + # remember this two avoid adding twice non symmetric relation + # such as "Emailthread forked_from Emailthread" + added.append((etype, rschema.type, targettype)) + rdef = self._get_rdef(rschema, eschema, targetschema) + ss.execschemarql(execute, rdef, + ss.rdef2rql(rdef, cstrtypemap, groupmap)) + for rschema in eschema.object_relations(): + if rschema.type in META_RTYPES: + continue + rtypeadded = rschema.type in instschema or rschema.type in added + for targetschema in rschema.subjects(etype): + # ignore relations where the targeted type is not in the + # current instance schema + targettype = targetschema.type + # don't check targettype != etype since in this case the + # relation has already been added as a subject relation + if not targettype in instschema: + continue + if not rtypeadded: + # need to add the relation type and to commit to get it + # actually in the schema + self.cmd_add_relation_type(rschema.type, False, commit=True) + rtypeadded = True + elif (targettype, rschema.type, etype) in added: + continue + # register relation definition + rdef = self._get_rdef(rschema, targetschema, eschema) + ss.execschemarql(execute, rdef, + ss.rdef2rql(rdef, cstrtypemap, groupmap)) + if commit: + self.commit() + + def cmd_drop_entity_type(self, etype, commit=True): + """Drop an existing entity type. + + This will trigger deletion of necessary relation types and definitions. + Note that existing entities of the given type will be deleted without + any hooks called. + """ + # XXX what if we delete an entity type which is specialized by other types + # unregister the entity from CWEType + self.rqlexec('DELETE CWEType X WHERE X name %(etype)s', {'etype': etype}, + ask_confirm=self.verbosity>=2) + if commit: + self.commit() + + def cmd_rename_entity_type(self, oldname, newname, attrs=None, commit=True): + """rename an existing entity type in the persistent schema + + `oldname` is a string giving the name of the existing entity type + `newname` is a string giving the name of the renamed entity type + """ + schema = self.repo.schema + if oldname not in schema: + print('warning: entity type %s is unknown, skip renaming' % oldname) + return + # if merging two existing entity types + if newname in schema: + assert oldname in ETYPE_NAME_MAP, \ + '%s should be mapped to %s in ETYPE_NAME_MAP' % (oldname, + newname) + if attrs is None: + attrs = ','.join(SQL_PREFIX + rschema.type + for rschema in schema[newname].subject_relations() + if (rschema.final or rschema.inlined) + and not rschema in PURE_VIRTUAL_RTYPES) + else: + attrs += ('eid', 'creation_date', 'modification_date', 'cwuri') + attrs = ','.join(SQL_PREFIX + attr for attr in attrs) + self.sqlexec('INSERT INTO %s%s(%s) SELECT %s FROM %s%s' % ( + SQL_PREFIX, newname, attrs, attrs, SQL_PREFIX, oldname), + ask_confirm=False) + # old entity type has not been added to the schema, can't gather it + new = schema.eschema(newname) + oldeid = self.rqlexec('CWEType ET WHERE ET name %(on)s', + {'on': oldname}, ask_confirm=False)[0][0] + # backport old type relations to new type + # XXX workflows, other relations? + for r1, rr1 in [('from_entity', 'to_entity'), + ('to_entity', 'from_entity')]: + self.rqlexec('SET X %(r1)s NET WHERE X %(r1)s OET, ' + 'NOT EXISTS(X2 %(r1)s NET, X relation_type XRT, ' + 'X2 relation_type XRT, X %(rr1)s XTE, X2 %(rr1)s XTE), ' + 'OET eid %%(o)s, NET eid %%(n)s' % locals(), + {'o': oldeid, 'n': new.eid}, ask_confirm=False) + # backport is / is_instance_of relation to new type + for rtype in ('is', 'is_instance_of'): + self.sqlexec('UPDATE %s_relation SET eid_to=%s WHERE eid_to=%s' + % (rtype, new.eid, oldeid), ask_confirm=False) + # delete relations using SQL to avoid relations content removal + # triggered by schema synchronization hooks. + for rdeftype in ('CWRelation', 'CWAttribute'): + thispending = set( (eid for eid, in self.sqlexec( + 'SELECT cw_eid FROM cw_%s WHERE cw_from_entity=%%(eid)s OR ' + ' cw_to_entity=%%(eid)s' % rdeftype, + {'eid': oldeid}, ask_confirm=False)) ) + # we should add deleted eids into pending eids else we may + # get some validation error on commit since integrity hooks + # may think some required relation is missing... This also ensure + # repository caches are properly cleanup + hook.CleanupDeletedEidsCacheOp.get_instance(self.cnx).union(thispending) + # and don't forget to remove record from system tables + entities = [self.cnx.entity_from_eid(eid, rdeftype) for eid in thispending] + self.repo.system_source.delete_info_multi(self.cnx, entities) + self.sqlexec('DELETE FROM cw_%s WHERE cw_from_entity=%%(eid)s OR ' + 'cw_to_entity=%%(eid)s' % rdeftype, + {'eid': oldeid}, ask_confirm=False) + # now we have to manually cleanup relations pointing to deleted + # entities + thiseids = ','.join(str(eid) for eid in thispending) + for rschema, ttypes, role in schema[rdeftype].relation_definitions(): + if rschema.type in VIRTUAL_RTYPES: + continue + sqls = [] + if role == 'object': + if rschema.inlined: + for eschema in ttypes: + sqls.append('DELETE FROM cw_%s WHERE cw_%s IN(%%s)' + % (eschema, rschema)) + else: + sqls.append('DELETE FROM %s_relation WHERE eid_to IN(%%s)' + % rschema) + elif not rschema.inlined: + sqls.append('DELETE FROM %s_relation WHERE eid_from IN(%%s)' + % rschema) + for sql in sqls: + self.sqlexec(sql % thiseids, ask_confirm=False) + # remove the old type: use rql to propagate deletion + self.rqlexec('DELETE CWEType ET WHERE ET name %(on)s', {'on': oldname}, + ask_confirm=False) + # elif simply renaming an entity type + else: + self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(on)s', + {'newname' : text_type(newname), 'on' : oldname}, + ask_confirm=False) + if commit: + self.commit() + + def cmd_add_relation_type(self, rtype, addrdef=True, commit=True): + """register a new relation type named `rtype`, as described in the + schema description file. + + `addrdef` is a boolean value; when True, it will also add all relations + of the type just added found in the schema definition file. Note that it + implies an intermediate "commit" which commits the relation type + creation (but not the relation definitions themselves, for which + committing depends on the `commit` argument value). + + """ + reposchema = self.repo.schema + rschema = self.fs_schema.rschema(rtype) + execute = self.cnx.execute + if rtype in reposchema: + print('warning: relation type %s is already known, skip addition' % ( + rtype)) + elif rschema.rule: + gmap = self.group_mapping() + ss.execschemarql(execute, rschema, ss.crschema2rql(rschema, gmap)) + else: + # register the relation into CWRType and insert necessary relation + # definitions + ss.execschemarql(execute, rschema, ss.rschema2rql(rschema, addrdef=False)) + if not rschema.rule and addrdef: + self.commit() + gmap = self.group_mapping() + cmap = self.cstrtype_mapping() + done = set() + for subj, obj in rschema.rdefs: + if not (reposchema.has_entity(subj) + and reposchema.has_entity(obj)): + continue + # symmetric relations appears twice + if (subj, obj) in done: + continue + done.add( (subj, obj) ) + self.cmd_add_relation_definition(subj, rtype, obj) + if rtype in META_RTYPES: + # if the relation is in META_RTYPES, ensure we're adding it for + # all entity types *in the persistent schema*, not only those in + # the fs schema + for etype in self.repo.schema.entities(): + if not etype in self.fs_schema: + # get sample object type and rproperties + objtypes = rschema.objects() + assert len(objtypes) == 1, objtypes + objtype = objtypes[0] + rdef = copy(rschema.rdef(rschema.subjects(objtype)[0], objtype)) + rdef.subject = etype + rdef.rtype = self.repo.schema.rschema(rschema) + rdef.object = self.repo.schema.eschema(objtype) + ss.execschemarql(execute, rdef, + ss.rdef2rql(rdef, cmap, gmap)) + if commit: + self.commit() + + def cmd_drop_relation_type(self, rtype, commit=True): + """Drop an existing relation type. + + Note that existing relations of the given type will be deleted without + any hooks called. + """ + self.rqlexec('DELETE CWRType X WHERE X name %r' % rtype, + ask_confirm=self.verbosity>=2) + self.rqlexec('DELETE CWComputedRType X WHERE X name %r' % rtype, + ask_confirm=self.verbosity>=2) + if commit: + self.commit() + + def cmd_rename_relation_type(self, oldname, newname, commit=True, force=False): + """rename an existing relation + + `oldname` is a string giving the name of the existing relation + `newname` is a string giving the name of the renamed relation + + If `force` is True, proceed even if `oldname` still appears in the fs schema + """ + if oldname in self.fs_schema and not force: + if not self.confirm('Relation %s is still present in the filesystem schema,' + ' do you really want to drop it?' % oldname, + default='n'): + return + self.cmd_add_relation_type(newname, commit=True) + if not self.repo.schema[oldname].rule: + self.rqlexec('SET X %s Y WHERE X %s Y' % (newname, oldname), + ask_confirm=self.verbosity>=2) + self.cmd_drop_relation_type(oldname, commit=commit) + + def cmd_add_relation_definition(self, subjtype, rtype, objtype, commit=True): + """register a new relation definition, from its definition found in the + schema definition file + """ + rschema = self.fs_schema.rschema(rtype) + if rschema.rule: + raise ExecutionError('Cannot add a relation definition for a ' + 'computed relation (%s)' % rschema) + if not rtype in self.repo.schema: + self.cmd_add_relation_type(rtype, addrdef=False, commit=True) + if (subjtype, objtype) in self.repo.schema.rschema(rtype).rdefs: + print('warning: relation %s %s %s is already known, skip addition' % ( + subjtype, rtype, objtype)) + return + rdef = self._get_rdef(rschema, subjtype, objtype) + ss.execschemarql(self.cnx.execute, rdef, + ss.rdef2rql(rdef, self.cstrtype_mapping(), + self.group_mapping())) + if commit: + self.commit() + + def _get_rdef(self, rschema, subjtype, objtype): + return self._set_rdef_eid(rschema.rdefs[(subjtype, objtype)]) + + def _set_rdef_eid(self, rdef): + for attr in ('rtype', 'subject', 'object'): + schemaobj = getattr(rdef, attr) + if getattr(schemaobj, 'eid', None) is None: + schemaobj.eid = self.repo.schema[schemaobj].eid + assert schemaobj.eid is not None, schemaobj + return rdef + + def cmd_drop_relation_definition(self, subjtype, rtype, objtype, commit=True): + """Drop an existing relation definition. + + Note that existing relations of the given definition will be deleted + without any hooks called. + """ + rschema = self.repo.schema.rschema(rtype) + if rschema.rule: + raise ExecutionError('Cannot drop a relation definition for a ' + 'computed relation (%s)' % rschema) + # unregister the definition from CWAttribute or CWRelation + if rschema.final: + etype = 'CWAttribute' + else: + etype = 'CWRelation' + rql = ('DELETE %s X WHERE X from_entity FE, FE name "%s",' + 'X relation_type RT, RT name "%s", X to_entity TE, TE name "%s"') + self.rqlexec(rql % (etype, subjtype, rtype, objtype), + ask_confirm=self.verbosity>=2) + if commit: + self.commit() + + def cmd_sync_schema_props_perms(self, ertype=None, syncperms=True, + syncprops=True, syncrdefs=True, commit=True): + """synchronize the persistent schema against the current definition + schema. + + `ertype` can be : + - None, in that case everything will be synced ; + - a string, it should be an entity type or + a relation type. In that case, only the corresponding + entities / relations will be synced ; + - an rdef object to synchronize only this specific relation definition + + It will synch common stuff between the definition schema and the + actual persistent schema, it won't add/remove any entity or relation. + """ + assert syncperms or syncprops, 'nothing to do' + if ertype is not None: + if isinstance(ertype, RelationDefinitionSchema): + ertype = ertype.as_triple() + if isinstance(ertype, (tuple, list)): + assert len(ertype) == 3, 'not a relation definition' + self._synchronize_rdef_schema(ertype[0], ertype[1], ertype[2], + syncperms=syncperms, + syncprops=syncprops) + else: + erschema = self.repo.schema[ertype] + if isinstance(erschema, CubicWebRelationSchema): + self._synchronize_rschema(erschema, syncrdefs=syncrdefs, + syncperms=syncperms, + syncprops=syncprops) + else: + self._synchronize_eschema(erschema, syncrdefs=syncrdefs, + syncperms=syncperms, + syncprops=syncprops) + else: + for etype in self.repo.schema.entities(): + if etype.eid is None: + # not yet added final etype (thing to BigInt defined in + # yams though 3.13 migration not done yet) + continue + self._synchronize_eschema(etype, syncrdefs=syncrdefs, + syncprops=syncprops, syncperms=syncperms) + if commit: + self.commit() + + def cmd_change_relation_props(self, subjtype, rtype, objtype, + commit=True, **kwargs): + """change some properties of a relation definition + + you usually want to use sync_schema_props_perms instead. + """ + assert kwargs + restriction = [] + if subjtype and subjtype != 'Any': + restriction.append('X from_entity FE, FE name "%s"' % subjtype) + if objtype and objtype != 'Any': + restriction.append('X to_entity TE, TE name "%s"' % objtype) + if rtype and rtype != 'Any': + restriction.append('X relation_type RT, RT name "%s"' % rtype) + assert restriction + values = [] + for k, v in kwargs.items(): + values.append('X %s %%(%s)s' % (k, k)) + if PY2 and isinstance(v, str): + kwargs[k] = unicode(v) + rql = 'SET %s WHERE %s' % (','.join(values), ','.join(restriction)) + self.rqlexec(rql, kwargs, ask_confirm=self.verbosity>=2) + if commit: + self.commit() + + def cmd_set_size_constraint(self, etype, rtype, size, commit=True): + """set change size constraint of a string attribute + + if size is None any size constraint will be removed. + + you usually want to use sync_schema_props_perms instead. + """ + oldvalue = None + for constr in self.repo.schema.eschema(etype).rdef(rtype).constraints: + if isinstance(constr, SizeConstraint): + oldvalue = constr.max + if oldvalue == size: + return + if oldvalue is None and not size is None: + ceid = self.rqlexec('INSERT CWConstraint C: C value %(v)s, C cstrtype CT ' + 'WHERE CT name "SizeConstraint"', + {'v': SizeConstraint(size).serialize()}, + ask_confirm=self.verbosity>=2)[0][0] + self.rqlexec('SET X constrained_by C WHERE X from_entity S, X relation_type R, ' + 'S name "%s", R name "%s", C eid %s' % (etype, rtype, ceid), + ask_confirm=self.verbosity>=2) + elif not oldvalue is None: + if not size is None: + self.rqlexec('SET C value %%(v)s WHERE X from_entity S, X relation_type R,' + 'X constrained_by C, C cstrtype CT, CT name "SizeConstraint",' + 'S name "%s", R name "%s"' % (etype, rtype), + {'v': text_type(SizeConstraint(size).serialize())}, + ask_confirm=self.verbosity>=2) + else: + self.rqlexec('DELETE X constrained_by C WHERE X from_entity S, X relation_type R,' + 'X constrained_by C, C cstrtype CT, CT name "SizeConstraint",' + 'S name "%s", R name "%s"' % (etype, rtype), + ask_confirm=self.verbosity>=2) + # cleanup unused constraints + self.rqlexec('DELETE CWConstraint C WHERE NOT X constrained_by C') + if commit: + self.commit() + + # Workflows handling ###################################################### + + def cmd_make_workflowable(self, etype): + """add workflow relations to an entity type to make it workflowable""" + self.cmd_add_relation_definition(etype, 'in_state', 'State') + self.cmd_add_relation_definition(etype, 'custom_workflow', 'Workflow') + self.cmd_add_relation_definition('TrInfo', 'wf_info_for', etype) + + def cmd_add_workflow(self, name, wfof, default=True, commit=False, + ensure_workflowable=True, **kwargs): + """ + create a new workflow and links it to entity types + :type name: unicode + :param name: name of the workflow + + :type wfof: string or list/tuple of strings + :param wfof: entity type(s) having this workflow + + :type default: bool + :param default: tells wether this is the default workflow + for the specified entity type(s); set it to false in + the case of a subworkflow + + :rtype: `Workflow` + """ + wf = self.cmd_create_entity('Workflow', name=text_type(name), + **kwargs) + if not isinstance(wfof, (list, tuple)): + wfof = (wfof,) + def _missing_wf_rel(etype): + return 'missing workflow relations, see make_workflowable(%s)' % etype + for etype in wfof: + eschema = self.repo.schema[etype] + etype = text_type(etype) + if ensure_workflowable: + assert 'in_state' in eschema.subjrels, _missing_wf_rel(etype) + assert 'custom_workflow' in eschema.subjrels, _missing_wf_rel(etype) + assert 'wf_info_for' in eschema.objrels, _missing_wf_rel(etype) + rset = self.rqlexec( + 'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s', + {'x': wf.eid, 'et': text_type(etype)}, ask_confirm=False) + assert rset, 'unexistant entity type %s' % etype + if default: + self.rqlexec( + 'SET ET default_workflow X WHERE X eid %(x)s, ET name %(et)s', + {'x': wf.eid, 'et': text_type(etype)}, ask_confirm=False) + if commit: + self.commit() + return wf + + def cmd_get_workflow_for(self, etype): + """return default workflow for the given entity type""" + rset = self.rqlexec('Workflow X WHERE ET default_workflow X, ET name %(et)s', + {'et': etype}) + return rset.get_entity(0, 0) + + # CWProperty handling ###################################################### + + def cmd_property_value(self, pkey): + """retreive the site-wide persistent property value for the given key. + + To get a user specific property value, use appropriate method on CWUser + instance. + """ + rset = self.rqlexec( + 'Any V WHERE X is CWProperty, X pkey %(k)s, X value V, NOT X for_user U', + {'k': pkey}, ask_confirm=False) + return rset[0][0] + + def cmd_set_property(self, pkey, value): + """set the site-wide persistent property value for the given key to the + given value. + + To set a user specific property value, use appropriate method on CWUser + instance. + """ + value = text_type(value) + try: + prop = self.rqlexec( + 'CWProperty X WHERE X pkey %(k)s, NOT X for_user U', + {'k': text_type(pkey)}, ask_confirm=False).get_entity(0, 0) + except Exception: + self.cmd_create_entity('CWProperty', pkey=text_type(pkey), value=value) + else: + prop.cw_set(value=value) + + # other data migration commands ########################################### + + def cmd_storage_changed(self, etype, attribute): + """migrate entities to a custom storage. The new storage is expected to + be set, it will be temporarily removed for the migration. + """ + from logilab.common.shellutils import ProgressBar + source = self.repo.system_source + storage = source.storage(etype, attribute) + source.unset_storage(etype, attribute) + rset = self.rqlexec('Any X WHERE X is %s' % etype, ask_confirm=False) + pb = ProgressBar(len(rset)) + for entity in rset.entities(): + # fill cache. Do not fetch that attribute using the global rql query + # since we may exhaust memory doing that.... + getattr(entity, attribute) + storage.migrate_entity(entity, attribute) + # remove from entity cache to avoid memory exhaustion + del entity.cw_attr_cache[attribute] + pb.update() + print() + source.set_storage(etype, attribute, storage) + + def cmd_create_entity(self, etype, commit=False, **kwargs): + """add a new entity of the given type""" + entity = self.cnx.create_entity(etype, **kwargs) + if commit: + self.commit() + return entity + + def cmd_find(self, etype, **kwargs): + """find entities of the given type and attribute values""" + return self.cnx.find(etype, **kwargs) + + @deprecated("[3.19] use find(*args, **kwargs).entities() instead") + def cmd_find_entities(self, etype, **kwargs): + """find entities of the given type and attribute values""" + return self.cnx.find(etype, **kwargs).entities() + + @deprecated("[3.19] use find(*args, **kwargs).one() instead") + def cmd_find_one_entity(self, etype, **kwargs): + """find one entity of the given type and attribute values. + + raise :exc:`cubicweb.req.FindEntityError` if can not return one and only + one entity. + """ + return self.cnx.find(etype, **kwargs).one() + + def cmd_update_etype_fti_weight(self, etype, weight): + if self.repo.system_source.dbdriver == 'postgres': + self.sqlexec('UPDATE appears SET weight=%(weight)s ' + 'FROM entities as X ' + 'WHERE X.eid=appears.uid AND X.type=%(type)s', + {'type': etype, 'weight': weight}, ask_confirm=False) + + def cmd_reindex_entities(self, etypes=None): + """force reindexaction of entities of the given types or of all + indexable entity types + """ + from cubicweb.server.checkintegrity import reindex_entities + reindex_entities(self.repo.schema, self.cnx, etypes=etypes) + + @contextmanager + def cmd_dropped_constraints(self, etype, attrname, cstrtype=None, + droprequired=False): + """context manager to drop constraints temporarily on fs_schema + + `cstrtype` should be a constraint class (or a tuple of classes) + and will be passed to isinstance directly + + For instance:: + + >>> with dropped_constraints('MyType', 'myattr', + ... UniqueConstraint, droprequired=True): + ... add_attribute('MyType', 'myattr') + ... # + instructions to fill MyType.myattr column + ... + >>> + + """ + rdef = self.fs_schema.eschema(etype).rdef(attrname) + original_constraints = rdef.constraints + # remove constraints + if cstrtype: + rdef.constraints = [cstr for cstr in original_constraints + if not (cstrtype and isinstance(cstr, cstrtype))] + if droprequired: + original_cardinality = rdef.cardinality + rdef.cardinality = '?' + rdef.cardinality[1] + yield + # restore original constraints + rdef.constraints = original_constraints + if droprequired: + rdef.cardinality = original_cardinality + # update repository schema + self.cmd_sync_schema_props_perms(rdef, syncperms=False) + + def sqlexec(self, sql, args=None, ask_confirm=True): + """execute the given sql if confirmed + + should only be used for low level stuff undoable with existing higher + level actions + """ + if not ask_confirm or self.confirm('Execute sql: %s ?' % sql): + try: + cu = self.cnx.system_sql(sql, args) + except Exception: + ex = sys.exc_info()[1] + if self.confirm('Error: %s\nabort?' % ex, pdb=True): + raise + return + try: + return cu.fetchall() + except Exception: + # no result to fetch + return + + def rqlexec(self, rql, kwargs=None, build_descr=True, + ask_confirm=False): + """rql action""" + if not isinstance(rql, (tuple, list)): + rql = ( (rql, kwargs), ) + res = None + execute = self.cnx.execute + for rql, kwargs in rql: + if kwargs: + msg = '%s (%s)' % (rql, kwargs) + else: + msg = rql + if not ask_confirm or self.confirm('Execute rql: %s ?' % msg): + try: + res = execute(rql, kwargs, build_descr=build_descr) + except Exception as ex: + if self.confirm('Error: %s\nabort?' % ex, pdb=True): + raise + return res + + def rqliter(self, rql, kwargs=None, ask_confirm=True): + return ForRqlIterator(self, rql, kwargs, ask_confirm) + + # low-level commands to repair broken system database ###################### + + def cmd_change_attribute_type(self, etype, attr, newtype, commit=True): + """low level method to change the type of an entity attribute. This is + a quick hack which has some drawback: + * only works when the old type can be changed to the new type by the + underlying rdbms (eg using ALTER TABLE) + * the actual schema won't be updated until next startup + """ + rschema = self.repo.schema.rschema(attr) + oldschema = rschema.objects(etype)[0] + rdef = rschema.rdef(etype, oldschema) + sql = ("UPDATE cw_CWAttribute " + "SET cw_to_entity=(SELECT cw_eid FROM cw_CWEType WHERE cw_name='%s')" + "WHERE cw_eid=%s") % (newtype, rdef.eid) + self.sqlexec(sql, ask_confirm=False) + dbhelper = self.repo.system_source.dbhelper + newrdef = self.fs_schema.rschema(attr).rdef(etype, newtype) + sqltype = sql_type(dbhelper, newrdef) + cursor = self.cnx.cnxset.cu + # consider former cardinality by design, since cardinality change is not handled here + allownull = rdef.cardinality[0] != '1' + dbhelper.change_col_type(cursor, 'cw_%s' % etype, 'cw_%s' % attr, sqltype, allownull) + if commit: + self.commit() + # manually update live schema + eschema = self.repo.schema[etype] + rschema._subj_schemas[eschema].remove(oldschema) + rschema._obj_schemas[oldschema].remove(eschema) + newschema = self.repo.schema[newtype] + rschema._update(eschema, newschema) + rdef.object = newschema + del rschema.rdefs[(eschema, oldschema)] + rschema.rdefs[(eschema, newschema)] = rdef + + def cmd_add_entity_type_table(self, etype, commit=True): + """low level method to create the sql table for an existing entity. + This may be useful on accidental desync between the repository schema + and a sql database + """ + dbhelper = self.repo.system_source.dbhelper + tablesql = eschema2sql(dbhelper, self.repo.schema.eschema(etype), + prefix=SQL_PREFIX) + for sql in tablesql.split(';'): + if sql.strip(): + self.sqlexec(sql) + if commit: + self.commit() + + def cmd_add_relation_type_table(self, rtype, commit=True): + """low level method to create the sql table for an existing relation. + This may be useful on accidental desync between the repository schema + and a sql database + """ + tablesql = rschema2sql(self.repo.schema.rschema(rtype)) + for sql in tablesql.split(';'): + if sql.strip(): + self.sqlexec(sql) + if commit: + self.commit() + + @deprecated("[3.15] use rename_relation_type(oldname, newname)") + def cmd_rename_relation(self, oldname, newname, commit=True): + self.cmd_rename_relation_type(oldname, newname, commit) + + +class ForRqlIterator: + """specific rql iterator to make the loop skipable""" + def __init__(self, helper, rql, kwargs, ask_confirm): + self._h = helper + self.rql = rql + self.kwargs = kwargs + self.ask_confirm = ask_confirm + self._rsetit = None + + def __iter__(self): + return self + + def _get_rset(self): + rql, kwargs = self.rql, self.kwargs + if kwargs: + msg = '%s (%s)' % (rql, kwargs) + else: + msg = rql + if self.ask_confirm: + if not self._h.confirm('Execute rql: %s ?' % msg): + raise StopIteration + try: + return self._h._cw.execute(rql, kwargs) + except Exception as ex: + if self._h.confirm('Error: %s\nabort?' % ex): + raise + else: + raise StopIteration + + def __next__(self): + if self._rsetit is not None: + return next(self._rsetit) + rset = self._get_rset() + self._rsetit = iter(rset) + return next(self._rsetit) + + next = __next__ + + def entities(self): + try: + rset = self._get_rset() + except StopIteration: + return [] + return rset.entities() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/querier.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/querier.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,737 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Helper classes to execute RQL queries on a set of sources, performing +security checking and data aggregation. +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from itertools import repeat + +from six import text_type, string_types, integer_types +from six.moves import range + +from rql import RQLSyntaxError, CoercionError +from rql.stmts import Union +from rql.nodes import ETYPE_PYOBJ_MAP, etype_from_pyobj, Relation, Exists, Not +from yams import BASE_TYPES + +from cubicweb import ValidationError, Unauthorized, UnknownEid +from cubicweb.rqlrewrite import RQLRelationRewriter +from cubicweb import Binary, server +from cubicweb.rset import ResultSet + +from cubicweb.utils import QueryCache, RepeatList +from cubicweb.server.rqlannotation import SQLGenAnnotator, set_qdata +from cubicweb.server.ssplanner import READ_ONLY_RTYPES, add_types_restriction +from cubicweb.server.edition import EditedEntity +from cubicweb.server.ssplanner import SSPlanner +from cubicweb.statsd_logger import statsd_timeit, statsd_c + +ETYPE_PYOBJ_MAP[Binary] = 'Bytes' + + +def empty_rset(rql, args, rqlst=None): + """build an empty result set object""" + return ResultSet([], rql, args, rqlst=rqlst) + +def update_varmap(varmap, selected, table): + """return a sql schema to store RQL query result""" + for i, term in enumerate(selected): + key = term.as_string() + value = '%s.C%s' % (table, i) + if varmap.get(key, value) != value: + raise Exception('variable name conflict on %s: got %s / %s' + % (key, value, varmap)) + varmap[key] = value + +# permission utilities ######################################################## + +def check_no_password_selected(rqlst): + """check that Password entities are not selected""" + for solution in rqlst.solutions: + for var, etype in solution.items(): + if etype == 'Password': + raise Unauthorized('Password selection is not allowed (%s)' % var) + +def term_etype(cnx, term, solution, args): + """return the entity type for the given term (a VariableRef or a Constant + node) + """ + try: + return solution[term.name] + except AttributeError: + return cnx.entity_metas(term.eval(args))['type'] + +def check_relations_read_access(cnx, select, args): + """Raise :exc:`Unauthorized` if the given user doesn't have credentials to + read relations used in the given syntax tree + """ + # use `term_etype` since we've to deal with rewritten constants here, + # when used as an external source by another repository. + # XXX what about local read security w/ those rewritten constants... + # XXX constants can also happen in some queries generated by req.find() + DBG = (server.DEBUG & server.DBG_SEC) and 'read' in server._SECURITY_CAPS + schema = cnx.repo.schema + user = cnx.user + if select.where is not None: + for rel in select.where.iget_nodes(Relation): + for solution in select.solutions: + # XXX has_text may have specific perm ? + if rel.r_type in READ_ONLY_RTYPES: + continue + rschema = schema.rschema(rel.r_type) + if rschema.final: + eschema = schema.eschema(term_etype(cnx, rel.children[0], + solution, args)) + rdef = eschema.rdef(rschema) + else: + rdef = rschema.rdef(term_etype(cnx, rel.children[0], + solution, args), + term_etype(cnx, rel.children[1].children[0], + solution, args)) + if not user.matching_groups(rdef.get_groups('read')): + if DBG: + print('check_read_access: %s %s does not match %s' % + (rdef, user.groups, rdef.get_groups('read'))) + # XXX rqlexpr not allowed + raise Unauthorized('read', rel.r_type) + if DBG: + print('check_read_access: %s %s matches %s' % + (rdef, user.groups, rdef.get_groups('read'))) + +def get_local_checks(cnx, rqlst, solution): + """Check that the given user has credentials to access data read by the + query and return a dict defining necessary "local checks" (i.e. rql + expression in read permission defined in the schema) where no group grants + him the permission. + + Returned dictionary's keys are variable names and values the rql expressions + for this variable (with the given solution). + + Raise :exc:`Unauthorized` if access is known to be defined, i.e. if there is + no matching group and no local permissions. + """ + DBG = (server.DEBUG & server.DBG_SEC) and 'read' in server._SECURITY_CAPS + schema = cnx.repo.schema + user = cnx.user + localchecks = {} + # iterate on defined_vars and not on solutions to ignore column aliases + for varname in rqlst.defined_vars: + eschema = schema.eschema(solution[varname]) + if eschema.final: + continue + if not user.matching_groups(eschema.get_groups('read')): + erqlexprs = eschema.get_rqlexprs('read') + if not erqlexprs: + ex = Unauthorized('read', solution[varname]) + ex.var = varname + if DBG: + print('check_read_access: %s %s %s %s' % + (varname, eschema, user.groups, eschema.get_groups('read'))) + raise ex + # don't insert security on variable only referenced by 'NOT X relation Y' or + # 'NOT EXISTS(X relation Y)' + varinfo = rqlst.defined_vars[varname].stinfo + if varinfo['selected'] or ( + len([r for r in varinfo['relations'] + if (not schema.rschema(r.r_type).final + and ((isinstance(r.parent, Exists) and r.parent.neged(strict=True)) + or isinstance(r.parent, Not)))]) + != + len(varinfo['relations'])): + localchecks[varname] = erqlexprs + return localchecks + + +# Plans ####################################################################### + +class ExecutionPlan(object): + """the execution model of a rql query, composed of querier steps""" + + def __init__(self, querier, rqlst, args, cnx): + # original rql syntax tree + self.rqlst = rqlst + self.args = args or {} + # cnx executing the query + self.cnx = cnx + # quick reference to the system source + self.syssource = cnx.repo.system_source + # execution steps + self.steps = [] + # various resource accesors + self.querier = querier + self.schema = querier.schema + self.sqlannotate = querier.sqlgen_annotate + self.rqlhelper = cnx.vreg.rqlhelper + + def annotate_rqlst(self): + if not self.rqlst.annotated: + self.rqlhelper.annotate(self.rqlst) + + def add_step(self, step): + """add a step to the plan""" + self.steps.append(step) + + def sqlexec(self, sql, args=None): + return self.syssource.sqlexec(self.cnx, sql, args) + + def execute(self): + """execute a plan and return resulting rows""" + for step in self.steps: + result = step.execute() + # the latest executed step contains the full query result + return result + + def preprocess(self, union, security=True): + """insert security when necessary then annotate rql st for sql generation + + return rqlst to actually execute + """ + cached = None + if security and self.cnx.read_security: + # ensure security is turned of when security is inserted, + # else we may loop for ever... + if self.cnx.transaction_data.get('security-rqlst-cache'): + key = self.cache_key + else: + key = None + if key is not None and key in self.cnx.transaction_data: + cachedunion, args = self.cnx.transaction_data[key] + union.children[:] = [] + for select in cachedunion.children: + union.append(select) + union.has_text_query = cachedunion.has_text_query + args.update(self.args) + self.args = args + cached = True + else: + with self.cnx.security_enabled(read=False): + noinvariant = self._insert_security(union) + if key is not None: + self.cnx.transaction_data[key] = (union, self.args) + else: + noinvariant = () + if cached is None: + self.rqlhelper.simplify(union) + self.sqlannotate(union) + set_qdata(self.schema.rschema, union, noinvariant) + if union.has_text_query: + self.cache_key = None + + def _insert_security(self, union): + noinvariant = set() + for select in union.children[:]: + for subquery in select.with_: + self._insert_security(subquery.query) + localchecks, restricted = self._check_permissions(select) + if any(localchecks): + self.cnx.rql_rewriter.insert_local_checks( + select, self.args, localchecks, restricted, noinvariant) + return noinvariant + + def _check_permissions(self, rqlst): + """Return a dict defining "local checks", i.e. RQLExpression defined in + the schema that should be inserted in the original query, together with + a set of variable names which requires some security to be inserted. + + Solutions where a variable has a type which the user can't definitly + read are removed, else if the user *may* read it (i.e. if an rql + expression is defined for the "read" permission of the related type), + the local checks dict is updated. + + The local checks dict has entries for each different local check + necessary, with associated solutions as value, a local check being + defined by a list of 2-uple (variable name, rql expressions) for each + variable which has to be checked. Solutions which don't require local + checks will be associated to the empty tuple key. + + Note rqlst should not have been simplified at this point. + """ + cnx = self.cnx + msgs = [] + # dict(varname: eid), allowing to check rql expression for variables + # which have a known eid + varkwargs = {} + if not cnx.transaction_data.get('security-rqlst-cache'): + for var in rqlst.defined_vars.values(): + if var.stinfo['constnode'] is not None: + eid = var.stinfo['constnode'].eval(self.args) + varkwargs[var.name] = int(eid) + # dictionary of variables restricted for security reason + localchecks = {} + restricted_vars = set() + newsolutions = [] + for solution in rqlst.solutions: + try: + localcheck = get_local_checks(cnx, rqlst, solution) + except Unauthorized as ex: + msg = 'remove %s from solutions since %s has no %s access to %s' + msg %= (solution, cnx.user.login, ex.args[0], ex.args[1]) + msgs.append(msg) + LOGGER.info(msg) + else: + newsolutions.append(solution) + # try to benefit of rqlexpr.check cache for entities which + # are specified by eid in query'args + for varname, eid in varkwargs.items(): + try: + rqlexprs = localcheck.pop(varname) + except KeyError: + continue + # if entity has been added in the current transaction, the + # user can read it whatever rql expressions are associated + # to its type + if cnx.added_in_transaction(eid): + continue + for rqlexpr in rqlexprs: + if rqlexpr.check(cnx, eid): + break + else: + raise Unauthorized('No read acces on %r with eid %i.' % (var, eid)) + # mark variables protected by an rql expression + restricted_vars.update(localcheck) + # turn local check into a dict key + localcheck = tuple(sorted(localcheck.items())) + localchecks.setdefault(localcheck, []).append(solution) + # raise Unautorized exception if the user can't access to any solution + if not newsolutions: + raise Unauthorized('\n'.join(msgs)) + # if there is some message, solutions have been modified and must be + # reconsidered by the syntax treee + if msgs: + rqlst.set_possible_types(newsolutions) + return localchecks, restricted_vars + + def finalize(self, select, solutions, insertedvars): + rqlst = Union() + rqlst.append(select) + for mainvarname, rschema, newvarname in insertedvars: + nvartype = str(rschema.objects(solutions[0][mainvarname])[0]) + for sol in solutions: + sol[newvarname] = nvartype + select.clean_solutions(solutions) + add_types_restriction(self.schema, select) + self.rqlhelper.annotate(rqlst) + self.preprocess(rqlst, security=False) + return rqlst + + +class InsertPlan(ExecutionPlan): + """an execution model specific to the INSERT rql query + """ + + def __init__(self, querier, rqlst, args, cnx): + ExecutionPlan.__init__(self, querier, rqlst, args, cnx) + # save originally selected variable, we may modify this + # dictionary for substitution (query parameters) + self.selected = rqlst.selection + # list of rows of entities definition (ssplanner.EditedEntity) + self.e_defs = [[]] + # list of new relation definition (3-uple (from_eid, r_type, to_eid) + self.r_defs = set() + # indexes to track entity definitions bound to relation definitions + self._r_subj_index = {} + self._r_obj_index = {} + self._expanded_r_defs = {} + + def add_entity_def(self, edef): + """add an entity definition to build""" + self.e_defs[-1].append(edef) + + def add_relation_def(self, rdef): + """add an relation definition to build""" + self.r_defs.add(rdef) + if not isinstance(rdef[0], int): + self._r_subj_index.setdefault(rdef[0], []).append(rdef) + if not isinstance(rdef[2], int): + self._r_obj_index.setdefault(rdef[2], []).append(rdef) + + def substitute_entity_def(self, edef, edefs): + """substitute an incomplete entity definition by a list of complete + equivalents + + e.g. on queries such as :: + INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y + WHERE U login 'admin', U login N + + X will be inserted as many times as U exists, and so the X travaille Y + relations as to be added as many time as X is inserted + """ + if not edefs or not self.e_defs: + # no result, no entity will be created + self.e_defs = () + return + # first remove the incomplete entity definition + colidx = self.e_defs[0].index(edef) + for i, row in enumerate(self.e_defs[:]): + self.e_defs[i][colidx] = edefs[0] + samplerow = self.e_defs[i] + for edef_ in edefs[1:]: + row = [ed.clone() for i, ed in enumerate(samplerow) + if i != colidx] + row.insert(colidx, edef_) + self.e_defs.append(row) + # now, see if this entity def is referenced as subject in some relation + # definition + if edef in self._r_subj_index: + for rdef in self._r_subj_index[edef]: + expanded = self._expanded(rdef) + result = [] + for exp_rdef in expanded: + for edef_ in edefs: + result.append( (edef_, exp_rdef[1], exp_rdef[2]) ) + self._expanded_r_defs[rdef] = result + # and finally, see if this entity def is referenced as object in some + # relation definition + if edef in self._r_obj_index: + for rdef in self._r_obj_index[edef]: + expanded = self._expanded(rdef) + result = [] + for exp_rdef in expanded: + for edef_ in edefs: + result.append( (exp_rdef[0], exp_rdef[1], edef_) ) + self._expanded_r_defs[rdef] = result + + def _expanded(self, rdef): + """return expanded value for the given relation definition""" + try: + return self._expanded_r_defs[rdef] + except KeyError: + self.r_defs.remove(rdef) + return [rdef] + + def relation_defs(self): + """return the list for relation definitions to insert""" + for rdefs in self._expanded_r_defs.values(): + for rdef in rdefs: + yield rdef + for rdef in self.r_defs: + yield rdef + + def insert_entity_defs(self): + """return eids of inserted entities in a suitable form for the resulting + result set, e.g.: + + e.g. on queries such as :: + INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y + WHERE U login 'admin', U login N + + if there is two entities matching U, the result set will look like + [(eidX1, eidY1), (eidX2, eidY2)] + """ + cnx = self.cnx + repo = cnx.repo + results = [] + for row in self.e_defs: + results.append([repo.glob_add_entity(cnx, edef) + for edef in row]) + return results + + def insert_relation_defs(self): + cnx = self.cnx + repo = cnx.repo + edited_entities = {} + relations = {} + for subj, rtype, obj in self.relation_defs(): + # if a string is given into args instead of an int, we get it here + if isinstance(subj, string_types): + subj = int(subj) + elif not isinstance(subj, integer_types): + subj = subj.entity.eid + if isinstance(obj, string_types): + obj = int(obj) + elif not isinstance(obj, integer_types): + obj = obj.entity.eid + if repo.schema.rschema(rtype).inlined: + if subj not in edited_entities: + entity = cnx.entity_from_eid(subj) + edited = EditedEntity(entity) + edited_entities[subj] = edited + else: + edited = edited_entities[subj] + edited.edited_attribute(rtype, obj) + else: + if rtype in relations: + relations[rtype].append((subj, obj)) + else: + relations[rtype] = [(subj, obj)] + repo.glob_add_relations(cnx, relations) + for edited in edited_entities.values(): + repo.glob_update_entity(cnx, edited) + + +class QuerierHelper(object): + """helper class to execute rql queries, putting all things together""" + + def __init__(self, repo, schema): + # system info helper + self._repo = repo + # instance schema + self.set_schema(schema) + + def set_schema(self, schema): + self.schema = schema + repo = self._repo + # rql st and solution cache. + self._rql_cache = QueryCache(repo.config['rql-cache-size']) + # rql cache key cache. Don't bother using a Cache instance: we should + # have a limited number of queries in there, since there are no entries + # in this cache for user queries (which have no args) + self._rql_ck_cache = {} + # some cache usage stats + self.cache_hit, self.cache_miss = 0, 0 + # rql parsing / analysing helper + self.solutions = repo.vreg.solutions + rqlhelper = repo.vreg.rqlhelper + # set backend on the rql helper, will be used for function checking + rqlhelper.backend = repo.config.system_source_config['db-driver'] + self._parse = rqlhelper.parse + self._annotate = rqlhelper.annotate + # rql planner + self._planner = SSPlanner(schema, rqlhelper) + # sql generation annotator + self.sqlgen_annotate = SQLGenAnnotator(schema).annotate + + def parse(self, rql, annotate=False): + """return a rql syntax tree for the given rql""" + try: + return self._parse(text_type(rql), annotate=annotate) + except UnicodeError: + raise RQLSyntaxError(rql) + + def plan_factory(self, rqlst, args, cnx): + """create an execution plan for an INSERT RQL query""" + if rqlst.TYPE == 'insert': + return InsertPlan(self, rqlst, args, cnx) + return ExecutionPlan(self, rqlst, args, cnx) + + @statsd_timeit + def execute(self, cnx, rql, args=None, build_descr=True): + """execute a rql query, return resulting rows and their description in + a `ResultSet` object + + * `rql` should be a Unicode string or a plain ASCII string + * `args` the optional parameters dictionary associated to the query + * `build_descr` is a boolean flag indicating if the description should + be built on select queries (if false, the description will be en empty + list) + + on INSERT queries, there will be one row with the eid of each inserted + entity + + result for DELETE and SET queries is undefined yet + + to maximize the rql parsing/analyzing cache performance, you should + always use substitute arguments in queries (i.e. avoid query such as + 'Any X WHERE X eid 123'!) + """ + if server.DEBUG & (server.DBG_RQL | server.DBG_SQL): + if server.DEBUG & (server.DBG_MORE | server.DBG_SQL): + print('*'*80) + print('querier input', repr(rql), repr(args)) + # parse the query and binds variables + cachekey = (rql,) + try: + if args: + # search for named args in query which are eids (hence + # influencing query's solutions) + eidkeys = self._rql_ck_cache[rql] + if eidkeys: + # if there are some, we need a better cache key, eg (rql + + # entity type of each eid) + try: + cachekey = self._repo.querier_cache_key(cnx, rql, + args, eidkeys) + except UnknownEid: + # we want queries such as "Any X WHERE X eid 9999" + # return an empty result instead of raising UnknownEid + return empty_rset(rql, args) + rqlst = self._rql_cache[cachekey] + self.cache_hit += 1 + statsd_c('cache_hit') + except KeyError: + self.cache_miss += 1 + statsd_c('cache_miss') + rqlst = self.parse(rql) + try: + # compute solutions for rqlst and return named args in query + # which are eids. Notice that if you may not need `eidkeys`, we + # have to compute solutions anyway (kept as annotation on the + # tree) + eidkeys = self.solutions(cnx, rqlst, args) + except UnknownEid: + # we want queries such as "Any X WHERE X eid 9999" return an + # empty result instead of raising UnknownEid + return empty_rset(rql, args) + if args and rql not in self._rql_ck_cache: + self._rql_ck_cache[rql] = eidkeys + if eidkeys: + cachekey = self._repo.querier_cache_key(cnx, rql, args, + eidkeys) + self._rql_cache[cachekey] = rqlst + if rqlst.TYPE != 'select': + if cnx.read_security: + check_no_password_selected(rqlst) + cachekey = None + else: + if cnx.read_security: + for select in rqlst.children: + check_no_password_selected(select) + check_relations_read_access(cnx, select, args) + # on select query, always copy the cached rqlst so we don't have to + # bother modifying it. This is not necessary on write queries since + # a new syntax tree is built from them. + rqlst = rqlst.copy() + # Rewrite computed relations + rewriter = RQLRelationRewriter(cnx) + rewriter.rewrite(rqlst, args) + self._annotate(rqlst) + if args: + # different SQL generated when some argument is None or not (IS + # NULL). This should be considered when computing sql cache key + cachekey += tuple(sorted([k for k, v in args.items() + if v is None])) + # make an execution plan + plan = self.plan_factory(rqlst, args, cnx) + plan.cache_key = cachekey + self._planner.build_plan(plan) + # execute the plan + try: + results = plan.execute() + except (Unauthorized, ValidationError): + # getting an Unauthorized/ValidationError exception means the + # transaction must be rolled back + # + # notes: + # * we should not reset the connections set here, since we don't want the + # connection to loose it during processing + # * don't rollback if we're in the commit process, will be handled + # by the connection + if cnx.commit_state is None: + cnx.commit_state = 'uncommitable' + raise + # build a description for the results if necessary + descr = () + if build_descr: + if rqlst.TYPE == 'select': + # sample selection + if len(rqlst.children) == 1 and len(rqlst.children[0].solutions) == 1: + # easy, all lines are identical + selected = rqlst.children[0].selection + solution = rqlst.children[0].solutions[0] + description = _make_description(selected, args, solution) + descr = RepeatList(len(results), tuple(description)) + else: + # hard, delegate the work :o) + descr = manual_build_descr(cnx, rqlst, args, results) + elif rqlst.TYPE == 'insert': + # on insert plan, some entities may have been auto-casted, + # so compute description manually even if there is only + # one solution + basedescr = [None] * len(plan.selected) + todetermine = list(zip(range(len(plan.selected)), repeat(False))) + descr = _build_descr(cnx, results, basedescr, todetermine) + # FIXME: get number of affected entities / relations on non + # selection queries ? + # return a result set object + return ResultSet(results, rql, args, descr) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + +from logging import getLogger +from cubicweb import set_log_methods +LOGGER = getLogger('cubicweb.querier') +set_log_methods(QuerierHelper, LOGGER) + + +def manual_build_descr(cnx, rqlst, args, result): + """build a description for a given result by analysing each row + + XXX could probably be done more efficiently during execution of query + """ + # not so easy, looks for variable which changes from one solution + # to another + unstables = rqlst.get_variable_indices() + basedescr = [] + todetermine = [] + for i in range(len(rqlst.children[0].selection)): + ttype = _selection_idx_type(i, rqlst, args) + if ttype is None or ttype == 'Any': + ttype = None + isfinal = True + else: + isfinal = ttype in BASE_TYPES + if ttype is None or i in unstables: + basedescr.append(None) + todetermine.append( (i, isfinal) ) + else: + basedescr.append(ttype) + if not todetermine: + return RepeatList(len(result), tuple(basedescr)) + return _build_descr(cnx, result, basedescr, todetermine) + +def _build_descr(cnx, result, basedescription, todetermine): + description = [] + entity_metas = cnx.entity_metas + todel = [] + for i, row in enumerate(result): + row_descr = basedescription[:] + for index, isfinal in todetermine: + value = row[index] + if value is None: + # None value inserted by an outer join, no type + row_descr[index] = None + continue + if isfinal: + row_descr[index] = etype_from_pyobj(value) + else: + try: + row_descr[index] = entity_metas(value)['type'] + except UnknownEid: + cnx.error('wrong eid %s in repository, you should ' + 'db-check the database' % value) + todel.append(i) + break + else: + description.append(tuple(row_descr)) + for i in reversed(todel): + del result[i] + return description + +def _make_description(selected, args, solution): + """return a description for a result set""" + description = [] + for term in selected: + description.append(term.get_type(solution, args)) + return description + +def _selection_idx_type(i, rqlst, args): + """try to return type of term at index `i` of the rqlst's selection""" + for select in rqlst.children: + term = select.selection[i] + for solution in select.solutions: + try: + ttype = term.get_type(solution, args) + if ttype is not None: + return ttype + except CoercionError: + return None diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/repository.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/repository.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1133 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Defines the central class for the CubicWeb RQL server: the repository. + +The repository is an abstraction allowing execution of rql queries against +data sources. Most of the work is actually done in helper classes. The +repository mainly: + +* brings these classes all together to provide a single access + point to a cubicweb instance. +* handles session management +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import threading +from warnings import warn +from itertools import chain +from time import time, localtime, strftime +from contextlib import contextmanager + +from six.moves import range, queue + +from logilab.common.decorators import cached, clear_cache +from logilab.common.deprecation import deprecated + +from yams import BadSchemaDefinition +from rql.utils import rqlvar_maker + +from cubicweb import (CW_MIGRATION_MAP, QueryError, + UnknownEid, AuthenticationError, ExecutionError, + BadConnectionId, ValidationError, Unauthorized, + UniqueTogetherError, onevent, ViolatedConstraint) +from cubicweb import cwvreg, schema, server +from cubicweb.server import ShuttingDown, utils, hook, querier, sources +from cubicweb.server.session import Session, InternalManager + +NO_CACHE_RELATIONS = set( [('owned_by', 'object'), + ('created_by', 'object'), + ('cw_source', 'object'), + ]) + +def prefill_entity_caches(entity): + cnx = entity._cw + # prefill entity relation caches + for rschema in entity.e_schema.subject_relations(): + rtype = str(rschema) + if rtype in schema.VIRTUAL_RTYPES or (rtype, 'subject') in NO_CACHE_RELATIONS: + continue + if rschema.final: + entity.cw_attr_cache.setdefault(rtype, None) + else: + entity.cw_set_relation_cache(rtype, 'subject', + cnx.empty_rset()) + for rschema in entity.e_schema.object_relations(): + rtype = str(rschema) + if rtype in schema.VIRTUAL_RTYPES or (rtype, 'object') in NO_CACHE_RELATIONS: + continue + entity.cw_set_relation_cache(rtype, 'object', cnx.empty_rset()) + +def del_existing_rel_if_needed(cnx, eidfrom, rtype, eidto): + """delete existing relation when adding a new one if card is 1 or ? + + have to be done once the new relation has been inserted to avoid having + an entity without a relation for some time + + this kind of behaviour has to be done in the repository so we don't have + hooks order hazardness + """ + # skip that if integrity explicitly disabled + if not cnx.is_hook_category_activated('activeintegrity'): + return + rdef = cnx.rtype_eids_rdef(rtype, eidfrom, eidto) + card = rdef.cardinality + # one may be tented to check for neweids but this may cause more than one + # relation even with '1?' cardinality if thoses relations are added in the + # same transaction where the entity is being created. This never occurs from + # the web interface but may occurs during test or dbapi connection (though + # not expected for this). So: don't do it, we pretend to ensure repository + # consistency. + # + # notes: + # * inlined relations will be implicitly deleted for the subject entity + # * we don't want read permissions to be applied but we want delete + # permission to be checked + if card[0] in '1?': + with cnx.security_enabled(read=False): + cnx.execute('DELETE X %s Y WHERE X eid %%(x)s, ' + 'NOT Y eid %%(y)s' % rtype, + {'x': eidfrom, 'y': eidto}) + if card[1] in '1?': + with cnx.security_enabled(read=False): + cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s, ' + 'NOT X eid %%(x)s' % rtype, + {'x': eidfrom, 'y': eidto}) + + +def preprocess_inlined_relations(cnx, entity): + """when an entity is added, check if it has some inlined relation which + requires to be extrated for proper call hooks + """ + relations = [] + activeintegrity = cnx.is_hook_category_activated('activeintegrity') + eschema = entity.e_schema + for attr in entity.cw_edited: + rschema = eschema.subjrels[attr] + if not rschema.final: # inlined relation + value = entity.cw_edited[attr] + relations.append((attr, value)) + cnx.update_rel_cache_add(entity.eid, attr, value) + rdef = cnx.rtype_eids_rdef(attr, entity.eid, value) + if rdef.cardinality[1] in '1?' and activeintegrity: + with cnx.security_enabled(read=False): + cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s' % attr, + {'x': entity.eid, 'y': value}) + return relations + + +class NullEventBus(object): + def publish(self, msg): + pass + + def add_subscription(self, topic, callback): + pass + + def start(self): + pass + + def stop(self): + pass + + +class Repository(object): + """a repository provides access to a set of persistent storages for + entities and relations + """ + + def __init__(self, config, tasks_manager=None, vreg=None): + self.config = config + if vreg is None: + vreg = cwvreg.CWRegistryStore(config) + self.vreg = vreg + self._tasks_manager = tasks_manager + + self.app_instances_bus = NullEventBus() + self.info('starting repository from %s', self.config.apphome) + # dictionary of opened sessions + self._sessions = {} + + # list of functions to be called at regular interval + # list of running threads + self._running_threads = [] + # initial schema, should be build or replaced latter + self.schema = schema.CubicWebSchema(config.appid) + self.vreg.schema = self.schema # until actual schema is loaded... + # shutdown flag + self.shutting_down = False + # sources (additional sources info in the system database) + self.system_source = self.get_source('native', 'system', + config.system_source_config.copy()) + self.sources_by_uri = {'system': self.system_source} + # querier helper, need to be created after sources initialization + self.querier = querier.QuerierHelper(self, self.schema) + # cache eid -> (type, extid, actual source) + self._type_source_cache = {} + # cache extid -> eid + self._extid_cache = {} + # open some connection sets + if config.init_cnxset_pool: + self.init_cnxset_pool() + # the hooks manager + self.hm = hook.HooksManager(self.vreg) + # registry hook to fix user class on registry reload + @onevent('after-registry-reload', self) + def fix_user_classes(self): + # After registry reload the 'CWUser' class used for CWEtype + # changed. So any existing user object have a different class than + # the new loaded one. We are hot fixing this. + usercls = self.vreg['etypes'].etype_class('CWUser') + for session in self._sessions.values(): + if not isinstance(session.user, InternalManager): + session.user.__class__ = usercls + + def init_cnxset_pool(self): + """should be called bootstrap_repository, as this is what it does""" + config = self.config + self._cnxsets_pool = queue.Queue() + # 0. init a cnxset that will be used to fetch bootstrap information from + # the database + self._cnxsets_pool.put_nowait(self.system_source.wrapped_connection()) + # 1. set used cubes + if config.creating or not config.read_instance_schema: + config.bootstrap_cubes() + else: + self.set_schema(self.config.load_bootstrap_schema(), resetvreg=False) + config.init_cubes(self.get_cubes()) + # 2. load schema + if config.quick_start: + # quick start: only to get a minimal repository to get cubes + # information (eg dump/restore/...) + # + # restrict appobject_path to only load hooks and entity classes in + # the registry + config.cube_appobject_path = set(('hooks', 'entities')) + config.cubicweb_appobject_path = set(('hooks', 'entities')) + # limit connections pool to 1 + config['connections-pool-size'] = 1 + if config.quick_start or config.creating or not config.read_instance_schema: + # load schema from the file system + if not config.creating: + self.info("set fs instance'schema") + self.set_schema(config.load_schema(expand_cubes=True)) + else: + # normal start: load the instance schema from the database + self.info('loading schema from the repository') + self.set_schema(self.deserialize_schema()) + # 3. initialize data sources + if config.creating: + # call init_creating so that for instance native source can + # configurate tsearch according to postgres version + self.system_source.init_creating() + else: + self.init_sources_from_database() + if 'CWProperty' in self.schema: + self.vreg.init_properties(self.properties()) + # 4. close initialization connection set and reopen fresh ones for + # proper initialization + self._get_cnxset().close(True) + self.cnxsets = [] # list of available cnxsets (can't iterate on a Queue) + for i in range(config['connections-pool-size']): + self.cnxsets.append(self.system_source.wrapped_connection()) + self._cnxsets_pool.put_nowait(self.cnxsets[-1]) + + # internals ############################################################### + + def init_sources_from_database(self): + self.sources_by_eid = {} + if self.config.quick_start \ + or not 'CWSource' in self.schema: # # 3.10 migration + self.system_source.init_creating() + return + with self.internal_cnx() as cnx: + # FIXME: sources should be ordered (add_entity priority) + for sourceent in cnx.execute( + 'Any S, SN, SA, SC WHERE S is_instance_of CWSource, ' + 'S name SN, S type SA, S config SC').entities(): + if sourceent.name == 'system': + self.system_source.eid = sourceent.eid + self.sources_by_eid[sourceent.eid] = self.system_source + self.system_source.init(True, sourceent) + continue + self.add_source(sourceent) + + def _clear_planning_caches(self): + clear_cache(self, 'source_defs') + + def add_source(self, sourceent): + try: + source = self.get_source(sourceent.type, sourceent.name, + sourceent.host_config, sourceent.eid) + except RuntimeError: + if self.config.repairing: + self.exception('cant setup source %s, skipped', sourceent.name) + return + raise + self.sources_by_eid[sourceent.eid] = source + self.sources_by_uri[sourceent.name] = source + if self.config.source_enabled(source): + # call source's init method to complete their initialisation if + # needed (for instance looking for persistent configuration using an + # internal session, which is not possible until connections sets have been + # initialized) + source.init(True, sourceent) + else: + source.init(False, sourceent) + self._clear_planning_caches() + + def remove_source(self, uri): + source = self.sources_by_uri.pop(uri) + del self.sources_by_eid[source.eid] + self._clear_planning_caches() + + def get_source(self, type, uri, source_config, eid=None): + # set uri and type in source config so it's available through + # source_defs() + source_config['uri'] = uri + source_config['type'] = type + return sources.get_source(type, source_config, self, eid) + + def set_schema(self, schema, resetvreg=True): + self.info('set schema %s %#x', schema.name, id(schema)) + if resetvreg: + # trigger full reload of all appobjects + self.vreg.set_schema(schema) + else: + self.vreg._set_schema(schema) + self.querier.set_schema(schema) + for source in self.sources_by_uri.values(): + source.set_schema(schema) + self.schema = schema + + def deserialize_schema(self): + """load schema from the database""" + from cubicweb.server.schemaserial import deserialize_schema + appschema = schema.CubicWebSchema(self.config.appid) + self.debug('deserializing db schema into %s %#x', appschema.name, id(appschema)) + with self.internal_cnx() as cnx: + try: + deserialize_schema(appschema, cnx) + except BadSchemaDefinition: + raise + except Exception as ex: + import traceback + traceback.print_exc() + raise Exception('Is the database initialised ? (cause: %s)' % ex) + return appschema + + def _prepare_startup(self): + """Prepare "Repository as a server" for startup. + + * trigger server startup hook, + * register session clean up task. + """ + if not (self.config.creating or self.config.repairing + or self.config.quick_start): + # call instance level initialisation hooks + self.hm.call_hooks('server_startup', repo=self) + # register a task to cleanup expired session + self.cleanup_session_time = self.config['cleanup-session-time'] or 60 * 60 * 24 + assert self.cleanup_session_time > 0 + cleanup_session_interval = min(60*60, self.cleanup_session_time / 3) + assert self._tasks_manager is not None, "This Repository is not intended to be used as a server" + self._tasks_manager.add_looping_task(cleanup_session_interval, + self.clean_sessions) + + def start_looping_tasks(self): + """Actual "Repository as a server" startup. + + * trigger server startup hook, + * register session clean up task, + * start all tasks. + + XXX Other startup related stuffs are done elsewhere. In Repository + XXX __init__ or in external codes (various server managers). + """ + self._prepare_startup() + assert self._tasks_manager is not None, "This Repository is not intended to be used as a server" + self._tasks_manager.start() + + def looping_task(self, interval, func, *args): + """register a function to be called every `interval` seconds. + + looping tasks can only be registered during repository initialization, + once done this method will fail. + """ + assert self._tasks_manager is not None, "This Repository is not intended to be used as a server" + self._tasks_manager.add_looping_task(interval, func, *args) + + def threaded_task(self, func): + """start function in a separated thread""" + utils.RepoThread(func, self._running_threads).start() + + #@locked + def _get_cnxset(self): + try: + return self._cnxsets_pool.get(True, timeout=5) + except queue.Empty: + raise Exception('no connections set available after 5 secs, probably either a ' + 'bug in code (too many uncommited/rolled back ' + 'connections) or too much load on the server (in ' + 'which case you can try to set a bigger ' + 'connections pool size)') + + def _free_cnxset(self, cnxset): + self._cnxsets_pool.put_nowait(cnxset) + + def shutdown(self): + """called on server stop event to properly close opened sessions and + connections + """ + assert not self.shutting_down, 'already shutting down' + if not (self.config.creating or self.config.repairing + or self.config.quick_start): + # then, the system source is still available + self.hm.call_hooks('before_server_shutdown', repo=self) + self.shutting_down = True + self.system_source.shutdown() + if self._tasks_manager is not None: + self._tasks_manager.stop() + if not (self.config.creating or self.config.repairing + or self.config.quick_start): + self.hm.call_hooks('server_shutdown', repo=self) + for thread in self._running_threads: + self.info('waiting thread %s...', thread.getName()) + thread.join() + self.info('thread %s finished', thread.getName()) + self.close_sessions() + while not self._cnxsets_pool.empty(): + cnxset = self._cnxsets_pool.get_nowait() + try: + cnxset.close(True) + except Exception: + self.exception('error while closing %s' % cnxset) + continue + hits, misses = self.querier.cache_hit, self.querier.cache_miss + try: + self.info('rql st cache hit/miss: %s/%s (%s%% hits)', hits, misses, + (hits * 100) / (hits + misses)) + hits, misses = self.system_source.cache_hit, self.system_source.cache_miss + self.info('sql cache hit/miss: %s/%s (%s%% hits)', hits, misses, + (hits * 100) / (hits + misses)) + nocache = self.system_source.no_cache + self.info('sql cache usage: %s/%s (%s%%)', hits+ misses, nocache, + ((hits + misses) * 100) / (hits + misses + nocache)) + except ZeroDivisionError: + pass + + def check_auth_info(self, cnx, login, authinfo): + """validate authentication, raise AuthenticationError on failure, return + associated CWUser's eid on success. + """ + # iter on sources_by_uri then check enabled source since sources doesn't + # contain copy based sources + for source in self.sources_by_uri.values(): + if self.config.source_enabled(source) and source.support_entity('CWUser'): + try: + return source.authenticate(cnx, login, **authinfo) + except AuthenticationError: + continue + else: + raise AuthenticationError('authentication failed with all sources') + + def authenticate_user(self, cnx, login, **authinfo): + """validate login / password, raise AuthenticationError on failure + return associated CWUser instance on success + """ + eid = self.check_auth_info(cnx, login, authinfo) + cwuser = self._build_user(cnx, eid) + if self.config.consider_user_state and \ + not cwuser.cw_adapt_to('IWorkflowable').state in cwuser.AUTHENTICABLE_STATES: + raise AuthenticationError('user is not in authenticable state') + return cwuser + + def _build_user(self, cnx, eid): + """return a CWUser entity for user with the given eid""" + cls = self.vreg['etypes'].etype_class('CWUser') + st = cls.fetch_rqlst(cnx.user, ordermethod=None) + st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute') + rset = cnx.execute(st.as_string(), {'x': eid}) + assert len(rset) == 1, rset + cwuser = rset.get_entity(0, 0) + # pylint: disable=W0104 + # prefetch / cache cwuser's groups and properties. This is especially + # useful for internal sessions to avoid security insertions + cwuser.groups + cwuser.properties + return cwuser + + # public (dbapi) interface ################################################ + + @deprecated("[3.19] use _cw.call_service('repo_stats')") + def stats(self): # XXX restrict to managers session? + """Return a dictionary containing some statistics about the repository + resources usage. + + This is a public method, not requiring a session id. + + This method is deprecated in favor of using _cw.call_service('repo_stats') + """ + with self.internal_cnx() as cnx: + return cnx.call_service('repo_stats') + + @deprecated("[3.19] use _cw.call_service('repo_gc_stats')") + def gc_stats(self, nmax=20): + """Return a dictionary containing some statistics about the repository + memory usage. + + This is a public method, not requiring a session id. + + nmax is the max number of (most) referenced object returned as + the 'referenced' result + """ + with self.internal_cnx() as cnx: + return cnx.call_service('repo_gc_stats', nmax=nmax) + + def get_schema(self): + """Return the instance schema. + + This is a public method, not requiring a session id. + """ + return self.schema + + def get_cubes(self): + """Return the list of cubes used by this instance. + + This is a public method, not requiring a session id. + """ + versions = self.get_versions(not (self.config.creating + or self.config.repairing + or self.config.quick_start + or self.config.mode == 'test')) + cubes = list(versions) + cubes.remove('cubicweb') + return cubes + + def get_option_value(self, option, foreid=None): + """Return the value for `option` in the configuration. + + This is a public method, not requiring a session id. + + `foreid` argument is deprecated and now useless (as of 3.19). + """ + if foreid is not None: + warn('[3.19] foreid argument is deprecated', DeprecationWarning, + stacklevel=2) + # XXX we may want to check we don't give sensible information + return self.config[option] + + @cached + def get_versions(self, checkversions=False): + """Return the a dictionary containing cubes used by this instance + as key with their version as value, including cubicweb version. + + This is a public method, not requiring a session id. + """ + from logilab.common.changelog import Version + vcconf = {} + with self.internal_cnx() as cnx: + for pk, version in cnx.execute( + 'Any K,V WHERE P is CWProperty, P value V, P pkey K, ' + 'P pkey ~="system.version.%"', build_descr=False): + cube = pk.split('.')[-1] + # XXX cubicweb migration + if cube in CW_MIGRATION_MAP: + cube = CW_MIGRATION_MAP[cube] + version = Version(version) + vcconf[cube] = version + if checkversions: + if cube != 'cubicweb': + fsversion = self.config.cube_version(cube) + else: + fsversion = self.config.cubicweb_version() + if version < fsversion: + msg = ('instance has %s version %s but %s ' + 'is installed. Run "cubicweb-ctl upgrade".') + raise ExecutionError(msg % (cube, version, fsversion)) + return vcconf + + @cached + def source_defs(self): + """Return the a dictionary containing source uris as value and a + dictionary describing each source as value. + + This is a public method, not requiring a session id. + """ + sources = {} + # remove sensitive information + for uri, source in self.sources_by_uri.items(): + sources[uri] = source.public_config + return sources + + def properties(self): + """Return a result set containing system wide properties. + + This is a public method, not requiring a session id. + """ + with self.internal_cnx() as cnx: + # don't use cnx.execute, we don't want rset.req set + return self.querier.execute(cnx, 'Any K,V WHERE P is CWProperty,' + 'P pkey K, P value V, NOT P for_user U', + build_descr=False) + + @deprecated("[3.19] Use session.call_service('register_user') instead'") + def register_user(self, login, password, email=None, **kwargs): + """check a user with the given login exists, if not create it with the + given password. This method is designed to be used for anonymous + registration on public web site. + """ + with self.internal_cnx() as cnx: + cnx.call_service('register_user', login=login, password=password, + email=email, **kwargs) + cnx.commit() + + def find_users(self, fetch_attrs, **query_attrs): + """yield user attributes for cwusers matching the given query_attrs + (the result set cannot survive this method call) + + This can be used by low-privileges account (anonymous comes to + mind). + + `fetch_attrs`: tuple of attributes to be fetched + `query_attrs`: dict of attr/values to restrict the query + """ + assert query_attrs + if not hasattr(self, '_cwuser_attrs'): + cwuser = self.schema['CWUser'] + self._cwuser_attrs = set(str(rschema) + for rschema, _eschema in cwuser.attribute_definitions() + if not rschema.meta) + cwuserattrs = self._cwuser_attrs + for k in chain(fetch_attrs, query_attrs): + if k not in cwuserattrs: + raise Exception('bad input for find_user') + with self.internal_cnx() as cnx: + varmaker = rqlvar_maker() + vars = [(attr, next(varmaker)) for attr in fetch_attrs] + rql = 'Any %s WHERE X is CWUser, ' % ','.join(var[1] for var in vars) + rql += ','.join('X %s %s' % (var[0], var[1]) for var in vars) + ',' + rset = cnx.execute(rql + ','.join('X %s %%(%s)s' % (attr, attr) + for attr in query_attrs), + query_attrs) + return rset.rows + + def new_session(self, login, **kwargs): + """open a new session for a given user + + raise `AuthenticationError` if the authentication failed + raise `ConnectionError` if we can't open a connection + """ + cnxprops = kwargs.pop('cnxprops', None) + # use an internal connection + with self.internal_cnx() as cnx: + # try to get a user object + user = self.authenticate_user(cnx, login, **kwargs) + session = Session(user, self, cnxprops) + user._cw = user.cw_rset.req = session + user.cw_clear_relation_cache() + self._sessions[session.sessionid] = session + self.info('opened session %s for user %s', session.sessionid, login) + with session.new_cnx() as cnx: + self.hm.call_hooks('session_open', cnx) + # commit connection at this point in case write operation has been + # done during `session_open` hooks + cnx.commit() + return session + + def connect(self, login, **kwargs): + """open a new session for a given user and return its sessionid """ + return self.new_session(login, **kwargs).sessionid + + def close(self, sessionid, txid=None, checkshuttingdown=True): + """close the session with the given id""" + session = self._get_session(sessionid, txid=txid, + checkshuttingdown=checkshuttingdown) + # operation uncommited before close are rolled back before hook is called + with session.new_cnx() as cnx: + self.hm.call_hooks('session_close', cnx) + # commit connection at this point in case write operation has been + # done during `session_close` hooks + cnx.commit() + session.close() + del self._sessions[sessionid] + self.info('closed session %s for user %s', sessionid, session.user.login) + + # session handling ######################################################## + + def close_sessions(self): + """close every opened sessions""" + for sessionid in list(self._sessions): + try: + self.close(sessionid, checkshuttingdown=False) + except Exception: # XXX BaseException? + self.exception('error while closing session %s' % sessionid) + + def clean_sessions(self): + """close sessions not used since an amount of time specified in the + configuration + """ + mintime = time() - self.cleanup_session_time + self.debug('cleaning session unused since %s', + strftime('%H:%M:%S', localtime(mintime))) + nbclosed = 0 + for session in self._sessions.values(): + if session.timestamp < mintime: + self.close(session.sessionid) + nbclosed += 1 + return nbclosed + + @contextmanager + def internal_cnx(self): + """Context manager returning a Connection using internal user which have + every access rights on the repository. + + Beware that unlike the older :meth:`internal_session`, internal + connections have all hooks beside security enabled. + """ + with Session(InternalManager(), self) as session: + with session.new_cnx() as cnx: + cnx.user._cw = cnx # XXX remove when "vreg = user._cw.vreg" + # hack in entity.py is gone + with cnx.security_enabled(read=False, write=False): + yield cnx + + def _get_session(self, sessionid, txid=None, checkshuttingdown=True): + """return the session associated with the given session identifier""" + if checkshuttingdown and self.shutting_down: + raise ShuttingDown('Repository is shutting down') + try: + session = self._sessions[sessionid] + except KeyError: + raise BadConnectionId('No such session %s' % sessionid) + return session + + # data sources handling ################################################### + # * correspondance between eid and (type, source) + # * correspondance between eid and local id (i.e. specific to a given source) + + def type_and_source_from_eid(self, eid, cnx): + """return a tuple `(type, extid, actual source uri)` for the entity of + the given `eid` + """ + try: + eid = int(eid) + except ValueError: + raise UnknownEid(eid) + try: + return self._type_source_cache[eid] + except KeyError: + etype, extid, auri = self.system_source.eid_type_source(cnx, eid) + self._type_source_cache[eid] = (etype, extid, auri) + return etype, extid, auri + + def clear_caches(self, eids): + etcache = self._type_source_cache + extidcache = self._extid_cache + rqlcache = self.querier._rql_cache + for eid in eids: + try: + etype, extid, auri = etcache.pop(int(eid)) # may be a string in some cases + rqlcache.pop( ('%s X WHERE X eid %s' % (etype, eid),), None) + extidcache.pop(extid, None) + except KeyError: + etype = None + rqlcache.pop( ('Any X WHERE X eid %s' % eid,), None) + self.system_source.clear_eid_cache(eid, etype) + + def type_from_eid(self, eid, cnx): + """return the type of the entity with id """ + return self.type_and_source_from_eid(eid, cnx)[0] + + def querier_cache_key(self, cnx, rql, args, eidkeys): + cachekey = [rql] + for key in sorted(eidkeys): + try: + etype = self.type_from_eid(args[key], cnx) + except KeyError: + raise QueryError('bad cache key %s (no value)' % key) + except TypeError: + raise QueryError('bad cache key %s (value: %r)' % ( + key, args[key])) + cachekey.append(etype) + # ensure eid is correctly typed in args + args[key] = int(args[key]) + return tuple(cachekey) + + @deprecated('[3.22] use the new store API') + def extid2eid(self, source, extid, etype, cnx, insert=True, + sourceparams=None): + """Return eid from a local id. If the eid is a negative integer, that + means the entity is known but has been copied back to the system source + hence should be ignored. + + If no record is found, ie the entity is not known yet: + + 1. an eid is attributed + + 2. the source's :meth:`before_entity_insertion` method is called to + build the entity instance + + 3. unless source's :attr:`should_call_hooks` tell otherwise, + 'before_add_entity' hooks are called + + 4. record is added into the system source + + 5. the source's :meth:`after_entity_insertion` method is called to + complete building of the entity instance + + 6. unless source's :attr:`should_call_hooks` tell otherwise, + 'before_add_entity' hooks are called + """ + try: + return self._extid_cache[extid] + except KeyError: + pass + eid = self.system_source.extid2eid(cnx, extid) + if eid is not None: + self._extid_cache[extid] = eid + self._type_source_cache[eid] = (etype, extid, source.uri) + return eid + if not insert: + return + # no link between extid and eid, create one + # write query, ensure connection's mode is 'write' so connections + # won't be released until commit/rollback + try: + eid = self.system_source.create_eid(cnx) + self._extid_cache[extid] = eid + self._type_source_cache[eid] = (etype, extid, source.uri) + entity = source.before_entity_insertion( + cnx, extid, etype, eid, sourceparams) + if source.should_call_hooks: + # get back a copy of operation for later restore if + # necessary, see below + pending_operations = cnx.pending_operations[:] + self.hm.call_hooks('before_add_entity', cnx, entity=entity) + self.add_info(cnx, entity, source, extid) + source.after_entity_insertion(cnx, extid, entity, sourceparams) + if source.should_call_hooks: + self.hm.call_hooks('after_add_entity', cnx, entity=entity) + return eid + except Exception: + # XXX do some cleanup manually so that the transaction has a + # chance to be commited, with simply this entity discarded + self._extid_cache.pop(extid, None) + self._type_source_cache.pop(eid, None) + if 'entity' in locals(): + hook.CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(entity.eid) + self.system_source.delete_info_multi(cnx, [entity]) + if source.should_call_hooks: + cnx.pending_operations = pending_operations + raise + + def add_info(self, cnx, entity, source, extid=None): + """add type and source info for an eid into the system table, + and index the entity with the full text index + """ + # begin by inserting eid/type/source/extid into the entities table + hook.CleanupNewEidsCacheOp.get_instance(cnx).add_data(entity.eid) + self.system_source.add_info(cnx, entity, source, extid) + + def _delete_cascade_multi(self, cnx, entities): + """same as _delete_cascade but accepts a list of entities with + the same etype and belonging to the same source. + """ + pendingrtypes = cnx.transaction_data.get('pendingrtypes', ()) + # delete remaining relations: if user can delete the entity, he can + # delete all its relations without security checking + with cnx.security_enabled(read=False, write=False): + in_eids = ','.join([str(_e.eid) for _e in entities]) + with cnx.running_hooks_ops(): + for rschema, _, role in entities[0].e_schema.relation_definitions(): + if rschema.rule: + continue # computed relation + rtype = rschema.type + if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes: + continue + if role == 'subject': + # don't skip inlined relation so they are regularly + # deleted and so hooks are correctly called + rql = 'DELETE X %s Y WHERE X eid IN (%s)' % (rtype, in_eids) + else: + rql = 'DELETE Y %s X WHERE X eid IN (%s)' % (rtype, in_eids) + try: + cnx.execute(rql, build_descr=False) + except ValidationError: + raise + except Unauthorized: + self.exception('Unauthorized exception while cascading delete for entity %s. ' + 'RQL: %s.\nThis should not happen since security is disabled here.', + entities, rql) + raise + except Exception: + if self.config.mode == 'test': + raise + self.exception('error while cascading delete for entity %s. RQL: %s', + entities, rql) + + def init_entity_caches(self, cnx, entity, source): + """add entity to connection entities cache and repo's extid cache. + Return entity's ext id if the source isn't the system source. + """ + cnx.set_entity_cache(entity) + if source.uri == 'system': + extid = None + else: + extid = source.get_extid(entity) + self._extid_cache[str(extid)] = entity.eid + self._type_source_cache[entity.eid] = (entity.cw_etype, extid, source.uri) + return extid + + def glob_add_entity(self, cnx, edited): + """add an entity to the repository + + the entity eid should originally be None and a unique eid is assigned to + the entity instance + """ + entity = edited.entity + entity._cw_is_saved = False # entity has an eid but is not yet saved + # init edited_attributes before calling before_add_entity hooks + entity.cw_edited = edited + source = self.system_source + # allocate an eid to the entity before calling hooks + entity.eid = self.system_source.create_eid(cnx) + # set caches asap + extid = self.init_entity_caches(cnx, entity, source) + if server.DEBUG & server.DBG_REPO: + print('ADD entity', self, entity.cw_etype, entity.eid, edited) + prefill_entity_caches(entity) + self.hm.call_hooks('before_add_entity', cnx, entity=entity) + relations = preprocess_inlined_relations(cnx, entity) + edited.set_defaults() + if cnx.is_hook_category_activated('integrity'): + edited.check(creation=True) + self.add_info(cnx, entity, source, extid) + try: + source.add_entity(cnx, entity) + except (UniqueTogetherError, ViolatedConstraint) as exc: + userhdlr = cnx.vreg['adapters'].select( + 'IUserFriendlyError', cnx, entity=entity, exc=exc) + userhdlr.raise_user_exception() + edited.saved = entity._cw_is_saved = True + # trigger after_add_entity after after_add_relation + self.hm.call_hooks('after_add_entity', cnx, entity=entity) + # call hooks for inlined relations + for attr, value in relations: + self.hm.call_hooks('before_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) + self.hm.call_hooks('after_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) + return entity.eid + + def glob_update_entity(self, cnx, edited): + """replace an entity in the repository + the type and the eid of an entity must not be changed + """ + entity = edited.entity + if server.DEBUG & server.DBG_REPO: + print('UPDATE entity', entity.cw_etype, entity.eid, + entity.cw_attr_cache, edited) + hm = self.hm + eschema = entity.e_schema + cnx.set_entity_cache(entity) + orig_edited = getattr(entity, 'cw_edited', None) + entity.cw_edited = edited + source = self.system_source + try: + only_inline_rels, need_fti_update = True, False + relations = [] + for attr in list(edited): + if attr == 'eid': + continue + rschema = eschema.subjrels[attr] + if rschema.final: + if getattr(eschema.rdef(attr), 'fulltextindexed', False): + need_fti_update = True + only_inline_rels = False + else: + # inlined relation + previous_value = entity.related(attr) or None + if previous_value is not None: + previous_value = previous_value[0][0] # got a result set + if previous_value == entity.cw_attr_cache[attr]: + previous_value = None + else: + hm.call_hooks('before_delete_relation', cnx, + eidfrom=entity.eid, rtype=attr, + eidto=previous_value) + relations.append((attr, edited[attr], previous_value)) + # call hooks for inlined relations + for attr, value, _t in relations: + hm.call_hooks('before_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) + if not only_inline_rels: + hm.call_hooks('before_update_entity', cnx, entity=entity) + if cnx.is_hook_category_activated('integrity'): + edited.check() + try: + source.update_entity(cnx, entity) + edited.saved = True + except (UniqueTogetherError, ViolatedConstraint) as exc: + userhdlr = cnx.vreg['adapters'].select( + 'IUserFriendlyError', cnx, entity=entity, exc=exc) + userhdlr.raise_user_exception() + self.system_source.update_info(cnx, entity, need_fti_update) + if not only_inline_rels: + hm.call_hooks('after_update_entity', cnx, entity=entity) + for attr, value, prevvalue in relations: + # if the relation is already cached, update existant cache + relcache = entity.cw_relation_cached(attr, 'subject') + if prevvalue is not None: + hm.call_hooks('after_delete_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=prevvalue) + if relcache is not None: + cnx.update_rel_cache_del(entity.eid, attr, prevvalue) + del_existing_rel_if_needed(cnx, entity.eid, attr, value) + cnx.update_rel_cache_add(entity.eid, attr, value) + hm.call_hooks('after_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) + finally: + if orig_edited is not None: + entity.cw_edited = orig_edited + + + def glob_delete_entities(self, cnx, eids): + """delete a list of entities and all related entities from the repository""" + # mark eids as being deleted in cnx info and setup cache update + # operation (register pending eids before actual deletion to avoid + # multiple call to glob_delete_entities) + op = hook.CleanupDeletedEidsCacheOp.get_instance(cnx) + if not isinstance(eids, (set, frozenset)): + warn('[3.13] eids should be given as a set', DeprecationWarning, + stacklevel=2) + eids = frozenset(eids) + eids = eids - op._container + op._container |= eids + data_by_etype = {} # values are [list of entities] + # + # WARNING: the way this dictionary is populated is heavily optimized + # and does not use setdefault on purpose. Unless a new release + # of the Python interpreter advertises large perf improvements + # in setdefault, this should not be changed without profiling. + for eid in eids: + etype = self.type_from_eid(eid, cnx) + # XXX should cache entity's cw_metainformation + entity = cnx.entity_from_eid(eid, etype) + try: + data_by_etype[etype].append(entity) + except KeyError: + data_by_etype[etype] = [entity] + source = self.system_source + for etype, entities in data_by_etype.items(): + if server.DEBUG & server.DBG_REPO: + print('DELETE entities', etype, [entity.eid for entity in entities]) + self.hm.call_hooks('before_delete_entity', cnx, entities=entities) + self._delete_cascade_multi(cnx, entities) + source.delete_entities(cnx, entities) + source.delete_info_multi(cnx, entities) + self.hm.call_hooks('after_delete_entity', cnx, entities=entities) + # don't clear cache here, it is done in a hook on commit + + def glob_add_relation(self, cnx, subject, rtype, object): + """add a relation to the repository""" + self.glob_add_relations(cnx, {rtype: [(subject, object)]}) + + def glob_add_relations(self, cnx, relations): + """add several relations to the repository + + relations is a dictionary rtype: [(subj_eid, obj_eid), ...] + """ + source = self.system_source + relations_by_rtype = {} + subjects_by_types = {} + objects_by_types = {} + activintegrity = cnx.is_hook_category_activated('activeintegrity') + for rtype, eids_subj_obj in relations.items(): + if server.DEBUG & server.DBG_REPO: + for subjeid, objeid in eids_subj_obj: + print('ADD relation', subjeid, rtype, objeid) + for subjeid, objeid in eids_subj_obj: + if rtype in relations_by_rtype: + relations_by_rtype[rtype].append((subjeid, objeid)) + else: + relations_by_rtype[rtype] = [(subjeid, objeid)] + if not activintegrity: + continue + # take care to relation of cardinality '?1', as all eids will + # be inserted later, we've remove duplicated eids since they + # won't be caught by `del_existing_rel_if_needed` + rdef = cnx.rtype_eids_rdef(rtype, subjeid, objeid) + card = rdef.cardinality + if card[0] in '?1': + with cnx.security_enabled(read=False): + cnx.execute('DELETE X %s Y WHERE X eid %%(x)s, ' + 'NOT Y eid %%(y)s' % rtype, + {'x': subjeid, 'y': objeid}) + subjects = subjects_by_types.setdefault(rdef, {}) + if subjeid in subjects: + del relations_by_rtype[rtype][subjects[subjeid]] + subjects[subjeid] = len(relations_by_rtype[rtype]) - 1 + continue + subjects[subjeid] = len(relations_by_rtype[rtype]) - 1 + if card[1] in '?1': + with cnx.security_enabled(read=False): + cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s, ' + 'NOT X eid %%(x)s' % rtype, + {'x': subjeid, 'y': objeid}) + objects = objects_by_types.setdefault(rdef, {}) + if objeid in objects: + del relations_by_rtype[rtype][objects[objeid]] + objects[objeid] = len(relations_by_rtype[rtype]) + continue + objects[objeid] = len(relations_by_rtype[rtype]) + for rtype, source_relations in relations_by_rtype.items(): + self.hm.call_hooks('before_add_relation', cnx, + rtype=rtype, eids_from_to=source_relations) + for rtype, source_relations in relations_by_rtype.items(): + source.add_relations(cnx, rtype, source_relations) + rschema = self.schema.rschema(rtype) + for subjeid, objeid in source_relations: + cnx.update_rel_cache_add(subjeid, rtype, objeid, rschema.symmetric) + for rtype, source_relations in relations_by_rtype.items(): + self.hm.call_hooks('after_add_relation', cnx, + rtype=rtype, eids_from_to=source_relations) + + def glob_delete_relation(self, cnx, subject, rtype, object): + """delete a relation from the repository""" + if server.DEBUG & server.DBG_REPO: + print('DELETE relation', subject, rtype, object) + source = self.system_source + self.hm.call_hooks('before_delete_relation', cnx, + eidfrom=subject, rtype=rtype, eidto=object) + source.delete_relation(cnx, subject, rtype, object) + rschema = self.schema.rschema(rtype) + cnx.update_rel_cache_del(subject, rtype, object, rschema.symmetric) + self.hm.call_hooks('after_delete_relation', cnx, + eidfrom=subject, rtype=rtype, eidto=object) + + + + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None + +from logging import getLogger +from cubicweb import set_log_methods +set_log_methods(Repository, getLogger('cubicweb.repository')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/rqlannotation.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/rqlannotation.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,413 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Functions to add additional annotations on a rql syntax tree to ease later +code generation. +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from rql import BadRQLQuery +from rql.nodes import Relation, VariableRef, Constant, Variable, Or, Exists +from rql.utils import common_parent + +def _annotate_select(annotator, rqlst): + has_text_query = False + for subquery in rqlst.with_: + if annotator._annotate_union(subquery.query): + has_text_query = True + #if server.DEBUG: + # print '-------- sql annotate', repr(rqlst) + getrschema = annotator.schema.rschema + for var in rqlst.defined_vars.values(): + stinfo = var.stinfo + if stinfo.get('ftirels'): + has_text_query = True + if stinfo['attrvar']: + stinfo['invariant'] = False + stinfo['principal'] = _select_main_var(stinfo['rhsrelations']) + continue + if not stinfo['relations'] and stinfo['typerel'] is None: + # Any X, Any MAX(X)... + # those particular queries should be executed using the system + # entities table unless there is some type restriction + stinfo['invariant'] = True + stinfo['principal'] = None + continue + if any(rel for rel in stinfo['relations'] if rel.r_type == 'eid' and rel.operator() != '=') and \ + not any(r for r in var.stinfo['relations'] - var.stinfo['rhsrelations'] + if r.r_type != 'eid' and (getrschema(r.r_type).inlined or getrschema(r.r_type).final)): + # Any X WHERE X eid > 2 + # those particular queries should be executed using the system entities table + stinfo['invariant'] = True + stinfo['principal'] = None + continue + if stinfo['selected'] and var.valuable_references() == 1+bool(stinfo['constnode']): + # "Any X", "Any X, Y WHERE X attr Y" + stinfo['invariant'] = False + continue + joins = set() + invariant = False + for ref in var.references(): + rel = ref.relation() + if rel is None or rel.is_types_restriction(): + continue + lhs, rhs = rel.get_parts() + onlhs = ref is lhs + role = 'subject' if onlhs else 'object' + if rel.r_type == 'eid': + if not (onlhs and len(stinfo['relations']) > 1): + break + if not stinfo['constnode']: + joins.add( (rel, role) ) + continue + elif rel.r_type == 'identity': + # identity can't be used as principal, so check other relation are used + # XXX explain rhs.operator == '=' + if rhs.operator != '=' or len(stinfo['relations']) <= 1: #(stinfo['constnode'] and rhs.operator == '='): + break + joins.add( (rel, role) ) + continue + rschema = getrschema(rel.r_type) + if rel.optional: + if rel in stinfo.get('optrelations', ()): + # optional variable can't be invariant if this is the lhs + # variable of an inlined relation + if not rel in stinfo['rhsrelations'] and rschema.inlined: + break + # variable used as main variable of an optional relation can't + # be invariant, unless we can use some other relation as + # reference for the outer join + elif not stinfo['constnode']: + break + elif len(stinfo['relations']) == 2: + if onlhs: + ostinfo = rhs.children[0].variable.stinfo + else: + ostinfo = lhs.variable.stinfo + if not (ostinfo.get('optcomparisons') or + any(orel for orel in ostinfo['relations'] + if orel.optional and orel is not rel)): + break + if rschema.final or (onlhs and rschema.inlined): + if rschema.type != 'has_text': + # need join anyway if the variable appears in a final or + # inlined relation + break + joins.add( (rel, role) ) + continue + if not stinfo['constnode']: + if rschema.inlined and rel.neged(strict=True): + # if relation is inlined, can't be invariant if that + # variable is used anywhere else. + # see 'Any P WHERE NOT N ecrit_par P, N eid 512': + # sql for 'NOT N ecrit_par P' is 'N.ecrit_par is NULL' so P + # can use N.ecrit_par as principal + if (stinfo['selected'] or len(stinfo['relations']) > 1): + break + joins.add( (rel, role) ) + else: + # if there is at least one ambigous relation and no other to + # restrict types, can't be invariant since we need to filter out + # other types + if not annotator.is_ambiguous(var): + invariant = True + stinfo['invariant'] = invariant + if invariant and joins: + # remember rqlst/solutions analyze information + # we have to select a kindof "main" relation which will "extrajoins" + # the other + # priority should be given to relation which are not in inner queries + # (eg exists) + try: + stinfo['principal'] = principal = _select_principal(var.scope, joins) + if getrschema(principal.r_type).inlined: + # the scope of the lhs variable must be equal or outer to the + # rhs variable's scope (since it's retrieved from lhs's table) + sstinfo = principal.children[0].variable.stinfo + sstinfo['scope'] = common_parent(sstinfo['scope'], stinfo['scope']).scope + except CantSelectPrincipal: + stinfo['invariant'] = False + # see unittest_rqlannotation. test_has_text_security_cache_bug + # XXX probably more to do, but yet that work without more... + for col_alias in rqlst.aliases.values(): + if col_alias.stinfo.get('ftirels'): + has_text_query = True + return has_text_query + + + +class CantSelectPrincipal(Exception): + """raised when no 'principal' variable can be found""" + +def _select_principal(scope, relations, _sort=lambda x:x): + """given a list of rqlst relations, select one which will be used to + represent an invariant variable (e.g. using on extremity of the relation + instead of the variable's type table + """ + # _sort argument is there for test + diffscope_rels = {} + ored_rels = set() + diffscope_rels = set() + for rel, role in _sort(relations): + # note: only eid and has_text among all final relations may be there + if rel.r_type in ('eid', 'identity'): + continue + if rel.optional is not None and len(relations) > 1: + if role == 'subject' and rel.optional == 'right': + continue + if role == 'object' and rel.optional == 'left': + continue + if rel.ored(traverse_scope=True): + ored_rels.add(rel) + elif rel.scope is scope: + return rel + elif not rel.neged(traverse_scope=True): + diffscope_rels.add(rel) + if len(ored_rels) > 1: + ored_rels_copy = tuple(ored_rels) + for rel1 in ored_rels_copy: + for rel2 in ored_rels_copy: + if rel1 is rel2: + continue + if isinstance(common_parent(rel1, rel2), Or): + ored_rels.discard(rel1) + ored_rels.discard(rel2) + for rel in _sort(ored_rels): + if rel.scope is scope: + return rel + diffscope_rels.add(rel) + # if DISTINCT query, can use variable from a different scope as principal + # since introduced duplicates will be removed + if scope.stmt.distinct and diffscope_rels: + return next(iter(_sort(diffscope_rels))) + # XXX could use a relation from a different scope if it can't generate + # duplicates, so we should have to check cardinality + raise CantSelectPrincipal() + +def _select_main_var(relations): + """given a list of rqlst relations, select one which will be used as main + relation for the rhs variable + """ + principal = None + others = [] + # sort for test predictability + for rel in sorted(relations, key=lambda x: (x.children[0].name, x.r_type)): + # only equality relation with a variable as rhs may be principal + if rel.operator() not in ('=', 'IS') \ + or not isinstance(rel.children[1].children[0], VariableRef) or rel.neged(strict=True): + continue + if rel.optional: + others.append(rel) + continue + if rel.scope is rel.stmt: + return rel + principal = rel + if principal is None: + if others: + return others[0] + raise BadRQLQuery('unable to find principal in %s' % ', '.join( + r.as_string() for r in relations)) + return principal + + +def set_qdata(getrschema, union, noinvariant): + """recursive function to set querier data on variables in the syntax tree + """ + for select in union.children: + for subquery in select.with_: + set_qdata(getrschema, subquery.query, noinvariant) + for var in select.defined_vars.values(): + if var.stinfo['invariant']: + if var in noinvariant and not var.stinfo['principal'].r_type == 'has_text': + var._q_invariant = False + else: + var._q_invariant = True + else: + var._q_invariant = False + + +class SQLGenAnnotator(object): + def __init__(self, schema): + self.schema = schema + self.nfdomain = frozenset(eschema.type for eschema in schema.entities() + if not eschema.final) + + def annotate(self, rqlst): + """add information to the rql syntax tree to help sources to do their + job (read sql generation) + + a variable is tagged as invariant if: + * it's a non final variable + * it's not used as lhs in any final or inlined relation + * there is no type restriction on this variable (either explicit in the + syntax tree or because a solution for this variable has been removed + due to security filtering) + """ + #assert rqlst.TYPE == 'select', rqlst + rqlst.has_text_query = self._annotate_union(rqlst) + + def _annotate_union(self, union): + has_text_query = False + for select in union.children: + if _annotate_select(self, select): + has_text_query = True + return has_text_query + + def is_ambiguous(self, var): + # ignore has_text relation when we know it will be used as principal. + # This is expected by the rql2sql generator which will use the `entities` + # table to filter out by type if necessary, This optimisation is very + # interesting in multi-sources cases, as it may avoid a costly query + # on sources to get all entities of a given type to achieve this, while + # we have all the necessary information. + root = var.stmt.root # Union node + # rel.scope -> Select or Exists node, so add .parent to get Union from + # Select node + rels = [rel for rel in var.stinfo['relations'] if rel.scope.parent is root] + if len(rels) == 1 and rels[0].r_type == 'has_text': + return False + try: + data = var.stmt._deamb_data + except AttributeError: + data = var.stmt._deamb_data = IsAmbData(self.schema, self.nfdomain) + data.compute(var.stmt) + return data.is_ambiguous(var) + + +class IsAmbData(object): + def __init__(self, schema, nfdomain): + self.schema = schema + # shortcuts + self.rschema = schema.rschema + self.eschema = schema.eschema + # domain for non final variables + self.nfdomain = nfdomain + # {var: possible solutions set} + self.varsols = {} + # set of ambiguous variables + self.ambiguousvars = set() + # remember if a variable has been deambiguified by another to avoid + # doing the opposite + self.deambification_map = {} + # not invariant variables (access to final.inlined relation) + self.not_invariants = set() + + def is_ambiguous(self, var): + return var in self.ambiguousvars + + def restrict(self, var, restricted_domain): + self.varsols[var] &= restricted_domain + if var in self.ambiguousvars and self.varsols[var] == var.stinfo['possibletypes']: + self.ambiguousvars.remove(var) + + def compute(self, rqlst): + # set domains for each variable + for varname, var in rqlst.defined_vars.items(): + if var.stinfo['uidrel'] is not None or \ + self.eschema(rqlst.solutions[0][varname]).final: + ptypes = var.stinfo['possibletypes'] + else: + ptypes = set(self.nfdomain) + self.ambiguousvars.add(var) + self.varsols[var] = ptypes + if not self.ambiguousvars: + return + # apply relation restriction + self.maydeambrels = maydeambrels = {} + for rel in rqlst.iget_nodes(Relation): + if rel.r_type == 'eid' or rel.is_types_restriction(): + continue + lhs, rhs = rel.get_variable_parts() + if isinstance(lhs, VariableRef) or isinstance(rhs, VariableRef): + rschema = self.rschema(rel.r_type) + if rschema.inlined or rschema.final: + self.not_invariants.add(lhs.variable) + self.set_rel_constraint(lhs, rel, rschema.subjects) + self.set_rel_constraint(rhs, rel, rschema.objects) + # try to deambiguify more variables by considering other variables'type + modified = True + while modified and self.ambiguousvars: + modified = False + for var in self.ambiguousvars.copy(): + try: + for rel in (var.stinfo['relations'] & maydeambrels[var]): + if self.deambiguifying_relation(var, rel): + modified = True + break + except KeyError: + # no relation to deambiguify + continue + + def _debug_print(self): + print('varsols', dict((x, sorted(str(v) for v in values)) + for x, values in self.varsols.items())) + print('ambiguous vars', sorted(self.ambiguousvars)) + + def set_rel_constraint(self, term, rel, etypes_func): + if isinstance(term, VariableRef) and self.is_ambiguous(term.variable): + var = term.variable + if len(var.stinfo['relations']) == 1 \ + or rel.scope is var.scope or rel.r_type == 'identity': + self.restrict(var, frozenset(etypes_func())) + try: + self.maydeambrels[var].add(rel) + except KeyError: + self.maydeambrels[var] = set((rel,)) + + def deambiguifying_relation(self, var, rel): + lhs, rhs = rel.get_variable_parts() + onlhs = var is getattr(lhs, 'variable', None) + other = onlhs and rhs or lhs + otheretypes = None + # XXX isinstance(other.variable, Variable) to skip column alias + if isinstance(other, VariableRef) and isinstance(other.variable, Variable): + deambiguifier = other.variable + if not var is self.deambification_map.get(deambiguifier): + if var.stinfo['typerel'] is None: + otheretypes = deambiguifier.stinfo['possibletypes'] + elif not self.is_ambiguous(deambiguifier): + otheretypes = self.varsols[deambiguifier] + elif deambiguifier in self.not_invariants: + # we know variable won't be invariant, try to use + # it to deambguify the current variable + otheretypes = self.varsols[deambiguifier] + if deambiguifier.stinfo['typerel'] is None: + # if deambiguifier has no type restriction using 'is', + # don't record it + deambiguifier = None + elif isinstance(other, Constant) and other.uidtype: + otheretypes = (other.uidtype,) + deambiguifier = None + if otheretypes is not None: + # to restrict, we must check that for all type in othertypes, + # possible types on the other end of the relation are matching + # variable's possible types + rschema = self.rschema(rel.r_type) + if onlhs: + rtypefunc = rschema.subjects + else: + rtypefunc = rschema.objects + for otheretype in otheretypes: + reltypes = frozenset(rtypefunc(otheretype)) + if var.stinfo['possibletypes'] != reltypes: + return False + self.restrict(var, var.stinfo['possibletypes']) + self.deambification_map[var] = deambiguifier + return True + return False diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/schema2sql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/schema2sql.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,300 @@ +# copyright 2004-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of cubicweb. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +"""write a schema as sql""" + +__docformat__ = "restructuredtext en" + +from hashlib import md5 + +from six import string_types +from six.moves import range + +from yams.constraints import (SizeConstraint, UniqueConstraint, Attribute, + NOW, TODAY) + +# default are usually not handled at the sql level. If you want them, set +# SET_DEFAULT to True +SET_DEFAULT = False + +def rschema_has_table(rschema, skip_relations): + """Return True if the given schema should have a table in the database""" + return not (rschema.final or rschema.inlined or rschema.rule or rschema.type in skip_relations) + + +def schema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''): + """write to the output stream a SQL schema to store the objects + corresponding to the given schema + """ + output = [] + w = output.append + for etype in sorted(schema.entities()): + eschema = schema.eschema(etype) + if eschema.final or eschema.type in skip_entities: + continue + w(eschema2sql(dbhelper, eschema, skip_relations, prefix=prefix)) + for rtype in sorted(schema.relations()): + rschema = schema.rschema(rtype) + if rschema_has_table(rschema, skip_relations): + w(rschema2sql(rschema)) + return '\n'.join(output) + + +def dropschema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''): + """write to the output stream a SQL schema to store the objects + corresponding to the given schema + """ + output = [] + w = output.append + for etype in sorted(schema.entities()): + eschema = schema.eschema(etype) + if eschema.final or eschema.type in skip_entities: + continue + stmts = dropeschema2sql(dbhelper, eschema, skip_relations, prefix=prefix) + for stmt in stmts: + w(stmt) + for rtype in sorted(schema.relations()): + rschema = schema.rschema(rtype) + if rschema_has_table(rschema, skip_relations): + w(droprschema2sql(rschema)) + return '\n'.join(output) + + +def eschema_attrs(eschema, skip_relations): + attrs = [attrdef for attrdef in eschema.attribute_definitions() + if not attrdef[0].type in skip_relations] + attrs += [(rschema, None) + for rschema in eschema.subject_relations() + if not rschema.final and rschema.inlined] + return attrs + +def unique_index_name(eschema, columns): + return u'unique_%s' % md5((eschema.type + + ',' + + ','.join(sorted(columns))).encode('ascii')).hexdigest() + +def iter_unique_index_names(eschema): + for columns in eschema._unique_together or (): + yield columns, unique_index_name(eschema, columns) + +def dropeschema2sql(dbhelper, eschema, skip_relations=(), prefix=''): + """return sql to drop an entity type's table""" + # not necessary to drop indexes, that's implictly done when + # dropping the table, but we need to drop SQLServer views used to + # create multicol unique indices + statements = [] + tablename = prefix + eschema.type + if eschema._unique_together is not None: + for columns, index_name in iter_unique_index_names(eschema): + cols = ['%s%s' % (prefix, col) for col in columns] + sqls = dbhelper.sqls_drop_multicol_unique_index(tablename, cols, index_name) + statements += sqls + statements += ['DROP TABLE %s;' % (tablename)] + return statements + + +def eschema2sql(dbhelper, eschema, skip_relations=(), prefix=''): + """write an entity schema as SQL statements to stdout""" + output = [] + w = output.append + table = prefix + eschema.type + w('CREATE TABLE %s(' % (table)) + attrs = eschema_attrs(eschema, skip_relations) + # XXX handle objectinline physical mode + for i in range(len(attrs)): + rschema, attrschema = attrs[i] + if attrschema is not None: + sqltype = aschema2sql(dbhelper, eschema, rschema, attrschema, + indent=' ') + else: # inline relation + sqltype = 'integer REFERENCES entities (eid)' + if i == len(attrs) - 1: + w(' %s%s %s' % (prefix, rschema.type, sqltype)) + else: + w(' %s%s %s,' % (prefix, rschema.type, sqltype)) + for rschema, aschema in attrs: + if aschema is None: # inline relation + continue + attr = rschema.type + rdef = rschema.rdef(eschema.type, aschema.type) + for constraint in rdef.constraints: + cstrname, check = check_constraint(eschema, aschema, attr, constraint, dbhelper, prefix=prefix) + if cstrname is not None: + w(', CONSTRAINT %s CHECK(%s)' % (cstrname, check)) + w(');') + # create indexes + for i in range(len(attrs)): + rschema, attrschema = attrs[i] + if attrschema is None or eschema.rdef(rschema).indexed: + w(dbhelper.sql_create_index(table, prefix + rschema.type)) + for columns, index_name in iter_unique_index_names(eschema): + cols = ['%s%s' % (prefix, col) for col in columns] + sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, index_name) + for sql in sqls: + w(sql) + w('') + return '\n'.join(output) + +def as_sql(value, dbhelper, prefix): + if isinstance(value, Attribute): + return prefix + value.attr + elif isinstance(value, TODAY): + return dbhelper.sql_current_date() + elif isinstance(value, NOW): + return dbhelper.sql_current_timestamp() + else: + # XXX more quoting for literals? + return value + +def check_constraint(eschema, aschema, attr, constraint, dbhelper, prefix=''): + # XXX should find a better name + cstrname = 'cstr' + md5((eschema.type + attr + constraint.type() + + (constraint.serialize() or '')).encode('ascii')).hexdigest() + if constraint.type() == 'BoundaryConstraint': + value = as_sql(constraint.boundary, dbhelper, prefix) + return cstrname, '%s%s %s %s' % (prefix, attr, constraint.operator, value) + elif constraint.type() == 'IntervalBoundConstraint': + condition = [] + if constraint.minvalue is not None: + value = as_sql(constraint.minvalue, dbhelper, prefix) + condition.append('%s%s >= %s' % (prefix, attr, value)) + if constraint.maxvalue is not None: + value = as_sql(constraint.maxvalue, dbhelper, prefix) + condition.append('%s%s <= %s' % (prefix, attr, value)) + return cstrname, ' AND '.join(condition) + elif constraint.type() == 'StaticVocabularyConstraint': + sample = next(iter(constraint.vocabulary())) + if not isinstance(sample, string_types): + values = ', '.join(str(word) for word in constraint.vocabulary()) + else: + # XXX better quoting? + values = ', '.join("'%s'" % word.replace("'", "''") for word in constraint.vocabulary()) + return cstrname, '%s%s IN (%s)' % (prefix, attr, values) + return None, None + +def aschema2sql(dbhelper, eschema, rschema, aschema, creating=True, indent=''): + """write an attribute schema as SQL statements to stdout""" + attr = rschema.type + rdef = rschema.rdef(eschema.type, aschema.type) + sqltype = type_from_rdef(dbhelper, rdef, creating) + if SET_DEFAULT: + default = eschema.default(attr) + if default is not None: + if aschema.type == 'Boolean': + sqltype += ' DEFAULT %s' % dbhelper.boolean_value(default) + elif aschema.type == 'String': + sqltype += ' DEFAULT %r' % str(default) + elif aschema.type in ('Int', 'BigInt', 'Float'): + sqltype += ' DEFAULT %s' % default + # XXX ignore default for other type + # this is expected for NOW / TODAY + if creating: + if rdef.uid: + sqltype += ' PRIMARY KEY REFERENCES entities (eid)' + elif rdef.cardinality[0] == '1': + # don't set NOT NULL if backend isn't able to change it later + if dbhelper.alter_column_support: + sqltype += ' NOT NULL' + # else we're getting sql type to alter a column, we don't want key / indexes + # / null modifiers + return sqltype + + +def type_from_rdef(dbhelper, rdef, creating=True): + """return a sql type string corresponding to the relation definition""" + constraints = list(rdef.constraints) + unique, sqltype = False, None + if rdef.object.type == 'String': + for constraint in constraints: + if isinstance(constraint, SizeConstraint): + if constraint.max is not None: + size_constrained_string = dbhelper.TYPE_MAPPING.get( + 'SizeConstrainedString', 'varchar(%s)') + sqltype = size_constrained_string % constraint.max + elif isinstance(constraint, UniqueConstraint): + unique = True + if sqltype is None: + sqltype = sql_type(dbhelper, rdef) + if creating and unique: + sqltype += ' UNIQUE' + return sqltype + + +def sql_type(dbhelper, rdef): + sqltype = dbhelper.TYPE_MAPPING[rdef.object] + if callable(sqltype): + sqltype = sqltype(rdef) + return sqltype + + +_SQL_SCHEMA = """ +CREATE TABLE %(table)s ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT %(table)s_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX %(table)s_from_idx ON %(table)s(eid_from); +CREATE INDEX %(table)s_to_idx ON %(table)s(eid_to);""" + + +def rschema2sql(rschema): + assert not rschema.rule + return _SQL_SCHEMA % {'table': '%s_relation' % rschema.type} + + +def droprschema2sql(rschema): + """return sql to drop a relation type's table""" + # not necessary to drop indexes, that's implictly done when dropping + # the table + return 'DROP TABLE %s_relation;' % rschema.type + + +def grant_schema(schema, user, set_owner=True, skip_entities=(), prefix=''): + """write to the output stream a SQL schema to store the objects + corresponding to the given schema + """ + output = [] + w = output.append + for etype in sorted(schema.entities()): + eschema = schema.eschema(etype) + if eschema.final or etype in skip_entities: + continue + w(grant_eschema(eschema, user, set_owner, prefix=prefix)) + for rtype in sorted(schema.relations()): + rschema = schema.rschema(rtype) + if rschema_has_table(rschema, skip_relations=()): # XXX skip_relations should be specified + w(grant_rschema(rschema, user, set_owner)) + return '\n'.join(output) + + +def grant_eschema(eschema, user, set_owner=True, prefix=''): + output = [] + w = output.append + etype = eschema.type + if set_owner: + w('ALTER TABLE %s%s OWNER TO %s;' % (prefix, etype, user)) + w('GRANT ALL ON %s%s TO %s;' % (prefix, etype, user)) + return '\n'.join(output) + + +def grant_rschema(rschema, user, set_owner=True): + output = [] + if set_owner: + output.append('ALTER TABLE %s_relation OWNER TO %s;' % (rschema.type, user)) + output.append('GRANT ALL ON %s_relation TO %s;' % (rschema.type, user)) + return '\n'.join(output) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/schemaserial.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/schemaserial.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,656 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""functions for schema / permissions (de)serialization using RQL""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import os +import json +import sys + +from six import PY2, text_type, string_types + +from logilab.common.shellutils import ProgressBar, DummyProgressBar + +from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo + +from cubicweb import Binary +from cubicweb.schema import (KNOWN_RPROPERTIES, CONSTRAINTS, ETYPE_NAME_MAP, + VIRTUAL_RTYPES) +from cubicweb.server import sqlutils, schema2sql as y2sql + + +def group_mapping(cnx, interactive=True): + """create a group mapping from an rql cursor + + A group mapping has standard group names as key (managers, owners at least) + and the actual CWGroup entity's eid as associated value. + In interactive mode (the default), missing groups'eid will be prompted + from the user. + """ + res = {} + for eid, name in cnx.execute('Any G, N WHERE G is CWGroup, G name N', + build_descr=False): + res[name] = eid + if not interactive: + return res + missing = [g for g in ('owners', 'managers', 'users', 'guests') if not g in res] + if missing: + print('some native groups are missing but the following groups have been found:') + print('\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items())) + print() + print('enter the eid of a to group to map to each missing native group') + print('or just type enter to skip permissions granted to a group') + for group in missing: + while True: + value = raw_input('eid for group %s: ' % group).strip() + if not value: + continue + try: + eid = int(value) + except ValueError: + print('eid should be an integer') + continue + for eid_ in res.values(): + if eid == eid_: + break + else: + print('eid is not a group eid') + continue + res[name] = eid + break + return res + +def cstrtype_mapping(cnx): + """cached constraint types mapping""" + map = dict(cnx.execute('Any T, X WHERE X is CWConstraintType, X name T')) + return map + +# schema / perms deserialization ############################################## + +def deserialize_schema(schema, cnx): + """return a schema according to information stored in an rql database + as CWRType and CWEType entities + """ + repo = cnx.repo + dbhelper = repo.system_source.dbhelper + + # Computed Rtype + with cnx.ensure_cnx_set: + tables = set(t.lower() for t in dbhelper.list_tables(cnx.cnxset.cu)) + has_computed_relations = 'cw_cwcomputedrtype' in tables + # computed attribute + try: + cnx.system_sql("SELECT cw_formula FROM cw_CWAttribute") + has_computed_attributes = True + except Exception: + cnx.rollback() + has_computed_attributes = False + + # XXX bw compat (3.6 migration) + sqlcu = cnx.system_sql("SELECT * FROM cw_CWRType WHERE cw_name='symetric'") + if sqlcu.fetchall(): + sql = dbhelper.sql_rename_col('cw_CWRType', 'cw_symetric', 'cw_symmetric', + dbhelper.TYPE_MAPPING['Boolean'], True) + sqlcu.execute(sql) + sqlcu.execute("UPDATE cw_CWRType SET cw_name='symmetric' WHERE cw_name='symetric'") + cnx.commit() + ertidx = {} + copiedeids = set() + permsidx = deserialize_ertype_permissions(cnx) + schema.reading_from_database = True + # load every entity types + for eid, etype, desc in cnx.execute( + 'Any X, N, D WHERE X is CWEType, X name N, X description D', + build_descr=False): + # base types are already in the schema, skip them + if etype in schemamod.BASE_TYPES: + # just set the eid + eschema = schema.eschema(etype) + eschema.eid = eid + ertidx[eid] = etype + continue + if etype in ETYPE_NAME_MAP: + needcopy = False + netype = ETYPE_NAME_MAP[etype] + # can't use write rql queries at this point, use raw sql + sqlexec = cnx.system_sql + if sqlexec('SELECT 1 FROM %(p)sCWEType WHERE %(p)sname=%%(n)s' + % {'p': sqlutils.SQL_PREFIX}, {'n': netype}).fetchone(): + # the new type already exists, we should copy (eg make existing + # instances of the old type instances of the new type) + assert etype.lower() != netype.lower() + needcopy = True + else: + # the new type doesn't exist, we should rename + sqlexec('UPDATE %(p)sCWEType SET %(p)sname=%%(n)s WHERE %(p)seid=%%(x)s' + % {'p': sqlutils.SQL_PREFIX}, {'x': eid, 'n': netype}) + if etype.lower() != netype.lower(): + alter_table_sql = dbhelper.sql_rename_table(sqlutils.SQL_PREFIX+etype, + sqlutils.SQL_PREFIX+netype) + sqlexec(alter_table_sql) + sqlexec('UPDATE entities SET type=%(n)s WHERE type=%(x)s', + {'x': etype, 'n': netype}) + cnx.commit(False) + tocleanup = [eid] + tocleanup += (eid for eid, cached in repo._type_source_cache.items() + if etype == cached[0]) + repo.clear_caches(tocleanup) + cnx.commit(False) + if needcopy: + ertidx[eid] = netype + copiedeids.add(eid) + # copy / CWEType entity removal expected to be done through + # rename_entity_type in a migration script + continue + etype = netype + ertidx[eid] = etype + eschema = schema.add_entity_type( + ybo.EntityType(name=etype, description=desc, eid=eid)) + set_perms(eschema, permsidx) + # load inheritance relations + for etype, stype in cnx.execute( + 'Any XN, ETN WHERE X is CWEType, X name XN, X specializes ET, ET name ETN', + build_descr=False): + etype = ETYPE_NAME_MAP.get(etype, etype) + stype = ETYPE_NAME_MAP.get(stype, stype) + schema.eschema(etype)._specialized_type = stype + schema.eschema(stype)._specialized_by.append(etype) + if has_computed_relations: + rset = cnx.execute( + 'Any X, N, R, D WHERE X is CWComputedRType, X name N, ' + 'X rule R, X description D') + for eid, rule_name, rule, description in rset.rows: + rtype = ybo.ComputedRelation(name=rule_name, rule=rule, eid=eid, + description=description) + rschema = schema.add_relation_type(rtype) + set_perms(rschema, permsidx) + # load every relation types + for eid, rtype, desc, sym, il, ftc in cnx.execute( + 'Any X,N,D,S,I,FTC WHERE X is CWRType, X name N, X description D, ' + 'X symmetric S, X inlined I, X fulltext_container FTC', build_descr=False): + ertidx[eid] = rtype + rschema = schema.add_relation_type( + ybo.RelationType(name=rtype, description=desc, + symmetric=bool(sym), inlined=bool(il), + fulltext_container=ftc, eid=eid)) + # remains to load every relation definitions (ie relations and attributes) + cstrsidx = deserialize_rdef_constraints(cnx) + pendingrdefs = [] + # closure to factorize common code of attribute/relation rdef addition + def _add_rdef(rdefeid, seid, reid, oeid, **kwargs): + rdef = ybo.RelationDefinition(ertidx[seid], ertidx[reid], ertidx[oeid], + constraints=cstrsidx.get(rdefeid, ()), + eid=rdefeid, **kwargs) + if seid in copiedeids or oeid in copiedeids: + # delay addition of this rdef. We'll insert them later if needed. We + # have to do this because: + # + # * on etype renaming, we want relation of the old entity type being + # redirected to the new type during migration + # + # * in the case of a copy, we've to take care that rdef already + # existing in the schema are not overwritten by a redirected one, + # since we want correct eid on them (redirected rdef will be + # removed in rename_entity_type) + pendingrdefs.append(rdef) + else: + # add_relation_def return a RelationDefinitionSchema if it has been + # actually added (can be None on duplicated relation definitions, + # e.g. if the relation type is marked as beeing symmetric) + rdefs = schema.add_relation_def(rdef) + if rdefs is not None: + ertidx[rdefeid] = rdefs + set_perms(rdefs, permsidx) + # Get the type parameters for additional base types. + try: + extra_props = dict(cnx.execute('Any X, XTP WHERE X is CWAttribute, ' + 'X extra_props XTP')) + except Exception: + cnx.critical('Previous CRITICAL notification about extra_props is not ' + 'a problem if you are migrating to cubicweb 3.17') + extra_props = {} # not yet in the schema (introduced by 3.17 migration) + + # load attributes + rql = ('Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT%(fm)s ' + 'WHERE X is CWAttribute, X relation_type RT, X cardinality CARD,' + ' X ordernum ORD, X indexed IDX, X description DESC, ' + ' X internationalizable I18N, X defaultval DFLT,%(fmsnip)s' + ' X fulltextindexed FTIDX, X from_entity SE, X to_entity OE') + if has_computed_attributes: + rql = rql % {'fm': ',FM', 'fmsnip': 'X formula FM,'} + else: + rql = rql % {'fm': '', 'fmsnip': ''} + for values in cnx.execute(rql, build_descr=False): + attrs = dict(zip( + ('rdefeid', 'seid', 'reid', 'oeid', 'cardinality', + 'order', 'description', 'indexed', 'fulltextindexed', + 'internationalizable', 'default', 'formula'), values)) + typeparams = extra_props.get(attrs['rdefeid']) + attrs.update(json.loads(typeparams.getvalue().decode('ascii')) if typeparams else {}) + default = attrs['default'] + if default is not None: + if isinstance(default, Binary): + # while migrating from 3.17 to 3.18, we still have to + # handle String defaults + attrs['default'] = default.unzpickle() + _add_rdef(**attrs) + # load relations + for values in cnx.execute( + 'Any X,SE,RT,OE,CARD,ORD,DESC,C WHERE X is CWRelation, X relation_type RT,' + 'X cardinality CARD, X ordernum ORD, X description DESC, ' + 'X from_entity SE, X to_entity OE, X composite C', build_descr=False): + rdefeid, seid, reid, oeid, card, ord, desc, comp = values + _add_rdef(rdefeid, seid, reid, oeid, + cardinality=card, description=desc, order=ord, + composite=comp) + for rdef in pendingrdefs: + try: + rdefs = schema.add_relation_def(rdef) + except BadSchemaDefinition: + continue + if rdefs is not None: + set_perms(rdefs, permsidx) + unique_togethers = {} + rset = cnx.execute( + 'Any X,E,R WHERE ' + 'X is CWUniqueTogetherConstraint, ' + 'X constraint_of E, X relations R', build_descr=False) + for values in rset: + uniquecstreid, eeid, releid = values + eschema = schema.schema_by_eid(eeid) + relations = unique_togethers.setdefault(uniquecstreid, (eschema, [])) + rel = ertidx[releid] + if isinstance(rel, schemamod.RelationDefinitionSchema): + # not yet migrated 3.9 database ('relations' target type changed + # to CWRType in 3.10) + rtype = rel.rtype.type + else: + rtype = str(rel) + relations[1].append(rtype) + for eschema, unique_together in unique_togethers.values(): + eschema._unique_together.append(tuple(sorted(unique_together))) + schema.infer_specialization_rules() + cnx.commit() + schema.finalize() + schema.reading_from_database = False + + +def deserialize_ertype_permissions(cnx): + """return sect action:groups associations for the given + entity or relation schema with its eid, according to schema's + permissions stored in the database as [read|add|delete|update]_permission + relations between CWEType/CWRType and CWGroup entities + """ + res = {} + for action in ('read', 'add', 'update', 'delete'): + rql = 'Any E,N WHERE G is CWGroup, G name N, E %s_permission G' % action + for eid, gname in cnx.execute(rql, build_descr=False): + res.setdefault(eid, {}).setdefault(action, []).append(gname) + rql = ('Any E,X,EXPR,V WHERE X is RQLExpression, X expression EXPR, ' + 'E %s_permission X, X mainvars V' % action) + for eid, expreid, expr, mainvars in cnx.execute(rql, build_descr=False): + # we don't know yet if it's a rql expr for an entity or a relation, + # so append a tuple to differentiate from groups and so we'll be + # able to instantiate it later + res.setdefault(eid, {}).setdefault(action, []).append( (expr, mainvars, expreid) ) + return res + +def deserialize_rdef_constraints(cnx): + """return the list of relation definition's constraints as instances""" + res = {} + for rdefeid, ceid, ct, val in cnx.execute( + 'Any E, X,TN,V WHERE E constrained_by X, X is CWConstraint, ' + 'X cstrtype T, T name TN, X value V', build_descr=False): + cstr = CONSTRAINTS[ct].deserialize(val) + cstr.eid = ceid + res.setdefault(rdefeid, []).append(cstr) + return res + +def set_perms(erschema, permsidx): + """set permissions on the given erschema according to the permission + definition dictionary as built by deserialize_ertype_permissions for a + given erschema's eid + """ + # reset erschema permissions here to avoid getting yams default anyway + erschema.permissions = dict((action, ()) for action in erschema.ACTIONS) + try: + thispermsdict = permsidx[erschema.eid] + except KeyError: + return + for action, somethings in thispermsdict.items(): + erschema.permissions[action] = tuple( + isinstance(p, tuple) and erschema.rql_expression(*p) or p + for p in somethings) + + +# schema / perms serialization ################################################ + +def serialize_schema(cnx, schema): + """synchronize schema and permissions in the database according to + current schema + """ + _title = '-> storing the schema in the database ' + print(_title, end=' ') + execute = cnx.execute + eschemas = schema.entities() + pb_size = (len(eschemas + schema.relations()) + + len(CONSTRAINTS) + + len([x for x in eschemas if x.specializes()])) + if sys.stdout.isatty(): + pb = ProgressBar(pb_size, title=_title) + else: + pb = DummyProgressBar() + groupmap = group_mapping(cnx, interactive=False) + # serialize all entity types, assuring CWEType is serialized first for proper + # is / is_instance_of insertion + eschemas.remove(schema.eschema('CWEType')) + eschemas.insert(0, schema.eschema('CWEType')) + for eschema in eschemas: + execschemarql(execute, eschema, eschema2rql(eschema, groupmap)) + pb.update() + # serialize constraint types + cstrtypemap = {} + rql = 'INSERT CWConstraintType X: X name %(ct)s' + for cstrtype in CONSTRAINTS: + cstrtypemap[cstrtype] = execute(rql, {'ct': text_type(cstrtype)}, + build_descr=False)[0][0] + pb.update() + # serialize relations + for rschema in schema.relations(): + # skip virtual relations such as eid, has_text and identity + if rschema in VIRTUAL_RTYPES: + pb.update() + continue + if rschema.rule: + execschemarql(execute, rschema, crschema2rql(rschema, groupmap)) + pb.update() + continue + execschemarql(execute, rschema, rschema2rql(rschema, addrdef=False)) + if rschema.symmetric: + rdefs = [rdef for k, rdef in rschema.rdefs.items() + if (rdef.subject, rdef.object) == k] + else: + rdefs = rschema.rdefs.values() + for rdef in rdefs: + execschemarql(execute, rdef, + rdef2rql(rdef, cstrtypemap, groupmap)) + pb.update() + # serialize unique_together constraints + for eschema in eschemas: + if eschema._unique_together: + execschemarql(execute, eschema, uniquetogether2rqls(eschema)) + # serialize yams inheritance relationships + for rql, kwargs in specialize2rql(schema): + execute(rql, kwargs, build_descr=False) + pb.update() + print() + + +# high level serialization functions + +def execschemarql(execute, schema, rqls): + for rql, kwargs in rqls: + kwargs['x'] = schema.eid + rset = execute(rql, kwargs, build_descr=False) + if schema.eid is None: + schema.eid = rset[0][0] + else: + assert rset + +def erschema2rql(erschema, groupmap): + if isinstance(erschema, schemamod.EntitySchema): + return eschema2rql(erschema, groupmap=groupmap) + return rschema2rql(erschema, groupmap=groupmap) + +def specialize2rql(schema): + for eschema in schema.entities(): + if eschema.final: + continue + for rql, kwargs in eschemaspecialize2rql(eschema): + yield rql, kwargs + +# etype serialization + +def eschema2rql(eschema, groupmap=None): + """return a list of rql insert statements to enter an entity schema + in the database as an CWEType entity + """ + relations, values = eschema_relations_values(eschema) + # NOTE: 'specializes' relation can't be inserted here since there's no + # way to make sure the parent type is inserted before the child type + yield 'INSERT CWEType X: %s' % ','.join(relations) , values + # entity permissions + if groupmap is not None: + for rql, args in _erperms2rql(eschema, groupmap): + yield rql, args + +def eschema_relations_values(eschema): + values = _ervalues(eschema) + relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] + return relations, values + +def eschemaspecialize2rql(eschema): + specialized_type = eschema.specializes() + if specialized_type: + values = {'x': eschema.eid, 'et': specialized_type.eid} + yield 'SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', values + +def uniquetogether2rqls(eschema): + rql_args = [] + # robustness against duplicated CWUniqueTogetherConstraint (pre 3.18) + columnset = set() + for columns in eschema._unique_together: + if columns in columnset: + print('schemaserial: skipping duplicate unique together %r %r' % + (eschema.type, columns)) + continue + columnset.add(columns) + rql, args = _uniquetogether2rql(eschema, columns) + args['name'] = y2sql.unique_index_name(eschema, columns) + rql_args.append((rql, args)) + return rql_args + +def _uniquetogether2rql(eschema, unique_together): + relations = [] + restrictions = [] + substs = {} + for i, name in enumerate(unique_together): + rschema = eschema.schema.rschema(name) + rtype = 'T%d' % i + substs[rtype] = text_type(rschema.type) + relations.append('C relations %s' % rtype) + restrictions.append('%(rtype)s name %%(%(rtype)s)s' % {'rtype': rtype}) + relations = ', '.join(relations) + restrictions = ', '.join(restrictions) + rql = ('INSERT CWUniqueTogetherConstraint C: C name %%(name)s, C constraint_of X, %s ' + 'WHERE X eid %%(x)s, %s') + return rql % (relations, restrictions), substs + + +def _ervalues(erschema): + try: + type_ = text_type(erschema.type) + except UnicodeDecodeError as e: + raise Exception("can't decode %s [was %s]" % (erschema.type, e)) + try: + desc = text_type(erschema.description) or u'' + except UnicodeDecodeError as e: + raise Exception("can't decode %s [was %s]" % (erschema.description, e)) + return { + 'name': type_, + 'final': erschema.final, + 'description': desc, + } + +# rtype serialization + +def rschema2rql(rschema, cstrtypemap=None, addrdef=True, groupmap=None): + """generate rql insert statements to enter a relation schema + in the database as an CWRType entity + """ + if rschema.type == 'has_text': + return + relations, values = rschema_relations_values(rschema) + yield 'INSERT CWRType X: %s' % ','.join(relations), values + if addrdef: + assert cstrtypemap + # sort for testing purpose + for rdef in sorted(rschema.rdefs.values(), + key=lambda x: (x.subject, x.object)): + for rql, values in rdef2rql(rdef, cstrtypemap, groupmap): + yield rql, values + +def rschema_relations_values(rschema): + values = _ervalues(rschema) + values['final'] = rschema.final + values['symmetric'] = rschema.symmetric + values['inlined'] = rschema.inlined + if PY2 and isinstance(rschema.fulltext_container, str): + values['fulltext_container'] = unicode(rschema.fulltext_container) + else: + values['fulltext_container'] = rschema.fulltext_container + relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] + return relations, values + +def crschema2rql(crschema, groupmap): + relations, values = crschema_relations_values(crschema) + yield 'INSERT CWComputedRType X: %s' % ','.join(relations), values + if groupmap: + for rql, args in _erperms2rql(crschema, groupmap): + yield rql, args + +def crschema_relations_values(crschema): + values = _ervalues(crschema) + values['rule'] = text_type(crschema.rule) + # XXX why oh why? + del values['final'] + relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] + return relations, values + +# rdef serialization + +def rdef2rql(rdef, cstrtypemap, groupmap=None): + # don't serialize inferred relations + if rdef.infered: + return + relations, values = _rdef_values(rdef) + relations.append('X relation_type ER,X from_entity SE,X to_entity OE') + values.update({'se': rdef.subject.eid, 'rt': rdef.rtype.eid, 'oe': rdef.object.eid}) + if rdef.final: + etype = 'CWAttribute' + else: + etype = 'CWRelation' + yield 'INSERT %s X: %s WHERE SE eid %%(se)s,ER eid %%(rt)s,OE eid %%(oe)s' % ( + etype, ','.join(relations), ), values + for rql, values in constraints2rql(cstrtypemap, rdef.constraints): + yield rql, values + # no groupmap means "no security insertion" + if groupmap: + for rql, args in _erperms2rql(rdef, groupmap): + yield rql, args + +_IGNORED_PROPS = ['eid', 'constraints', 'uid', 'infered', 'permissions'] + +def _rdef_values(rdef): + amap = {'order': 'ordernum', 'default': 'defaultval'} + values = {} + extra = {} + for prop in rdef.rproperty_defs(rdef.object): + if prop in _IGNORED_PROPS: + continue + value = getattr(rdef, prop) + if prop not in KNOWN_RPROPERTIES: + extra[prop] = value + continue + # XXX type cast really necessary? + if prop in ('indexed', 'fulltextindexed', 'internationalizable'): + value = bool(value) + elif prop == 'ordernum': + value = int(value) + elif PY2 and isinstance(value, str): + value = unicode(value) + if value is not None and prop == 'default': + value = Binary.zpickle(value) + values[amap.get(prop, prop)] = value + if extra: + values['extra_props'] = Binary(json.dumps(extra).encode('ascii')) + relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] + return relations, values + +def constraints2rql(cstrtypemap, constraints, rdefeid=None): + for constraint in constraints: + values = {'ct': cstrtypemap[constraint.type()], + 'value': text_type(constraint.serialize()), + 'x': rdefeid} # when not specified, will have to be set by the caller + yield 'INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE \ +CT eid %(ct)s, EDEF eid %(x)s', values + + +def _erperms2rql(erschema, groupmap): + """return rql insert statements to enter the entity or relation + schema's permissions in the database as + [read|add|delete|update]_permission relations between CWEType/CWRType + and CWGroup entities + """ + for action in erschema.ACTIONS: + try: + grantedto = erschema.action_permissions(action) + except KeyError: + # may occurs when modifying persistent schema + continue + for group_or_rqlexpr in grantedto: + if isinstance(group_or_rqlexpr, string_types): + # group + try: + yield ('SET X %s_permission Y WHERE Y eid %%(g)s, X eid %%(x)s' % action, + {'g': groupmap[group_or_rqlexpr]}) + except KeyError: + print("WARNING: group %s used in permissions for %s was ignored because it doesn't exist." + " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema)) + continue + else: + # rqlexpr + rqlexpr = group_or_rqlexpr + yield ('INSERT RQLExpression E: E expression %%(e)s, E exprtype %%(t)s, ' + 'E mainvars %%(v)s, X %s_permission E WHERE X eid %%(x)s' % action, + {'e': text_type(rqlexpr.expression), + 'v': text_type(','.join(sorted(rqlexpr.mainvars))), + 't': text_type(rqlexpr.__class__.__name__)}) + +# update functions + +def updateeschema2rql(eschema, eid): + relations, values = eschema_relations_values(eschema) + values['x'] = eid + yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values + +def updaterschema2rql(rschema, eid): + if rschema.rule: + yield ('SET X rule %(r)s WHERE X eid %(x)s', + {'x': eid, 'r': text_type(rschema.rule)}) + else: + relations, values = rschema_relations_values(rschema) + values['x'] = eid + yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values + +def updaterdef2rql(rdef, eid): + relations, values = _rdef_values(rdef) + values['x'] = eid + yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/serverconfig.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/serverconfig.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,350 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""server.serverconfig definition""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +from os.path import join, exists + +from six.moves import StringIO + +import logilab.common.configuration as lgconfig +from logilab.common.decorators import cached + +from cubicweb.toolsutils import read_config, restrict_perms_to_user +from cubicweb.cwconfig import CONFIGURATIONS, CubicWebConfiguration +from cubicweb.server import SOURCE_TYPES + + +USER_OPTIONS = ( + ('login', {'type' : 'string', + 'default': 'admin', + 'help': "cubicweb manager account's login " + '(this user will be created)', + 'level': 0, + }), + ('password', {'type' : 'password', + 'default': lgconfig.REQUIRED, + 'help': "cubicweb manager account's password", + 'level': 0, + }), + ) + +class SourceConfiguration(lgconfig.Configuration): + def __init__(self, appconfig, options): + self.appconfig = appconfig # has to be done before super call + super(SourceConfiguration, self).__init__(options=options) + + # make Method('default_instance_id') usable in db option defs (in native.py) + def default_instance_id(self): + return self.appconfig.appid + + def input_option(self, option, optdict, inputlevel): + try: + dbdriver = self['db-driver'] + except lgconfig.OptionError: + pass + else: + if dbdriver == 'sqlite': + if option in ('db-user', 'db-password'): + return + if option == 'db-name': + optdict = optdict.copy() + optdict['help'] = 'path to the sqlite database' + optdict['default'] = join(self.appconfig.appdatahome, + self.appconfig.appid + '.sqlite') + super(SourceConfiguration, self).input_option(option, optdict, inputlevel) + + + +def ask_source_config(appconfig, type, inputlevel=0): + options = SOURCE_TYPES[type].options + sconfig = SourceConfiguration(appconfig, options=options) + sconfig.input_config(inputlevel=inputlevel) + return sconfig + +def generate_source_config(sconfig, encoding=sys.stdin.encoding): + """serialize a repository source configuration as text""" + stream = StringIO() + optsbysect = list(sconfig.options_by_section()) + assert len(optsbysect) == 1, ( + 'all options for a source should be in the same group, got %s' + % [x[0] for x in optsbysect]) + lgconfig.ini_format(stream, optsbysect[0][1], encoding) + return stream.getvalue() + + +class ServerConfiguration(CubicWebConfiguration): + """standalone RQL server""" + name = 'repository' + + cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set(['sobjects', 'hooks']) + cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(['sobjects', 'hooks']) + + options = lgconfig.merge_options(( + # ctl configuration + ('host', + {'type' : 'string', + 'default': None, + 'help': 'host name if not correctly detectable through gethostname', + 'group': 'main', 'level': 1, + }), + ('pid-file', + {'type' : 'string', + 'default': lgconfig.Method('default_pid_file'), + 'help': 'repository\'s pid file', + 'group': 'main', 'level': 2, + }), + ('uid', + {'type' : 'string', + 'default': None, + 'help': 'if this option is set, use the specified user to start \ +the repository rather than the user running the command', + 'group': 'main', 'level': (CubicWebConfiguration.mode == 'installed') and 0 or 1, + }), + ('cleanup-session-time', + {'type' : 'time', + 'default': '24h', + 'help': 'duration of inactivity after which a session ' + 'will be closed, to limit memory consumption (avoid sessions that ' + 'never expire and cause memory leak when http-session-time is 0, or ' + 'because of bad client that never closes their connection). ' + 'So notice that even if http-session-time is 0 and the user don\'t ' + 'close his browser, he will have to reauthenticate after this time ' + 'of inactivity. Default to 24h.', + 'group': 'main', 'level': 3, + }), + ('connections-pool-size', + {'type' : 'int', + 'default': 4, + 'help': 'size of the connections pool. Each source supporting multiple \ +connections will have this number of opened connections.', + 'group': 'main', 'level': 3, + }), + ('rql-cache-size', + {'type' : 'int', + 'default': 3000, + 'help': 'size of the parsed rql cache size.', + 'group': 'main', 'level': 3, + }), + ('undo-enabled', + {'type' : 'yn', 'default': False, + 'help': 'enable undo support', + 'group': 'main', 'level': 3, + }), + ('keep-transaction-lifetime', + {'type' : 'int', 'default': 7, + 'help': 'number of days during which transaction records should be \ +kept (hence undoable).', + 'group': 'main', 'level': 3, + }), + ('multi-sources-etypes', + {'type' : 'csv', 'default': (), + 'help': 'defines which entity types from this repository are used \ +by some other instances. You should set this properly for these instances to \ +detect updates / deletions.', + 'group': 'main', 'level': 3, + }), + + ('delay-full-text-indexation', + {'type' : 'yn', 'default': False, + 'help': 'When full text indexation of entity has a too important cost' + ' to be done when entity are added/modified by users, activate this ' + 'option and setup a job using cubicweb-ctl db-rebuild-fti on your ' + 'system (using cron for instance).', + 'group': 'main', 'level': 3, + }), + + # email configuration + ('default-recipients-mode', + {'type' : 'choice', + 'choices' : ('default-dest-addrs', 'users', 'none'), + 'default': 'default-dest-addrs', + 'help': 'when a notification should be sent with no specific rules \ +to find recipients, recipients will be found according to this mode. Available \ +modes are "default-dest-addrs" (emails specified in the configuration \ +variable with the same name), "users" (every users which has activated \ +account with an email set), "none" (no notification).', + 'group': 'email', 'level': 2, + }), + ('default-dest-addrs', + {'type' : 'csv', + 'default': (), + 'help': 'comma separated list of email addresses that will be used \ +as default recipient when an email is sent and the notification has no \ +specific recipient rules.', + 'group': 'email', 'level': 2, + }), + ('supervising-addrs', + {'type' : 'csv', + 'default': (), + 'help': 'comma separated list of email addresses that will be \ +notified of every changes.', + 'group': 'email', 'level': 2, + }), + ('zmq-address-sub', + {'type' : 'csv', + 'default' : (), + 'help': ('List of ZMQ addresses to subscribe to (requires pyzmq) ' + '(of the form `tcp://:`)'), + 'group': 'zmq', 'level': 1, + }), + ('zmq-address-pub', + {'type' : 'string', + 'default' : None, + 'help': ('ZMQ address to use for publishing (requires pyzmq) ' + '(of the form `tcp://:`)'), + 'group': 'zmq', 'level': 1, + }), + ) + CubicWebConfiguration.options) + + # should we init the connections pool (eg connect to sources). This is + # usually necessary... + init_cnxset_pool = True + + # read the schema from the database + read_instance_schema = True + # set this to true to get a minimal repository, for instance to get cubes + # information on commands such as i18ninstance, db-restore, etc... + quick_start = False + # check user's state at login time + consider_user_state = True + + # should some hooks be deactivated during [pre|post]create script execution + free_wheel = False + + # list of enables sources when sources restriction is necessary + # (eg repository initialization at least) + enabled_sources = None + + def bootstrap_cubes(self): + from logilab.common.textutils import splitstrip + with open(join(self.apphome, 'bootstrap_cubes')) as f: + for line in f: + line = line.strip() + if not line or line.startswith('#'): + continue + self.init_cubes(self.expand_cubes(splitstrip(line))) + break + else: + # no cubes + self.init_cubes(()) + + def write_bootstrap_cubes_file(self, cubes): + stream = open(join(self.apphome, 'bootstrap_cubes'), 'w') + stream.write('# this is a generated file only used for bootstraping\n') + stream.write('# you should not have to edit this\n') + stream.write('%s\n' % ','.join(cubes)) + stream.close() + + def sources_file(self): + return join(self.apphome, 'sources') + + # this method has to be cached since when the server is running using a + # restricted user, this user usually don't have access to the sources + # configuration file (#16102) + @cached + def read_sources_file(self): + """return a dictionary of values found in the sources file""" + return read_config(self.sources_file(), raise_if_unreadable=True) + + @property + def system_source_config(self): + return self.read_sources_file()['system'] + + @property + def default_admin_config(self): + return self.read_sources_file()['admin'] + + def source_enabled(self, source): + if self.sources_mode is not None: + if 'migration' in self.sources_mode: + assert len(self.sources_mode) == 1 + if source.connect_for_migration: + return True + print('not connecting to source', source.uri, 'during migration') + return False + if 'all' in self.sources_mode: + assert len(self.sources_mode) == 1 + return True + return source.uri in self.sources_mode + if self.quick_start: + return source.uri == 'system' + return (not source.disabled and ( + not self.enabled_sources or source.uri in self.enabled_sources)) + + def write_sources_file(self, sourcescfg): + """serialize repository'sources configuration into a INI like file""" + sourcesfile = self.sources_file() + if exists(sourcesfile): + import shutil + shutil.copy(sourcesfile, sourcesfile + '.bak') + stream = open(sourcesfile, 'w') + for section in ('admin', 'system'): + sconfig = sourcescfg[section] + if isinstance(sconfig, dict): + # get a Configuration object + assert section == 'system', '%r is not system' % section + _sconfig = SourceConfiguration( + self, options=SOURCE_TYPES['native'].options) + for attr, val in sconfig.items(): + try: + _sconfig.set_option(attr, val) + except lgconfig.OptionError: + # skip adapter, may be present on pre 3.10 instances + if attr != 'adapter': + self.error('skip unknown option %s in sources file' % attr) + sconfig = _sconfig + stream.write('[%s]\n%s\n' % (section, generate_source_config(sconfig))) + restrict_perms_to_user(sourcesfile) + + def load_schema(self, expand_cubes=False, **kwargs): + from cubicweb.schema import CubicWebSchemaLoader + if expand_cubes: + # in case some new dependencies have been introduced, we have to + # reinitialize cubes so the full filesystem schema is read + origcubes = self.cubes() + self._cubes = None + self.init_cubes(self.expand_cubes(origcubes)) + schema = CubicWebSchemaLoader().load(self, **kwargs) + if expand_cubes: + # restore original value + self._cubes = origcubes + return schema + + def load_bootstrap_schema(self): + from cubicweb.schema import BootstrapSchemaLoader + schema = BootstrapSchemaLoader().load(self) + schema.name = 'bootstrap' + return schema + + sources_mode = None + def set_sources_mode(self, sources): + self.sources_mode = sources + + def migration_handler(self, schema=None, interactive=True, + cnx=None, repo=None, connect=True, verbosity=None): + """return a migration handler instance""" + from cubicweb.server.migractions import ServerMigrationHelper + if verbosity is None: + verbosity = getattr(self, 'verbosity', 0) + return ServerMigrationHelper(self, schema, interactive=interactive, + cnx=cnx, repo=repo, connect=connect, + verbosity=verbosity) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/serverctl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/serverctl.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1100 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-ctl commands and command handlers specific to the repository""" +from __future__ import print_function + +__docformat__ = 'restructuredtext en' + +# *ctl module should limit the number of import to be imported as quickly as +# possible (for cubicweb-ctl reactivity, necessary for instance for usable bash +# completion). So import locally in command helpers. +import sys +import os +from contextlib import contextmanager +import logging +import subprocess + +from six import string_types +from six.moves import input + +from logilab.common import nullobject +from logilab.common.configuration import Configuration, merge_options +from logilab.common.shellutils import ASK, generate_password + +from logilab.database import get_db_helper, get_connection + +from cubicweb import AuthenticationError, ExecutionError, ConfigurationError +from cubicweb.toolsutils import Command, CommandHandler, underline_title +from cubicweb.cwctl import CWCTL, check_options_consistency, ConfigureInstanceCommand +from cubicweb.server import SOURCE_TYPES +from cubicweb.server.serverconfig import ( + USER_OPTIONS, ServerConfiguration, SourceConfiguration, + ask_source_config, generate_source_config) + +# utility functions ########################################################### + +def source_cnx(source, dbname=None, special_privs=False, interactive=True): + """open and return a connection to the system database defined in the + given server.serverconfig + """ + from getpass import getpass + dbhost = source.get('db-host') + if dbname is None: + dbname = source['db-name'] + driver = source['db-driver'] + dbhelper = get_db_helper(driver) + if interactive: + print('-> connecting to %s database' % driver, end=' ') + if dbhost: + print('%s@%s' % (dbname, dbhost), end=' ') + else: + print(dbname, end=' ') + if dbhelper.users_support: + if not interactive or (not special_privs and source.get('db-user')): + user = source.get('db-user', os.environ.get('USER', '')) + if interactive: + print('as', user) + password = source.get('db-password') + else: + print() + if special_privs: + print('WARNING') + print ('the user will need the following special access rights ' + 'on the database:') + print(special_privs) + print() + default_user = source.get('db-user', os.environ.get('USER', '')) + user = input('Connect as user ? [%r]: ' % default_user) + user = user.strip() or default_user + if user == source.get('db-user'): + password = source.get('db-password') + else: + password = getpass('password: ') + else: + user = password = None + extra_args = source.get('db-extra-arguments') + extra = extra_args and {'extra_args': extra_args} or {} + cnx = get_connection(driver, dbhost, dbname, user, password=password, + port=source.get('db-port'), + schema=source.get('db-namespace'), + **extra) + try: + cnx.logged_user = user + except AttributeError: + # C object, __slots__ + from logilab.database import _SimpleConnectionWrapper + cnx = _SimpleConnectionWrapper(cnx) + cnx.logged_user = user + return cnx + +def system_source_cnx(source, dbms_system_base=False, + special_privs='CREATE/DROP DATABASE', interactive=True): + """shortcut to get a connextion to the instance system database + defined in the given config. If is True, + connect to the dbms system database instead (for task such as + create/drop the instance database) + """ + if dbms_system_base: + system_db = get_db_helper(source['db-driver']).system_database() + return source_cnx(source, system_db, special_privs=special_privs, + interactive=interactive) + return source_cnx(source, special_privs=special_privs, + interactive=interactive) + +def _db_sys_cnx(source, special_privs, interactive=True): + """return a connection on the RDMS system table (to create/drop a user or a + database) + """ + import logilab.common as lgp + lgp.USE_MX_DATETIME = False + driver = source['db-driver'] + helper = get_db_helper(driver) + # connect on the dbms system base to create our base + cnx = system_source_cnx(source, True, special_privs=special_privs, + interactive=interactive) + # disable autocommit (isolation_level(1)) because DROP and + # CREATE DATABASE can't be executed in a transaction + set_isolation_level = getattr(cnx, 'set_isolation_level', None) + if set_isolation_level is not None: + # set_isolation_level() is psycopg specific + set_isolation_level(0) + return cnx + +def repo_cnx(config): + """return a in-memory repository and a repoapi connection to it""" + from cubicweb import repoapi + from cubicweb.server.utils import manager_userpasswd + try: + login = config.default_admin_config['login'] + pwd = config.default_admin_config['password'] + except KeyError: + login, pwd = manager_userpasswd() + while True: + try: + repo = repoapi.get_repository(config=config) + cnx = repoapi.connect(repo, login, password=pwd) + return repo, cnx + except AuthenticationError: + print('-> Error: wrong user/password.') + # reset cubes else we'll have an assertion error on next retry + config._cubes = None + login, pwd = manager_userpasswd() + + +# repository specific command handlers ######################################## + +class RepositoryCreateHandler(CommandHandler): + cmdname = 'create' + cfgname = 'repository' + + def bootstrap(self, cubes, automatic=False, inputlevel=0): + """create an instance by copying files from the given cube and by asking + information necessary to build required configuration files + """ + config = self.config + if not automatic: + print(underline_title('Configuring the repository')) + config.input_config('email', inputlevel) + print('\n'+underline_title('Configuring the sources')) + sourcesfile = config.sources_file() + # hack to make Method('default_instance_id') usable in db option defs + # (in native.py) + sconfig = SourceConfiguration(config, + options=SOURCE_TYPES['native'].options) + if not automatic: + sconfig.input_config(inputlevel=inputlevel) + print() + sourcescfg = {'system': sconfig} + if automatic: + # XXX modify a copy + password = generate_password() + print('-> set administrator account to admin / %s' % password) + USER_OPTIONS[1][1]['default'] = password + sconfig = Configuration(options=USER_OPTIONS) + else: + sconfig = Configuration(options=USER_OPTIONS) + sconfig.input_config(inputlevel=inputlevel) + sourcescfg['admin'] = sconfig + config.write_sources_file(sourcescfg) + # remember selected cubes for later initialization of the database + config.write_bootstrap_cubes_file(cubes) + + def postcreate(self, automatic=False, inputlevel=0): + if automatic: + CWCTL.run(['db-create', '--automatic', self.config.appid]) + elif ASK.confirm('Run db-create to create the system database ?'): + CWCTL.run(['db-create', '--config-level', str(inputlevel), + self.config.appid]) + else: + print('-> nevermind, you can do it later with ' + '"cubicweb-ctl db-create %s".' % self.config.appid) + + +@contextmanager +def db_transaction(source, privilege): + """Open a transaction to the instance database""" + cnx = system_source_cnx(source, special_privs=privilege) + cursor = cnx.cursor() + try: + yield cursor + except: + cnx.rollback() + cnx.close() + raise + else: + cnx.commit() + cnx.close() + + +@contextmanager +def db_sys_transaction(source, privilege): + """Open a transaction to the system database""" + cnx = _db_sys_cnx(source, privilege) + cursor = cnx.cursor() + try: + yield cursor + except: + cnx.rollback() + cnx.close() + raise + else: + cnx.commit() + cnx.close() + + +class RepositoryDeleteHandler(CommandHandler): + cmdname = 'delete' + cfgname = 'repository' + + def _drop_namespace(self, source): + db_namespace = source.get('db-namespace') + with db_transaction(source, privilege='DROP SCHEMA') as cursor: + helper = get_db_helper(source['db-driver']) + helper.drop_schema(cursor, db_namespace) + print('-> database schema %s dropped' % db_namespace) + + def _drop_database(self, source): + dbname = source['db-name'] + if source['db-driver'] == 'sqlite': + print('deleting database file %(db-name)s' % source) + os.unlink(source['db-name']) + print('-> database %(db-name)s dropped.' % source) + else: + helper = get_db_helper(source['db-driver']) + with db_sys_transaction(source, privilege='DROP DATABASE') as cursor: + print('dropping database %(db-name)s' % source) + cursor.execute('DROP DATABASE "%(db-name)s"' % source) + print('-> database %(db-name)s dropped.' % source) + + def _drop_user(self, source): + user = source['db-user'] or None + if user is not None: + with db_sys_transaction(source, privilege='DROP USER') as cursor: + print('dropping user %s' % user) + cursor.execute('DROP USER %s' % user) + + def _cleanup_steps(self, source): + # 1/ delete namespace if used + db_namespace = source.get('db-namespace') + if db_namespace: + yield ('Delete database namespace "%s"' % db_namespace, + self._drop_namespace, True) + # 2/ delete database + yield ('Delete database "%(db-name)s"' % source, + self._drop_database, True) + # 3/ delete user + helper = get_db_helper(source['db-driver']) + if source['db-user'] and helper.users_support: + # XXX should check we are not connected as user + yield ('Delete user "%(db-user)s"' % source, + self._drop_user, False) + + def cleanup(self): + """remove instance's configuration and database""" + source = self.config.system_source_config + for msg, step, default in self._cleanup_steps(source): + if ASK.confirm(msg, default_is_yes=default): + try: + step(source) + except Exception as exc: + print('ERROR', exc) + if ASK.confirm('An error occurred. Continue anyway?', + default_is_yes=False): + continue + raise ExecutionError(str(exc)) + + +# repository specific commands ################################################ + +def createdb(helper, source, dbcnx, cursor, **kwargs): + if dbcnx.logged_user != source['db-user']: + helper.create_database(cursor, source['db-name'], source['db-user'], + source['db-encoding'], **kwargs) + else: + helper.create_database(cursor, source['db-name'], + dbencoding=source['db-encoding'], **kwargs) + + +class CreateInstanceDBCommand(Command): + """Create the system database of an instance (run after 'create'). + + You will be prompted for a login / password to use to connect to + the system database. The given user should have almost all rights + on the database (ie a super user on the DBMS allowed to create + database, users, languages...). + + + the identifier of the instance to initialize. + """ + name = 'db-create' + arguments = '' + min_args = max_args = 1 + options = ( + ('automatic', + {'short': 'a', 'action' : 'store_true', + 'default': False, + 'help': 'automatic mode: never ask and use default answer to every ' + 'question. this may require that your login match a database super ' + 'user (allowed to create database & all).', + }), + ('config-level', + {'short': 'l', 'type' : 'int', 'metavar': '', + 'default': 0, + 'help': 'configuration level (0..2): 0 will ask for essential ' + 'configuration parameters only while 2 will ask for all parameters', + }), + ('create-db', + {'short': 'c', 'type': 'yn', 'metavar': '', + 'default': True, + 'help': 'create the database (yes by default)' + }), + ) + + def run(self, args): + """run the command with its specific arguments""" + check_options_consistency(self.config) + automatic = self.get('automatic') + appid = args.pop() + config = ServerConfiguration.config_for(appid) + source = config.system_source_config + dbname = source['db-name'] + driver = source['db-driver'] + helper = get_db_helper(driver) + if driver == 'sqlite': + if os.path.exists(dbname) and ( + automatic or + ASK.confirm('Database %s already exists. Drop it?' % dbname)): + os.unlink(dbname) + elif self.config.create_db: + print('\n'+underline_title('Creating the system database')) + # connect on the dbms system base to create our base + dbcnx = _db_sys_cnx(source, 'CREATE/DROP DATABASE and / or USER', + interactive=not automatic) + cursor = dbcnx.cursor() + try: + if helper.users_support: + user = source['db-user'] + if not helper.user_exists(cursor, user) and (automatic or \ + ASK.confirm('Create db user %s ?' % user, default_is_yes=False)): + helper.create_user(source['db-user'], source.get('db-password')) + print('-> user %s created.' % user) + if dbname in helper.list_databases(cursor): + if automatic or ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname): + cursor.execute('DROP DATABASE "%s"' % dbname) + else: + print('you may want to run "cubicweb-ctl db-init ' + '--drop %s" manually to continue.' % config.appid) + return + createdb(helper, source, dbcnx, cursor) + dbcnx.commit() + print('-> database %s created.' % dbname) + except BaseException: + dbcnx.rollback() + raise + cnx = system_source_cnx(source, special_privs='CREATE LANGUAGE/SCHEMA', + interactive=not automatic) + cursor = cnx.cursor() + helper.init_fti_extensions(cursor) + namespace = source.get('db-namespace') + if namespace and ASK.confirm('Create schema %s in database %s ?' + % (namespace, dbname)): + helper.create_schema(cursor, namespace) + cnx.commit() + # postgres specific stuff + if driver == 'postgres': + # install plpythonu/plpgsql languages + langs = ('plpythonu', 'plpgsql') + for extlang in langs: + if automatic or ASK.confirm('Create language %s ?' % extlang): + try: + helper.create_language(cursor, extlang) + except Exception as exc: + print('-> ERROR:', exc) + print('-> could not create language %s, some stored procedures might be unusable' % extlang) + cnx.rollback() + else: + cnx.commit() + print('-> database for instance %s created and necessary extensions installed.' % appid) + print() + if automatic: + CWCTL.run(['db-init', '--automatic', '--config-level', '0', + config.appid]) + elif ASK.confirm('Run db-init to initialize the system database ?'): + CWCTL.run(['db-init', '--config-level', + str(self.config.config_level), config.appid]) + else: + print('-> nevermind, you can do it later with ' + '"cubicweb-ctl db-init %s".' % config.appid) + + +class InitInstanceCommand(Command): + """Initialize the system database of an instance (run after 'db-create'). + + Notice this will be done using user specified in the sources files, so this + user should have the create tables grant permissions on the database. + + + the identifier of the instance to initialize. + """ + name = 'db-init' + arguments = '' + min_args = max_args = 1 + options = ( + ('automatic', + {'short': 'a', 'action' : 'store_true', + 'default': False, + 'help': 'automatic mode: never ask and use default answer to every ' + 'question.', + }), + ('config-level', + {'short': 'l', 'type': 'int', 'default': 0, + 'help': 'level threshold for questions asked when configuring ' + 'another source' + }), + ('drop', + {'short': 'd', 'action': 'store_true', + 'default': False, + 'help': 'insert drop statements to remove previously existant ' + 'tables, indexes... (no by default)' + }), + ) + + def run(self, args): + check_options_consistency(self.config) + print('\n'+underline_title('Initializing the system database')) + from cubicweb.server import init_repository + appid = args[0] + config = ServerConfiguration.config_for(appid) + try: + system = config.system_source_config + extra_args = system.get('db-extra-arguments') + extra = extra_args and {'extra_args': extra_args} or {} + get_connection( + system['db-driver'], database=system['db-name'], + host=system.get('db-host'), port=system.get('db-port'), + user=system.get('db-user') or '', password=system.get('db-password') or '', + schema=system.get('db-namespace'), **extra) + except Exception as ex: + raise ConfigurationError( + 'You seem to have provided wrong connection information in '\ + 'the %s file. Resolve this first (error: %s).' + % (config.sources_file(), str(ex).strip())) + init_repository(config, drop=self.config.drop) + if not self.config.automatic: + while ASK.confirm('Enter another source ?', default_is_yes=False): + CWCTL.run(['source-add', '--config-level', + str(self.config.config_level), config.appid]) + + +class AddSourceCommand(Command): + """Add a data source to an instance. + + + the identifier of the instance to initialize. + """ + name = 'source-add' + arguments = '' + min_args = max_args = 1 + options = ( + ('config-level', + {'short': 'l', 'type': 'int', 'default': 1, + 'help': 'level threshold for questions asked when configuring another source' + }), + ) + + def run(self, args): + appid = args[0] + config = ServerConfiguration.config_for(appid) + repo, cnx = repo_cnx(config) + repo.hm.call_hooks('server_maintenance', repo=repo) + try: + with cnx: + used = set(n for n, in cnx.execute('Any SN WHERE S is CWSource, S name SN')) + cubes = repo.get_cubes() + while True: + type = input('source type (%s): ' + % ', '.join(sorted(SOURCE_TYPES))) + if type not in SOURCE_TYPES: + print('-> unknown source type, use one of the available types.') + continue + sourcemodule = SOURCE_TYPES[type].module + if not sourcemodule.startswith('cubicweb.'): + # module names look like cubes.mycube.themodule + sourcecube = SOURCE_TYPES[type].module.split('.', 2)[1] + # if the source adapter is coming from an external component, + # ensure it's specified in used cubes + if not sourcecube in cubes: + print ('-> this source type require the %s cube which is ' + 'not used by the instance.') + continue + break + while True: + parser = input('parser type (%s): ' + % ', '.join(sorted(repo.vreg['parsers']))) + if parser in repo.vreg['parsers']: + break + print('-> unknown parser identifier, use one of the available types.') + while True: + sourceuri = input('source identifier (a unique name used to ' + 'tell sources apart): ').strip() + if not sourceuri: + print('-> mandatory.') + else: + sourceuri = unicode(sourceuri, sys.stdin.encoding) + if sourceuri in used: + print('-> uri already used, choose another one.') + else: + break + url = input('source URL (leave empty for none): ').strip() + url = unicode(url) if url else None + # XXX configurable inputlevel + sconfig = ask_source_config(config, type, inputlevel=self.config.config_level) + cfgstr = unicode(generate_source_config(sconfig), sys.stdin.encoding) + cnx.create_entity('CWSource', name=sourceuri, type=unicode(type), + config=cfgstr, parser=unicode(parser), url=unicode(url)) + cnx.commit() + finally: + repo.hm.call_hooks('server_shutdown') + + +class GrantUserOnInstanceCommand(Command): + """Grant a database user on a repository system database. + + + the identifier of the instance + + the database's user requiring grant access + """ + name = 'db-grant-user' + arguments = ' ' + min_args = max_args = 2 + options = ( + ('set-owner', + {'short': 'o', 'type' : 'yn', 'metavar' : '', + 'default' : False, + 'help': 'Set the user as tables owner if yes (no by default).'} + ), + ) + def run(self, args): + """run the command with its specific arguments""" + from cubicweb.server.sqlutils import sqlexec, sqlgrants + appid, user = args + config = ServerConfiguration.config_for(appid) + source = config.system_source_config + set_owner = self.config.set_owner + cnx = system_source_cnx(source, special_privs='GRANT') + cursor = cnx.cursor() + schema = config.load_schema() + try: + sqlexec(sqlgrants(schema, source['db-driver'], user, + set_owner=set_owner), cursor) + except Exception as ex: + cnx.rollback() + import traceback + traceback.print_exc() + print('-> an error occurred:', ex) + else: + cnx.commit() + print('-> rights granted to %s on instance %s.' % (appid, user)) + + +class ResetAdminPasswordCommand(Command): + """Reset the administrator password. + + + the identifier of the instance + """ + name = 'reset-admin-pwd' + arguments = '' + min_args = max_args = 1 + options = ( + ('password', + {'short': 'p', 'type' : 'string', 'metavar' : '', + 'default' : None, + 'help': 'Use this password instead of prompt for one.\n' + '/!\ THIS IS AN INSECURE PRACTICE /!\ \n' + 'the password will appear in shell history'} + ), + ) + + def run(self, args): + """run the command with its specific arguments""" + from cubicweb.server.utils import crypt_password, manager_userpasswd + appid = args[0] + config = ServerConfiguration.config_for(appid) + sourcescfg = config.read_sources_file() + try: + adminlogin = sourcescfg['admin']['login'] + except KeyError: + print('-> Error: could not get cubicweb administrator login.') + sys.exit(1) + cnx = source_cnx(sourcescfg['system']) + driver = sourcescfg['system']['db-driver'] + dbhelper = get_db_helper(driver) + cursor = cnx.cursor() + # check admin exists + cursor.execute("SELECT * FROM cw_CWUser WHERE cw_login=%(l)s", + {'l': adminlogin}) + if not cursor.fetchall(): + print("-> error: admin user %r specified in sources doesn't exist " + "in the database" % adminlogin) + print(" fix your sources file before running this command") + cnx.close() + sys.exit(1) + if self.config.password is None: + # ask for a new password + msg = 'new password for %s' % adminlogin + _, pwd = manager_userpasswd(adminlogin, confirm=True, passwdmsg=msg) + else: + pwd = self.config.password + try: + cursor.execute("UPDATE cw_CWUser SET cw_upassword=%(p)s WHERE cw_login=%(l)s", + {'p': dbhelper.binary_value(crypt_password(pwd)), 'l': adminlogin}) + sconfig = Configuration(options=USER_OPTIONS) + sconfig['login'] = adminlogin + sconfig['password'] = pwd + sourcescfg['admin'] = sconfig + config.write_sources_file(sourcescfg) + except Exception as ex: + cnx.rollback() + import traceback + traceback.print_exc() + print('-> an error occurred:', ex) + else: + cnx.commit() + print('-> password reset, sources file regenerated.') + cnx.close() + + + +def _remote_dump(host, appid, output, sudo=False): + # XXX generate unique/portable file name + from datetime import date + filename = '%s-%s.tgz' % (appid, date.today().strftime('%Y-%m-%d')) + dmpcmd = 'cubicweb-ctl db-dump -o /tmp/%s %s' % (filename, appid) + if sudo: + dmpcmd = 'sudo %s' % (dmpcmd) + dmpcmd = 'ssh -t %s "%s"' % (host, dmpcmd) + print(dmpcmd) + if os.system(dmpcmd): + raise ExecutionError('Error while dumping the database') + if output is None: + output = filename + cmd = 'scp %s:/tmp/%s %s' % (host, filename, output) + print(cmd) + if os.system(cmd): + raise ExecutionError('Error while retrieving the dump at /tmp/%s' % filename) + rmcmd = 'ssh -t %s "rm -f /tmp/%s"' % (host, filename) + print(rmcmd) + if os.system(rmcmd) and not ASK.confirm( + 'An error occurred while deleting remote dump at /tmp/%s. ' + 'Continue anyway?' % filename): + raise ExecutionError('Error while deleting remote dump at /tmp/%s' % filename) + + +def _local_dump(appid, output, format='native'): + config = ServerConfiguration.config_for(appid) + config.quick_start = True + mih = config.migration_handler(verbosity=1) + mih.backup_database(output, askconfirm=False, format=format) + mih.shutdown() + +def _local_restore(appid, backupfile, drop, format='native'): + config = ServerConfiguration.config_for(appid) + config.verbosity = 1 # else we won't be asked for confirmation on problems + config.quick_start = True + mih = config.migration_handler(connect=False, verbosity=1) + mih.restore_database(backupfile, drop, askconfirm=False, format=format) + repo = mih.repo + # version of the database + dbversions = repo.get_versions() + mih.shutdown() + if not dbversions: + print("bad or missing version information in the database, don't upgrade file system") + return + # version of installed software + eversion = dbversions['cubicweb'] + status = instance_status(config, eversion, dbversions) + # * database version > installed software + if status == 'needsoftupgrade': + print("** The database of %s is more recent than the installed software!" % config.appid) + print("** Upgrade your software, then migrate the database by running the command") + print("** 'cubicweb-ctl upgrade %s'" % config.appid) + return + # * database version < installed software, an upgrade will be necessary + # anyway, just rewrite vc.conf and warn user he has to upgrade + elif status == 'needapplupgrade': + print("** The database of %s is older than the installed software." % config.appid) + print("** Migrate the database by running the command") + print("** 'cubicweb-ctl upgrade %s'" % config.appid) + return + # * database version = installed software, database version = instance fs version + # ok! + +def instance_status(config, cubicwebapplversion, vcconf): + cubicwebversion = config.cubicweb_version() + if cubicwebapplversion > cubicwebversion: + return 'needsoftupgrade' + if cubicwebapplversion < cubicwebversion: + return 'needapplupgrade' + for cube in config.cubes(): + try: + softversion = config.cube_version(cube) + except ConfigurationError: + print('-> Error: no cube version information for %s, please check that the cube is installed.' % cube) + continue + try: + applversion = vcconf[cube] + except KeyError: + print('-> Error: no cube version information for %s in version configuration.' % cube) + continue + if softversion == applversion: + continue + if softversion > applversion: + return 'needsoftupgrade' + elif softversion < applversion: + return 'needapplupgrade' + return None + + +class DBDumpCommand(Command): + """Backup the system database of an instance. + + + the identifier of the instance to backup + format [[user@]host:]appname + """ + name = 'db-dump' + arguments = '' + min_args = max_args = 1 + options = ( + ('output', + {'short': 'o', 'type' : 'string', 'metavar' : '', + 'default' : None, + 'help': 'Specify the backup file where the backup will be stored.'} + ), + ('sudo', + {'short': 's', 'action' : 'store_true', + 'default' : False, + 'help': 'Use sudo on the remote host.'} + ), + ('format', + {'short': 'f', 'default': 'native', 'type': 'choice', + 'choices': ('native', 'portable'), + 'help': '"native" format uses db backend utilities to dump the database. ' + '"portable" format uses a database independent format'} + ), + ) + + def run(self, args): + appid = args[0] + if ':' in appid: + host, appid = appid.split(':') + _remote_dump(host, appid, self.config.output, self.config.sudo) + else: + _local_dump(appid, self.config.output, format=self.config.format) + + + + +class DBRestoreCommand(Command): + """Restore the system database of an instance. + + + the identifier of the instance to restore + """ + name = 'db-restore' + arguments = ' ' + min_args = max_args = 2 + + options = ( + ('no-drop', + {'short': 'n', 'action' : 'store_true', 'default' : False, + 'help': 'for some reason the database doesn\'t exist and so ' + 'should not be dropped.'} + ), + ('format', + {'short': 'f', 'default': 'native', 'type': 'choice', + 'choices': ('native', 'portable'), + 'help': 'the format used when dumping the database'}), + ) + + def run(self, args): + appid, backupfile = args + if self.config.format == 'portable': + # we need to ensure a DB exist before restoring from portable format + if not self.config.no_drop: + try: + CWCTL.run(['db-create', '--automatic', appid]) + except SystemExit as exc: + # continue if the command exited with status 0 (success) + if exc.code: + raise + _local_restore(appid, backupfile, + drop=not self.config.no_drop, + format=self.config.format) + if self.config.format == 'portable': + try: + CWCTL.run(['db-rebuild-fti', appid]) + except SystemExit as exc: + if exc.code: + raise + + +class DBCopyCommand(Command): + """Copy the system database of an instance (backup and restore). + + + the identifier of the instance to backup + format [[user@]host:]appname + + + the identifier of the instance to restore + """ + name = 'db-copy' + arguments = ' ' + min_args = max_args = 2 + options = ( + ('no-drop', + {'short': 'n', 'action' : 'store_true', + 'default' : False, + 'help': 'For some reason the database doesn\'t exist and so ' + 'should not be dropped.'} + ), + ('keep-dump', + {'short': 'k', 'action' : 'store_true', + 'default' : False, + 'help': 'Specify that the dump file should not be automatically removed.'} + ), + ('sudo', + {'short': 's', 'action' : 'store_true', + 'default' : False, + 'help': 'Use sudo on the remote host.'} + ), + ('format', + {'short': 'f', 'default': 'native', 'type': 'choice', + 'choices': ('native', 'portable'), + 'help': '"native" format uses db backend utilities to dump the database. ' + '"portable" format uses a database independent format'} + ), + ) + + def run(self, args): + import tempfile + srcappid, destappid = args + fd, output = tempfile.mkstemp() + os.close(fd) + if ':' in srcappid: + host, srcappid = srcappid.split(':') + _remote_dump(host, srcappid, output, self.config.sudo) + else: + _local_dump(srcappid, output, format=self.config.format) + _local_restore(destappid, output, not self.config.no_drop, + self.config.format) + if self.config.keep_dump: + print('-> you can get the dump file at', output) + else: + os.remove(output) + + +class CheckRepositoryCommand(Command): + """Check integrity of the system database of an instance. + + + the identifier of the instance to check + """ + name = 'db-check' + arguments = '' + min_args = max_args = 1 + options = ( + ('checks', + {'short': 'c', 'type' : 'csv', 'metavar' : '', + 'default' : ('entities', 'relations', + 'mandatory_relations', 'mandatory_attributes', + 'metadata', 'schema', 'text_index'), + 'help': 'Comma separated list of check to run. By default run all \ +checks, i.e. entities, relations, mandatory_relations, mandatory_attributes, \ +metadata, text_index and schema.'} + ), + + ('autofix', + {'short': 'a', 'type' : 'yn', 'metavar' : '', + 'default' : False, + 'help': 'Automatically correct integrity problems if this option \ +is set to "y" or "yes", else only display them'} + ), + ('reindex', + {'short': 'r', 'type' : 'yn', 'metavar' : '', + 'default' : False, + 'help': 're-indexes the database for full text search if this \ +option is set to "y" or "yes" (may be long for large database).'} + ), + ('force', + {'short': 'f', 'action' : 'store_true', + 'default' : False, + 'help': 'don\'t check instance is up to date.'} + ), + + ) + + def run(self, args): + from cubicweb.server.checkintegrity import check + appid = args[0] + config = ServerConfiguration.config_for(appid) + config.repairing = self.config.force + repo, _cnx = repo_cnx(config) + with repo.internal_cnx() as cnx: + check(repo, cnx, + self.config.checks, + self.config.reindex, + self.config.autofix) + + +class RebuildFTICommand(Command): + """Rebuild the full-text index of the system database of an instance. + + [etype(s)] + the identifier of the instance to rebuild + + If no etype is specified, cubicweb will reindex everything, otherwise + only specified etypes will be considered. + """ + name = 'db-rebuild-fti' + arguments = '' + min_args = 1 + + def run(self, args): + from cubicweb.server.checkintegrity import reindex_entities + appid = args.pop(0) + etypes = args or None + config = ServerConfiguration.config_for(appid) + repo, cnx = repo_cnx(config) + with cnx: + reindex_entities(repo.schema, cnx, etypes=etypes) + cnx.commit() + + +class SynchronizeSourceCommand(Command): + """Force a source synchronization. + + + the identifier of the instance + + the name of the source to synchronize. + """ + name = 'source-sync' + arguments = ' ' + min_args = max_args = 2 + options = ( + ('loglevel', + {'short': 'l', 'type' : 'choice', 'metavar': '', + 'default': 'info', 'choices': ('debug', 'info', 'warning', 'error'), + }), + ) + + def run(self, args): + from cubicweb import repoapi + from cubicweb.cwctl import init_cmdline_log_threshold + config = ServerConfiguration.config_for(args[0]) + config.global_set_option('log-file', None) + config.log_format = '%(levelname)s %(name)s: %(message)s' + init_cmdline_log_threshold(config, self['loglevel']) + repo = repoapi.get_repository(config=config) + repo.hm.call_hooks('server_maintenance', repo=repo) + try: + try: + source = repo.sources_by_uri[args[1]] + except KeyError: + raise ExecutionError('no source named %r' % args[1]) + with repo.internal_cnx() as cnx: + stats = source.pull_data(cnx, force=True, raise_on_error=True) + finally: + repo.shutdown() + for key, val in stats.items(): + if val: + print(key, ':', val) + + + +def permissionshandler(relation, perms): + from yams.schema import RelationDefinitionSchema + from yams.buildobjs import DEFAULT_ATTRPERMS + from cubicweb.schema import (PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, + PUB_SYSTEM_ATTR_PERMS, RO_REL_PERMS, RO_ATTR_PERMS) + defaultrelperms = (DEFAULT_ATTRPERMS, PUB_SYSTEM_REL_PERMS, + PUB_SYSTEM_ATTR_PERMS, RO_REL_PERMS, RO_ATTR_PERMS) + defaulteperms = (PUB_SYSTEM_ENTITY_PERMS,) + # canonicalize vs str/unicode + for p in ('read', 'add', 'update', 'delete'): + rule = perms.get(p) + if rule: + perms[p] = tuple(str(x) if isinstance(x, string_types) else x + for x in rule) + return perms, perms in defaultrelperms or perms in defaulteperms + + +class SchemaDiffCommand(Command): + """Generate a diff between schema and fsschema description. + + + the identifier of the instance + + the name of the diff tool to compare the two generated files. + """ + name = 'schema-diff' + arguments = ' ' + min_args = max_args = 2 + + def run(self, args): + from yams.diff import schema_diff + from cubicweb import repoapi + appid = args.pop(0) + diff_tool = args.pop(0) + config = ServerConfiguration.config_for(appid) + repo = repoapi.get_repository(config=config) + fsschema = config.load_schema(expand_cubes=True) + schema_diff(fsschema, repo.schema, permissionshandler, diff_tool, ignore=('eid',)) + + +for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand, + GrantUserOnInstanceCommand, ResetAdminPasswordCommand, + DBDumpCommand, DBRestoreCommand, DBCopyCommand, + AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand, + SynchronizeSourceCommand, SchemaDiffCommand, + ): + CWCTL.register(cmdclass) + +# extend configure command to set options in sources config file ############### + +db_options = ( + ('db', + {'short': 'd', 'type' : 'named', 'metavar' : '[section1.]key1:value1,[section2.]key2:value2', + 'default': None, + 'help': '''set in
to in "source" configuration file. If
is not specified, it defaults to "system". + +Beware that changing admin.login or admin.password using this command +will NOT update the database with new admin credentials. Use the +reset-admin-pwd command instead. +''', + }), + ) + +ConfigureInstanceCommand.options = merge_options( + ConfigureInstanceCommand.options + db_options) + +configure_instance = ConfigureInstanceCommand.configure_instance +def configure_instance2(self, appid): + configure_instance(self, appid) + if self.config.db is not None: + appcfg = ServerConfiguration.config_for(appid) + srccfg = appcfg.read_sources_file() + for key, value in self.config.db.items(): + if '.' in key: + section, key = key.split('.', 1) + else: + section = 'system' + try: + srccfg[section][key] = value + except KeyError: + raise ConfigurationError('unknown configuration key "%s" in section "%s" for source' % (key, section)) + admcfg = Configuration(options=USER_OPTIONS) + admcfg['login'] = srccfg['admin']['login'] + admcfg['password'] = srccfg['admin']['password'] + srccfg['admin'] = admcfg + appcfg.write_sources_file(srccfg) +ConfigureInstanceCommand.configure_instance = configure_instance2 diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/session.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/session.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1141 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Repository users' and internal' sessions.""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +from time import time +from uuid import uuid4 +from warnings import warn +import functools +from contextlib import contextmanager + +from six import text_type + +from logilab.common.deprecation import deprecated +from logilab.common.textutils import unormalize +from logilab.common.registry import objectify_predicate + +from cubicweb import QueryError, schema, server, ProgrammingError +from cubicweb.req import RequestSessionBase +from cubicweb.utils import make_uid +from cubicweb.rqlrewrite import RQLRewriter +from cubicweb.server.edition import EditedEntity + + +NO_UNDO_TYPES = schema.SCHEMA_TYPES.copy() +NO_UNDO_TYPES.add('CWCache') +# is / is_instance_of are usually added by sql hooks except when using +# dataimport.NoHookRQLObjectStore, and we don't want to record them +# anyway in the later case +NO_UNDO_TYPES.add('is') +NO_UNDO_TYPES.add('is_instance_of') +NO_UNDO_TYPES.add('cw_source') +# XXX rememberme,forgotpwd,apycot,vcsfile + +@objectify_predicate +def is_user_session(cls, req, **kwargs): + """return 1 when session is not internal. + + This predicate can only be used repository side only. """ + return not req.is_internal_session + +@objectify_predicate +def is_internal_session(cls, req, **kwargs): + """return 1 when session is not internal. + + This predicate can only be used repository side only. """ + return req.is_internal_session + +@objectify_predicate +def repairing(cls, req, **kwargs): + """return 1 when repository is running in repair mode""" + return req.vreg.config.repairing + + +@deprecated('[3.17] use .allow/deny_all_hooks_but instead') +def hooks_control(obj, mode, *categories): + assert mode in (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL) + if mode == HOOKS_ALLOW_ALL: + return obj.allow_all_hooks_but(*categories) + elif mode == HOOKS_DENY_ALL: + return obj.deny_all_hooks_but(*categories) + + +class _hooks_control(object): + """context manager to control activated hooks categories. + + If mode is `HOOKS_DENY_ALL`, given hooks categories will + be enabled. + + If mode is `HOOKS_ALLOW_ALL`, given hooks categories will + be disabled. + + .. sourcecode:: python + + with _hooks_control(cnx, HOOKS_ALLOW_ALL, 'integrity'): + # ... do stuff with all but 'integrity' hooks activated + + with _hooks_control(cnx, HOOKS_DENY_ALL, 'integrity'): + # ... do stuff with none but 'integrity' hooks activated + + This is an internal API, you should rather use + :meth:`~cubicweb.server.session.Connection.deny_all_hooks_but` or + :meth:`~cubicweb.server.session.Connection.allow_all_hooks_but` + Connection methods. + """ + def __init__(self, cnx, mode, *categories): + assert mode in (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL) + self.cnx = cnx + self.mode = mode + self.categories = categories + self.oldmode = None + self.changes = () + + def __enter__(self): + self.oldmode = self.cnx.hooks_mode + self.cnx.hooks_mode = self.mode + if self.mode is HOOKS_DENY_ALL: + self.changes = self.cnx.enable_hook_categories(*self.categories) + else: + self.changes = self.cnx.disable_hook_categories(*self.categories) + self.cnx.ctx_count += 1 + + def __exit__(self, exctype, exc, traceback): + self.cnx.ctx_count -= 1 + try: + if self.categories: + if self.mode is HOOKS_DENY_ALL: + self.cnx.disable_hook_categories(*self.categories) + else: + self.cnx.enable_hook_categories(*self.categories) + finally: + self.cnx.hooks_mode = self.oldmode + + +@deprecated('[3.17] use .security_enabled instead') +def security_enabled(obj, *args, **kwargs): + return obj.security_enabled(*args, **kwargs) + +class _security_enabled(object): + """context manager to control security w/ session.execute, + + By default security is disabled on queries executed on the repository + side. + """ + def __init__(self, cnx, read=None, write=None): + self.cnx = cnx + self.read = read + self.write = write + self.oldread = None + self.oldwrite = None + + def __enter__(self): + if self.read is None: + self.oldread = None + else: + self.oldread = self.cnx.read_security + self.cnx.read_security = self.read + if self.write is None: + self.oldwrite = None + else: + self.oldwrite = self.cnx.write_security + self.cnx.write_security = self.write + self.cnx.ctx_count += 1 + + def __exit__(self, exctype, exc, traceback): + self.cnx.ctx_count -= 1 + if self.oldread is not None: + self.cnx.read_security = self.oldread + if self.oldwrite is not None: + self.cnx.write_security = self.oldwrite + +HOOKS_ALLOW_ALL = object() +HOOKS_DENY_ALL = object() +DEFAULT_SECURITY = object() # evaluated to true by design + +class SessionClosedError(RuntimeError): + pass + + +def _open_only(func): + """decorator for Connection method that check it is open""" + @functools.wraps(func) + def check_open(cnx, *args, **kwargs): + if not cnx._open: + raise ProgrammingError('Closed Connection: %s' + % cnx.connectionid) + return func(cnx, *args, **kwargs) + return check_open + + +class Connection(RequestSessionBase): + """Repository Connection + + Holds all connection related data + + Database connection resources: + + :attr:`hooks_in_progress`, boolean flag telling if the executing + query is coming from a repoapi connection or is a query from + within the repository (e.g. started by hooks) + + :attr:`cnxset`, the connections set to use to execute queries on sources. + If the transaction is read only, the connection set may be freed between + actual queries. This allows multiple connections with a reasonably low + connection set pool size. Control mechanism is detailed below. + + .. automethod:: cubicweb.server.session.Connection.set_cnxset + .. automethod:: cubicweb.server.session.Connection.free_cnxset + + :attr:`mode`, string telling the connections set handling mode, may be one + of 'read' (connections set may be freed), 'write' (some write was done in + the connections set, it can't be freed before end of the transaction), + 'transaction' (we want to keep the connections set during all the + transaction, with or without writing) + + Shared data: + + :attr:`data` is a dictionary bound to the underlying session, + who will be present for the life time of the session. This may + be useful for web clients that rely on the server for managing + bits of session-scoped data. + + :attr:`transaction_data` is a dictionary cleared at the end of + the transaction. Hooks and operations may put arbitrary data in + there. + + Internal state: + + :attr:`pending_operations`, ordered list of operations to be processed on + commit/rollback + + :attr:`commit_state`, describing the transaction commit state, may be one + of None (not yet committing), 'precommit' (calling precommit event on + operations), 'postcommit' (calling postcommit event on operations), + 'uncommitable' (some :exc:`ValidationError` or :exc:`Unauthorized` error + has been raised during the transaction and so it must be rolled back). + + Hooks controls: + + :attr:`hooks_mode`, may be either `HOOKS_ALLOW_ALL` or `HOOKS_DENY_ALL`. + + :attr:`enabled_hook_cats`, when :attr:`hooks_mode` is + `HOOKS_DENY_ALL`, this set contains hooks categories that are enabled. + + :attr:`disabled_hook_cats`, when :attr:`hooks_mode` is + `HOOKS_ALLOW_ALL`, this set contains hooks categories that are disabled. + + Security level Management: + + :attr:`read_security` and :attr:`write_security`, boolean flags telling if + read/write security is currently activated. + + """ + is_request = False + hooks_in_progress = False + is_repo_in_memory = True # bw compat + + def __init__(self, session): + # using super(Connection, self) confuse some test hack + RequestSessionBase.__init__(self, session.vreg) + #: connection unique id + self._open = None + self.connectionid = '%s-%s' % (session.sessionid, uuid4().hex) + self.session = session + self.sessionid = session.sessionid + #: reentrance handling + self.ctx_count = 0 + + #: server.Repository object + self.repo = session.repo + self.vreg = self.repo.vreg + self._execute = self.repo.querier.execute + + # other session utility + self._session_timestamp = session._timestamp + + # internal (root) session + self.is_internal_session = isinstance(session.user, InternalManager) + + #: dict containing arbitrary data cleared at the end of the transaction + self.transaction_data = {} + self._session_data = session.data + #: ordered list of operations to be processed on commit/rollback + self.pending_operations = [] + #: (None, 'precommit', 'postcommit', 'uncommitable') + self.commit_state = None + + ### hook control attribute + self.hooks_mode = HOOKS_ALLOW_ALL + self.disabled_hook_cats = set() + self.enabled_hook_cats = set() + self.pruned_hooks_cache = {} + + + ### security control attributes + self._read_security = DEFAULT_SECURITY # handled by a property + self.write_security = DEFAULT_SECURITY + + # undo control + config = session.repo.config + if config.creating or config.repairing or self.is_internal_session: + self.undo_actions = False + else: + self.undo_actions = config['undo-enabled'] + + # RQLRewriter are not thread safe + self._rewriter = RQLRewriter(self) + + # other session utility + if session.user.login == '__internal_manager__': + self.user = session.user + self.set_language(self.user.prefered_language()) + else: + self._set_user(session.user) + + @_open_only + def source_defs(self): + """Return the definition of sources used by the repository.""" + return self.session.repo.source_defs() + + @_open_only + def get_schema(self): + """Return the schema currently used by the repository.""" + return self.session.repo.source_defs() + + @_open_only + def get_option_value(self, option): + """Return the value for `option` in the configuration.""" + return self.session.repo.get_option_value(option) + + # transaction api + + @_open_only + def undoable_transactions(self, ueid=None, **actionfilters): + """Return a list of undoable transaction objects by the connection's + user, ordered by descendant transaction time. + + Managers may filter according to user (eid) who has done the transaction + using the `ueid` argument. Others will only see their own transactions. + + Additional filtering capabilities is provided by using the following + named arguments: + + * `etype` to get only transactions creating/updating/deleting entities + of the given type + + * `eid` to get only transactions applied to entity of the given eid + + * `action` to get only transactions doing the given action (action in + 'C', 'U', 'D', 'A', 'R'). If `etype`, action can only be 'C', 'U' or + 'D'. + + * `public`: when additional filtering is provided, they are by default + only searched in 'public' actions, unless a `public` argument is given + and set to false. + """ + return self.repo.system_source.undoable_transactions(self, ueid, + **actionfilters) + + @_open_only + def transaction_info(self, txuuid): + """Return transaction object for the given uid. + + raise `NoSuchTransaction` if not found or if session's user is + not allowed (eg not in managers group and the transaction + doesn't belong to him). + """ + return self.repo.system_source.tx_info(self, txuuid) + + @_open_only + def transaction_actions(self, txuuid, public=True): + """Return an ordered list of actions effectued during that transaction. + + If public is true, return only 'public' actions, i.e. not ones + triggered under the cover by hooks, else return all actions. + + raise `NoSuchTransaction` if the transaction is not found or + if the user is not allowed (eg not in managers group). + """ + return self.repo.system_source.tx_actions(self, txuuid, public) + + @_open_only + def undo_transaction(self, txuuid): + """Undo the given transaction. Return potential restoration errors. + + raise `NoSuchTransaction` if not found or if user is not + allowed (eg not in managers group). + """ + return self.repo.system_source.undo_transaction(self, txuuid) + + # life cycle handling #################################################### + + def __enter__(self): + assert self._open is None # first opening + self._open = True + self.cnxset = self.repo._get_cnxset() + return self + + def __exit__(self, exctype=None, excvalue=None, tb=None): + assert self._open # actually already open + self.rollback() + self._open = False + self.cnxset.cnxset_freed() + self.repo._free_cnxset(self.cnxset) + self.cnxset = None + + @contextmanager + def running_hooks_ops(self): + """this context manager should be called whenever hooks or operations + are about to be run (but after hook selection) + + It will help the undo logic record pertinent metadata or some + hooks to run (or not) depending on who/what issued the query. + """ + prevmode = self.hooks_in_progress + self.hooks_in_progress = True + yield + self.hooks_in_progress = prevmode + + # shared data handling ################################################### + + @property + def data(self): + return self._session_data + + @property + def rql_rewriter(self): + return self._rewriter + + @_open_only + @deprecated('[3.19] use session or transaction data', stacklevel=3) + def get_shared_data(self, key, default=None, pop=False, txdata=False): + """return value associated to `key` in session data""" + if txdata: + data = self.transaction_data + else: + data = self._session_data + if pop: + return data.pop(key, default) + else: + return data.get(key, default) + + @_open_only + @deprecated('[3.19] use session or transaction data', stacklevel=3) + def set_shared_data(self, key, value, txdata=False): + """set value associated to `key` in session data""" + if txdata: + self.transaction_data[key] = value + else: + self._session_data[key] = value + + def clear(self): + """reset internal data""" + self.transaction_data = {} + #: ordered list of operations to be processed on commit/rollback + self.pending_operations = [] + #: (None, 'precommit', 'postcommit', 'uncommitable') + self.commit_state = None + self.pruned_hooks_cache = {} + self.local_perm_cache.clear() + self.rewriter = RQLRewriter(self) + + @deprecated('[3.19] cnxset are automatically managed now.' + ' stop using explicit set and free.') + def set_cnxset(self): + pass + + @deprecated('[3.19] cnxset are automatically managed now.' + ' stop using explicit set and free.') + def free_cnxset(self, ignoremode=False): + pass + + @property + @contextmanager + @_open_only + @deprecated('[3.21] a cnxset is automatically set on __enter__ call now.' + ' stop using .ensure_cnx_set') + def ensure_cnx_set(self): + yield + + @property + def anonymous_connection(self): + return self.session.anonymous_session + + # Entity cache management ################################################# + # + # The connection entity cache as held in cnx.transaction_data is removed at the + # end of the connection (commit and rollback) + # + # XXX connection level caching may be a pb with multiple repository + # instances, but 1. this is probably not the only one :$ and 2. it may be + # an acceptable risk. Anyway we could activate it or not according to a + # configuration option + + def set_entity_cache(self, entity): + """Add `entity` to the connection entity cache""" + # XXX not using _open_only because before at creation time. _set_user + # call this function to cache the Connection user. + if entity.cw_etype != 'CWUser' and not self._open: + raise ProgrammingError('Closed Connection: %s' + % self.connectionid) + ecache = self.transaction_data.setdefault('ecache', {}) + ecache.setdefault(entity.eid, entity) + + @_open_only + def entity_cache(self, eid): + """get cache entity for `eid`""" + return self.transaction_data['ecache'][eid] + + @_open_only + def cached_entities(self): + """return the whole entity cache""" + return self.transaction_data.get('ecache', {}).values() + + @_open_only + def drop_entity_cache(self, eid=None): + """drop entity from the cache + + If eid is None, the whole cache is dropped""" + if eid is None: + self.transaction_data.pop('ecache', None) + else: + del self.transaction_data['ecache'][eid] + + # relations handling ####################################################### + + @_open_only + def add_relation(self, fromeid, rtype, toeid): + """provide direct access to the repository method to add a relation. + + This is equivalent to the following rql query: + + SET X rtype Y WHERE X eid fromeid, T eid toeid + + without read security check but also all the burden of rql execution. + You may use this in hooks when you know both eids of the relation you + want to add. + """ + self.add_relations([(rtype, [(fromeid, toeid)])]) + + @_open_only + def add_relations(self, relations): + '''set many relation using a shortcut similar to the one in add_relation + + relations is a list of 2-uples, the first element of each + 2-uple is the rtype, and the second is a list of (fromeid, + toeid) tuples + ''' + edited_entities = {} + relations_dict = {} + with self.security_enabled(False, False): + for rtype, eids in relations: + if self.vreg.schema[rtype].inlined: + for fromeid, toeid in eids: + if fromeid not in edited_entities: + entity = self.entity_from_eid(fromeid) + edited = EditedEntity(entity) + edited_entities[fromeid] = edited + else: + edited = edited_entities[fromeid] + edited.edited_attribute(rtype, toeid) + else: + relations_dict[rtype] = eids + self.repo.glob_add_relations(self, relations_dict) + for edited in edited_entities.values(): + self.repo.glob_update_entity(self, edited) + + + @_open_only + def delete_relation(self, fromeid, rtype, toeid): + """provide direct access to the repository method to delete a relation. + + This is equivalent to the following rql query: + + DELETE X rtype Y WHERE X eid fromeid, T eid toeid + + without read security check but also all the burden of rql execution. + You may use this in hooks when you know both eids of the relation you + want to delete. + """ + with self.security_enabled(False, False): + if self.vreg.schema[rtype].inlined: + entity = self.entity_from_eid(fromeid) + entity.cw_attr_cache[rtype] = None + self.repo.glob_update_entity(self, entity, set((rtype,))) + else: + self.repo.glob_delete_relation(self, fromeid, rtype, toeid) + + # relations cache handling ################################################# + + @_open_only + def update_rel_cache_add(self, subject, rtype, object, symmetric=False): + self._update_entity_rel_cache_add(subject, rtype, 'subject', object) + if symmetric: + self._update_entity_rel_cache_add(object, rtype, 'subject', subject) + else: + self._update_entity_rel_cache_add(object, rtype, 'object', subject) + + @_open_only + def update_rel_cache_del(self, subject, rtype, object, symmetric=False): + self._update_entity_rel_cache_del(subject, rtype, 'subject', object) + if symmetric: + self._update_entity_rel_cache_del(object, rtype, 'object', object) + else: + self._update_entity_rel_cache_del(object, rtype, 'object', subject) + + @_open_only + def _update_entity_rel_cache_add(self, eid, rtype, role, targeteid): + try: + entity = self.entity_cache(eid) + except KeyError: + return + rcache = entity.cw_relation_cached(rtype, role) + if rcache is not None: + rset, entities = rcache + rset = rset.copy() + entities = list(entities) + rset.rows.append([targeteid]) + if not isinstance(rset.description, list): # else description not set + rset.description = list(rset.description) + rset.description.append([self.entity_metas(targeteid)['type']]) + targetentity = self.entity_from_eid(targeteid) + if targetentity.cw_rset is None: + targetentity.cw_rset = rset + targetentity.cw_row = rset.rowcount + targetentity.cw_col = 0 + rset.rowcount += 1 + entities.append(targetentity) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) + + @_open_only + def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): + try: + entity = self.entity_cache(eid) + except KeyError: + return + rcache = entity.cw_relation_cached(rtype, role) + if rcache is not None: + rset, entities = rcache + for idx, row in enumerate(rset.rows): + if row[0] == targeteid: + break + else: + # this may occurs if the cache has been filed by a hook + # after the database update + self.debug('cache inconsistency for %s %s %s %s', eid, rtype, + role, targeteid) + return + rset = rset.copy() + entities = list(entities) + del rset.rows[idx] + if isinstance(rset.description, list): # else description not set + del rset.description[idx] + del entities[idx] + rset.rowcount -= 1 + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) + + # Tracking of entities added of removed in the transaction ################## + + @_open_only + def deleted_in_transaction(self, eid): + """return True if the entity of the given eid is being deleted in the + current transaction + """ + return eid in self.transaction_data.get('pendingeids', ()) + + @_open_only + def added_in_transaction(self, eid): + """return True if the entity of the given eid is being created in the + current transaction + """ + return eid in self.transaction_data.get('neweids', ()) + + # Operation management #################################################### + + @_open_only + def add_operation(self, operation, index=None): + """add an operation to be executed at the end of the transaction""" + if index is None: + self.pending_operations.append(operation) + else: + self.pending_operations.insert(index, operation) + + # Hooks control ########################################################### + + @_open_only + def allow_all_hooks_but(self, *categories): + return _hooks_control(self, HOOKS_ALLOW_ALL, *categories) + + @_open_only + def deny_all_hooks_but(self, *categories): + return _hooks_control(self, HOOKS_DENY_ALL, *categories) + + @_open_only + def disable_hook_categories(self, *categories): + """disable the given hook categories: + + - on HOOKS_DENY_ALL mode, ensure those categories are not enabled + - on HOOKS_ALLOW_ALL mode, ensure those categories are disabled + """ + changes = set() + self.pruned_hooks_cache.clear() + categories = set(categories) + if self.hooks_mode is HOOKS_DENY_ALL: + enabledcats = self.enabled_hook_cats + changes = enabledcats & categories + enabledcats -= changes # changes is small hence faster + else: + disabledcats = self.disabled_hook_cats + changes = categories - disabledcats + disabledcats |= changes # changes is small hence faster + return tuple(changes) + + @_open_only + def enable_hook_categories(self, *categories): + """enable the given hook categories: + + - on HOOKS_DENY_ALL mode, ensure those categories are enabled + - on HOOKS_ALLOW_ALL mode, ensure those categories are not disabled + """ + changes = set() + self.pruned_hooks_cache.clear() + categories = set(categories) + if self.hooks_mode is HOOKS_DENY_ALL: + enabledcats = self.enabled_hook_cats + changes = categories - enabledcats + enabledcats |= changes # changes is small hence faster + else: + disabledcats = self.disabled_hook_cats + changes = disabledcats & categories + disabledcats -= changes # changes is small hence faster + return tuple(changes) + + @_open_only + def is_hook_category_activated(self, category): + """return a boolean telling if the given category is currently activated + or not + """ + if self.hooks_mode is HOOKS_DENY_ALL: + return category in self.enabled_hook_cats + return category not in self.disabled_hook_cats + + @_open_only + def is_hook_activated(self, hook): + """return a boolean telling if the given hook class is currently + activated or not + """ + return self.is_hook_category_activated(hook.category) + + # Security management ##################################################### + + @_open_only + def security_enabled(self, read=None, write=None): + return _security_enabled(self, read=read, write=write) + + @property + @_open_only + def read_security(self): + return self._read_security + + @read_security.setter + @_open_only + def read_security(self, activated): + self._read_security = activated + + # undo support ############################################################ + + @_open_only + def ertype_supports_undo(self, ertype): + return self.undo_actions and ertype not in NO_UNDO_TYPES + + @_open_only + def transaction_uuid(self, set=True): + uuid = self.transaction_data.get('tx_uuid') + if set and uuid is None: + self.transaction_data['tx_uuid'] = uuid = text_type(uuid4().hex) + self.repo.system_source.start_undoable_transaction(self, uuid) + return uuid + + @_open_only + def transaction_inc_action_counter(self): + num = self.transaction_data.setdefault('tx_action_count', 0) + 1 + self.transaction_data['tx_action_count'] = num + return num + + # db-api like interface ################################################### + + @_open_only + def source_defs(self): + return self.repo.source_defs() + + @deprecated('[3.19] use .entity_metas(eid) instead') + @_open_only + def describe(self, eid, asdict=False): + """return a tuple (type, sourceuri, extid) for the entity with id """ + etype, extid, source = self.repo.type_and_source_from_eid(eid, self) + metas = {'type': etype, 'source': source, 'extid': extid} + if asdict: + metas['asource'] = metas['source'] # XXX pre 3.19 client compat + return metas + return etype, source, extid + + @_open_only + def entity_metas(self, eid): + """return a tuple (type, sourceuri, extid) for the entity with id """ + etype, extid, source = self.repo.type_and_source_from_eid(eid, self) + return {'type': etype, 'source': source, 'extid': extid} + + # core method ############################################################# + + @_open_only + def execute(self, rql, kwargs=None, build_descr=True): + """db-api like method directly linked to the querier execute method. + + See :meth:`cubicweb.dbapi.Cursor.execute` documentation. + """ + self._session_timestamp.touch() + rset = self._execute(self, rql, kwargs, build_descr) + rset.req = self + self._session_timestamp.touch() + return rset + + @_open_only + def rollback(self, free_cnxset=None, reset_pool=None): + """rollback the current transaction""" + if free_cnxset is not None: + warn('[3.21] free_cnxset is now unneeded', + DeprecationWarning, stacklevel=2) + if reset_pool is not None: + warn('[3.13] reset_pool is now unneeded', + DeprecationWarning, stacklevel=2) + cnxset = self.cnxset + assert cnxset is not None + try: + # by default, operations are executed with security turned off + with self.security_enabled(False, False): + while self.pending_operations: + try: + operation = self.pending_operations.pop(0) + operation.handle_event('rollback_event') + except BaseException: + self.critical('rollback error', exc_info=sys.exc_info()) + continue + cnxset.rollback() + self.debug('rollback for transaction %s done', self.connectionid) + finally: + self._session_timestamp.touch() + self.clear() + + @_open_only + def commit(self, free_cnxset=None, reset_pool=None): + """commit the current session's transaction""" + if free_cnxset is not None: + warn('[3.21] free_cnxset is now unneeded', + DeprecationWarning, stacklevel=2) + if reset_pool is not None: + warn('[3.13] reset_pool is now unneeded', + DeprecationWarning, stacklevel=2) + assert self.cnxset is not None + cstate = self.commit_state + if cstate == 'uncommitable': + raise QueryError('transaction must be rolled back') + if cstate == 'precommit': + self.warn('calling commit in precommit makes no sense; ignoring commit') + return + if cstate == 'postcommit': + self.critical('postcommit phase is not allowed to write to the db; ignoring commit') + return + assert cstate is None + # on rollback, an operation should have the following state + # information: + # - processed by the precommit/commit event or not + # - if processed, is it the failed operation + debug = server.DEBUG & server.DBG_OPS + try: + # by default, operations are executed with security turned off + with self.security_enabled(False, False): + processed = [] + self.commit_state = 'precommit' + if debug: + print(self.commit_state, '*' * 20) + try: + with self.running_hooks_ops(): + while self.pending_operations: + operation = self.pending_operations.pop(0) + operation.processed = 'precommit' + processed.append(operation) + if debug: + print(operation) + operation.handle_event('precommit_event') + self.pending_operations[:] = processed + self.debug('precommit transaction %s done', self.connectionid) + except BaseException: + # if error on [pre]commit: + # + # * set .failed = True on the operation causing the failure + # * call revert_event on processed operations + # * call rollback_event on *all* operations + # + # that seems more natural than not calling rollback_event + # for processed operations, and allow generic rollback + # instead of having to implements rollback, revertprecommit + # and revertcommit, that will be enough in mont case. + operation.failed = True + if debug: + print(self.commit_state, '*' * 20) + with self.running_hooks_ops(): + for operation in reversed(processed): + if debug: + print(operation) + try: + operation.handle_event('revertprecommit_event') + except BaseException: + self.critical('error while reverting precommit', + exc_info=True) + # XXX use slice notation since self.pending_operations is a + # read-only property. + self.pending_operations[:] = processed + self.pending_operations + self.rollback() + raise + self.cnxset.commit() + self.commit_state = 'postcommit' + if debug: + print(self.commit_state, '*' * 20) + with self.running_hooks_ops(): + while self.pending_operations: + operation = self.pending_operations.pop(0) + if debug: + print(operation) + operation.processed = 'postcommit' + try: + operation.handle_event('postcommit_event') + except BaseException: + self.critical('error while postcommit', + exc_info=sys.exc_info()) + self.debug('postcommit transaction %s done', self.connectionid) + return self.transaction_uuid(set=False) + finally: + self._session_timestamp.touch() + self.clear() + + # resource accessors ###################################################### + + @_open_only + def call_service(self, regid, **kwargs): + self.debug('calling service %s', regid) + service = self.vreg['services'].select(regid, self, **kwargs) + return service.call(**kwargs) + + @_open_only + def system_sql(self, sql, args=None, rollback_on_failure=True): + """return a sql cursor on the system database""" + source = self.repo.system_source + try: + return source.doexec(self, sql, args, rollback=rollback_on_failure) + except (source.OperationalError, source.InterfaceError): + if not rollback_on_failure: + raise + source.warning("trying to reconnect") + self.cnxset.reconnect() + return source.doexec(self, sql, args, rollback=rollback_on_failure) + + @_open_only + def rtype_eids_rdef(self, rtype, eidfrom, eidto): + # use type_and_source_from_eid instead of type_from_eid for optimization + # (avoid two extra methods call) + subjtype = self.repo.type_and_source_from_eid(eidfrom, self)[0] + objtype = self.repo.type_and_source_from_eid(eidto, self)[0] + return self.vreg.schema.rschema(rtype).rdefs[(subjtype, objtype)] + + +def cnx_attr(attr_name, writable=False): + """return a property to forward attribute access to connection. + + This is to be used by session""" + args = {} + @deprecated('[3.19] use a Connection object instead') + def attr_from_cnx(session): + return getattr(session._cnx, attr_name) + args['fget'] = attr_from_cnx + if writable: + @deprecated('[3.19] use a Connection object instead') + def write_attr(session, value): + return setattr(session._cnx, attr_name, value) + args['fset'] = write_attr + return property(**args) + + +class Timestamp(object): + + def __init__(self): + self.value = time() + + def touch(self): + self.value = time() + + def __float__(self): + return float(self.value) + + +class Session(object): + """Repository user session + + This ties all together: + * session id, + * user, + * other session data. + """ + + def __init__(self, user, repo, cnxprops=None, _id=None): + self.sessionid = _id or make_uid(unormalize(user.login)) + self.user = user # XXX repoapi: deprecated and store only a login. + self.repo = repo + self.vreg = repo.vreg + self._timestamp = Timestamp() + self.data = {} + self.closed = False + + def close(self): + self.closed = True + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def __unicode__(self): + return '' % ( + unicode(self.user.login), self.sessionid, id(self)) + + @property + def timestamp(self): + return float(self._timestamp) + + @property + @deprecated('[3.19] session.id is deprecated, use session.sessionid') + def id(self): + return self.sessionid + + @property + def login(self): + return self.user.login + + def new_cnx(self): + """Return a new Connection object linked to the session + + The returned Connection will *not* be managed by the Session. + """ + return Connection(self) + + @deprecated('[3.19] use a Connection object instead') + def get_option_value(self, option, foreid=None): + if foreid is not None: + warn('[3.19] foreid argument is deprecated', DeprecationWarning, + stacklevel=2) + return self.repo.get_option_value(option) + + def _touch(self): + """update latest session usage timestamp and reset mode to read""" + self._timestamp.touch() + + local_perm_cache = cnx_attr('local_perm_cache') + @local_perm_cache.setter + def local_perm_cache(self, value): + #base class assign an empty dict:-( + assert value == {} + pass + + # deprecated ############################################################### + + @property + def anonymous_session(self): + # XXX for now, anonymous_user only exists in webconfig (and testconfig). + # It will only be present inside all-in-one instance. + # there is plan to move it down to global config. + if not hasattr(self.repo.config, 'anonymous_user'): + # not a web or test config, no anonymous user + return False + return self.user.login == self.repo.config.anonymous_user()[0] + + @deprecated('[3.13] use getattr(session.rtype_eids_rdef(rtype, eidfrom, eidto), prop)') + def schema_rproperty(self, rtype, eidfrom, eidto, rprop): + return getattr(self.rtype_eids_rdef(rtype, eidfrom, eidto), rprop) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + + + +class InternalManager(object): + """a manager user with all access rights used internally for task such as + bootstrapping the repository or creating regular users according to + repository content + """ + + def __init__(self, lang='en'): + self.eid = -1 + self.login = u'__internal_manager__' + self.properties = {} + self.groups = set(['managers']) + self.lang = lang + + def matching_groups(self, groups): + return 1 + + def is_in_group(self, group): + return True + + def owns(self, eid): + return True + + def property_value(self, key): + if key == 'ui.language': + return self.lang + return None + + def prefered_language(self, language=None): + # mock CWUser.prefered_language, mainly for testing purpose + return self.property_value('ui.language') + + # CWUser compat for notification ########################################### + + def name(self): + return 'cubicweb' + + class _IEmailable: + @staticmethod + def get_email(): + return '' + + def cw_adapt_to(self, iface): + if iface == 'IEmailable': + return self._IEmailable + return None + +from logging import getLogger +from cubicweb import set_log_methods +set_log_methods(Session, getLogger('cubicweb.session')) +set_log_methods(Connection, getLogger('cubicweb.session')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/sources/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/sources/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,474 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb server sources support""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from time import time +from logging import getLogger +from base64 import b64decode + +from six import text_type + +from logilab.common import configuration +from logilab.common.textutils import unormalize +from logilab.common.deprecation import deprecated + +from yams.schema import role_name + +from cubicweb import ValidationError, set_log_methods, server +from cubicweb.server import SOURCE_TYPES +from cubicweb.server.edition import EditedEntity + + +def dbg_st_search(uri, union, varmap, args, cachekey=None, prefix='rql for'): + if server.DEBUG & server.DBG_RQL: + global t + print(' %s %s source: %s' % (prefix, uri, repr(union.as_string()))) + t = time() + if varmap: + print(' using varmap', varmap) + if server.DEBUG & server.DBG_MORE: + print(' args', repr(args)) + print(' cache key', cachekey) + print(' solutions', ','.join(str(s.solutions) + for s in union.children)) + # return true so it can be used as assertion (and so be killed by python -O) + return True + +def dbg_results(results): + if server.DEBUG & server.DBG_RQL: + if len(results) > 10: + print(' -->', results[:10], '...', len(results), end=' ') + else: + print(' -->', results, end=' ') + print('time: ', time() - t) + # return true so it can be used as assertion (and so be killed by python -O) + return True + + +class AbstractSource(object): + """an abstract class for sources""" + + # boolean telling if modification hooks should be called when something is + # modified in this source + should_call_hooks = True + # boolean telling if the repository should connect to this source during + # migration + connect_for_migration = True + + # mappings telling which entities and relations are available in the source + # keys are supported entity/relation types and values are boolean indicating + # wether the support is read-only (False) or read-write (True) + support_entities = {} + support_relations = {} + # a global identifier for this source, which has to be set by the source + # instance + uri = None + # a reference to the system information helper + repo = None + # a reference to the instance'schema (may differs from the source'schema) + schema = None + + # force deactivation (configuration error for instance) + disabled = False + + # boolean telling if cwuri of entities from this source is the url that + # should be used as entity's absolute url + use_cwuri_as_url = False + + # source configuration options + options = () + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + + def __init__(self, repo, source_config, eid=None): + self.repo = repo + self.set_schema(repo.schema) + self.support_relations['identity'] = False + self.eid = eid + self.public_config = source_config.copy() + self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url + self.remove_sensitive_information(self.public_config) + self.uri = source_config.pop('uri') + # unormalize to avoid non-ascii characters in logger's name, this will cause decoding error + # on logging + set_log_methods(self, getLogger('cubicweb.sources.' + unormalize(text_type(self.uri)))) + source_config.pop('type') + self.update_config(None, self.check_conf_dict(eid, source_config, + fail_if_unknown=False)) + + def __repr__(self): + return '<%s %s source %s @%#x>' % (self.uri, self.__class__.__name__, + self.eid, id(self)) + + def __lt__(self, other): + """simple comparison function to get predictable source order, with the + system source at last + """ + if self.uri == other.uri: + return False + if self.uri == 'system': + return False + if other.uri == 'system': + return True + return self.uri < other.uri + + def __eq__(self, other): + return self.uri == other.uri + + def __ne__(self, other): + return not (self == other) + + def backup(self, backupfile, confirm, format='native'): + """method called to create a backup of source's data""" + pass + + def restore(self, backupfile, confirm, drop, format='native'): + """method called to restore a backup of source's data""" + pass + + @classmethod + def check_conf_dict(cls, eid, confdict, _=text_type, fail_if_unknown=True): + """check configuration of source entity. Return config dict properly + typed with defaults set. + """ + processed = {} + for optname, optdict in cls.options: + value = confdict.pop(optname, optdict.get('default')) + if value is configuration.REQUIRED: + if not fail_if_unknown: + continue + msg = _('specifying %s is mandatory' % optname) + raise ValidationError(eid, {role_name('config', 'subject'): msg}) + elif value is not None: + # type check + try: + value = configuration._validate(value, optdict, optname) + except Exception as ex: + msg = text_type(ex) # XXX internationalization + raise ValidationError(eid, {role_name('config', 'subject'): msg}) + processed[optname] = value + # cw < 3.10 bw compat + try: + processed['adapter'] = confdict['adapter'] + except KeyError: + pass + # check for unknown options + if confdict and tuple(confdict) != ('adapter',): + if fail_if_unknown: + msg = _('unknown options %s') % ', '.join(confdict) + raise ValidationError(eid, {role_name('config', 'subject'): msg}) + else: + logger = getLogger('cubicweb.sources') + logger.warning('unknown options %s', ', '.join(confdict)) + # add options to processed, they may be necessary during migration + processed.update(confdict) + return processed + + @classmethod + def check_config(cls, source_entity): + """check configuration of source entity""" + return cls.check_conf_dict(source_entity.eid, source_entity.host_config, + _=source_entity._cw._) + + def update_config(self, source_entity, typedconfig): + """update configuration from source entity. `typedconfig` is config + properly typed with defaults set + """ + if source_entity is not None: + self._entity_update(source_entity) + self.config = typedconfig + + def _entity_update(self, source_entity): + source_entity.complete() + if source_entity.url: + self.urls = [url.strip() for url in source_entity.url.splitlines() + if url.strip()] + else: + self.urls = [] + + @staticmethod + def decode_extid(extid): + if extid is None: + return extid + return b64decode(extid) + + # source initialization / finalization ##################################### + + def set_schema(self, schema): + """set the instance'schema""" + self.schema = schema + + def init_creating(self): + """method called by the repository once ready to create a new instance""" + pass + + def init(self, activated, source_entity): + """method called by the repository once ready to handle request. + `activated` is a boolean flag telling if the source is activated or not. + """ + if activated: + self._entity_update(source_entity) + + PUBLIC_KEYS = ('type', 'uri', 'use-cwuri-as-url') + def remove_sensitive_information(self, sourcedef): + """remove sensitive information such as login / password from source + definition + """ + for key in list(sourcedef): + if not key in self.PUBLIC_KEYS: + sourcedef.pop(key) + + # connections handling ##################################################### + + def get_connection(self): + """open and return a connection to the source""" + raise NotImplementedError(self) + + def close_source_connections(self): + for cnxset in self.repo.cnxsets: + cnxset.cu = None + cnxset.cnx.close() + + def open_source_connections(self): + for cnxset in self.repo.cnxsets: + cnxset.cnx = self.get_connection() + cnxset.cu = cnxset.cnx.cursor() + + # cache handling ########################################################### + + def reset_caches(self): + """method called during test to reset potential source caches""" + pass + + def clear_eid_cache(self, eid, etype): + """clear potential caches for the given eid""" + pass + + # external source api ###################################################### + + def support_entity(self, etype, write=False): + """return true if the given entity's type is handled by this adapter + if write is true, return true only if it's a RW support + """ + try: + wsupport = self.support_entities[etype] + except KeyError: + return False + if write: + return wsupport + return True + + def support_relation(self, rtype, write=False): + """return true if the given relation's type is handled by this adapter + if write is true, return true only if it's a RW support + + current implementation return true if the relation is defined into + `support_relations` or if it is a final relation of a supported entity + type + """ + try: + wsupport = self.support_relations[rtype] + except KeyError: + rschema = self.schema.rschema(rtype) + if not rschema.final or rschema.type == 'has_text': + return False + for etype in rschema.subjects(): + try: + wsupport = self.support_entities[etype] + break + except KeyError: + continue + else: + return False + if write: + return wsupport + return True + + def before_entity_insertion(self, cnx, lid, etype, eid, sourceparams): + """called by the repository when an eid has been attributed for an + entity stored here but the entity has not been inserted in the system + table yet. + + This method must return the an Entity instance representation of this + entity. + """ + entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) + entity.eid = eid + entity.cw_edited = EditedEntity(entity) + return entity + + def after_entity_insertion(self, cnx, lid, entity, sourceparams): + """called by the repository after an entity stored here has been + inserted in the system table. + """ + pass + + def _load_mapping(self, cnx, **kwargs): + if not 'CWSourceSchemaConfig' in self.schema: + self.warning('instance is not mapping ready') + return + for schemacfg in cnx.execute( + 'Any CFG,CFGO,S WHERE ' + 'CFG options CFGO, CFG cw_schema S, ' + 'CFG cw_for_source X, X eid %(x)s', {'x': self.eid}).entities(): + self.add_schema_config(schemacfg, **kwargs) + + def add_schema_config(self, schemacfg, checkonly=False): + """added CWSourceSchemaConfig, modify mapping accordingly""" + msg = schemacfg._cw._("this source doesn't use a mapping") + raise ValidationError(schemacfg.eid, {None: msg}) + + def del_schema_config(self, schemacfg, checkonly=False): + """deleted CWSourceSchemaConfig, modify mapping accordingly""" + msg = schemacfg._cw._("this source doesn't use a mapping") + raise ValidationError(schemacfg.eid, {None: msg}) + + def update_schema_config(self, schemacfg, checkonly=False): + """updated CWSourceSchemaConfig, modify mapping accordingly""" + self.del_schema_config(schemacfg, checkonly) + self.add_schema_config(schemacfg, checkonly) + + # user authentication api ################################################## + + def authenticate(self, cnx, login, **kwargs): + """if the source support CWUser entity type, it should implement + this method which should return CWUser eid for the given login/password + if this account is defined in this source and valid login / password is + given. Else raise `AuthenticationError` + """ + raise NotImplementedError(self) + + # RQL query api ############################################################ + + def syntax_tree_search(self, cnx, union, + args=None, cachekey=None, varmap=None, debug=0): + """return result from this source for a rql query (actually from a rql + syntax tree and a solution dictionary mapping each used variable to a + possible type). If cachekey is given, the query necessary to fetch the + results (but not the results themselves) may be cached using this key. + """ + raise NotImplementedError(self) + + # write modification api ################################################### + # read-only sources don't have to implement methods below + + def get_extid(self, entity): + """return the external id for the given newly inserted entity""" + raise NotImplementedError(self) + + def add_entity(self, cnx, entity): + """add a new entity to the source""" + raise NotImplementedError(self) + + def update_entity(self, cnx, entity): + """update an entity in the source""" + raise NotImplementedError(self) + + def delete_entities(self, cnx, entities): + """delete several entities from the source""" + for entity in entities: + self.delete_entity(cnx, entity) + + def delete_entity(self, cnx, entity): + """delete an entity from the source""" + raise NotImplementedError(self) + + def add_relation(self, cnx, subject, rtype, object): + """add a relation to the source""" + raise NotImplementedError(self) + + def add_relations(self, cnx, rtype, subj_obj_list): + """add a relations to the source""" + # override in derived classes if you feel you can + # optimize + for subject, object in subj_obj_list: + self.add_relation(cnx, subject, rtype, object) + + def delete_relation(self, session, subject, rtype, object): + """delete a relation from the source""" + raise NotImplementedError(self) + + # system source interface ################################################# + + def eid_type_source(self, cnx, eid): + """return a tuple (type, extid, source) for the entity with id """ + raise NotImplementedError(self) + + def create_eid(self, cnx): + raise NotImplementedError(self) + + def add_info(self, cnx, entity, source, extid): + """add type and source info for an eid into the system table""" + raise NotImplementedError(self) + + def update_info(self, cnx, entity, need_fti_update): + """mark entity as being modified, fulltext reindex if needed""" + raise NotImplementedError(self) + + def index_entity(self, cnx, entity): + """create an operation to [re]index textual content of the given entity + on commit + """ + raise NotImplementedError(self) + + def fti_unindex_entities(self, cnx, entities): + """remove text content for entities from the full text index + """ + raise NotImplementedError(self) + + def fti_index_entities(self, cnx, entities): + """add text content of created/modified entities to the full text index + """ + raise NotImplementedError(self) + + # sql system source interface ############################################# + + def sqlexec(self, cnx, sql, args=None): + """execute the query and return its result""" + raise NotImplementedError(self) + + def create_index(self, cnx, table, column, unique=False): + raise NotImplementedError(self) + + def drop_index(self, cnx, table, column, unique=False): + raise NotImplementedError(self) + + + @deprecated('[3.13] use extid2eid(source, value, etype, cnx, **kwargs)') + def extid2eid(self, value, etype, cnx, **kwargs): + return self.repo.extid2eid(self, value, etype, cnx, **kwargs) + + + + +def source_adapter(source_type): + try: + return SOURCE_TYPES[source_type] + except KeyError: + raise RuntimeError('Unknown source type %r' % source_type) + +def get_source(type, source_config, repo, eid): + """return a source adapter according to the adapter field in the source's + configuration + """ + return source_adapter(type)(repo, source_config, eid) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/sources/datafeed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/sources/datafeed.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,578 @@ +# copyright 2010-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""datafeed sources: copy data from an external data stream into the system +database +""" + +from io import BytesIO +from os.path import exists +from datetime import datetime, timedelta + +from six import text_type +from six.moves.urllib.parse import urlparse +from six.moves.urllib.request import Request, build_opener, HTTPCookieProcessor +from six.moves.urllib.error import HTTPError +from six.moves.http_cookiejar import CookieJar + +from pytz import utc +from lxml import etree + +from logilab.common.deprecation import deprecated + +from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid +from cubicweb.server.repository import preprocess_inlined_relations +from cubicweb.server.sources import AbstractSource +from cubicweb.appobject import AppObject + + +class DataFeedSource(AbstractSource): + use_cwuri_as_url = True + + options = ( + ('synchronize', + {'type' : 'yn', + 'default': True, + 'help': ('Is the repository responsible to automatically import ' + 'content from this source? ' + 'You should say yes unless you don\'t want this behaviour ' + 'or if you use a multiple repositories setup, in which ' + 'case you should say yes on one repository, no on others.'), + 'group': 'datafeed-source', 'level': 2, + }), + ('synchronization-interval', + {'type' : 'time', + 'default': '5min', + 'help': ('Interval in seconds between synchronization with the ' + 'external source (default to 5 minutes, must be >= 1 min).'), + 'group': 'datafeed-source', 'level': 2, + }), + ('max-lock-lifetime', + {'type' : 'time', + 'default': '1h', + 'help': ('Maximum time allowed for a synchronization to be run. ' + 'Exceeded that time, the synchronization will be considered ' + 'as having failed and not properly released the lock, hence ' + 'it won\'t be considered'), + 'group': 'datafeed-source', 'level': 2, + }), + ('delete-entities', + {'type' : 'yn', + 'default': False, + 'help': ('Should already imported entities not found anymore on the ' + 'external source be deleted?'), + 'group': 'datafeed-source', 'level': 2, + }), + ('logs-lifetime', + {'type': 'time', + 'default': '10d', + 'help': ('Time before logs from datafeed imports are deleted.'), + 'group': 'datafeed-source', 'level': 2, + }), + ('http-timeout', + {'type': 'time', + 'default': '1min', + 'help': ('Timeout of HTTP GET requests, when synchronizing a source.'), + 'group': 'datafeed-source', 'level': 2, + }), + ('use-cwuri-as-url', + {'type': 'yn', + 'default': None, # explicitly unset + 'help': ('Use cwuri (i.e. external URL) for link to the entity ' + 'instead of its local URL.'), + 'group': 'datafeed-source', 'level': 1, + }), + ) + + def check_config(self, source_entity): + """check configuration of source entity""" + typed_config = super(DataFeedSource, self).check_config(source_entity) + if typed_config['synchronization-interval'] < 60: + _ = source_entity._cw._ + msg = _('synchronization-interval must be greater than 1 minute') + raise ValidationError(source_entity.eid, {'config': msg}) + return typed_config + + def _entity_update(self, source_entity): + super(DataFeedSource, self)._entity_update(source_entity) + self.parser_id = source_entity.parser + self.latest_retrieval = source_entity.latest_retrieval + + def update_config(self, source_entity, typed_config): + """update configuration from source entity. `typed_config` is config + properly typed with defaults set + """ + super(DataFeedSource, self).update_config(source_entity, typed_config) + self.synchro_interval = timedelta(seconds=typed_config['synchronization-interval']) + self.max_lock_lifetime = timedelta(seconds=typed_config['max-lock-lifetime']) + self.http_timeout = typed_config['http-timeout'] + # if typed_config['use-cwuri-as-url'] is set, we have to update + # use_cwuri_as_url attribute and public configuration dictionary + # accordingly + if typed_config['use-cwuri-as-url'] is not None: + self.use_cwuri_as_url = typed_config['use-cwuri-as-url'] + self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url + + def init(self, activated, source_entity): + super(DataFeedSource, self).init(activated, source_entity) + self.parser_id = source_entity.parser + self.load_mapping(source_entity._cw) + + def _get_parser(self, cnx, **kwargs): + if self.parser_id is None: + self.warning('No parser defined on source %r', self) + raise ObjectNotFound() + return self.repo.vreg['parsers'].select( + self.parser_id, cnx, source=self, **kwargs) + + def load_mapping(self, cnx): + self.mapping = {} + self.mapping_idx = {} + try: + parser = self._get_parser(cnx) + except (RegistryNotFound, ObjectNotFound): + return # no parser yet, don't go further + self._load_mapping(cnx, parser=parser) + + def add_schema_config(self, schemacfg, checkonly=False, parser=None): + """added CWSourceSchemaConfig, modify mapping accordingly""" + if parser is None: + parser = self._get_parser(schemacfg._cw) + parser.add_schema_config(schemacfg, checkonly) + + def del_schema_config(self, schemacfg, checkonly=False, parser=None): + """deleted CWSourceSchemaConfig, modify mapping accordingly""" + if parser is None: + parser = self._get_parser(schemacfg._cw) + parser.del_schema_config(schemacfg, checkonly) + + def fresh(self): + if self.latest_retrieval is None: + return False + return datetime.now(tz=utc) < (self.latest_retrieval + self.synchro_interval) + + def update_latest_retrieval(self, cnx): + self.latest_retrieval = datetime.now(tz=utc) + cnx.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s', + {'x': self.eid, 'date': self.latest_retrieval}) + cnx.commit() + + def acquire_synchronization_lock(self, cnx): + # XXX race condition until WHERE of SET queries is executed using + # 'SELECT FOR UPDATE' + now = datetime.now(tz=utc) + if not cnx.execute( + 'SET X in_synchronization %(now)s WHERE X eid %(x)s, ' + 'X in_synchronization NULL OR X in_synchronization < %(maxdt)s', + {'x': self.eid, 'now': now, 'maxdt': now - self.max_lock_lifetime}): + self.error('concurrent synchronization detected, skip pull') + cnx.commit() + return False + cnx.commit() + return True + + def release_synchronization_lock(self, cnx): + cnx.execute('SET X in_synchronization NULL WHERE X eid %(x)s', + {'x': self.eid}) + cnx.commit() + + def pull_data(self, cnx, force=False, raise_on_error=False): + """Launch synchronization of the source if needed. + + This method is responsible to handle commit/rollback on the given + connection. + """ + if not force and self.fresh(): + return {} + if not self.acquire_synchronization_lock(cnx): + return {} + try: + return self._pull_data(cnx, force, raise_on_error) + finally: + cnx.rollback() # rollback first in case there is some dirty + # transaction remaining + self.release_synchronization_lock(cnx) + + def _pull_data(self, cnx, force=False, raise_on_error=False): + importlog = self.init_import_log(cnx) + myuris = self.source_cwuris(cnx) + try: + parser = self._get_parser(cnx, sourceuris=myuris, import_log=importlog) + except ObjectNotFound: + return {} + if self.process_urls(parser, self.urls, raise_on_error): + self.warning("some error occurred, don't attempt to delete entities") + else: + parser.handle_deletion(self.config, cnx, myuris) + self.update_latest_retrieval(cnx) + stats = parser.stats + if stats.get('created'): + importlog.record_info('added %s entities' % len(stats['created'])) + if stats.get('updated'): + importlog.record_info('updated %s entities' % len(stats['updated'])) + importlog.write_log(cnx, end_timestamp=self.latest_retrieval) + cnx.commit() + return stats + + def process_urls(self, parser, urls, raise_on_error=False): + error = False + for url in urls: + self.info('pulling data from %s', url) + try: + if parser.process(url, raise_on_error): + error = True + except IOError as exc: + if raise_on_error: + raise + parser.import_log.record_error( + 'could not pull data while processing %s: %s' + % (url, exc)) + error = True + except Exception as exc: + if raise_on_error: + raise + self.exception('error while processing %s: %s', + url, exc) + error = True + return error + + @deprecated('[3.21] use the new store API') + def before_entity_insertion(self, cnx, lid, etype, eid, sourceparams): + """called by the repository when an eid has been attributed for an + entity stored here but the entity has not been inserted in the system + table yet. + + This method must return the an Entity instance representation of this + entity. + """ + entity = super(DataFeedSource, self).before_entity_insertion( + cnx, lid, etype, eid, sourceparams) + entity.cw_edited['cwuri'] = lid.decode('utf-8') + entity.cw_edited.set_defaults() + sourceparams['parser'].before_entity_copy(entity, sourceparams) + return entity + + @deprecated('[3.21] use the new store API') + def after_entity_insertion(self, cnx, lid, entity, sourceparams): + """called by the repository after an entity stored here has been + inserted in the system table. + """ + relations = preprocess_inlined_relations(cnx, entity) + if cnx.is_hook_category_activated('integrity'): + entity.cw_edited.check(creation=True) + self.repo.system_source.add_entity(cnx, entity) + entity.cw_edited.saved = entity._cw_is_saved = True + sourceparams['parser'].after_entity_copy(entity, sourceparams) + # call hooks for inlined relations + call_hooks = self.repo.hm.call_hooks + if self.should_call_hooks: + for attr, value in relations: + call_hooks('before_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) + call_hooks('after_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) + + def source_cwuris(self, cnx): + sql = ('SELECT extid, eid, type FROM entities, cw_source_relation ' + 'WHERE entities.eid=cw_source_relation.eid_from ' + 'AND cw_source_relation.eid_to=%s' % self.eid) + return dict((self.decode_extid(uri), (eid, type)) + for uri, eid, type in cnx.system_sql(sql).fetchall()) + + def init_import_log(self, cnx, **kwargs): + dataimport = cnx.create_entity('CWDataImport', cw_import_of=self, + start_timestamp=datetime.now(tz=utc), + **kwargs) + dataimport.init() + return dataimport + + +class DataFeedParser(AppObject): + __registry__ = 'parsers' + + def __init__(self, cnx, source, sourceuris=None, import_log=None, **kwargs): + super(DataFeedParser, self).__init__(cnx, **kwargs) + self.source = source + self.sourceuris = sourceuris + self.import_log = import_log + self.stats = {'created': set(), 'updated': set(), 'checked': set()} + + def normalize_url(self, url): + """Normalize an url by looking if there is a replacement for it in + `cubicweb.sobjects.URL_MAPPING`. + + This dictionary allow to redirect from one host to another, which may be + useful for example in case of test instance using production data, while + you don't want to load the external source nor to hack your `/etc/hosts` + file. + """ + # local import mandatory, it's available after registration + from cubicweb.sobjects import URL_MAPPING + for mappedurl in URL_MAPPING: + if url.startswith(mappedurl): + return url.replace(mappedurl, URL_MAPPING[mappedurl], 1) + return url + + def retrieve_url(self, url): + """Return stream linked by the given url: + * HTTP urls will be normalized (see :meth:`normalize_url`) + * handle file:// URL + * other will be considered as plain content, useful for testing purpose + + For http URLs, it will try to find a cwclientlib config entry + (if available) and use it as requester. + """ + purl = urlparse(url) + if purl.scheme == 'file': + return URLLibResponseAdapter(open(url[7:]), url) + + url = self.normalize_url(url) + + # first, try to use cwclientlib if it's available and if the + # url matches a configuration entry in ~/.config/cwclientlibrc + try: + from cwclientlib import cwproxy_for + # parse url again since it has been normalized + cnx = cwproxy_for(url) + cnx.timeout = self.source.http_timeout + self.source.info('Using cwclientlib for %s' % url) + resp = cnx.get(url) + resp.raise_for_status() + return URLLibResponseAdapter(BytesIO(resp.text), url) + except (ImportError, ValueError, EnvironmentError) as exc: + # ImportError: not available + # ValueError: no config entry found + # EnvironmentError: no cwclientlib config file found + self.source.debug(str(exc)) + + # no chance with cwclientlib, fall back to former implementation + if purl.scheme in ('http', 'https'): + self.source.info('GET %s', url) + req = Request(url) + return _OPENER.open(req, timeout=self.source.http_timeout) + + # url is probably plain content + return URLLibResponseAdapter(BytesIO(url.encode('ascii')), url) + + def add_schema_config(self, schemacfg, checkonly=False): + """added CWSourceSchemaConfig, modify mapping accordingly""" + msg = schemacfg._cw._("this parser doesn't use a mapping") + raise ValidationError(schemacfg.eid, {None: msg}) + + def del_schema_config(self, schemacfg, checkonly=False): + """deleted CWSourceSchemaConfig, modify mapping accordingly""" + msg = schemacfg._cw._("this parser doesn't use a mapping") + raise ValidationError(schemacfg.eid, {None: msg}) + + @deprecated('[3.21] use the new store API') + def extid2entity(self, uri, etype, **sourceparams): + """Return an entity for the given uri. May return None if it should be + skipped. + + If a `raise_on_error` keyword parameter is passed, a ValidationError + exception may be raised. + """ + raise_on_error = sourceparams.pop('raise_on_error', False) + cnx = self._cw + # if cwsource is specified and repository has a source with the same + # name, call extid2eid on that source so entity will be properly seen as + # coming from this source + source_uri = sourceparams.pop('cwsource', None) + if source_uri is not None and source_uri != 'system': + source = cnx.repo.sources_by_uri.get(source_uri, self.source) + else: + source = self.source + sourceparams['parser'] = self + if isinstance(uri, text_type): + uri = uri.encode('utf-8') + try: + eid = cnx.repo.extid2eid(source, uri, etype, cnx, + sourceparams=sourceparams) + except ValidationError as ex: + if raise_on_error: + raise + self.source.critical('error while creating %s: %s', etype, ex) + self.import_log.record_error('error while creating %s: %s' + % (etype, ex)) + return None + if eid < 0: + # entity has been moved away from its original source + # + # Don't give etype to entity_from_eid so we get UnknownEid if the + # entity has been removed + try: + entity = cnx.entity_from_eid(-eid) + except UnknownEid: + return None + self.notify_updated(entity) # avoid later update from the source's data + return entity + if self.sourceuris is not None: + self.sourceuris.pop(str(uri), None) + return cnx.entity_from_eid(eid, etype) + + def process(self, url, raise_on_error=False): + """main callback: process the url""" + raise NotImplementedError + + @deprecated('[3.21] use the new store API') + def before_entity_copy(self, entity, sourceparams): + raise NotImplementedError + + @deprecated('[3.21] use the new store API') + def after_entity_copy(self, entity, sourceparams): + self.stats['created'].add(entity.eid) + + def created_during_pull(self, entity): + return entity.eid in self.stats['created'] + + def updated_during_pull(self, entity): + return entity.eid in self.stats['updated'] + + def notify_updated(self, entity): + return self.stats['updated'].add(entity.eid) + + def notify_checked(self, entity): + return self.stats['checked'].add(entity.eid) + + def is_deleted(self, extid, etype, eid): + """return True if the entity of given external id, entity type and eid + is actually deleted. Always return True by default, put more sensible + stuff in sub-classes. + """ + return True + + def handle_deletion(self, config, cnx, myuris): + if config['delete-entities'] and myuris: + byetype = {} + for extid, (eid, etype) in myuris.items(): + if self.is_deleted(extid, etype, eid): + byetype.setdefault(etype, []).append(str(eid)) + for etype, eids in byetype.items(): + self.warning('delete %s %s entities', len(eids), etype) + cnx.execute('DELETE %s X WHERE X eid IN (%s)' + % (etype, ','.join(eids))) + cnx.commit() + + def update_if_necessary(self, entity, attrs): + entity.complete(tuple(attrs)) + # check modification date and compare attribute values to only update + # what's actually needed + self.notify_checked(entity) + mdate = attrs.get('modification_date') + if not mdate or mdate > entity.modification_date: + attrs = dict( (k, v) for k, v in attrs.items() + if v != getattr(entity, k)) + if attrs: + entity.cw_set(**attrs) + self.notify_updated(entity) + + +class DataFeedXMLParser(DataFeedParser): + + @deprecated() + def process(self, url, raise_on_error=False): + """IDataFeedParser main entry point""" + try: + parsed = self.parse(url) + except Exception as ex: + if raise_on_error: + raise + self.import_log.record_error(str(ex)) + return True + error = False + commit = self._cw.commit + rollback = self._cw.rollback + for args in parsed: + try: + self.process_item(*args, raise_on_error=raise_on_error) + # commit+set_cnxset instead of commit(free_cnxset=False) to let + # other a chance to get our connections set + commit() + except ValidationError as exc: + if raise_on_error: + raise + self.source.error('Skipping %s because of validation error %s' + % (args, exc)) + rollback() + error = True + return error + + def parse(self, url): + stream = self.retrieve_url(url) + return self.parse_etree(etree.parse(stream).getroot()) + + def parse_etree(self, document): + return [(document,)] + + def process_item(self, *args, **kwargs): + raise NotImplementedError + + def is_deleted(self, extid, etype, eid): + if extid.startswith('file://'): + return exists(extid[7:]) + + url = self.normalize_url(extid) + # first, try to use cwclientlib if it's available and if the + # url matches a configuration entry in ~/.config/cwclientlibrc + try: + from cwclientlib import cwproxy_for + # parse url again since it has been normalized + cnx = cwproxy_for(url) + cnx.timeout = self.source.http_timeout + self.source.info('Using cwclientlib for checking %s' % url) + return cnx.get(url).status_code == 404 + except (ImportError, ValueError, EnvironmentError) as exc: + # ImportError: not available + # ValueError: no config entry found + # EnvironmentError: no cwclientlib config file found + self.source.debug(str(exc)) + + # no chance with cwclientlib, fall back to former implementation + if urlparse(url).scheme in ('http', 'https'): + try: + _OPENER.open(url, timeout=self.source.http_timeout) + except HTTPError as ex: + if ex.code == 404: + return True + return False + + +class URLLibResponseAdapter(object): + """Thin wrapper to be used to fake a value returned by urllib2.urlopen""" + def __init__(self, stream, url, code=200): + self._stream = stream + self._url = url + self.code = code + + def read(self, *args): + return self._stream.read(*args) + + def geturl(self): + return self._url + + def getcode(self): + return self.code + + +# use a cookie enabled opener to use session cookie if any +_OPENER = build_opener() +try: + from logilab.common import urllib2ext + _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler()) +except ImportError: # python-kerberos not available + pass +_OPENER.add_handler(HTTPCookieProcessor(CookieJar())) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/sources/ldapfeed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/sources/ldapfeed.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,358 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb ldap feed source""" + +from __future__ import division # XXX why? + +from datetime import datetime + +from six import PY2, string_types + +import ldap3 + +from logilab.common.configuration import merge_options + +from cubicweb import ValidationError, AuthenticationError, Binary +from cubicweb.server import utils +from cubicweb.server.sources import datafeed + +from cubicweb import _ + +# search scopes +BASE = ldap3.SEARCH_SCOPE_BASE_OBJECT +ONELEVEL = ldap3.SEARCH_SCOPE_SINGLE_LEVEL +SUBTREE = ldap3.SEARCH_SCOPE_WHOLE_SUBTREE +LDAP_SCOPES = {'BASE': BASE, + 'ONELEVEL': ONELEVEL, + 'SUBTREE': SUBTREE} + +# map ldap protocol to their standard port +PROTO_PORT = {'ldap': 389, + 'ldaps': 636, + 'ldapi': None, + } + + +def replace_filter(s): + s = s.replace('*', '\\2A') + s = s.replace('(', '\\28') + s = s.replace(')', '\\29') + s = s.replace('\\', '\\5c') + s = s.replace('\0', '\\00') + return s + + +class LDAPFeedSource(datafeed.DataFeedSource): + """LDAP feed source: unlike ldapuser source, this source is copy based and + will import ldap content (beside passwords for authentication) into the + system source. + """ + support_entities = {'CWUser': False} + use_cwuri_as_url = False + + options = ( + ('auth-mode', + {'type' : 'choice', + 'default': 'simple', + 'choices': ('simple', 'digest_md5', 'gssapi'), + 'help': 'authentication mode used to authenticate user to the ldap.', + 'group': 'ldap-source', 'level': 3, + }), + ('auth-realm', + {'type' : 'string', + 'default': None, + 'help': 'realm to use when using gssapi/kerberos authentication.', + 'group': 'ldap-source', 'level': 3, + }), + + ('data-cnx-dn', + {'type' : 'string', + 'default': '', + 'help': 'user dn to use to open data connection to the ldap (eg used \ +to respond to rql queries). Leave empty for anonymous bind', + 'group': 'ldap-source', 'level': 1, + }), + ('data-cnx-password', + {'type' : 'string', + 'default': '', + 'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries). Leave empty for anonymous bind.', + 'group': 'ldap-source', 'level': 1, + }), + + ('user-base-dn', + {'type' : 'string', + 'default': '', + 'help': 'base DN to lookup for users; disable user importation mechanism if unset', + 'group': 'ldap-source', 'level': 1, + }), + ('user-scope', + {'type' : 'choice', + 'default': 'ONELEVEL', + 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), + 'help': 'user search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")', + 'group': 'ldap-source', 'level': 1, + }), + ('user-classes', + {'type' : 'csv', + 'default': ('top', 'posixAccount'), + 'help': 'classes of user (with Active Directory, you want to say "user" here)', + 'group': 'ldap-source', 'level': 1, + }), + ('user-filter', + {'type': 'string', + 'default': '', + 'help': 'additional filters to be set in the ldap query to find valid users', + 'group': 'ldap-source', 'level': 2, + }), + ('user-login-attr', + {'type' : 'string', + 'default': 'uid', + 'help': 'attribute used as login on authentication (with Active Directory, you want to use "sAMAccountName" here)', + 'group': 'ldap-source', 'level': 1, + }), + ('user-default-group', + {'type' : 'csv', + 'default': ('users',), + 'help': 'name of a group in which ldap users will be by default. \ +You can set multiple groups by separating them by a comma.', + 'group': 'ldap-source', 'level': 1, + }), + ('user-attrs-map', + {'type' : 'named', + 'default': {'uid': 'login'}, + 'help': 'map from ldap user attributes to cubicweb attributes (with Active Directory, you want to use sAMAccountName:login,mail:email,givenName:firstname,sn:surname)', + 'group': 'ldap-source', 'level': 1, + }), + ('group-base-dn', + {'type' : 'string', + 'default': '', + 'help': 'base DN to lookup for groups; disable group importation mechanism if unset', + 'group': 'ldap-source', 'level': 1, + }), + ('group-scope', + {'type' : 'choice', + 'default': 'ONELEVEL', + 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), + 'help': 'group search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")', + 'group': 'ldap-source', 'level': 1, + }), + ('group-classes', + {'type' : 'csv', + 'default': ('top', 'posixGroup'), + 'help': 'classes of group', + 'group': 'ldap-source', 'level': 1, + }), + ('group-filter', + {'type': 'string', + 'default': '', + 'help': 'additional filters to be set in the ldap query to find valid groups', + 'group': 'ldap-source', 'level': 2, + }), + ('group-attrs-map', + {'type' : 'named', + 'default': {'cn': 'name', 'memberUid': 'member'}, + 'help': 'map from ldap group attributes to cubicweb attributes', + 'group': 'ldap-source', 'level': 1, + }), + ) + + options = merge_options(datafeed.DataFeedSource.options + options, + optgroup='ldap-source',) + + _conn = None + + def update_config(self, source_entity, typedconfig): + """update configuration from source entity. `typedconfig` is config + properly typed with defaults set + """ + super(LDAPFeedSource, self).update_config(source_entity, typedconfig) + self.authmode = typedconfig['auth-mode'] + self._authenticate = getattr(self, '_auth_%s' % self.authmode) + self.cnx_dn = typedconfig['data-cnx-dn'] + self.cnx_pwd = typedconfig['data-cnx-password'] + self.user_base_dn = str(typedconfig['user-base-dn']) + self.user_base_scope = globals()[typedconfig['user-scope']] + self.user_login_attr = typedconfig['user-login-attr'] + self.user_default_groups = typedconfig['user-default-group'] + self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} + self.user_attrs.update(typedconfig['user-attrs-map']) + self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.items()) + self.base_filters = ['(objectclass=%s)' % replace_filter(o) + for o in typedconfig['user-classes']] + if typedconfig['user-filter']: + self.base_filters.append(typedconfig['user-filter']) + self.group_base_dn = str(typedconfig['group-base-dn']) + self.group_base_scope = LDAP_SCOPES[typedconfig['group-scope']] + self.group_attrs = typedconfig['group-attrs-map'] + self.group_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} + self.group_attrs.update(typedconfig['group-attrs-map']) + self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.items()) + self.group_base_filters = ['(objectClass=%s)' % replace_filter(o) + for o in typedconfig['group-classes']] + if typedconfig['group-filter']: + self.group_base_filters.append(typedconfig['group-filter']) + self._conn = None + + def _entity_update(self, source_entity): + super(LDAPFeedSource, self)._entity_update(source_entity) + if self.urls: + if len(self.urls) > 1: + raise ValidationError(source_entity.eid, {'url': _('can only have one url')}) + try: + protocol, hostport = self.urls[0].split('://') + except ValueError: + raise ValidationError(source_entity.eid, {'url': _('badly formatted url')}) + if protocol not in PROTO_PORT: + raise ValidationError(source_entity.eid, {'url': _('unsupported protocol')}) + + def connection_info(self): + assert len(self.urls) == 1, self.urls + protocol, hostport = self.urls[0].split('://') + if protocol != 'ldapi' and ':' in hostport: + host, port = hostport.rsplit(':', 1) + else: + host, port = hostport, PROTO_PORT[protocol] + return protocol, host, port + + def authenticate(self, cnx, login, password=None, **kwargs): + """return CWUser eid for the given login/password if this account is + defined in this source, else raise `AuthenticationError` + + two queries are needed since passwords are stored crypted, so we have + to fetch the salt first + """ + self.info('ldap authenticate %s', login) + if not password: + # On Windows + ADAM this would have succeeded (!!!) + # You get Authenticated as: 'NT AUTHORITY\ANONYMOUS LOGON'. + # we really really don't want that + raise AuthenticationError() + searchfilter = ['(%s=%s)' % (replace_filter(self.user_login_attr), replace_filter(login))] + searchfilter.extend(self.base_filters) + searchstr = '(&%s)' % ''.join(searchfilter) + # first search the user + try: + user = self._search(cnx, self.user_base_dn, + self.user_base_scope, searchstr)[0] + except IndexError: + # no such user + raise AuthenticationError() + # check password by establishing a (unused) connection + try: + self._connect(user, password) + except ldap3.LDAPException as ex: + # Something went wrong, most likely bad credentials + self.info('while trying to authenticate %s: %s', user, ex) + raise AuthenticationError() + except Exception: + self.error('while trying to authenticate %s', user, exc_info=True) + raise AuthenticationError() + eid = self.repo.system_source.extid2eid(cnx, user['dn'].encode('ascii')) + if eid is None or eid < 0: + # user is not known or has been moved away from this source + raise AuthenticationError() + return eid + + def _connect(self, user=None, userpwd=None): + protocol, host, port = self.connection_info() + self.info('connecting %s://%s:%s as %s', protocol, host, port, + user and user['dn'] or 'anonymous') + server = ldap3.Server(host, port=int(port)) + conn = ldap3.Connection(server, user=user and user['dn'], client_strategy=ldap3.STRATEGY_SYNC_RESTARTABLE, auto_referrals=False) + # Now bind with the credentials given. Let exceptions propagate out. + if user is None: + # XXX always use simple bind for data connection + if not self.cnx_dn: + conn.bind() + else: + self._authenticate(conn, {'dn': self.cnx_dn}, self.cnx_pwd) + else: + # user specified, we want to check user/password, no need to return + # the connection which will be thrown out + self._authenticate(conn, user, userpwd) + return conn + + def _auth_simple(self, conn, user, userpwd): + conn.authentication = ldap3.AUTH_SIMPLE + conn.user = user['dn'] + conn.password = userpwd + conn.bind() + + def _auth_digest_md5(self, conn, user, userpwd): + conn.authentication = ldap3.AUTH_SASL + conn.sasl_mechanism = 'DIGEST-MD5' + # realm, user, password, authz-id + conn.sasl_credentials = (None, user['dn'], userpwd, None) + conn.bind() + + def _auth_gssapi(self, conn, user, userpwd): + conn.authentication = ldap3.AUTH_SASL + conn.sasl_mechanism = 'GSSAPI' + conn.bind() + + def _search(self, cnx, base, scope, + searchstr='(objectClass=*)', attrs=()): + """make an ldap query""" + self.debug('ldap search %s %s %s %s %s', self.uri, base, scope, + searchstr, list(attrs)) + if self._conn is None: + self._conn = self._connect() + ldapcnx = self._conn + if not ldapcnx.search(base, searchstr, search_scope=scope, attributes=attrs): + return [] + result = [] + for rec in ldapcnx.response: + if rec['type'] != 'searchResEntry': + continue + items = rec['attributes'].items() + itemdict = self._process_ldap_item(rec['dn'], items) + result.append(itemdict) + self.debug('ldap built results %s', len(result)) + return result + + def _process_ldap_item(self, dn, iterator): + """Turn an ldap received item into a proper dict.""" + itemdict = {'dn': dn} + for key, value in iterator: + if self.user_attrs.get(key) == 'upassword': # XXx better password detection + value = value[0].encode('utf-8') + # we only support ldap_salted_sha1 for ldap sources, see: server/utils.py + if not value.startswith(b'{SSHA}'): + value = utils.crypt_password(value) + itemdict[key] = Binary(value) + elif self.user_attrs.get(key) == 'modification_date': + itemdict[key] = datetime.strptime(value[0], '%Y%m%d%H%M%SZ') + else: + if PY2 and value and isinstance(value[0], str): + value = [unicode(val, 'utf-8', 'replace') for val in value] + if len(value) == 1: + itemdict[key] = value = value[0] + else: + itemdict[key] = value + # we expect memberUid to be a list of user ids, make sure of it + member = self.group_rev_attrs['member'] + if isinstance(itemdict.get(member), string_types): + itemdict[member] = [itemdict[member]] + return itemdict + + def _process_no_such_object(self, cnx, dn): + """Some search return NO_SUCH_OBJECT error, handle this (usually because + an object whose dn is no more existent in ldap as been encountered). + + Do nothing by default, let sub-classes handle that. + """ diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/sources/native.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/sources/native.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1813 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Adapters for native cubicweb sources. + +Notes: +* extid (aka external id, the primary key of an entity in the external source + from which it comes from) are stored in a varchar column encoded as a base64 + string. This is because it should actually be Bytes but we want an index on + it for fast querying. +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +from threading import Lock +from datetime import datetime +from base64 import b64encode +from contextlib import contextmanager +from os.path import basename +import re +import itertools +import zipfile +import logging +import sys + +from six import PY2, text_type, binary_type, string_types +from six.moves import range, cPickle as pickle + +from logilab.common.decorators import cached, clear_cache +from logilab.common.configuration import Method +from logilab.common.shellutils import getlogin +from logilab.database import get_db_helper, sqlgen + +from yams.schema import role_name + +from cubicweb import (UnknownEid, AuthenticationError, ValidationError, Binary, + UniqueTogetherError, UndoTransactionException, ViolatedConstraint) +from cubicweb import transaction as tx, server, neg_role +from cubicweb.utils import QueryCache +from cubicweb.schema import VIRTUAL_RTYPES +from cubicweb.cwconfig import CubicWebNoAppConfiguration +from cubicweb.server import hook +from cubicweb.server import schema2sql as y2sql +from cubicweb.server.utils import crypt_password, eschema_eid, verify_and_update +from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn +from cubicweb.server.rqlannotation import set_qdata +from cubicweb.server.hook import CleanupDeletedEidsCacheOp +from cubicweb.server.edition import EditedEntity +from cubicweb.server.sources import AbstractSource, dbg_st_search, dbg_results +from cubicweb.server.sources.rql2sql import SQLGenerator +from cubicweb.statsd_logger import statsd_timeit + + +ATTR_MAP = {} +NONSYSTEM_ETYPES = set() +NONSYSTEM_RELATIONS = set() + +class LogCursor(object): + def __init__(self, cursor): + self.cu = cursor + + def execute(self, query, args=None): + """Execute a query. + it's a function just so that it shows up in profiling + """ + if server.DEBUG & server.DBG_SQL: + print('exec', query, args) + try: + self.cu.execute(str(query), args) + except Exception as ex: + print("sql: %r\n args: %s\ndbms message: %r" % ( + query, args, ex.args[0])) + raise + + def fetchall(self): + return self.cu.fetchall() + + def fetchone(self): + return self.cu.fetchone() + + +def sql_or_clauses(sql, clauses): + select, restr = sql.split(' WHERE ', 1) + restrclauses = restr.split(' AND ') + for clause in clauses: + restrclauses.remove(clause) + if restrclauses: + restr = '%s AND (%s)' % (' AND '.join(restrclauses), + ' OR '.join(clauses)) + else: + restr = '(%s)' % ' OR '.join(clauses) + return '%s WHERE %s' % (select, restr) + + +def rdef_table_column(rdef): + """return table and column used to store the given relation definition in + the database + """ + return (SQL_PREFIX + str(rdef.subject), + SQL_PREFIX + str(rdef.rtype)) + + +def rdef_physical_info(dbhelper, rdef): + """return backend type and a boolean flag if NULL values should be allowed + for a given relation definition + """ + if not rdef.object.final: + return dbhelper.TYPE_MAPPING['Int'] + coltype = y2sql.type_from_rdef(dbhelper, rdef, creating=False) + allownull = rdef.cardinality[0] != '1' + return coltype, allownull + + +class _UndoException(Exception): + """something went wrong during undoing""" + + def __unicode__(self): + """Called by the unicode builtin; should return a Unicode object + + Type of _UndoException message must be `unicode` by design in CubicWeb. + """ + assert isinstance(self.args[0], text_type) + return self.args[0] + + +def _undo_check_relation_target(tentity, rdef, role): + """check linked entity has not been redirected for this relation""" + card = rdef.role_cardinality(role) + if card in '?1' and tentity.related(rdef.rtype, role): + raise _UndoException(tentity._cw._( + "Can't restore %(role)s relation %(rtype)s to entity %(eid)s which " + "is already linked using this relation.") + % {'role': neg_role(role), + 'rtype': rdef.rtype, + 'eid': tentity.eid}) + +def _undo_rel_info(cnx, subj, rtype, obj): + entities = [] + for role, eid in (('subject', subj), ('object', obj)): + try: + entities.append(cnx.entity_from_eid(eid)) + except UnknownEid: + raise _UndoException(cnx._( + "Can't restore relation %(rtype)s, %(role)s entity %(eid)s" + " doesn't exist anymore.") + % {'role': cnx._(role), + 'rtype': cnx._(rtype), + 'eid': eid}) + sentity, oentity = entities + try: + rschema = cnx.vreg.schema.rschema(rtype) + rdef = rschema.rdefs[(sentity.cw_etype, oentity.cw_etype)] + except KeyError: + raise _UndoException(cnx._( + "Can't restore relation %(rtype)s between %(subj)s and " + "%(obj)s, that relation does not exists anymore in the " + "schema.") + % {'rtype': cnx._(rtype), + 'subj': subj, + 'obj': obj}) + return sentity, oentity, rdef + +def _undo_has_later_transaction(cnx, eid): + return cnx.system_sql('''\ +SELECT T.tx_uuid FROM transactions AS TREF, transactions AS T +WHERE TREF.tx_uuid='%(txuuid)s' AND T.tx_uuid!='%(txuuid)s' +AND T.tx_time>=TREF.tx_time +AND (EXISTS(SELECT 1 FROM tx_entity_actions AS TEA + WHERE TEA.tx_uuid=T.tx_uuid AND TEA.eid=%(eid)s) + OR EXISTS(SELECT 1 FROM tx_relation_actions as TRA + WHERE TRA.tx_uuid=T.tx_uuid AND ( + TRA.eid_from=%(eid)s OR TRA.eid_to=%(eid)s)) + )''' % {'txuuid': cnx.transaction_data['undoing_uuid'], + 'eid': eid}).fetchone() + + +class DefaultEidGenerator(object): + __slots__ = ('source', 'cnx', 'lock') + + def __init__(self, source): + self.source = source + self.cnx = None + self.lock = Lock() + + def close(self): + if self.cnx: + self.cnx.close() + self.cnx = None + + def create_eid(self, _cnx, count=1): + # lock needed to prevent 'Connection is busy with results for another + # command (0)' errors with SQLServer + assert count > 0 + with self.lock: + return self._create_eid(count) + + def _create_eid(self, count): + # internal function doing the eid creation without locking. + # needed for the recursive handling of disconnections (otherwise we + # deadlock on self._eid_cnx_lock + source = self.source + if self.cnx is None: + self.cnx = source.get_connection() + cnx = self.cnx + try: + cursor = cnx.cursor() + for sql in source.dbhelper.sqls_increment_numrange('entities_id_seq', count): + cursor.execute(sql) + eid = cursor.fetchone()[0] + except (source.OperationalError, source.InterfaceError): + # FIXME: better detection of deconnection pb + source.warning("trying to reconnect create eid connection") + self.cnx = None + return self._create_eid(count) + except source.DbapiError as exc: + # We get this one with pyodbc and SQL Server when connection was reset + if exc.args[0] == '08S01': + source.warning("trying to reconnect create eid connection") + self.cnx = None + return self._create_eid(count) + else: + raise + except Exception: # WTF? + cnx.rollback() + self.cnx = None + source.exception('create eid failed in an unforeseen way on SQL statement %s', sql) + raise + else: + cnx.commit() + return eid + + +class SQLITEEidGenerator(object): + __slots__ = ('source', 'lock') + + def __init__(self, source): + self.source = source + self.lock = Lock() + + def close(self): + pass + + def create_eid(self, cnx, count=1): + assert count > 0 + source = self.source + with self.lock: + for sql in source.dbhelper.sqls_increment_numrange('entities_id_seq', count): + cursor = source.doexec(cnx, sql) + return cursor.fetchone()[0] + + +class NativeSQLSource(SQLAdapterMixIn, AbstractSource): + """adapter for source using the native cubicweb schema (see below) + """ + sqlgen_class = SQLGenerator + options = ( + ('db-driver', + {'type' : 'string', + 'default': 'postgres', + # XXX use choice type + 'help': 'database driver (postgres, sqlite, sqlserver2005)', + 'group': 'native-source', 'level': 0, + }), + ('db-host', + {'type' : 'string', + 'default': '', + 'help': 'database host', + 'group': 'native-source', 'level': 1, + }), + ('db-port', + {'type' : 'string', + 'default': '', + 'help': 'database port', + 'group': 'native-source', 'level': 1, + }), + ('db-name', + {'type' : 'string', + 'default': Method('default_instance_id'), + 'help': 'database name', + 'group': 'native-source', 'level': 0, + }), + ('db-namespace', + {'type' : 'string', + 'default': '', + 'help': 'database namespace (schema) name', + 'group': 'native-source', 'level': 1, + }), + ('db-user', + {'type' : 'string', + 'default': CubicWebNoAppConfiguration.mode == 'user' and getlogin() or 'cubicweb', + 'help': 'database user', + 'group': 'native-source', 'level': 0, + }), + ('db-password', + {'type' : 'password', + 'default': '', + 'help': 'database password', + 'group': 'native-source', 'level': 0, + }), + ('db-encoding', + {'type' : 'string', + 'default': 'utf8', + 'help': 'database encoding', + 'group': 'native-source', 'level': 1, + }), + ('db-extra-arguments', + {'type' : 'string', + 'default': '', + 'help': 'set to "Trusted_Connection" if you are using SQLServer and ' + 'want trusted authentication for the database connection', + 'group': 'native-source', 'level': 2, + }), + ('db-statement-timeout', + {'type': 'int', + 'default': 0, + 'help': 'sql statement timeout, in milliseconds (postgres only)', + 'group': 'native-source', 'level': 2, + }), + ) + + def __init__(self, repo, source_config, *args, **kwargs): + SQLAdapterMixIn.__init__(self, source_config, repairing=repo.config.repairing) + self.authentifiers = [LoginPasswordAuthentifier(self)] + if repo.config['allow-email-login']: + self.authentifiers.insert(0, EmailPasswordAuthentifier(self)) + AbstractSource.__init__(self, repo, source_config, *args, **kwargs) + # sql generator + self._rql_sqlgen = self.sqlgen_class(self.schema, self.dbhelper, + ATTR_MAP.copy()) + # full text index helper + self.do_fti = not repo.config['delay-full-text-indexation'] + # sql queries cache + self._cache = QueryCache(repo.config['rql-cache-size']) + # (etype, attr) / storage mapping + self._storages = {} + self.binary_to_str = self.dbhelper.dbapi_module.binary_to_str + if self.dbdriver == 'sqlite': + self.eid_generator = SQLITEEidGenerator(self) + else: + self.eid_generator = DefaultEidGenerator(self) + self.create_eid = self.eid_generator.create_eid + + def check_config(self, source_entity): + """check configuration of source entity""" + if source_entity.host_config: + msg = source_entity._cw._('the system source has its configuration ' + 'stored on the file-system') + raise ValidationError(source_entity.eid, {role_name('config', 'subject'): msg}) + + def add_authentifier(self, authentifier): + self.authentifiers.append(authentifier) + authentifier.source = self + authentifier.set_schema(self.schema) + + def reset_caches(self): + """method called during test to reset potential source caches""" + self._cache = QueryCache(self.repo.config['rql-cache-size']) + + def clear_eid_cache(self, eid, etype): + """clear potential caches for the given eid""" + self._cache.pop('Any X WHERE X eid %s, X is %s' % (eid, etype), None) + self._cache.pop('Any X WHERE X eid %s' % eid, None) + self._cache.pop('Any %s' % eid, None) + + @statsd_timeit + def sqlexec(self, cnx, sql, args=None): + """execute the query and return its result""" + return self.process_result(self.doexec(cnx, sql, args)) + + def init_creating(self, cnxset=None): + # check full text index availibility + if self.do_fti: + if cnxset is None: + _cnxset = self.repo._get_cnxset() + else: + _cnxset = cnxset + if not self.dbhelper.has_fti_table(_cnxset.cu): + if not self.repo.config.creating: + self.critical('no text index table') + self.do_fti = False + if cnxset is None: + _cnxset.cnxset_freed() + self.repo._free_cnxset(_cnxset) + + def backup(self, backupfile, confirm, format='native'): + """method called to create a backup of the source's data""" + if format == 'portable': + # ensure the schema is the one stored in the database: if repository + # started in quick_start mode, the file system's one has been loaded + # so force reload + if self.repo.config.quick_start: + self.repo.set_schema(self.repo.deserialize_schema(), + resetvreg=False) + helper = DatabaseIndependentBackupRestore(self) + self.close_source_connections() + try: + helper.backup(backupfile) + finally: + self.open_source_connections() + elif format == 'native': + self.close_source_connections() + try: + self.backup_to_file(backupfile, confirm) + finally: + self.open_source_connections() + else: + raise ValueError('Unknown format %r' % format) + + + def restore(self, backupfile, confirm, drop, format='native'): + """method called to restore a backup of source's data""" + if self.repo.config.init_cnxset_pool: + self.close_source_connections() + try: + if format == 'portable': + helper = DatabaseIndependentBackupRestore(self) + helper.restore(backupfile) + elif format == 'native': + self.restore_from_file(backupfile, confirm, drop=drop) + else: + raise ValueError('Unknown format %r' % format) + finally: + if self.repo.config.init_cnxset_pool: + self.open_source_connections() + + + def init(self, activated, source_entity): + try: + # test if 'asource' column exists + query = self.dbhelper.sql_add_limit_offset('SELECT asource FROM entities', 1) + source_entity._cw.system_sql(query) + except Exception as ex: + self.eid_type_source = self.eid_type_source_pre_131 + super(NativeSQLSource, self).init(activated, source_entity) + self.init_creating(source_entity._cw.cnxset) + + def shutdown(self): + self.eid_generator.close() + + # XXX deprecates [un]map_attribute? + def map_attribute(self, etype, attr, cb, sourcedb=True): + self._rql_sqlgen.attr_map[u'%s.%s' % (etype, attr)] = (cb, sourcedb) + + def unmap_attribute(self, etype, attr): + self._rql_sqlgen.attr_map.pop(u'%s.%s' % (etype, attr), None) + + def set_storage(self, etype, attr, storage): + storage_dict = self._storages.setdefault(etype, {}) + storage_dict[attr] = storage + self.map_attribute(etype, attr, + storage.callback, storage.is_source_callback) + + def unset_storage(self, etype, attr): + self._storages[etype].pop(attr) + # if etype has no storage left, remove the entry + if not self._storages[etype]: + del self._storages[etype] + self.unmap_attribute(etype, attr) + + def storage(self, etype, attr): + """return the storage for the given entity type / attribute + """ + try: + return self._storages[etype][attr] + except KeyError: + raise Exception('no custom storage set for %s.%s' % (etype, attr)) + + # ISource interface ####################################################### + + @statsd_timeit + def compile_rql(self, rql, sols): + rqlst = self.repo.vreg.rqlhelper.parse(rql) + rqlst.restricted_vars = () + rqlst.children[0].solutions = sols + self.repo.querier.sqlgen_annotate(rqlst) + set_qdata(self.schema.rschema, rqlst, ()) + return rqlst + + def set_schema(self, schema): + """set the instance'schema""" + self._cache = QueryCache(self.repo.config['rql-cache-size']) + self.cache_hit, self.cache_miss, self.no_cache = 0, 0, 0 + self.schema = schema + try: + self._rql_sqlgen.schema = schema + except AttributeError: + pass # __init__ + for authentifier in self.authentifiers: + authentifier.set_schema(self.schema) + clear_cache(self, 'need_fti_indexation') + + def support_entity(self, etype, write=False): + """return true if the given entity's type is handled by this adapter + if write is true, return true only if it's a RW support + """ + return not etype in NONSYSTEM_ETYPES + + def support_relation(self, rtype, write=False): + """return true if the given relation's type is handled by this adapter + if write is true, return true only if it's a RW support + """ + if write: + return not rtype in NONSYSTEM_RELATIONS + # due to current multi-sources implementation, the system source + # can't claim not supporting a relation + return True #not rtype == 'content_for' + + @statsd_timeit + def authenticate(self, cnx, login, **kwargs): + """return CWUser eid for the given login and other authentication + information found in kwargs, else raise `AuthenticationError` + """ + for authentifier in self.authentifiers: + try: + return authentifier.authenticate(cnx, login, **kwargs) + except AuthenticationError: + continue + raise AuthenticationError() + + def syntax_tree_search(self, cnx, union, args=None, cachekey=None, + varmap=None): + """return result from this source for a rql query (actually from + a rql syntax tree and a solution dictionary mapping each used + variable to a possible type). If cachekey is given, the query + necessary to fetch the results (but not the results themselves) + may be cached using this key. + """ + assert dbg_st_search(self.uri, union, varmap, args, cachekey) + # remember number of actually selected term (sql generation may append some) + if cachekey is None: + self.no_cache += 1 + # generate sql query if we are able to do so (not supported types...) + sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap) + else: + # sql may be cached + try: + sql, qargs, cbs = self._cache[cachekey] + self.cache_hit += 1 + except KeyError: + self.cache_miss += 1 + sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap) + self._cache[cachekey] = sql, qargs, cbs + args = self.merge_args(args, qargs) + assert isinstance(sql, string_types), repr(sql) + cursor = self.doexec(cnx, sql, args) + results = self.process_result(cursor, cnx, cbs) + assert dbg_results(results) + return results + + @contextmanager + def _fixup_cw(self, cnx, entity): + _cw = entity._cw + entity._cw = cnx + try: + yield + finally: + entity._cw = _cw + + @contextmanager + def _storage_handler(self, cnx, entity, event): + # 1/ memorize values as they are before the storage is called. + # For instance, the BFSStorage will replace the `data` + # binary value with a Binary containing the destination path + # on the filesystem. To make the entity.data usage absolutely + # transparent, we'll have to reset entity.data to its binary + # value once the SQL query will be executed + restore_values = [] + if isinstance(entity, list): + entities = entity + else: + entities = [entity] + etype = entities[0].__regid__ + for attr, storage in self._storages.get(etype, {}).items(): + for entity in entities: + with self._fixup_cw(cnx, entity): + if event == 'deleted': + storage.entity_deleted(entity, attr) + else: + edited = entity.cw_edited + if attr in edited: + handler = getattr(storage, 'entity_%s' % event) + to_restore = handler(entity, attr) + restore_values.append((entity, attr, to_restore)) + try: + yield # 2/ execute the source's instructions + finally: + # 3/ restore original values + for entity, attr, value in restore_values: + entity.cw_edited.edited_attribute(attr, value) + + def add_entity(self, cnx, entity): + """add a new entity to the source""" + with self._storage_handler(cnx, entity, 'added'): + attrs = self.preprocess_entity(entity) + sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs) + self.doexec(cnx, sql, attrs) + if cnx.ertype_supports_undo(entity.cw_etype): + self._record_tx_action(cnx, 'tx_entity_actions', u'C', + etype=text_type(entity.cw_etype), eid=entity.eid) + + def update_entity(self, cnx, entity): + """replace an entity in the source""" + with self._storage_handler(cnx, entity, 'updated'): + attrs = self.preprocess_entity(entity) + if cnx.ertype_supports_undo(entity.cw_etype): + changes = self._save_attrs(cnx, entity, attrs) + self._record_tx_action(cnx, 'tx_entity_actions', u'U', + etype=text_type(entity.cw_etype), eid=entity.eid, + changes=self._binary(pickle.dumps(changes))) + sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs, + ['cw_eid']) + self.doexec(cnx, sql, attrs) + + def delete_entity(self, cnx, entity): + """delete an entity from the source""" + with self._storage_handler(cnx, entity, 'deleted'): + if cnx.ertype_supports_undo(entity.cw_etype): + attrs = [SQL_PREFIX + r.type + for r in entity.e_schema.subject_relations() + if (r.final or r.inlined) and not r in VIRTUAL_RTYPES] + changes = self._save_attrs(cnx, entity, attrs) + self._record_tx_action(cnx, 'tx_entity_actions', u'D', + etype=text_type(entity.cw_etype), eid=entity.eid, + changes=self._binary(pickle.dumps(changes))) + attrs = {'cw_eid': entity.eid} + sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) + self.doexec(cnx, sql, attrs) + + def add_relation(self, cnx, subject, rtype, object, inlined=False): + """add a relation to the source""" + self._add_relations(cnx, rtype, [(subject, object)], inlined) + if cnx.ertype_supports_undo(rtype): + self._record_tx_action(cnx, 'tx_relation_actions', u'A', + eid_from=subject, rtype=text_type(rtype), eid_to=object) + + def add_relations(self, cnx, rtype, subj_obj_list, inlined=False): + """add a relations to the source""" + self._add_relations(cnx, rtype, subj_obj_list, inlined) + if cnx.ertype_supports_undo(rtype): + for subject, object in subj_obj_list: + self._record_tx_action(cnx, 'tx_relation_actions', u'A', + eid_from=subject, rtype=text_type(rtype), eid_to=object) + + def _add_relations(self, cnx, rtype, subj_obj_list, inlined=False): + """add a relation to the source""" + sql = [] + if inlined is False: + attrs = [{'eid_from': subject, 'eid_to': object} + for subject, object in subj_obj_list] + sql.append((self.sqlgen.insert('%s_relation' % rtype, attrs[0]), attrs)) + else: # used by data import + etypes = {} + for subject, object in subj_obj_list: + etype = cnx.entity_metas(subject)['type'] + if etype in etypes: + etypes[etype].append((subject, object)) + else: + etypes[etype] = [(subject, object)] + for subj_etype, subj_obj_list in etypes.items(): + attrs = [{'cw_eid': subject, SQL_PREFIX + rtype: object} + for subject, object in subj_obj_list] + sql.append((self.sqlgen.update(SQL_PREFIX + etype, attrs[0], + ['cw_eid']), + attrs)) + for statement, attrs in sql: + self.doexecmany(cnx, statement, attrs) + + def delete_relation(self, cnx, subject, rtype, object): + """delete a relation from the source""" + rschema = self.schema.rschema(rtype) + self._delete_relation(cnx, subject, rtype, object, rschema.inlined) + if cnx.ertype_supports_undo(rtype): + self._record_tx_action(cnx, 'tx_relation_actions', u'R', + eid_from=subject, rtype=text_type(rtype), eid_to=object) + + def _delete_relation(self, cnx, subject, rtype, object, inlined=False): + """delete a relation from the source""" + if inlined: + table = SQL_PREFIX + cnx.entity_metas(subject)['type'] + column = SQL_PREFIX + rtype + sql = 'UPDATE %s SET %s=NULL WHERE %seid=%%(eid)s' % (table, column, + SQL_PREFIX) + attrs = {'eid' : subject} + else: + attrs = {'eid_from': subject, 'eid_to': object} + sql = self.sqlgen.delete('%s_relation' % rtype, attrs) + self.doexec(cnx, sql, attrs) + + @statsd_timeit + def doexec(self, cnx, query, args=None, rollback=True): + """Execute a query. + it's a function just so that it shows up in profiling + """ + cursor = cnx.cnxset.cu + if server.DEBUG & server.DBG_SQL: + print('exec', query, args, cnx.cnxset.cnx) + try: + # str(query) to avoid error if it's a unicode string + cursor.execute(str(query), args) + except Exception as ex: + if self.repo.config.mode != 'test': + # during test we get those message when trying to alter sqlite + # db schema + self.info("sql: %r\n args: %s\ndbms message: %r", + query, args, ex.args[0]) + if rollback: + try: + cnx.cnxset.rollback() + if self.repo.config.mode != 'test': + self.debug('transaction has been rolled back') + except Exception as ex: + pass + if ex.__class__.__name__ == 'IntegrityError': + # need string comparison because of various backends + for arg in ex.args: + # postgres, sqlserver + mo = re.search("unique_[a-z0-9]{32}", arg) + if mo is not None: + raise UniqueTogetherError(cnx, cstrname=mo.group(0)) + # old sqlite + mo = re.search('columns? (.*) (?:is|are) not unique', arg) + if mo is not None: # sqlite in use + # we left chop the 'cw_' prefix of attribute names + rtypes = [c.strip()[3:] + for c in mo.group(1).split(',')] + raise UniqueTogetherError(cnx, rtypes=rtypes) + # sqlite after http://www.sqlite.org/cgi/src/info/c80e229dd9c1230a + if arg.startswith('UNIQUE constraint failed:'): + # message looks like: "UNIQUE constraint failed: foo.cw_bar, foo.cw_baz" + # so drop the prefix, split on comma, drop the tablenames, and drop "cw_" + columns = arg.split(':', 1)[1].split(',') + rtypes = [c.split('.', 1)[1].strip()[3:] for c in columns] + raise UniqueTogetherError(cnx, rtypes=rtypes) + + mo = re.search('"cstr[a-f0-9]{32}"', arg) + if mo is not None: + # postgresql + raise ViolatedConstraint(cnx, cstrname=mo.group(0)[1:-1]) + if arg.startswith('CHECK constraint failed:'): + # sqlite3 (new) + raise ViolatedConstraint(cnx, cstrname=arg.split(':', 1)[1].strip()) + mo = re.match('^constraint (cstr.*) failed$', arg) + if mo is not None: + # sqlite3 (old) + raise ViolatedConstraint(cnx, cstrname=mo.group(1)) + raise + return cursor + + @statsd_timeit + def doexecmany(self, cnx, query, args): + """Execute a query. + it's a function just so that it shows up in profiling + """ + if server.DEBUG & server.DBG_SQL: + print('execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx) + cursor = cnx.cnxset.cu + try: + # str(query) to avoid error if it's a unicode string + cursor.executemany(str(query), args) + except Exception as ex: + if self.repo.config.mode != 'test': + # during test we get those message when trying to alter sqlite + # db schema + self.critical("sql many: %r\n args: %s\ndbms message: %r", + query, args, ex.args[0]) + try: + cnx.cnxset.rollback() + if self.repo.config.mode != 'test': + self.critical('transaction has been rolled back') + except Exception: + pass + raise + + # short cut to method requiring advanced db helper usage ################## + + def update_rdef_column(self, cnx, rdef): + """update physical column for a relation definition (final or inlined) + """ + table, column = rdef_table_column(rdef) + coltype, allownull = rdef_physical_info(self.dbhelper, rdef) + if not self.dbhelper.alter_column_support: + self.error("backend can't alter %s.%s to %s%s", table, column, coltype, + not allownull and 'NOT NULL' or '') + return + self.dbhelper.change_col_type(LogCursor(cnx.cnxset.cu), + table, column, coltype, allownull) + self.info('altered %s.%s: now %s%s', table, column, coltype, + not allownull and 'NOT NULL' or '') + + def update_rdef_null_allowed(self, cnx, rdef): + """update NULL / NOT NULL of physical column for a relation definition + (final or inlined) + """ + if not self.dbhelper.alter_column_support: + # not supported (and NOT NULL not set by yams in that case, so no + # worry) + return + table, column = rdef_table_column(rdef) + coltype, allownull = rdef_physical_info(self.dbhelper, rdef) + self.dbhelper.set_null_allowed(LogCursor(cnx.cnxset.cu), + table, column, coltype, allownull) + + def update_rdef_indexed(self, cnx, rdef): + table, column = rdef_table_column(rdef) + if rdef.indexed: + self.create_index(cnx, table, column) + else: + self.drop_index(cnx, table, column) + + def update_rdef_unique(self, cnx, rdef): + table, column = rdef_table_column(rdef) + if rdef.constraint_by_type('UniqueConstraint'): + self.create_index(cnx, table, column, unique=True) + else: + self.drop_index(cnx, table, column, unique=True) + + def create_index(self, cnx, table, column, unique=False): + cursor = LogCursor(cnx.cnxset.cu) + self.dbhelper.create_index(cursor, table, column, unique) + + def drop_index(self, cnx, table, column, unique=False): + cursor = LogCursor(cnx.cnxset.cu) + self.dbhelper.drop_index(cursor, table, column, unique) + + # system source interface ################################################# + + def _eid_type_source(self, cnx, eid, sql): + try: + res = self.doexec(cnx, sql).fetchone() + if res is not None: + return res + except Exception: + self.exception('failed to query entities table for eid %s', eid) + raise UnknownEid(eid) + + def eid_type_source(self, cnx, eid): # pylint: disable=E0202 + """return a tuple (type, extid, source) for the entity with id """ + sql = 'SELECT type, extid, asource FROM entities WHERE eid=%s' % eid + res = self._eid_type_source(cnx, eid, sql) + if not isinstance(res, list): + res = list(res) + res[-2] = self.decode_extid(res[-2]) + return res + + def eid_type_source_pre_131(self, cnx, eid): + """return a tuple (type, extid, source) for the entity with id """ + sql = 'SELECT type, extid FROM entities WHERE eid=%s' % eid + res = self._eid_type_source(cnx, eid, sql) + if not isinstance(res, list): + res = list(res) + res[-1] = self.decode_extid(res[-1]) + res.append("system") + return res + + def extid2eid(self, cnx, extid): + """get eid from an external id. Return None if no record found.""" + assert isinstance(extid, binary_type) + args = {'x': b64encode(extid).decode('ascii')} + cursor = self.doexec(cnx, + 'SELECT eid FROM entities WHERE extid=%(x)s', + args) + # XXX testing rowcount cause strange bug with sqlite, results are there + # but rowcount is 0 + #if cursor.rowcount > 0: + try: + result = cursor.fetchone() + if result: + return result[0] + except Exception: + pass + cursor = self.doexec(cnx, + 'SELECT eid FROM moved_entities WHERE extid=%(x)s', + args) + try: + result = cursor.fetchone() + if result: + # entity was moved to the system source, return negative + # number to tell the external source to ignore it + return -result[0] + except Exception: + pass + return None + + def _handle_is_relation_sql(self, cnx, sql, attrs): + """ Handler for specific is_relation sql that may be + overwritten in some stores""" + self.doexec(cnx, sql % attrs) + + _handle_insert_entity_sql = doexec + _handle_is_instance_of_sql = _handle_source_relation_sql = _handle_is_relation_sql + + def add_info(self, cnx, entity, source, extid): + """add type and source info for an eid into the system table""" + assert cnx.cnxset is not None + # begin by inserting eid/type/source/extid into the entities table + if extid is not None: + assert isinstance(extid, binary_type) + extid = b64encode(extid).decode('ascii') + attrs = {'type': text_type(entity.cw_etype), 'eid': entity.eid, 'extid': extid, + 'asource': text_type(source.uri)} + self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs) + # insert core relations: is, is_instance_of and cw_source + try: + self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, eschema_eid(cnx, entity.e_schema))) + except IndexError: + # during schema serialization, skip + pass + else: + for eschema in entity.e_schema.ancestors() + [entity.e_schema]: + self._handle_is_relation_sql(cnx, + 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, eschema_eid(cnx, eschema))) + if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 + self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, source.eid)) + # now we can update the full text index + if self.need_fti_indexation(entity.cw_etype): + self.index_entity(cnx, entity=entity) + + def update_info(self, cnx, entity, need_fti_update): + """mark entity as being modified, fulltext reindex if needed""" + if need_fti_update: + # reindex the entity only if this query is updating at least + # one indexable attribute + self.index_entity(cnx, entity=entity) + + def delete_info_multi(self, cnx, entities): + """delete system information on deletion of a list of entities with the + same etype and belinging to the same source + + * update the fti + * remove record from the `entities` table + """ + self.fti_unindex_entities(cnx, entities) + attrs = {'eid': '(%s)' % ','.join([str(_e.eid) for _e in entities])} + self.doexec(cnx, self.sqlgen.delete_many('entities', attrs), attrs) + + # undo support ############################################################# + + def undoable_transactions(self, cnx, ueid=None, **actionfilters): + """See :class:`cubicweb.repoapi.Connection.undoable_transactions`""" + # force filtering to connection's user if not a manager + if not cnx.user.is_in_group('managers'): + ueid = cnx.user.eid + restr = {} + if ueid is not None: + restr['tx_user'] = ueid + sql = self.sqlgen.select('transactions', restr, ('tx_uuid', 'tx_time', 'tx_user')) + if actionfilters: + # we will need subqueries to filter transactions according to + # actions done + tearestr = {} # filters on the tx_entity_actions table + trarestr = {} # filters on the tx_relation_actions table + genrestr = {} # generic filters, appliyable to both table + # unless public explicitly set to false, we only consider public + # actions + if actionfilters.pop('public', True): + genrestr['txa_public'] = True + # put additional filters in trarestr and/or tearestr + for key, val in actionfilters.items(): + if key == 'etype': + # filtering on etype implies filtering on entity actions + # only, and with no eid specified + assert actionfilters.get('action', 'C') in 'CUD' + assert not 'eid' in actionfilters + tearestr['etype'] = text_type(val) + elif key == 'eid': + # eid filter may apply to 'eid' of tx_entity_actions or to + # 'eid_from' OR 'eid_to' of tx_relation_actions + if actionfilters.get('action', 'C') in 'CUD': + tearestr['eid'] = val + if actionfilters.get('action', 'A') in 'AR': + trarestr['eid_from'] = val + trarestr['eid_to'] = val + elif key == 'action': + if val in 'CUD': + tearestr['txa_action'] = text_type(val) + else: + assert val in 'AR' + trarestr['txa_action'] = text_type(val) + else: + raise AssertionError('unknow filter %s' % key) + assert trarestr or tearestr, "can't only filter on 'public'" + subqsqls = [] + # append subqueries to the original query, using EXISTS() + if trarestr or (genrestr and not tearestr): + trarestr.update(genrestr) + trasql = self.sqlgen.select('tx_relation_actions', trarestr, ('1',)) + if 'eid_from' in trarestr: + # replace AND by OR between eid_from/eid_to restriction + trasql = sql_or_clauses(trasql, ['eid_from = %(eid_from)s', + 'eid_to = %(eid_to)s']) + trasql += ' AND transactions.tx_uuid=tx_relation_actions.tx_uuid' + subqsqls.append('EXISTS(%s)' % trasql) + if tearestr or (genrestr and not trarestr): + tearestr.update(genrestr) + teasql = self.sqlgen.select('tx_entity_actions', tearestr, ('1',)) + teasql += ' AND transactions.tx_uuid=tx_entity_actions.tx_uuid' + subqsqls.append('EXISTS(%s)' % teasql) + if restr: + sql += ' AND %s' % ' OR '.join(subqsqls) + else: + sql += ' WHERE %s' % ' OR '.join(subqsqls) + restr.update(trarestr) + restr.update(tearestr) + # we want results ordered by transaction's time descendant + sql += ' ORDER BY tx_time DESC' + cu = self.doexec(cnx, sql, restr) + # turn results into transaction objects + return [tx.Transaction(cnx, *args) for args in cu.fetchall()] + + def tx_info(self, cnx, txuuid): + """See :class:`cubicweb.repoapi.Connection.transaction_info`""" + return tx.Transaction(cnx, txuuid, *self._tx_info(cnx, text_type(txuuid))) + + def tx_actions(self, cnx, txuuid, public): + """See :class:`cubicweb.repoapi.Connection.transaction_actions`""" + txuuid = text_type(txuuid) + self._tx_info(cnx, txuuid) + restr = {'tx_uuid': txuuid} + if public: + restr['txa_public'] = True + # XXX use generator to avoid loading everything in memory? + sql = self.sqlgen.select('tx_entity_actions', restr, + ('txa_action', 'txa_public', 'txa_order', + 'etype', 'eid', 'changes')) + with cnx.ensure_cnx_set: + cu = self.doexec(cnx, sql, restr) + actions = [tx.EntityAction(a,p,o,et,e,c and pickle.loads(self.binary_to_str(c))) + for a,p,o,et,e,c in cu.fetchall()] + sql = self.sqlgen.select('tx_relation_actions', restr, + ('txa_action', 'txa_public', 'txa_order', + 'rtype', 'eid_from', 'eid_to')) + with cnx.ensure_cnx_set: + cu = self.doexec(cnx, sql, restr) + actions += [tx.RelationAction(*args) for args in cu.fetchall()] + return sorted(actions, key=lambda x: x.order) + + def undo_transaction(self, cnx, txuuid): + """See :class:`cubicweb.repoapi.Connection.undo_transaction` + + important note: while undoing of a transaction, only hooks in the + 'integrity', 'activeintegrity' and 'undo' categories are called. + """ + errors = [] + cnx.transaction_data['undoing_uuid'] = txuuid + with cnx.deny_all_hooks_but('integrity', 'activeintegrity', 'undo'): + with cnx.security_enabled(read=False): + for action in reversed(self.tx_actions(cnx, txuuid, False)): + undomethod = getattr(self, '_undo_%s' % action.action.lower()) + errors += undomethod(cnx, action) + # remove the transactions record + self.doexec(cnx, + "DELETE FROM transactions WHERE tx_uuid='%s'" % txuuid) + if errors: + raise UndoTransactionException(txuuid, errors) + else: + return + + def start_undoable_transaction(self, cnx, uuid): + """connection callback to insert a transaction record in the transactions + table when some undoable transaction is started + """ + ueid = cnx.user.eid + attrs = {'tx_uuid': uuid, 'tx_user': ueid, 'tx_time': datetime.utcnow()} + self.doexec(cnx, self.sqlgen.insert('transactions', attrs), attrs) + + def _save_attrs(self, cnx, entity, attrs): + """return a pickleable dictionary containing current values for given + attributes of the entity + """ + restr = {'cw_eid': entity.eid} + sql = self.sqlgen.select(SQL_PREFIX + entity.cw_etype, restr, attrs) + cu = self.doexec(cnx, sql, restr) + values = dict(zip(attrs, cu.fetchone())) + # ensure backend specific binary are converted back to string + eschema = entity.e_schema + for column in attrs: + # [3:] remove 'cw_' prefix + attr = column[3:] + if not eschema.subjrels[attr].final: + continue + if eschema.destination(attr) in ('Password', 'Bytes'): + value = values[column] + if value is not None: + values[column] = self.binary_to_str(value) + return values + + def _record_tx_action(self, cnx, table, action, **kwargs): + """record a transaction action in the given table (either + 'tx_entity_actions' or 'tx_relation_action') + """ + kwargs['tx_uuid'] = cnx.transaction_uuid() + kwargs['txa_action'] = action + kwargs['txa_order'] = cnx.transaction_inc_action_counter() + kwargs['txa_public'] = not cnx.hooks_in_progress + self.doexec(cnx, self.sqlgen.insert(table, kwargs), kwargs) + + def _tx_info(self, cnx, txuuid): + """return transaction's time and user of the transaction with the given uuid. + + raise `NoSuchTransaction` if there is no such transaction of if the + connection's user isn't allowed to see it. + """ + restr = {'tx_uuid': txuuid} + sql = self.sqlgen.select('transactions', restr, + ('tx_time', 'tx_user')) + cu = self.doexec(cnx, sql, restr) + try: + time, ueid = cu.fetchone() + except TypeError: + raise tx.NoSuchTransaction(txuuid) + if not (cnx.user.is_in_group('managers') + or cnx.user.eid == ueid): + raise tx.NoSuchTransaction(txuuid) + return time, ueid + + def _reedit_entity(self, entity, changes, err): + cnx = entity._cw + eid = entity.eid + entity.cw_edited = edited = EditedEntity(entity) + # check for schema changes, entities linked through inlined relation + # still exists, rewrap binary values + eschema = entity.e_schema + getrschema = eschema.subjrels + for column, value in changes.items(): + rtype = column[len(SQL_PREFIX):] + if rtype == "eid": + continue # XXX should even `eid` be stored in action changes? + try: + rschema = getrschema[rtype] + except KeyError: + err(cnx._("can't restore relation %(rtype)s of entity %(eid)s, " + "this relation does not exist in the schema anymore.") + % {'rtype': rtype, 'eid': eid}) + if not rschema.final: + if not rschema.inlined: + assert value is None + # rschema is an inlined relation + elif value is not None: + # not a deletion: we must put something in edited + try: + entity._cw.entity_from_eid(value) # check target exists + edited[rtype] = value + except UnknownEid: + err(cnx._("can't restore entity %(eid)s of type %(eschema)s, " + "target of %(rtype)s (eid %(value)s) does not exist any longer") + % locals()) + changes[column] = None + elif eschema.destination(rtype) in ('Bytes', 'Password'): + changes[column] = self._binary(value) + edited[rtype] = Binary(value) + elif PY2 and isinstance(value, str): + edited[rtype] = text_type(value, cnx.encoding, 'replace') + else: + edited[rtype] = value + # This must only be done after init_entitiy_caches : defered in calling functions + # edited.check() + + def _undo_d(self, cnx, action): + """undo an entity deletion""" + errors = [] + err = errors.append + eid = action.eid + etype = action.etype + _ = cnx._ + # get an entity instance + try: + entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) + except Exception: + err("can't restore entity %s of type %s, type no more supported" + % (eid, etype)) + return errors + self._reedit_entity(entity, action.changes, err) + entity.eid = eid + cnx.repo.init_entity_caches(cnx, entity, self) + entity.cw_edited.check() + self.repo.hm.call_hooks('before_add_entity', cnx, entity=entity) + # restore the entity + action.changes['cw_eid'] = eid + # restore record in entities (will update fti if needed) + self.add_info(cnx, entity, self, None) + sql = self.sqlgen.insert(SQL_PREFIX + etype, action.changes) + self.doexec(cnx, sql, action.changes) + self.repo.hm.call_hooks('after_add_entity', cnx, entity=entity) + return errors + + def _undo_r(self, cnx, action): + """undo a relation removal""" + errors = [] + subj, rtype, obj = action.eid_from, action.rtype, action.eid_to + try: + sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj) + except _UndoException as ex: + errors.append(text_type(ex)) + else: + for role, entity in (('subject', sentity), + ('object', oentity)): + try: + _undo_check_relation_target(entity, rdef, role) + except _UndoException as ex: + errors.append(text_type(ex)) + continue + if not errors: + self.repo.hm.call_hooks('before_add_relation', cnx, + eidfrom=subj, rtype=rtype, eidto=obj) + # add relation in the database + self._add_relations(cnx, rtype, [(subj, obj)], rdef.rtype.inlined) + # set related cache + cnx.update_rel_cache_add(subj, rtype, obj, rdef.rtype.symmetric) + self.repo.hm.call_hooks('after_add_relation', cnx, + eidfrom=subj, rtype=rtype, eidto=obj) + return errors + + def _undo_c(self, cnx, action): + """undo an entity creation""" + eid = action.eid + # XXX done to avoid fetching all remaining relation for the entity + # we should find an efficient way to do this (keeping current veolidf + # massive deletion performance) + if _undo_has_later_transaction(cnx, eid): + msg = cnx._('some later transaction(s) touch entity, undo them ' + 'first') + raise ValidationError(eid, {None: msg}) + etype = action.etype + # get an entity instance + try: + entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) + except Exception: + return [cnx._( + "Can't undo creation of entity %(eid)s of type %(etype)s, type " + "no more supported" % {'eid': eid, 'etype': etype})] + entity.eid = eid + # for proper eid/type cache update + CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(eid) + self.repo.hm.call_hooks('before_delete_entity', cnx, entity=entity) + # remove is / is_instance_of which are added using sql by hooks, hence + # unvisible as transaction action + self.doexec(cnx, 'DELETE FROM is_relation WHERE eid_from=%s' % eid) + self.doexec(cnx, 'DELETE FROM is_instance_of_relation WHERE eid_from=%s' % eid) + self.doexec(cnx, 'DELETE FROM cw_source_relation WHERE eid_from=%s' % eid) + # XXX check removal of inlined relation? + # delete the entity + attrs = {'cw_eid': eid} + sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) + self.doexec(cnx, sql, attrs) + # remove record from entities (will update fti if needed) + self.delete_info_multi(cnx, [entity]) + self.repo.hm.call_hooks('after_delete_entity', cnx, entity=entity) + return () + + def _undo_u(self, cnx, action): + """undo an entity update""" + errors = [] + err = errors.append + try: + entity = cnx.entity_from_eid(action.eid) + except UnknownEid: + err(cnx._("can't restore state of entity %s, it has been " + "deleted inbetween") % action.eid) + return errors + self._reedit_entity(entity, action.changes, err) + entity.cw_edited.check() + self.repo.hm.call_hooks('before_update_entity', cnx, entity=entity) + sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, action.changes, + ['cw_eid']) + self.doexec(cnx, sql, action.changes) + self.repo.hm.call_hooks('after_update_entity', cnx, entity=entity) + return errors + + def _undo_a(self, cnx, action): + """undo a relation addition""" + errors = [] + subj, rtype, obj = action.eid_from, action.rtype, action.eid_to + try: + sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj) + except _UndoException as ex: + errors.append(text_type(ex)) + else: + rschema = rdef.rtype + if rschema.inlined: + sql = 'SELECT 1 FROM cw_%s WHERE cw_eid=%s and cw_%s=%s'\ + % (sentity.cw_etype, subj, rtype, obj) + else: + sql = 'SELECT 1 FROM %s_relation WHERE eid_from=%s and eid_to=%s'\ + % (rtype, subj, obj) + cu = self.doexec(cnx, sql) + if cu.fetchone() is None: + errors.append(cnx._( + "Can't undo addition of relation %(rtype)s from %(subj)s to" + " %(obj)s, doesn't exist anymore" % locals())) + if not errors: + self.repo.hm.call_hooks('before_delete_relation', cnx, + eidfrom=subj, rtype=rtype, eidto=obj) + # delete relation from the database + self._delete_relation(cnx, subj, rtype, obj, rschema.inlined) + # set related cache + cnx.update_rel_cache_del(subj, rtype, obj, rschema.symmetric) + self.repo.hm.call_hooks('after_delete_relation', cnx, + eidfrom=subj, rtype=rtype, eidto=obj) + return errors + + # full text index handling ################################################# + + @cached + def need_fti_indexation(self, etype): + eschema = self.schema.eschema(etype) + if any(eschema.indexable_attributes()): + return True + if any(eschema.fulltext_containers()): + return True + return False + + def index_entity(self, cnx, entity): + """create an operation to [re]index textual content of the given entity + on commit + """ + if self.do_fti: + FTIndexEntityOp.get_instance(cnx).add_data(entity.eid) + + def fti_unindex_entities(self, cnx, entities): + """remove text content for entities from the full text index + """ + cursor = cnx.cnxset.cu + cursor_unindex_object = self.dbhelper.cursor_unindex_object + try: + for entity in entities: + cursor_unindex_object(entity.eid, cursor) + except Exception: # let KeyboardInterrupt / SystemExit propagate + self.exception('error while unindexing %s', entity) + + + def fti_index_entities(self, cnx, entities): + """add text content of created/modified entities to the full text index + """ + cursor_index_object = self.dbhelper.cursor_index_object + cursor = cnx.cnxset.cu + try: + # use cursor_index_object, not cursor_reindex_object since + # unindexing done in the FTIndexEntityOp + for entity in entities: + cursor_index_object(entity.eid, + entity.cw_adapt_to('IFTIndexable'), + cursor) + except Exception: # let KeyboardInterrupt / SystemExit propagate + self.exception('error while indexing %s', entity) + + +class FTIndexEntityOp(hook.DataOperationMixIn, hook.LateOperation): + """operation to delay entity full text indexation to commit + + since fti indexing may trigger discovery of other entities, it should be + triggered on precommit, not commit, and this should be done after other + precommit operation which may add relations to the entity + """ + + def precommit_event(self): + cnx = self.cnx + source = cnx.repo.system_source + pendingeids = cnx.transaction_data.get('pendingeids', ()) + done = cnx.transaction_data.setdefault('indexedeids', set()) + to_reindex = set() + for eid in self.get_data(): + if eid in pendingeids or eid in done: + # entity added and deleted in the same transaction or already + # processed + continue + done.add(eid) + iftindexable = cnx.entity_from_eid(eid).cw_adapt_to('IFTIndexable') + to_reindex |= set(iftindexable.fti_containers()) + source.fti_unindex_entities(cnx, to_reindex) + source.fti_index_entities(cnx, to_reindex) + +def sql_schema(driver): + helper = get_db_helper(driver) + typemap = helper.TYPE_MAPPING + schema = """ +/* Create the repository's system database */ + +%s + +CREATE TABLE entities ( + eid INTEGER PRIMARY KEY NOT NULL, + type VARCHAR(64) NOT NULL, + asource VARCHAR(128) NOT NULL, + extid VARCHAR(256) +);; +CREATE INDEX entities_type_idx ON entities(type);; +CREATE TABLE moved_entities ( + eid INTEGER PRIMARY KEY NOT NULL, + extid VARCHAR(256) UNIQUE NOT NULL +);; + +CREATE TABLE transactions ( + tx_uuid CHAR(32) PRIMARY KEY NOT NULL, + tx_user INTEGER NOT NULL, + tx_time %s NOT NULL +);; +CREATE INDEX transactions_tx_user_idx ON transactions(tx_user);; +CREATE INDEX transactions_tx_time_idx ON transactions(tx_time);; + +CREATE TABLE tx_entity_actions ( + tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE, + txa_action CHAR(1) NOT NULL, + txa_public %s NOT NULL, + txa_order INTEGER, + eid INTEGER NOT NULL, + etype VARCHAR(64) NOT NULL, + changes %s +);; +CREATE INDEX tx_entity_actions_txa_action_idx ON tx_entity_actions(txa_action);; +CREATE INDEX tx_entity_actions_txa_public_idx ON tx_entity_actions(txa_public);; +CREATE INDEX tx_entity_actions_eid_idx ON tx_entity_actions(eid);; +CREATE INDEX tx_entity_actions_etype_idx ON tx_entity_actions(etype);; +CREATE INDEX tx_entity_actions_tx_uuid_idx ON tx_entity_actions(tx_uuid);; + +CREATE TABLE tx_relation_actions ( + tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE, + txa_action CHAR(1) NOT NULL, + txa_public %s NOT NULL, + txa_order INTEGER, + eid_from INTEGER NOT NULL, + eid_to INTEGER NOT NULL, + rtype VARCHAR(256) NOT NULL +);; +CREATE INDEX tx_relation_actions_txa_action_idx ON tx_relation_actions(txa_action);; +CREATE INDEX tx_relation_actions_txa_public_idx ON tx_relation_actions(txa_public);; +CREATE INDEX tx_relation_actions_eid_from_idx ON tx_relation_actions(eid_from);; +CREATE INDEX tx_relation_actions_eid_to_idx ON tx_relation_actions(eid_to);; +CREATE INDEX tx_relation_actions_tx_uuid_idx ON tx_relation_actions(tx_uuid);; +""" % (helper.sql_create_numrange('entities_id_seq').replace(';', ';;'), + typemap['Datetime'], + typemap['Boolean'], typemap['Bytes'], typemap['Boolean']) + if helper.backend_name == 'sqlite': + # sqlite support the ON DELETE CASCADE syntax but do nothing + schema += ''' +CREATE TRIGGER fkd_transactions +BEFORE DELETE ON transactions +FOR EACH ROW BEGIN + DELETE FROM tx_entity_actions WHERE tx_uuid=OLD.tx_uuid; + DELETE FROM tx_relation_actions WHERE tx_uuid=OLD.tx_uuid; +END;; +''' + schema += ';;'.join(helper.sqls_create_multicol_unique_index('entities', ['extid'])) + schema += ';;\n' + return schema + + +def sql_drop_schema(driver): + helper = get_db_helper(driver) + return """ +%s; +%s +DROP TABLE entities; +DROP TABLE tx_entity_actions; +DROP TABLE tx_relation_actions; +DROP TABLE transactions; +""" % (';'.join(helper.sqls_drop_multicol_unique_index('entities', ['extid'])), + helper.sql_drop_numrange('entities_id_seq')) + + +def grant_schema(user, set_owner=True): + result = '' + for table in ('entities', 'entities_id_seq', + 'transactions', 'tx_entity_actions', 'tx_relation_actions'): + if set_owner: + result = 'ALTER TABLE %s OWNER TO %s;\n' % (table, user) + result += 'GRANT ALL ON %s TO %s;\n' % (table, user) + return result + + +class BaseAuthentifier(object): + + def __init__(self, source=None): + self.source = source + + def set_schema(self, schema): + """set the instance'schema""" + pass + +class LoginPasswordAuthentifier(BaseAuthentifier): + passwd_rql = 'Any P WHERE X is CWUser, X login %(login)s, X upassword P' + auth_rql = (u'Any X WHERE X is CWUser, X login %(login)s, X upassword %(pwd)s, ' + 'X cw_source S, S name "system"') + _sols = ({'X': 'CWUser', 'P': 'Password', 'S': 'CWSource'},) + + def set_schema(self, schema): + """set the instance'schema""" + if 'CWUser' in schema: # probably an empty schema if not true... + # rql syntax trees used to authenticate users + self._passwd_rqlst = self.source.compile_rql(self.passwd_rql, self._sols) + self._auth_rqlst = self.source.compile_rql(self.auth_rql, self._sols) + + def authenticate(self, cnx, login, password=None, **kwargs): + """return CWUser eid for the given login/password if this account is + defined in this source, else raise `AuthenticationError` + + two queries are needed since passwords are stored crypted, so we have + to fetch the salt first + """ + args = {'login': login, 'pwd' : None} + if password is not None: + rset = self.source.syntax_tree_search(cnx, self._passwd_rqlst, args) + try: + pwd = rset[0][0] + except IndexError: + raise AuthenticationError('bad login') + if pwd is None: + # if pwd is None but a password is provided, something is wrong + raise AuthenticationError('bad password') + # passwords are stored using the Bytes type, so we get a StringIO + args['pwd'] = Binary(crypt_password(password, pwd.getvalue())) + # get eid from login and (crypted) password + rset = self.source.syntax_tree_search(cnx, self._auth_rqlst, args) + pwd = args['pwd'] + try: + user = rset[0][0] + # If the stored hash uses a deprecated scheme (e.g. DES or MD5 used + # before 3.14.7), update with a fresh one + if pwd is not None and pwd.getvalue(): + verify, newhash = verify_and_update(password, pwd.getvalue()) + if not verify: # should not happen, but... + raise AuthenticationError('bad password') + if newhash: + cnx.system_sql("UPDATE %s SET %s=%%(newhash)s WHERE %s=%%(login)s" % ( + SQL_PREFIX + 'CWUser', + SQL_PREFIX + 'upassword', + SQL_PREFIX + 'login'), + {'newhash': self.source._binary(newhash.encode('ascii')), + 'login': login}) + cnx.commit() + return user + except IndexError: + raise AuthenticationError('bad password') + + +class EmailPasswordAuthentifier(BaseAuthentifier): + def authenticate(self, cnx, login, **authinfo): + # email_auth flag prevent from infinite recursion (call to + # repo.check_auth_info at the end of this method may lead us here again) + if not '@' in login or authinfo.pop('email_auth', None): + raise AuthenticationError('not an email') + rset = cnx.execute('Any L WHERE U login L, U primary_email M, ' + 'M address %(login)s', {'login': login}, + build_descr=False) + if rset.rowcount != 1: + raise AuthenticationError('unexisting email') + login = rset.rows[0][0] + authinfo['email_auth'] = True + return self.source.repo.check_auth_info(cnx, login, authinfo) + + +class DatabaseIndependentBackupRestore(object): + """Helper class to perform db backend agnostic backup and restore + + The backup and restore methods are used to dump / restore the + system database in a database independent format. The file is a + Zip archive containing the following files: + + * format.txt: the format of the archive. Currently '1.1' + * tables.txt: list of filenames in the archive tables/ directory + * sequences.txt: list of filenames in the archive sequences/ directory + * numranges.txt: list of filenames in the archive numrange/ directory + * versions.txt: the list of cube versions from CWProperty + * tables/.: pickled data + * sequences/: pickled data + + The pickled data format for tables, numranges and sequences is a tuple of 3 elements: + * the table name + * a tuple of column names + * a list of rows (as tuples with one element per column) + + Tables are saved in chunks in different files in order to prevent + a too high memory consumption. + """ + blocksize = 100 + + def __init__(self, source): + """ + :param: source an instance of the system source + """ + self._source = source + self.logger = logging.getLogger('cubicweb.ctl') + self.logger.setLevel(logging.INFO) + self.logger.addHandler(logging.StreamHandler(sys.stdout)) + self.schema = self._source.schema + self.dbhelper = self._source.dbhelper + self.cnx = None + self.cursor = None + self.sql_generator = sqlgen.SQLGenerator() + + def get_connection(self): + return self._source.get_connection() + + def backup(self, backupfile): + archive = zipfile.ZipFile(backupfile, 'w', allowZip64=True) + self.cnx = self.get_connection() + try: + self.cursor = self.cnx.cursor() + self.cursor.arraysize = 100 + self.logger.info('writing metadata') + self.write_metadata(archive) + for seq in self.get_sequences(): + self.logger.info('processing sequence %s', seq) + self.write_sequence(archive, seq) + for numrange in self.get_numranges(): + self.logger.info('processing numrange %s', numrange) + self.write_numrange(archive, numrange) + for table in self.get_tables(): + self.logger.info('processing table %s', table) + self.write_table(archive, table) + finally: + archive.close() + self.cnx.close() + self.logger.info('done') + + def get_tables(self): + non_entity_tables = ['entities', + 'transactions', + 'tx_entity_actions', + 'tx_relation_actions', + ] + etype_tables = [] + relation_tables = [] + prefix = 'cw_' + for etype in self.schema.entities(): + eschema = self.schema.eschema(etype) + if eschema.final: + continue + etype_tables.append('%s%s'%(prefix, etype)) + for rtype in self.schema.relations(): + rschema = self.schema.rschema(rtype) + if rschema.final or rschema.inlined or rschema in VIRTUAL_RTYPES: + continue + relation_tables.append('%s_relation' % rtype) + return non_entity_tables + etype_tables + relation_tables + + def get_sequences(self): + return [] + + def get_numranges(self): + return ['entities_id_seq'] + + def write_metadata(self, archive): + archive.writestr('format.txt', '1.1') + archive.writestr('tables.txt', '\n'.join(self.get_tables())) + archive.writestr('sequences.txt', '\n'.join(self.get_sequences())) + archive.writestr('numranges.txt', '\n'.join(self.get_numranges())) + versions = self._get_versions() + versions_str = '\n'.join('%s %s' % (k, v) + for k, v in versions) + archive.writestr('versions.txt', versions_str) + + def write_sequence(self, archive, seq): + sql = self.dbhelper.sql_sequence_current_state(seq) + columns, rows_iterator = self._get_cols_and_rows(sql) + rows = list(rows_iterator) + serialized = self._serialize(seq, columns, rows) + archive.writestr('sequences/%s' % seq, serialized) + + def write_numrange(self, archive, numrange): + sql = self.dbhelper.sql_numrange_current_state(numrange) + columns, rows_iterator = self._get_cols_and_rows(sql) + rows = list(rows_iterator) + serialized = self._serialize(numrange, columns, rows) + archive.writestr('numrange/%s' % numrange, serialized) + + def write_table(self, archive, table): + nb_lines_sql = 'SELECT COUNT(*) FROM %s' % table + self.cursor.execute(nb_lines_sql) + rowcount = self.cursor.fetchone()[0] + sql = 'SELECT * FROM %s' % table + columns, rows_iterator = self._get_cols_and_rows(sql) + self.logger.info('number of rows: %d', rowcount) + blocksize = self.blocksize + if rowcount > 0: + for i, start in enumerate(range(0, rowcount, blocksize)): + rows = list(itertools.islice(rows_iterator, blocksize)) + serialized = self._serialize(table, columns, rows) + archive.writestr('tables/%s.%04d' % (table, i), serialized) + self.logger.debug('wrote rows %d to %d (out of %d) to %s.%04d', + start, start+len(rows)-1, + rowcount, + table, i) + else: + rows = [] + serialized = self._serialize(table, columns, rows) + archive.writestr('tables/%s.%04d' % (table, 0), serialized) + + def _get_cols_and_rows(self, sql): + process_result = self._source.iter_process_result + self.cursor.execute(sql) + columns = (d[0] for d in self.cursor.description) + rows = process_result(self.cursor) + return tuple(columns), rows + + def _serialize(self, name, columns, rows): + return pickle.dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL) + + def restore(self, backupfile): + archive = zipfile.ZipFile(backupfile, 'r', allowZip64=True) + self.cnx = self.get_connection() + self.cursor = self.cnx.cursor() + sequences, numranges, tables, table_chunks = self.read_metadata(archive, backupfile) + for seq in sequences: + self.logger.info('restoring sequence %s', seq) + self.read_sequence(archive, seq) + for numrange in numranges: + self.logger.info('restoring numrange %s', numrange) + self.read_numrange(archive, numrange) + for table in tables: + self.logger.info('restoring table %s', table) + self.read_table(archive, table, sorted(table_chunks[table])) + self.cnx.close() + archive.close() + self.logger.info('done') + + def read_metadata(self, archive, backupfile): + formatinfo = archive.read('format.txt') + self.logger.info('checking metadata') + if formatinfo.strip() != "1.1": + self.logger.critical('Unsupported format in archive: %s', formatinfo) + raise ValueError('Unknown format in %s: %s' % (backupfile, formatinfo)) + tables = archive.read('tables.txt').splitlines() + sequences = archive.read('sequences.txt').splitlines() + numranges = archive.read('numranges.txt').splitlines() + file_versions = self._parse_versions(archive.read('versions.txt')) + versions = set(self._get_versions()) + if file_versions != versions: + self.logger.critical('Unable to restore : versions do not match') + self.logger.critical('Expected:\n%s', '\n'.join('%s : %s' % (cube, ver) + for cube, ver in sorted(versions))) + self.logger.critical('Found:\n%s', '\n'.join('%s : %s' % (cube, ver) + for cube, ver in sorted(file_versions))) + raise ValueError('Unable to restore : versions do not match') + table_chunks = {} + for name in archive.namelist(): + if not name.startswith('tables/'): + continue + filename = basename(name) + tablename, _ext = filename.rsplit('.', 1) + table_chunks.setdefault(tablename, []).append(name) + return sequences, numranges, tables, table_chunks + + def read_sequence(self, archive, seq): + seqname, columns, rows = pickle.loads(archive.read('sequences/%s' % seq)) + assert seqname == seq + assert len(rows) == 1 + assert len(rows[0]) == 1 + value = rows[0][0] + sql = self.dbhelper.sql_restart_sequence(seq, value) + self.cursor.execute(sql) + self.cnx.commit() + + def read_numrange(self, archive, numrange): + rangename, columns, rows = pickle.loads(archive.read('numrange/%s' % numrange)) + assert rangename == numrange + assert len(rows) == 1 + assert len(rows[0]) == 1 + value = rows[0][0] + sql = self.dbhelper.sql_restart_numrange(numrange, value) + self.cursor.execute(sql) + self.cnx.commit() + + def read_table(self, archive, table, filenames): + merge_args = self._source.merge_args + self.cursor.execute('DELETE FROM %s' % table) + self.cnx.commit() + row_count = 0 + for filename in filenames: + tablename, columns, rows = pickle.loads(archive.read(filename)) + assert tablename == table + if not rows: + continue + insert = self.sql_generator.insert(table, + dict(zip(columns, rows[0]))) + for row in rows: + self.cursor.execute(insert, merge_args(dict(zip(columns, row)), {})) + row_count += len(rows) + self.cnx.commit() + self.logger.info('inserted %d rows', row_count) + + + def _parse_versions(self, version_str): + versions = set() + for line in version_str.splitlines(): + versions.add(tuple(line.split())) + return versions + + def _get_versions(self): + version_sql = 'SELECT cw_pkey, cw_value FROM cw_CWProperty' + versions = [] + self.cursor.execute(version_sql) + for pkey, value in self.cursor.fetchall(): + if pkey.startswith(u'system.version'): + versions.append((pkey, value)) + return versions diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/sources/rql2sql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/sources/rql2sql.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1703 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""RQL to SQL generator for native sources. + + +SQL queries optimization +~~~~~~~~~~~~~~~~~~~~~~~~ +1. CWUser X WHERE X in_group G, G name 'users': + + CWUser is the only subject entity type for the in_group relation, + which allow us to do :: + + SELECT eid_from FROM in_group, CWGroup + WHERE in_group.eid_to = CWGroup.eid_from + AND CWGroup.name = 'users' + + +2. Any X WHERE X nonfinal1 Y, Y nonfinal2 Z + + -> direct join between nonfinal1 and nonfinal2, whatever X,Y, Z (unless + inlined...) + + NOT IMPLEMENTED (and quite hard to implement) + +Potential optimization information is collected by the querier, sql generation +is done according to this information + +cross RDMS note : read `Comparison of different SQL implementations`_ +by Troels Arvin. Features SQL ISO Standard, PG, mysql, Oracle, MS SQL, DB2 +and Informix. + +.. _Comparison of different SQL implementations: http://www.troels.arvin.dk/db/rdbms +""" + +__docformat__ = "restructuredtext en" + +import threading + +from six import PY2 +from six.moves import range + +from logilab.database import FunctionDescr, SQL_FUNCTIONS_REGISTRY + +from rql import BadRQLQuery, CoercionError +from rql.utils import common_parent +from rql.stmts import Union, Select +from rql.nodes import (VariableRef, Constant, Function, Variable, Or, + Not, Comparison, ColumnAlias, Relation, SubQuery) + +from cubicweb import QueryError +from cubicweb.rqlrewrite import cleanup_solutions +from cubicweb.server.sqlutils import SQL_PREFIX + +ColumnAlias._q_invariant = False # avoid to check for ColumnAlias / Variable + +FunctionDescr.source_execute = None + +def default_update_cb_stack(self, stack): + stack.append(self.source_execute) +FunctionDescr.update_cb_stack = default_update_cb_stack + +get_func_descr = SQL_FUNCTIONS_REGISTRY.get_function + +LENGTH = get_func_descr('LENGTH') +def length_source_execute(source, session, value): + return len(value.getvalue()) +LENGTH.source_execute = length_source_execute + +def _new_var(select, varname): + newvar = select.get_variable(varname) + if not 'relations' in newvar.stinfo: + # not yet initialized + newvar.prepare_annotation() + newvar.stinfo['scope'] = select + newvar._q_invariant = False + select.selection.append(VariableRef(newvar)) + return newvar + +def _fill_to_wrap_rel(var, newselect, towrap, schema): + for rel in var.stinfo['relations'] - var.stinfo['rhsrelations']: + rschema = schema.rschema(rel.r_type) + if rschema.inlined: + towrap.add( (var, rel) ) + for vref in rel.children[1].iget_nodes(VariableRef): + newivar = _new_var(newselect, vref.name) + _fill_to_wrap_rel(vref.variable, newselect, towrap, schema) + elif rschema.final: + towrap.add( (var, rel) ) + for vref in rel.children[1].iget_nodes(VariableRef): + newivar = _new_var(newselect, vref.name) + newivar.stinfo['attrvar'] = (var, rel.r_type) + +def rewrite_unstable_outer_join(select, solutions, unstable, schema): + """if some optional variables are unstable, they should be selected in a + subquery. This function check this and rewrite the rql syntax tree if + necessary (in place). Return a boolean telling if the tree has been modified + """ + modified = False + for varname in tuple(unstable): + var = select.defined_vars[varname] + if not var.stinfo.get('optrelations'): + continue + unstable.remove(varname) + newselect = Select() + myunion = Union() + myunion.append(newselect) + # extract aliases / selection + newvar = _new_var(newselect, var.name) + newselect.selection = [VariableRef(newvar)] + towrap_rels = set() + _fill_to_wrap_rel(var, newselect, towrap_rels, schema) + # extract relations + for var, rel in towrap_rels: + newrel = rel.copy(newselect) + newselect.add_restriction(newrel) + select.remove_node(rel) + var.stinfo['relations'].remove(rel) + newvar.stinfo['relations'].add(newrel) + if rel.optional in ('left', 'both'): + newvar.add_optional_relation(newrel) + for vref in newrel.children[1].iget_nodes(VariableRef): + var = vref.variable + var.stinfo['relations'].add(newrel) + var.stinfo['rhsrelations'].add(newrel) + if rel.optional in ('right', 'both'): + var.add_optional_relation(newrel) + if not select.where and not modified: + # oops, generated the same thing as the original select.... + # restore original query, else we'll indefinitly loop + for var, rel in towrap_rels: + select.add_restriction(rel) + continue + modified = True + # extract subquery solutions + mysolutions = [sol.copy() for sol in solutions] + cleanup_solutions(newselect, mysolutions) + newselect.set_possible_types(mysolutions) + # full sub-query + aliases = [VariableRef(select.get_variable(avar.name, i)) + for i, avar in enumerate(newselect.selection)] + select.add_subquery(SubQuery(aliases, myunion), check=False) + return modified + +def _new_solutions(rqlst, solutions): + """first filter out subqueries variables from solutions""" + newsolutions = [] + for origsol in solutions: + asol = {} + for vname in rqlst.defined_vars: + asol[vname] = origsol[vname] + if not asol in newsolutions: + newsolutions.append(asol) + return newsolutions + +def remove_unused_solutions(rqlst, solutions, varmap, schema): + """cleanup solutions: remove solutions where invariant variables are taking + different types + """ + newsols = _new_solutions(rqlst, solutions) + existssols = {} + unstable = set() + invariants = {} + for vname, var in rqlst.defined_vars.items(): + vtype = newsols[0][vname] + if var._q_invariant or vname in varmap: + # remove invariant variable from solutions to remove duplicates + # later, then reinserting a type for the variable even later + for sol in newsols: + invariants.setdefault(id(sol), {})[vname] = sol.pop(vname) + elif var.scope is not rqlst: + # move apart variables which are in a EXISTS scope and are variating + try: + thisexistssols, thisexistsvars = existssols[var.scope] + except KeyError: + thisexistssols = [newsols[0]] + thisexistsvars = set() + existssols[var.scope] = thisexistssols, thisexistsvars + for i in range(len(newsols)-1, 0, -1): + if vtype != newsols[i][vname]: + thisexistssols.append(newsols.pop(i)) + thisexistsvars.add(vname) + else: + # remember unstable variables + for i in range(1, len(newsols)): + if vtype != newsols[i][vname]: + unstable.add(vname) + if invariants: + # filter out duplicates + newsols_ = [] + for sol in newsols: + if not sol in newsols_: + newsols_.append(sol) + newsols = newsols_ + # reinsert solutions for invariants + for sol in newsols: + for invvar, vartype in invariants[id(sol)].items(): + sol[invvar] = vartype + for sol in existssols: + try: + for invvar, vartype in invariants[id(sol)].items(): + sol[invvar] = vartype + except KeyError: + continue + if len(newsols) > 1: + if rewrite_unstable_outer_join(rqlst, newsols, unstable, schema): + # remove variables extracted to subqueries from solutions + newsols = _new_solutions(rqlst, newsols) + return newsols, existssols, unstable + +def relation_info(relation): + lhs, rhs = relation.get_variable_parts() + try: + lhs = lhs.variable + lhsconst = lhs.stinfo['constnode'] + except AttributeError: + lhsconst = lhs + lhs = None + except KeyError: + lhsconst = None # ColumnAlias + try: + rhs = rhs.variable + rhsconst = rhs.stinfo['constnode'] + except AttributeError: + rhsconst = rhs + rhs = None + except KeyError: + rhsconst = None # ColumnAlias + return lhs, lhsconst, rhs, rhsconst + +def sort_term_selection(sorts, rqlst, groups): + # XXX beurk + if isinstance(rqlst, list): + def append(term): + rqlst.append(term) + selectionidx = set(str(term) for term in rqlst) + else: + def append(term): + rqlst.selection.append(term.copy(rqlst)) + selectionidx = set(str(term) for term in rqlst.selection) + + for sortterm in sorts: + term = sortterm.term + if not isinstance(term, Constant) and not str(term) in selectionidx: + selectionidx.add(str(term)) + append(term) + if groups: + for vref in term.iget_nodes(VariableRef): + if not any(vref.is_equivalent(g) for g in groups): + groups.append(vref) + +def fix_selection_and_group(rqlst, needwrap, selectsortterms, + sorts, groups, having): + if selectsortterms and sorts: + sort_term_selection(sorts, rqlst, not needwrap and groups) + groupvrefs = [vref for term in groups for vref in term.iget_nodes(VariableRef)] + if sorts and groups: + # when a query is grouped, ensure sort terms are grouped as well + for sortterm in sorts: + term = sortterm.term + if not (isinstance(term, Constant) or \ + (isinstance(term, Function) and + get_func_descr(term.name).aggregat)): + for vref in term.iget_nodes(VariableRef): + if not any(vref.is_equivalent(group) for group in groupvrefs): + groups.append(vref) + groupvrefs.append(vref) + if needwrap and (groups or having): + selectedidx = set(vref.name for term in rqlst.selection + for vref in term.get_nodes(VariableRef)) + if groups: + for vref in groupvrefs: + if vref.name not in selectedidx: + selectedidx.add(vref.name) + rqlst.selection.append(vref) + if having: + for term in having: + for vref in term.iget_nodes(VariableRef): + if vref.name not in selectedidx: + selectedidx.add(vref.name) + rqlst.selection.append(vref) + +def iter_mapped_var_sels(stmt, variable): + # variable is a Variable or ColumnAlias node mapped to a source side + # callback + if not (len(variable.stinfo['rhsrelations']) <= 1 and # < 1 on column alias + variable.stinfo['selected']): + raise QueryError("can't use %s as a restriction variable" + % variable.name) + for selectidx in variable.stinfo['selected']: + vrefs = stmt.selection[selectidx].get_nodes(VariableRef) + if len(vrefs) != 1: + raise QueryError() + yield selectidx, vrefs[0] + +def update_source_cb_stack(state, stmt, node, stack): + while True: + node = node.parent + if node is stmt: + break + if not isinstance(node, Function): + raise QueryError() + funcd = get_func_descr(node.name) + if funcd.source_execute is None: + raise QueryError('%s can not be called on mapped attribute' + % node.name) + state.source_cb_funcs.add(node) + funcd.update_cb_stack(stack) + + +# IGenerator implementation for RQL->SQL ####################################### + +class StateInfo(object): + """this class stores data accumulated during the RQL syntax tree visit + for later SQL generation. + + Attributes related to OUTER JOIN handling + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * `outer_chains`, list of list of strings. Each list represent a tables + that have to be outer joined together. + + * `outer_tables`, dictionary used as index of tables used in outer join :: + + 'table alias': (outertype, [conditions], [chain]) + + where: + + * `outertype` is one of None, 'LEFT', 'RIGHT', 'FULL' + * `conditions` is a list of join conditions (string) + * `chain` is a list of table alias (the *outer chain*) in which the key + alias appears + + * `outer_pending` is a dictionary containing some conditions that will have + to be added to the outer join when the table will be turned into an + outerjoin :: + + 'table alias': [conditions] + """ + def __init__(self, select, existssols, unstablevars): + self.existssols = existssols + self.unstablevars = unstablevars + self.subtables = {} + self.needs_source_cb = None + self.subquery_source_cb = None + self.source_cb_funcs = set() + self.scopes = {select: 0} + self.scope_nodes = [] + + def reset(self, solution): + """reset some visit variables""" + self.solution = solution + self.count = 0 + self.done = set() + self.tables = self.subtables.copy() + self.actual_tables = [[]] + for _, tsql in self.tables.values(): + self.actual_tables[-1].append(tsql) + self.outer_chains = [] + self.outer_tables = {} + self.outer_pending = {} + self.duplicate_switches = [] + self.aliases = {} + self.restrictions = [] + self._restr_stack = [] + self.ignore_varmap = False + self._needs_source_cb = {} + + def merge_source_cbs(self, needs_source_cb): + if self.needs_source_cb is None: + self.needs_source_cb = needs_source_cb + elif needs_source_cb != self.needs_source_cb: + raise QueryError('query fetch some source mapped attribute, some not') + + def finalize_source_cbs(self): + if self.subquery_source_cb is not None: + self.needs_source_cb.update(self.subquery_source_cb) + + def add_restriction(self, restr): + if restr: + self.restrictions.append(restr) + + def iter_exists_sols(self, exists): + if not exists in self.existssols: + yield 1 + return + thisexistssols, thisexistsvars = self.existssols[exists] + notdone_outside_vars = set() + # when iterating other solutions inner to an EXISTS subquery, we should + # reset variables which have this exists node as scope at each iteration + for var in exists.stmt.defined_vars.values(): + if var.scope is exists: + thisexistsvars.add(var.name) + elif var.name not in self.done: + notdone_outside_vars.add(var) + origsol = self.solution + origtables = self.tables + done = self.done + for thisexistssol in thisexistssols: + for vname in self.unstablevars: + if thisexistssol[vname] != origsol[vname] and vname in thisexistsvars: + break + else: + self.tables = origtables.copy() + self.solution = thisexistssol + yield 1 + # cleanup self.done from stuff specific to exists + for var in thisexistsvars: + if var in done: + done.remove(var) + for var in list(notdone_outside_vars): + if var.name in done and var._q_sqltable in self.tables: + origtables[var._q_sqltable] = self.tables[var._q_sqltable] + notdone_outside_vars.remove(var) + for rel in exists.iget_nodes(Relation): + if rel in done: + done.remove(rel) + self.solution = origsol + self.tables = origtables + + def push_scope(self, scope_node): + self.scope_nodes.append(scope_node) + self.scopes[scope_node] = len(self.actual_tables) + self.actual_tables.append([]) + self._restr_stack.append(self.restrictions) + self.restrictions = [] + + def pop_scope(self): + del self.scopes[self.scope_nodes[-1]] + self.scope_nodes.pop() + restrictions = self.restrictions + self.restrictions = self._restr_stack.pop() + scope = len(self.actual_tables) - 1 + # check if we have some outer chain for this scope + matching_chains = [] + for chain in self.outer_chains: + for tablealias in chain: + if self.tables[tablealias][0] < scope: + # chain belongs to outer scope + break + else: + # chain match current scope + matching_chains.append(chain) + # call to `tables_sql` will pop actual_tables + tables = self.tables_sql(matching_chains) + # cleanup outer join related structure for tables in matching chains + for chain in matching_chains: + self.outer_chains.remove(chain) + for alias in chain: + del self.outer_tables[alias] + return restrictions, tables + + # tables handling ######################################################### + + def add_table(self, table, key=None, scope=-1): + if key is None: + key = table + if key in self.tables: + return + if scope < 0: + scope = len(self.actual_tables) + scope + self.tables[key] = (scope, table) + self.actual_tables[scope].append(table) + + def alias_and_add_table(self, tablename, scope=-1): + alias = '%s%s' % (tablename, self.count) + self.count += 1 + self.add_table('%s AS %s' % (tablename, alias), alias, scope) + return alias + + def relation_table(self, relation): + """return the table alias used by the given relation""" + if relation in self.done: + return relation._q_sqltable + rid = 'rel_%s%s' % (relation.r_type, self.count) + # relation's table is belonging to the root scope if it is the principal + # table of one of its variable and that variable belong's to parent + # scope + for varref in relation.iget_nodes(VariableRef): + var = varref.variable + # XXX may have a principal without being invariant for this generation, + # not sure this is a pb or not + if var.stinfo.get('principal') is relation and var.scope is var.stmt: + scope = 0 + break + else: + scope = -1 + self.count += 1 + self.add_table('%s_relation AS %s' % (relation.r_type, rid), rid, scope=scope) + relation._q_sqltable = rid + self.done.add(relation) + return rid + + def fti_table(self, relation, fti_table): + """return the table alias used by the given has_text relation, + `fti_table` being the table name for the plain text index + """ + if relation in self.done: + try: + return relation._q_sqltable + except AttributeError: + pass + self.done.add(relation) + scope = self.scopes[relation.scope] + alias = self.alias_and_add_table(fti_table, scope=scope) + relation._q_sqltable = alias + return alias + + # outer join handling ###################################################### + + def mark_as_used_in_outer_join(self, tablealias, addpending=True): + """Mark table of given alias as used in outer join. This must be called + after `outer_tables[tablealias]` has been initialized. + """ + # remove a table from actual_table because it's used in an outer join + # chain + scope, tabledef = self.tables[tablealias] + self.actual_tables[scope].remove(tabledef) + # check if there are some pending outer join condition for this table + if addpending: + try: + pending_conditions = self.outer_pending.pop(tablealias) + except KeyError: + pass + else: + self.outer_tables[tablealias][1].extend(pending_conditions) + else: + assert not tablealias in self.outer_pending + + def add_outer_join_condition(self, tablealias, condition): + try: + outer, conditions, chain = self.outer_tables[tablealias] + conditions.append(condition) + except KeyError: + self.outer_pending.setdefault(tablealias, []).append(condition) + + def replace_tables_by_outer_join(self, leftalias, rightalias, + outertype, condition): + """tell we need JOIN ON + """ + assert leftalias != rightalias, leftalias + outer_tables = self.outer_tables + louter, lconditions, lchain = outer_tables.get(leftalias, + (None, None, None)) + router, rconditions, rchain = outer_tables.get(rightalias, + (None, None, None)) + if lchain is None and rchain is None: + # create a new outer chaine + chain = [leftalias, rightalias] + outer_tables[leftalias] = (None, [], chain) + outer_tables[rightalias] = (outertype, [condition], chain) + self.outer_chains.append(chain) + self.mark_as_used_in_outer_join(leftalias, addpending=False) + self.mark_as_used_in_outer_join(rightalias) + elif lchain is None: + # [A > B > C] + [D > A] -> [D > A > B > C] + if rightalias == rchain[0]: + outer_tables[leftalias] = (None, [], rchain) + conditions = outer_tables[rightalias][1] + [condition] + outer_tables[rightalias] = (outertype, conditions, rchain) + rchain.insert(0, leftalias) + else: + # [A > B > C] + [D > B] -> [A > B > C < D] + if outertype == 'LEFT': + outertype = 'RIGHT' + outer_tables[leftalias] = (outertype, [condition], rchain) + rchain.append(leftalias) + self.mark_as_used_in_outer_join(leftalias) + elif rchain is None: + # [A > B > C] + [B > D] -> [A > B > C > D] + outer_tables[rightalias] = (outertype, [condition], lchain) + lchain.append(rightalias) + self.mark_as_used_in_outer_join(rightalias) + elif lchain is rchain: + # already in the same chain, simply check compatibility and append + # the condition if it's ok + lidx = lchain.index(leftalias) + ridx = lchain.index(rightalias) + if (outertype == 'FULL' and router != 'FULL') \ + or (lidx < ridx and router != 'LEFT') \ + or (ridx < lidx and louter != 'RIGHT'): + raise BadRQLQuery() + # merge conditions + if lidx < ridx: + rconditions.append(condition) + else: + lconditions.append(condition) + elif louter is None: + # merge chains + self.outer_chains.remove(lchain) + rchain += lchain + self.mark_as_used_in_outer_join(leftalias) + for alias, (aouter, aconditions, achain) in outer_tables.items(): + if achain is lchain: + outer_tables[alias] = (aouter, aconditions, rchain) + else: + raise BadRQLQuery() + + # sql generation helpers ################################################### + + def tables_sql(self, outer_chains=None): + """generate SQL for FROM clause""" + # sort for test predictability + tables = sorted(self.actual_tables.pop()) + # process outer joins + if outer_chains is None: + assert not self.actual_tables, self.actual_tables + assert not self.outer_pending + outer_chains = self.outer_chains + for chain in sorted(outer_chains): + tablealias = chain[0] + outertype, conditions, _ = self.outer_tables[tablealias] + assert _ is chain, (chain, _) + assert outertype is None, (chain, self.outer_chains) + assert not conditions, (chain, self.outer_chains) + assert len(chain) > 1 + tabledef = self.tables[tablealias][1] + outerjoin = [tabledef] + for tablealias in chain[1:]: + outertype, conditions, _ = self.outer_tables[tablealias] + assert _ is chain, (chain, self.outer_chains) + assert outertype in ('LEFT', 'RIGHT', 'FULL'), ( + tablealias, outertype, conditions) + assert isinstance(conditions, (list)), ( + tablealias, outertype, conditions) + tabledef = self.tables[tablealias][1] + outerjoin.append('%s OUTER JOIN %s ON (%s)' % ( + outertype, tabledef, ' AND '.join(conditions))) + tables.append(' '.join(outerjoin)) + return ', '.join(tables) + + +def extract_fake_having_terms(having): + """RQL's HAVING may be used to contains stuff that should go in the WHERE + clause of the SQL query, due to RQL grammar limitation. Split them... + + Return a list nodes that can be ANDed with query's WHERE clause. Having + subtrees updated in place. + """ + fakehaving = [] + for subtree in having: + ors, tocheck = set(), [] + for compnode in subtree.get_nodes(Comparison): + for fnode in compnode.get_nodes(Function): + if fnode.descr().aggregat: + p = compnode.parent + oor = None + while not isinstance(p, Select): + if isinstance(p, (Or, Not)): + oor = p + p = p.parent + if oor is not None: + ors.add(oor) + break + else: + tocheck.append(compnode) + # tocheck hold a set of comparison not implying an aggregat function + # put them in fakehaving if they don't share an Or node as ancestor + # with another comparison containing an aggregat function + for compnode in tocheck: + parents = set() + p = compnode.parent + oor = None + while not isinstance(p, Select): + if p in ors or p is None: # p is None for nodes already in fakehaving + break + if isinstance(p, (Or, Not)): + oor = p + p = p.parent + else: + node = oor or compnode + fakehaving.append(node) + node.parent.remove(node) + return fakehaving + + +class SQLGenerator(object): + """ + generation of SQL from the fully expanded RQL syntax tree + SQL is designed to be used with a CubicWeb SQL schema + + Groups and sort are not handled here since they should not be handled at + this level (see cubicweb.server.querier) + + we should not have errors here! + + WARNING: a CubicWebSQLGenerator instance is not thread safe, but generate is + protected by a lock + """ + + def __init__(self, schema, dbhelper, attrmap=None): + self.schema = schema + self.dbhelper = dbhelper + self.dbencoding = dbhelper.dbencoding + self.keyword_map = {'NOW' : self.dbhelper.sql_current_timestamp, + 'TODAY': self.dbhelper.sql_current_date, + } + if not self.dbhelper.union_parentheses_support: + self.union_sql = self.noparen_union_sql + self._lock = threading.Lock() + if attrmap is None: + attrmap = {} + self.attr_map = attrmap + + def generate(self, union, args=None, varmap=None): + """return SQL queries and a variable dictionary from a RQL syntax tree + + :partrqls: a list of couple (rqlst, solutions) + :args: optional dictionary with values of substitutions used in the query + :varmap: optional dictionary mapping variable name to a special table + name, in case the query as to fetch data from temporary tables + + return an sql string and a dictionary with substitutions values + """ + if args is None: + args = {} + if varmap is None: + varmap = {} + self._lock.acquire() + self._args = args + self._varmap = varmap + self._query_attrs = {} + self._state = None + # self._not_scope_offset = 0 + try: + # union query for each rqlst / solution + sql = self.union_sql(union) + # we are done + return sql, self._query_attrs, self._state.needs_source_cb + finally: + self._lock.release() + + def union_sql(self, union, needalias=False): # pylint: disable=E0202 + if len(union.children) == 1: + return self.select_sql(union.children[0], needalias) + sqls = ('(%s)' % self.select_sql(select, needalias) + for select in union.children) + return '\nUNION ALL\n'.join(sqls) + + def noparen_union_sql(self, union, needalias=False): + # needed for sqlite backend which doesn't like parentheses around union + # query. This may cause bug in some condition (sort in one of the + # subquery) but will work in most case + # + # see http://www.sqlite.org/cvstrac/tktview?tn=3074 + sqls = (self.select_sql(select, needalias) + for i, select in enumerate(union.children)) + return '\nUNION ALL\n'.join(sqls) + + def select_sql(self, select, needalias=False): + """return SQL queries and a variable dictionary from a RQL syntax tree + + :select: a selection statement of the syntax tree (`rql.stmts.Select`) + :solution: a dictionary containing variables binding. + A solution's dictionary has variable's names as key and variable's + types as values + :needwrap: boolean telling if the query will be wrapped in an outer + query (to deal with aggregat and/or grouping) + """ + if select.distinct: + distinct = True + elif self.dbhelper.fti_need_distinct: + distinct = getattr(select.parent, 'has_text_query', False) + else: + distinct = False + sorts = select.orderby + groups = select.groupby + having = select.having + for restr in extract_fake_having_terms(having): + scope = None + for vref in restr.get_nodes(VariableRef): + vscope = vref.variable.scope + if vscope is select: + continue # ignore select scope, so restriction is added to + # the inner most scope possible + if scope is None: + scope = vscope + elif vscope is not scope: + scope = common_parent(scope, vscope).scope + if scope is None: + scope = select + scope.add_restriction(restr) + # remember selection, it may be changed and have to be restored + origselection = select.selection[:] + # check if the query will have union subquery, if it need sort term + # selection (union or distinct query) and wrapping (union with groups) + needwrap = False + sols = select.solutions + selectsortterms = distinct + if len(sols) > 1: + # remove invariant from solutions + sols, existssols, unstable = remove_unused_solutions( + select, sols, self._varmap, self.schema) + if len(sols) > 1: + # if there is still more than one solution, a UNION will be + # generated and so sort terms have to be selected + selectsortterms = True + # and if select is using group by or aggregat, a wrapping + # query will be necessary + if groups or select.has_aggregat: + select.select_only_variables() + needwrap = True + else: + existssols, unstable = {}, () + state = StateInfo(select, existssols, unstable) + if self._state is not None: + # state from a previous unioned select + state.merge_source_cbs(self._state.needs_source_cb) + # treat subqueries + self._subqueries_sql(select, state) + # generate sql for this select node + if needwrap: + outerselection = origselection[:] + if sorts and selectsortterms: + if distinct: + sort_term_selection(sorts, outerselection, groups) + fix_selection_and_group(select, needwrap, selectsortterms, + sorts, groups, having) + if needwrap: + fneedwrap = len(outerselection) != len(origselection) + else: + fneedwrap = len(select.selection) != len(origselection) + if fneedwrap: + needalias = True + self._in_wrapping_query = False + self._state = state + try: + sql = self._solutions_sql(select, sols, distinct, + needalias or needwrap) + # generate groups / having before wrapping query selection to get + # correct column aliases + self._in_wrapping_query = needwrap + if groups: + # no constant should be inserted in GROUP BY else the backend + # will interpret it as a positional index in the selection + groups = ','.join(vref.accept(self) for vref in groups + if not isinstance(vref, Constant)) + if having: + # filter out constants as for GROUP BY + having = ' AND '.join(term.accept(self) for term in having + if not isinstance(term, Constant)) + if needwrap: + sql = '%s FROM (%s) AS T1' % ( + self._selection_sql(outerselection, distinct,needalias), + sql) + if groups: + sql += '\nGROUP BY %s' % groups + if having: + sql += '\nHAVING %s' % having + # sort + if sorts: + sqlsortterms = [] + if needwrap: + selectidx = [str(term) for term in outerselection] + else: + selectidx = [str(term) for term in select.selection] + for sortterm in sorts: + _term = self._sortterm_sql(sortterm, selectidx) + if _term is not None: + sqlsortterms.append(_term) + if sqlsortterms: + sql = self.dbhelper.sql_add_order_by( + sql, sqlsortterms, origselection, fneedwrap, + select.limit or select.offset) + else: + sqlsortterms = None + state.finalize_source_cbs() + finally: + select.selection = origselection + # limit / offset + sql = self.dbhelper.sql_add_limit_offset(sql, + select.limit, + select.offset, + sqlsortterms) + return sql + + def _subqueries_sql(self, select, state): + for i, subquery in enumerate(select.with_): + sql = self.union_sql(subquery.query, needalias=True) + tablealias = '_T%s' % i # XXX nested subqueries + sql = '(%s) AS %s' % (sql, tablealias) + state.subtables[tablealias] = (0, sql) + latest_state = self._state + for vref in subquery.aliases: + alias = vref.variable + alias._q_sqltable = tablealias + alias._q_sql = '%s.C%s' % (tablealias, alias.colnum) + try: + stack = latest_state.needs_source_cb[alias.colnum] + if state.subquery_source_cb is None: + state.subquery_source_cb = {} + for selectidx, vref in iter_mapped_var_sels(select, alias): + stack = stack[:] + update_source_cb_stack(state, select, vref, stack) + state.subquery_source_cb[selectidx] = stack + except KeyError: + continue + + def _solutions_sql(self, select, solutions, distinct, needalias): + sqls = [] + for solution in solutions: + self._state.reset(solution) + # visit restriction subtree + if select.where is not None: + self._state.add_restriction(select.where.accept(self)) + sql = [self._selection_sql(select.selection, distinct, needalias)] + if self._state.restrictions: + sql.append('WHERE %s' % ' AND '.join(self._state.restrictions)) + self._state.merge_source_cbs(self._state._needs_source_cb) + # add required tables + assert len(self._state.actual_tables) == 1, self._state.actual_tables + tables = self._state.tables_sql() + if tables: + sql.insert(1, 'FROM %s' % tables) + elif self._state.restrictions and self.dbhelper.needs_from_clause: + sql.insert(1, 'FROM (SELECT 1) AS _T') + sqls.append('\n'.join(sql)) + if distinct: + return '\nUNION\n'.join(sqls) + else: + return '\nUNION ALL\n'.join(sqls) + + def _selection_sql(self, selected, distinct, needaliasing=False): + clause = [] + for term in selected: + sql = term.accept(self) + if needaliasing: + colalias = 'C%s' % len(clause) + clause.append('%s AS %s' % (sql, colalias)) + if isinstance(term, VariableRef): + self._state.aliases[term.name] = colalias + else: + clause.append(sql) + if distinct: + return 'SELECT DISTINCT %s' % ', '.join(clause) + return 'SELECT %s' % ', '.join(clause) + + def _sortterm_sql(self, sortterm, selectidx): + term = sortterm.term + try: + sqlterm = selectidx.index(str(term)) + 1 + except ValueError: + # Constant node or non selected term + sqlterm = term.accept(self) + if sqlterm is None: + return None + if sortterm.asc: + return str(sqlterm) + else: + return '%s DESC' % sqlterm + + def visit_and(self, et): + """generate SQL for a AND subtree""" + res = [] + for c in et.children: + part = c.accept(self) + if part: + res.append(part) + return ' AND '.join(res) + + def visit_or(self, ou): + """generate SQL for a OR subtree""" + res = [] + for c in ou.children: + part = c.accept(self) + if part: + res.append('(%s)' % part) + if res: + if len(res) > 1: + return '(%s)' % ' OR '.join(res) + return res[0] + return '' + + def visit_not(self, node): + csql = node.children[0].accept(self) + if node in self._state.done or not csql: + # already processed or no sql generated by children + return csql + return 'NOT (%s)' % csql + + def visit_exists(self, exists): + """generate SQL name for a exists subquery""" + sqls = [] + for dummy in self._state.iter_exists_sols(exists): + sql = self._visit_exists(exists) + if sql: + sqls.append(sql) + if not sqls: + return '' + return 'EXISTS(%s)' % ' UNION '.join(sqls) + + def _visit_exists(self, exists): + self._state.push_scope(exists) + restriction = exists.children[0].accept(self) + restrictions, tables = self._state.pop_scope() + if restriction: + restrictions.append(restriction) + restriction = ' AND '.join(restrictions) + if not restriction: + if tables: + return 'SELECT 1 FROM %s' % tables + return '' + if not tables: + # XXX could leave surrounding EXISTS() in this case no? + sql = 'SELECT 1 WHERE %s' % restriction + else: + sql = 'SELECT 1 FROM %s WHERE %s' % (tables, restriction) + return sql + + + def visit_relation(self, relation): + """generate SQL for a relation""" + rtype = relation.r_type + # don't care of type constraint statement (i.e. relation_type = 'is') + if relation.is_types_restriction(): + return '' + lhs, rhs = relation.get_parts() + rschema = self.schema.rschema(rtype) + if rschema.final: + if rtype == 'eid' and lhs.variable._q_invariant and \ + lhs.variable.stinfo['constnode']: + # special case where this restriction is already generated by + # some other relation + return '' + # attribute relation + if rtype == 'has_text': + sql = self._visit_has_text_relation(relation) + else: + rhs_vars = rhs.get_nodes(VariableRef) + if rhs_vars: + # if variable(s) in the RHS + sql = self._visit_var_attr_relation(relation, rhs_vars) + else: + # no variables in the RHS + sql = self._visit_attribute_relation(relation) + elif (rtype == 'is' and isinstance(rhs.children[0], Constant) + and rhs.children[0].eval(self._args) is None): + # special case "C is NULL" + if lhs.name in self._varmap: + lhssql = self._varmap[lhs.name] + else: + lhssql = lhs.accept(self) + return '%s%s' % (lhssql, rhs.accept(self)) + elif '%s.%s' % (lhs, relation.r_type) in self._varmap: + # relation has already been processed by a previous step + return '' + elif relation.optional: + # OPTIONAL relation, generate a left|right outer join + if rtype == 'identity' or rschema.inlined: + sql = self._visit_outer_join_inlined_relation(relation, rschema) + else: + sql = self._visit_outer_join_relation(relation, rschema) + elif rschema.inlined: + sql = self._visit_inlined_relation(relation) + else: + # regular (non final) relation + sql = self._visit_relation(relation, rschema) + return sql + + def _visit_inlined_relation(self, relation): + lhsvar, _, rhsvar, rhsconst = relation_info(relation) + # we are sure lhsvar is not None + lhssql = self._inlined_var_sql(lhsvar, relation.r_type) + if rhsvar is None: + moresql = None + else: + moresql = self._extra_join_sql(relation, lhssql, rhsvar) + if isinstance(relation.parent, Not): + self._state.done.add(relation.parent) + if rhsvar is not None and rhsvar._q_invariant: + sql = '%s IS NULL' % lhssql + else: + # column != 1234 may not get back rows where column is NULL... + sql = '(%s IS NULL OR %s!=%s)' % ( + lhssql, lhssql, (rhsvar or rhsconst).accept(self)) + elif rhsconst is not None: + sql = '%s=%s' % (lhssql, rhsconst.accept(self)) + elif isinstance(rhsvar, Variable) and rhsvar._q_invariant and \ + not rhsvar.name in self._varmap: + # if the rhs variable is only linked to this relation, this mean we + # only want the relation to exists, eg NOT NULL in case of inlined + # relation + if moresql is not None: + return moresql + return '%s IS NOT NULL' % lhssql + else: + sql = '%s=%s' % (lhssql, rhsvar.accept(self)) + if moresql is None: + return sql + return '%s AND %s' % (sql, moresql) + + def _process_relation_term(self, relation, rid, termvar, termconst, relfield): + if termconst or not termvar._q_invariant: + termsql = termconst and termconst.accept(self) or termvar.accept(self) + yield '%s.%s=%s' % (rid, relfield, termsql) + elif termvar._q_invariant: + # if the variable is mapped, generate restriction anyway + if termvar.name in self._varmap: + termsql = termvar.accept(self) + yield '%s.%s=%s' % (rid, relfield, termsql) + extrajoin = self._extra_join_sql(relation, '%s.%s' % (rid, relfield), termvar) + if extrajoin is not None: + yield extrajoin + + def _visit_relation(self, relation, rschema): + """generate SQL for a relation + + implements optimization 1. + """ + if relation.r_type == 'identity': + # special case "X identity Y" + lhs, rhs = relation.get_parts() + return '%s%s' % (lhs.accept(self), rhs.accept(self)) + lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation) + rid = self._state.relation_table(relation) + sqls = [] + sqls += self._process_relation_term(relation, rid, lhsvar, lhsconst, 'eid_from') + sqls += self._process_relation_term(relation, rid, rhsvar, rhsconst, 'eid_to') + sql = ' AND '.join(sqls) + return sql + + def _visit_outer_join_relation(self, relation, rschema): + """ + left outer join syntax (optional=='right'): + X relation Y? + + right outer join syntax (optional=='left'): + X? relation Y + + full outer join syntaxes (optional=='both'): + X? relation Y? + + if relation is inlined: + if it's a left outer join: + -> X LEFT OUTER JOIN Y ON (X.relation=Y.eid) + elif it's a right outer join: + -> Y LEFT OUTER JOIN X ON (X.relation=Y.eid) + elif it's a full outer join: + -> X FULL OUTER JOIN Y ON (X.relation=Y.eid) + else: + if it's a left outer join: + -> X LEFT OUTER JOIN relation ON (relation.eid_from=X.eid) + LEFT OUTER JOIN Y ON (relation.eid_to=Y.eid) + elif it's a right outer join: + -> Y LEFT OUTER JOIN relation ON (relation.eid_to=Y.eid) + LEFT OUTER JOIN X ON (relation.eid_from=X.eid) + elif it's a full outer join: + -> X FULL OUTER JOIN Y ON (X.relation=Y.eid) + """ + leftvar, leftconst, rightvar, rightconst = relation_info(relation) + assert not (leftconst and rightconst), "doesn't make sense" + if relation.optional == 'left': + leftvar, rightvar = rightvar, leftvar + leftconst, rightconst = rightconst, leftconst + joinattr, restrattr = 'eid_to', 'eid_from' + else: + joinattr, restrattr = 'eid_from', 'eid_to' + # search table for this variable, to use as left table of the outer join + leftalias = None + if leftvar: + # take care, may return None for invariant variable + leftalias = self._var_table(leftvar) + if leftalias is None: + if leftvar.stinfo['principal'] is not relation: + # use variable's principal relation + leftalias = leftvar.stinfo['principal']._q_sqltable + else: + # search for relation on which we should join + for orelation in leftvar.stinfo['relations']: + if (orelation is not relation and + not self.schema.rschema(orelation.r_type).final): + break + else: + for orelation in rightvar.stinfo['relations']: + if (orelation is not relation and + not self.schema.rschema(orelation.r_type).final + and orelation.optional): + break + else: + # unexpected + assert False, leftvar + leftalias = self._state.relation_table(orelation) + # right table of the outer join + rightalias = self._state.relation_table(relation) + # compute join condition + if not leftconst or (leftvar and not leftvar._q_invariant): + leftsql = leftvar.accept(self) + else: + leftsql = leftconst.accept(self) + condition = '%s.%s=%s' % (rightalias, joinattr, leftsql) + if rightconst: + condition += ' AND %s.%s=%s' % (rightalias, restrattr, rightconst.accept(self)) + # record outer join + outertype = 'FULL' if relation.optional == 'both' else 'LEFT' + self._state.replace_tables_by_outer_join(leftalias, rightalias, + outertype, condition) + # need another join? + if rightconst is None: + # we need another outer join for the other side of the relation (e.g. + # for "X relation Y?" in RQL, we treated earlier the (cw_X.eid / + # relation.eid_from) join, now we've to do (relation.eid_to / + # cw_Y.eid) + leftalias = rightalias + rightsql = rightvar.accept(self) # accept before using var_table + rightalias = self._var_table(rightvar) + if rightalias is None: + if rightvar.stinfo['principal'] is not relation: + self._state.replace_tables_by_outer_join( + leftalias, rightvar.stinfo['principal']._q_sqltable, + outertype, '%s.%s=%s' % (leftalias, restrattr, rightvar.accept(self))) + else: + self._state.replace_tables_by_outer_join( + leftalias, rightalias, outertype, + '%s.%s=%s' % (leftalias, restrattr, rightvar.accept(self))) + # this relation will hence be expressed in FROM clause, return nothing + # here + return '' + + + def _visit_outer_join_inlined_relation(self, relation, rschema): + lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation) + assert not (lhsconst and rhsconst), "doesn't make sense" + attr = 'eid' if relation.r_type == 'identity' else relation.r_type + lhsalias = self._var_table(lhsvar) + rhsalias = rhsvar and self._var_table(rhsvar) + try: + lhssql = self._varmap['%s.%s' % (lhsvar.name, attr)] + except KeyError: + if lhsalias is None: + lhssql = lhsconst.accept(self) + elif attr == 'eid': + lhssql = lhsvar.accept(self) + else: + lhssql = '%s.%s%s' % (lhsalias, SQL_PREFIX, attr) + condition = '%s=%s' % (lhssql, (rhsconst or rhsvar).accept(self)) + # this is not a typo, rhs optional variable means lhs outer join and vice-versa + if relation.optional == 'left': + lhsvar, rhsvar = rhsvar, lhsvar + lhsconst, rhsconst = rhsconst, lhsconst + lhsalias, rhsalias = rhsalias, lhsalias + outertype = 'LEFT' + elif relation.optional == 'both': + outertype = 'FULL' + else: + outertype = 'LEFT' + if rhsalias is None: + if rhsconst is not None: + # inlined relation with invariant as rhs + if relation.r_type != 'identity': + condition = '(%s OR %s IS NULL)' % (condition, lhssql) + if not lhsvar.stinfo.get('optrelations'): + return condition + self._state.add_outer_join_condition(lhsalias, condition) + return + if lhsalias is None: + if lhsconst is not None and not rhsvar.stinfo.get('optrelations'): + return condition + lhsalias = lhsvar._q_sql.split('.', 1)[0] + if lhsalias == rhsalias: + self._state.add_outer_join_condition(lhsalias, condition) + else: + self._state.replace_tables_by_outer_join( + lhsalias, rhsalias, outertype, condition) + return '' + + def _visit_var_attr_relation(self, relation, rhs_vars): + """visit an attribute relation with variable(s) in the RHS + + attribute variables are used either in the selection or for unification + (eg X attr1 A, Y attr2 A). In case of selection, nothing to do here. + """ + ored = relation.ored() + for vref in rhs_vars: + var = vref.variable + if var.name in self._varmap: + # ensure table is added + self._var_info(var) + if isinstance(var, ColumnAlias): + # force sql generation whatever the computed principal + principal = 1 + else: + principal = var.stinfo.get('principal') + # we've to return some sql if: + # 1. visited relation is ored + # 2. variable's principal is not this relation and not 1. + if ored or (principal is not None and principal is not relation + and not getattr(principal, 'ored', lambda : 0)()): + # we have to generate unification expression + if principal is relation: + # take care if ored case and principal is the relation to + # use the right relation in the unification term + _rel = [rel for rel in var.stinfo['rhsrelations'] + if not rel is principal][0] + else: + _rel = relation + lhssql = self._inlined_var_sql(_rel.children[0].variable, + _rel.r_type) + try: + self._state.ignore_varmap = True + sql = lhssql + relation.children[1].accept(self) + finally: + self._state.ignore_varmap = False + if relation.optional == 'right': + leftalias = self._var_table(principal.children[0].variable) + rightalias = self._var_table(relation.children[0].variable) + self._state.replace_tables_by_outer_join( + leftalias, rightalias, 'LEFT', sql) + return '' + return sql + return '' + + def _visit_attribute_relation(self, rel): + """generate SQL for an attribute relation""" + lhs, rhs = rel.get_parts() + rhssql = rhs.accept(self) + table = self._var_table(lhs.variable) + if table is None: + assert rel.r_type == 'eid' + lhssql = lhs.accept(self) + else: + try: + lhssql = self._varmap['%s.%s' % (lhs.name, rel.r_type)] + except KeyError: + mapkey = '%s.%s' % (self._state.solution[lhs.name], rel.r_type) + if mapkey in self.attr_map: + cb, sourcecb = self.attr_map[mapkey] + if sourcecb: + # callback is a source callback, we can't use this + # attribute in restriction + raise QueryError("can't use %s (%s) in restriction" + % (mapkey, rel.as_string())) + lhssql = cb(self, lhs.variable, rel) + elif rel.r_type == 'eid': + lhssql = lhs.variable._q_sql + else: + lhssql = '%s.%s%s' % (table, SQL_PREFIX, rel.r_type) + try: + if rel._q_needcast == 'TODAY': + sql = 'DATE(%s)%s' % (lhssql, rhssql) + # XXX which cast function should be used + #elif rel._q_needcast == 'NOW': + # sql = 'TIMESTAMP(%s)%s' % (lhssql, rhssql) + else: + sql = '%s%s' % (lhssql, rhssql) + except AttributeError: + sql = '%s%s' % (lhssql, rhssql) + if lhs.variable.stinfo.get('optrelations'): + self._state.add_outer_join_condition(table, sql) + else: + return sql + + def _visit_has_text_relation(self, rel): + """generate SQL for a has_text relation""" + lhs, rhs = rel.get_parts() + const = rhs.children[0] + alias = self._state.fti_table(rel, self.dbhelper.fti_table) + jointo = lhs.accept(self) + restriction = '' + lhsvar = lhs.variable + me_is_principal = lhsvar.stinfo.get('principal') is rel + if me_is_principal: + if lhsvar.stinfo['typerel'] is None: + # the variable is using the fti table, no join needed + jointo = None + elif not lhsvar.name in self._varmap: + # join on entities instead of etype's table to get result for + # external entities on multisources configurations + ealias = lhsvar._q_sqltable = '_' + lhsvar.name + jointo = lhsvar._q_sql = '%s.eid' % ealias + self._state.add_table('entities AS %s' % ealias, ealias) + if not lhsvar._q_invariant or len(lhsvar.stinfo['possibletypes']) == 1: + restriction = " AND %s.type='%s'" % (ealias, self._state.solution[lhs.name]) + else: + etypes = ','.join("'%s'" % etype for etype in lhsvar.stinfo['possibletypes']) + restriction = " AND %s.type IN (%s)" % (ealias, etypes) + if isinstance(rel.parent, Not): + self._state.done.add(rel.parent) + not_ = True + else: + not_ = False + query = const.eval(self._args) + return self.dbhelper.fti_restriction_sql(alias, query, + jointo, not_) + restriction + + def visit_comparison(self, cmp): + """generate SQL for a comparison""" + optional = getattr(cmp, 'optional', None) # rql < 0.30 + if len(cmp.children) == 2: + # simplified expression from HAVING clause + lhs, rhs = cmp.children + else: + lhs = None + rhs = cmp.children[0] + assert not optional + sql = None + operator = cmp.operator + if operator in ('LIKE', 'ILIKE'): + if operator == 'ILIKE' and not self.dbhelper.ilike_support: + operator = ' LIKE ' + else: + operator = ' %s ' % operator + elif operator == 'REGEXP': + sql = ' %s' % self.dbhelper.sql_regexp_match_expression(rhs.accept(self)) + elif (operator == '=' and isinstance(rhs, Constant) + and rhs.eval(self._args) is None): + if lhs is None: + sql = ' IS NULL' + else: + sql = '%s IS NULL' % lhs.accept(self) + elif isinstance(rhs, Function) and rhs.name == 'IN': + assert operator == '=' + operator = ' ' + if sql is None: + if lhs is None: + sql = '%s%s'% (operator, rhs.accept(self)) + else: + sql = '%s%s%s'% (lhs.accept(self), operator, rhs.accept(self)) + if optional is None: + return sql + leftvars = cmp.children[0].get_nodes(VariableRef) + assert len(leftvars) == 1 + if leftvars[0].variable.stinfo['attrvar'] is None: + assert isinstance(leftvars[0].variable, ColumnAlias) + leftalias = leftvars[0].variable._q_sqltable + else: + leftalias = self._var_table(leftvars[0].variable.stinfo['attrvar']) + rightvars = cmp.children[1].get_nodes(VariableRef) + assert len(rightvars) == 1 + if rightvars[0].variable.stinfo['attrvar'] is None: + assert isinstance(rightvars[0].variable, ColumnAlias) + rightalias = rightvars[0].variable._q_sqltable + else: + rightalias = self._var_table(rightvars[0].variable.stinfo['attrvar']) + if optional == 'right': + self._state.replace_tables_by_outer_join( + leftalias, rightalias, 'LEFT', sql) + elif optional == 'left': + self._state.replace_tables_by_outer_join( + rightalias, leftalias, 'LEFT', sql) + else: + self._state.replace_tables_by_outer_join( + leftalias, rightalias, 'FULL', sql) + return '' + + def visit_mathexpression(self, mexpr): + """generate SQL for a mathematic expression""" + lhs, rhs = mexpr.get_parts() + # check for string concatenation + operator = mexpr.operator + if operator == '%': + operator = '%%' + try: + if mexpr.operator == '+' and mexpr.get_type(self._state.solution, self._args) == 'String': + return '(%s)' % self.dbhelper.sql_concat_string(lhs.accept(self), + rhs.accept(self)) + except CoercionError: + pass + return '(%s %s %s)'% (lhs.accept(self), operator, rhs.accept(self)) + + def visit_unaryexpression(self, uexpr): + """generate SQL for a unary expression""" + return '%s%s'% (uexpr.operator, uexpr.children[0].accept(self)) + + def visit_function(self, func): + """generate SQL name for a function""" + if func.name == 'FTIRANK': + try: + rel = next(iter(func.children[0].variable.stinfo['ftirels'])) + except KeyError: + raise BadRQLQuery("can't use FTIRANK on variable not used in an" + " 'has_text' relation (eg full-text search)") + const = rel.get_parts()[1].children[0] + return self.dbhelper.fti_rank_order( + self._state.fti_table(rel, self.dbhelper.fti_table), + const.eval(self._args)) + args = [c.accept(self) for c in func.children] + if func in self._state.source_cb_funcs: + # function executed as a callback on the source + assert len(args) == 1 + return args[0] + # func_as_sql will check function is supported by the backend + return self.dbhelper.func_as_sql(func.name, args) + + def visit_constant(self, constant): + """generate SQL name for a constant""" + if constant.type is None: + return 'NULL' + value = constant.value + if constant.type == 'etype': + return value + # don't substitute int, causes pb when used as sorting column number + if constant.type == 'Int': + return str(value) + if constant.type in ('Date', 'Datetime'): + rel = constant.relation() + if rel is not None: + rel._q_needcast = value + return self.keyword_map[value]() + if constant.type == 'Substitute': + try: + # we may found constant from simplified var in varmap + return self._mapped_term(constant, '%%(%s)s' % value)[0] + except KeyError: + _id = value + if PY2 and isinstance(_id, unicode): + _id = _id.encode() + else: + _id = str(id(constant)).replace('-', '', 1) + self._query_attrs[_id] = value + return '%%(%s)s' % _id + + def visit_variableref(self, variableref): + """get the sql name for a variable reference""" + # use accept, .variable may be a variable or a columnalias + return variableref.variable.accept(self) + + def visit_columnalias(self, colalias): + """get the sql name for a subquery column alias""" + if colalias.name in self._varmap: + sql = self._varmap[colalias.name] + table = sql.split('.', 1)[0] + colalias._q_sqltable = table + colalias._q_sql = sql + self._state.add_table(table) + return sql + return colalias._q_sql + + def visit_variable(self, variable): + """get the table name and sql string for a variable""" + #if contextrels is None and variable.name in self._state.done: + if variable.name in self._state.done: + if self._in_wrapping_query: + return 'T1.%s' % self._state.aliases[variable.name] + return variable._q_sql + self._state.done.add(variable.name) + vtablename = None + if not self._state.ignore_varmap and variable.name in self._varmap: + sql, vtablename = self._var_info(variable) + elif variable.stinfo['attrvar']: + # attribute variable (systematically used in rhs of final + # relation(s)), get table name and sql from any rhs relation + sql = self._linked_var_sql(variable) + elif variable._q_invariant: + # since variable is invariant, we know we won't found final relation + principal = variable.stinfo['principal'] + if principal is None: + vtablename = '_' + variable.name + self._state.add_table('entities AS %s' % vtablename, vtablename) + sql = '%s.eid' % vtablename + if variable.stinfo['typerel'] is not None: + # add additional restriction on entities.type column + pts = variable.stinfo['possibletypes'] + if len(pts) == 1: + etype = next(iter(variable.stinfo['possibletypes'])) + restr = "%s.type='%s'" % (vtablename, etype) + else: + etypes = ','.join("'%s'" % et for et in pts) + restr = '%s.type IN (%s)' % (vtablename, etypes) + self._state.add_restriction(restr) + elif principal.r_type == 'has_text': + sql = '%s.%s' % (self._state.fti_table(principal, + self.dbhelper.fti_table), + self.dbhelper.fti_uid_attr) + elif principal in variable.stinfo['rhsrelations']: + if self.schema.rschema(principal.r_type).inlined: + sql = self._linked_var_sql(variable) + else: + sql = '%s.eid_to' % self._state.relation_table(principal) + else: + sql = '%s.eid_from' % self._state.relation_table(principal) + else: + # standard variable: get table name according to etype and use .eid + # attribute + sql, vtablename = self._var_info(variable) + variable._q_sqltable = vtablename + variable._q_sql = sql + return sql + + # various utilities ####################################################### + + def _extra_join_sql(self, relation, sql, var): + # if rhs var is invariant, and this relation is not its principal, + # generate extra join + try: + if not var.stinfo['principal'] is relation: + op = relation.operator() + if op == '=': + # need a predicable result for tests + args = sorted( (sql, var.accept(self)) ) + args.insert(1, op) + else: + args = (sql, op, var.accept(self)) + return '%s%s%s' % tuple(args) + except KeyError: + # no principal defined, relation is necessarily the principal and + # so nothing to return here + pass + return None + + def _temp_table_scope(self, select, table): + scope = 9999 + for var, sql in self._varmap.items(): + # skip "attribute variable" in varmap (such 'T.login') + if not '.' in var and table == sql.split('.', 1)[0]: + try: + scope = min(scope, self._state.scopes[select.defined_vars[var].scope]) + except KeyError: + scope = 0 # XXX + if scope == 0: + break + return scope + + def _mapped_term(self, term, key): + """return sql and table alias to the `term`, mapped as `key` or raise + KeyError when the key is not found in the varmap + """ + sql = self._varmap[key] + tablealias = sql.split('.', 1)[0] + scope = self._temp_table_scope(term.stmt, tablealias) + self._state.add_table(tablealias, scope=scope) + return sql, tablealias + + def _var_info(self, var): + try: + return self._mapped_term(var, var.name) + except KeyError: + scope = self._state.scopes[var.scope] + etype = self._state.solution[var.name] + # XXX this check should be moved in rql.stcheck + if self.schema.eschema(etype).final: + raise BadRQLQuery(var.stmt.root) + tablealias = '_' + var.name + sql = '%s.%seid' % (tablealias, SQL_PREFIX) + self._state.add_table('%s%s AS %s' % (SQL_PREFIX, etype, tablealias), + tablealias, scope=scope) + return sql, tablealias + + def _inlined_var_sql(self, var, rtype): + try: + sql = self._varmap['%s.%s' % (var.name, rtype)] + scope = self._state.scopes[var.scope] + self._state.add_table(sql.split('.', 1)[0], scope=scope) + except KeyError: + # rtype may be an attribute relation when called from + # _visit_var_attr_relation. take care about 'eid' rtype, since in + # some case we may use the `entities` table, so in that case we've + # to properly use variable'sql + if rtype == 'eid': + sql = var.accept(self) + else: + sql = '%s.%s%s' % (self._var_table(var), SQL_PREFIX, rtype) + return sql + + def _linked_var_sql(self, variable): + if not self._state.ignore_varmap: + try: + return self._varmap[variable.name] + except KeyError: + pass + rel = (variable.stinfo.get('principal') or + next(iter(variable.stinfo['rhsrelations']))) + linkedvar = rel.children[0].variable + if rel.r_type == 'eid': + return linkedvar.accept(self) + if isinstance(linkedvar, ColumnAlias): + raise BadRQLQuery('variable %s should be selected by the subquery' + % variable.name) + try: + sql = self._varmap['%s.%s' % (linkedvar.name, rel.r_type)] + except KeyError: + mapkey = '%s.%s' % (self._state.solution[linkedvar.name], rel.r_type) + if mapkey in self.attr_map: + cb, sourcecb = self.attr_map[mapkey] + if not sourcecb: + return cb(self, linkedvar, rel) + # attribute mapped at the source level (bfss for instance) + stmt = rel.stmt + for selectidx, vref in iter_mapped_var_sels(stmt, variable): + stack = [cb] + update_source_cb_stack(self._state, stmt, vref, stack) + self._state._needs_source_cb[selectidx] = stack + linkedvar.accept(self) + sql = '%s.%s%s' % (linkedvar._q_sqltable, SQL_PREFIX, rel.r_type) + return sql + + # tables handling ######################################################### + + def _var_table(self, var): + var.accept(self)#.visit_variable(var) + return var._q_sqltable diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/sources/storages.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/sources/storages.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,288 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""custom storages for the system source""" + +import os +import sys +from os import unlink, path as osp +from contextlib import contextmanager +import tempfile + +from six import PY2, PY3, text_type, binary_type + +from logilab.common import nullobject + +from yams.schema import role_name + +from cubicweb import Binary, ValidationError +from cubicweb.server import hook +from cubicweb.server.edition import EditedEntity + + +def set_attribute_storage(repo, etype, attr, storage): + repo.system_source.set_storage(etype, attr, storage) + +def unset_attribute_storage(repo, etype, attr): + repo.system_source.unset_storage(etype, attr) + + +class Storage(object): + """abstract storage + + * If `source_callback` is true (by default), the callback will be run during + query result process of fetched attribute's value and should have the + following prototype:: + + callback(self, source, cnx, value) + + where `value` is the value actually stored in the backend. None values + will be skipped (eg callback won't be called). + + * if `source_callback` is false, the callback will be run during sql + generation when some attribute with a custom storage is accessed and + should have the following prototype:: + + callback(self, generator, relation, linkedvar) + + where `generator` is the sql generator, `relation` the current rql syntax + tree relation and linkedvar the principal syntax tree variable holding the + attribute. + """ + is_source_callback = True + + def callback(self, *args): + """see docstring for prototype, which vary according to is_source_callback + """ + raise NotImplementedError() + + def entity_added(self, entity, attr): + """an entity using this storage for attr has been added""" + raise NotImplementedError() + def entity_updated(self, entity, attr): + """an entity using this storage for attr has been updatded""" + raise NotImplementedError() + def entity_deleted(self, entity, attr): + """an entity using this storage for attr has been deleted""" + raise NotImplementedError() + def migrate_entity(self, entity, attribute): + """migrate an entity attribute to the storage""" + raise NotImplementedError() + +# TODO +# * make it configurable without code +# * better file path attribution +# * handle backup/restore + +def uniquify_path(dirpath, basename): + """return a file descriptor and unique file name for `basename` in `dirpath` + """ + path = basename.replace(osp.sep, '-') + base, ext = osp.splitext(path) + return tempfile.mkstemp(prefix=base, suffix=ext, dir=dirpath) + +@contextmanager +def fsimport(cnx): + present = 'fs_importing' in cnx.transaction_data + old_value = cnx.transaction_data.get('fs_importing') + cnx.transaction_data['fs_importing'] = True + yield + if present: + cnx.transaction_data['fs_importing'] = old_value + else: + del cnx.transaction_data['fs_importing'] + + +_marker = nullobject() + + +class BytesFileSystemStorage(Storage): + """store Bytes attribute value on the file system""" + def __init__(self, defaultdir, fsencoding=_marker, wmode=0o444): + if PY3: + if not isinstance(defaultdir, text_type): + raise TypeError('defaultdir must be a unicode object in python 3') + if fsencoding is not _marker: + raise ValueError('fsencoding is no longer supported in python 3') + else: + self.fsencoding = fsencoding or 'utf-8' + if isinstance(defaultdir, text_type): + defaultdir = defaultdir.encode(fsencoding) + self.default_directory = defaultdir + # extra umask to use when creating file + # 0444 as in "only allow read bit in permission" + self._wmode = wmode + + def _writecontent(self, fd, binary): + """write the content of a binary in readonly file + + As the bfss never alters an existing file it does not prevent it from + working as intended. This is a better safe than sorry approach. + """ + os.fchmod(fd, self._wmode) + fileobj = os.fdopen(fd, 'wb') + binary.to_file(fileobj) + fileobj.close() + + + def callback(self, source, cnx, value): + """sql generator callback when some attribute with a custom storage is + accessed + """ + fpath = source.binary_to_str(value) + try: + return Binary.from_file(fpath) + except EnvironmentError as ex: + source.critical("can't open %s: %s", value, ex) + return None + + def entity_added(self, entity, attr): + """an entity using this storage for attr has been added""" + if entity._cw.transaction_data.get('fs_importing'): + binary = Binary.from_file(entity.cw_edited[attr].getvalue()) + entity._cw_dont_cache_attribute(attr, repo_side=True) + else: + binary = entity.cw_edited.pop(attr) + fd, fpath = self.new_fs_path(entity, attr) + # bytes storage used to store file's path + binary_obj = Binary(fpath if PY2 else fpath.encode('utf-8')) + entity.cw_edited.edited_attribute(attr, binary_obj) + self._writecontent(fd, binary) + AddFileOp.get_instance(entity._cw).add_data(fpath) + return binary + + def entity_updated(self, entity, attr): + """an entity using this storage for attr has been updated""" + # get the name of the previous file containing the value + oldpath = self.current_fs_path(entity, attr) + if entity._cw.transaction_data.get('fs_importing'): + # If we are importing from the filesystem, the file already exists. + # We do not need to create it but we need to fetch the content of + # the file as the actual content of the attribute + fpath = entity.cw_edited[attr].getvalue() + entity._cw_dont_cache_attribute(attr, repo_side=True) + assert fpath is not None + binary = Binary.from_file(fpath) + else: + # We must store the content of the attributes + # into a file to stay consistent with the behaviour of entity_add. + # Moreover, the BytesFileSystemStorage expects to be able to + # retrieve the current value of the attribute at anytime by reading + # the file on disk. To be able to rollback things, use a new file + # and keep the old one that will be removed on commit if everything + # went ok. + # + # fetch the current attribute value in memory + binary = entity.cw_edited.pop(attr) + if binary is None: + fpath = None + else: + # Get filename for it + fd, fpath = self.new_fs_path(entity, attr) + # write attribute value on disk + self._writecontent(fd, binary) + # Mark the new file as added during the transaction. + # The file will be removed on rollback + AddFileOp.get_instance(entity._cw).add_data(fpath) + # reinstall poped value + if fpath is None: + entity.cw_edited.edited_attribute(attr, None) + else: + # register the new location for the file. + binary_obj = Binary(fpath if PY2 else fpath.encode('utf-8')) + entity.cw_edited.edited_attribute(attr, binary_obj) + if oldpath is not None and oldpath != fpath: + # Mark the old file as useless so the file will be removed at + # commit. + DeleteFileOp.get_instance(entity._cw).add_data(oldpath) + return binary + + def entity_deleted(self, entity, attr): + """an entity using this storage for attr has been deleted""" + fpath = self.current_fs_path(entity, attr) + if fpath is not None: + DeleteFileOp.get_instance(entity._cw).add_data(fpath) + + def new_fs_path(self, entity, attr): + # We try to get some hint about how to name the file using attribute's + # name metadata, so we use the real file name and extension when + # available. Keeping the extension is useful for example in the case of + # PIL processing that use filename extension to detect content-type, as + # well as providing more understandable file names on the fs. + if PY2: + attr = attr.encode('ascii') + basename = [str(entity.eid), attr] + name = entity.cw_attr_metadata(attr, 'name') + if name is not None: + basename.append(name.encode(self.fsencoding) if PY2 else name) + fd, fspath = uniquify_path(self.default_directory, + '_'.join(basename)) + if fspath is None: + msg = entity._cw._('failed to uniquify path (%s, %s)') % ( + self.default_directory, '_'.join(basename)) + raise ValidationError(entity.eid, {role_name(attr, 'subject'): msg}) + assert isinstance(fspath, str) # bytes on py2, unicode on py3 + return fd, fspath + + def current_fs_path(self, entity, attr): + """return the current fs_path of the attribute, or None is the attr is + not stored yet. + """ + sysource = entity._cw.repo.system_source + cu = sysource.doexec(entity._cw, + 'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % ( + attr, entity.cw_etype, entity.eid)) + rawvalue = cu.fetchone()[0] + if rawvalue is None: # no previous value + return None + fspath = sysource._process_value(rawvalue, cu.description[0], + binarywrap=binary_type) + if PY3: + fspath = fspath.decode('utf-8') + assert isinstance(fspath, str) # bytes on py2, unicode on py3 + return fspath + + def migrate_entity(self, entity, attribute): + """migrate an entity attribute to the storage""" + entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) + self.entity_added(entity, attribute) + cnx = entity._cw + source = cnx.repo.system_source + attrs = source.preprocess_entity(entity) + sql = source.sqlgen.update('cw_' + entity.cw_etype, attrs, + ['cw_eid']) + source.doexec(cnx, sql, attrs) + entity.cw_edited = None + + +class AddFileOp(hook.DataOperationMixIn, hook.Operation): + def rollback_event(self): + for filepath in self.get_data(): + assert isinstance(filepath, str) # bytes on py2, unicode on py3 + try: + unlink(filepath) + except Exception as ex: + self.error("can't remove %s: %s" % (filepath, ex)) + +class DeleteFileOp(hook.DataOperationMixIn, hook.Operation): + def postcommit_event(self): + for filepath in self.get_data(): + assert isinstance(filepath, str) # bytes on py2, unicode on py3 + try: + unlink(filepath) + except Exception as ex: + self.error("can't remove %s: %s" % (filepath, ex)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/sqlutils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/sqlutils.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,591 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""SQL utilities functions and classes.""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +import re +import subprocess +from os.path import abspath +from logging import getLogger +from datetime import time, datetime, timedelta + +from six import string_types, text_type +from six.moves import filter + +from pytz import utc + +from logilab import database as db, common as lgc +from logilab.common.shellutils import ProgressBar, DummyProgressBar +from logilab.common.deprecation import deprecated +from logilab.common.logging_ext import set_log_methods +from logilab.common.date import utctime, utcdatetime, strptime +from logilab.database.sqlgen import SQLGenerator + +from cubicweb import Binary, ConfigurationError +from cubicweb.uilib import remove_html_tags +from cubicweb.schema import PURE_VIRTUAL_RTYPES +from cubicweb.server import SQL_CONNECT_HOOKS +from cubicweb.server.utils import crypt_password + +lgc.USE_MX_DATETIME = False +SQL_PREFIX = 'cw_' + + +def _run_command(cmd): + if isinstance(cmd, string_types): + print(cmd) + return subprocess.call(cmd, shell=True) + else: + print(' '.join(cmd)) + return subprocess.call(cmd) + + +def sqlexec(sqlstmts, cursor_or_execute, withpb=True, + pbtitle='', delimiter=';', cnx=None): + """execute sql statements ignoring DROP/ CREATE GROUP or USER statements + error. + + :sqlstmts_as_string: a string or a list of sql statements. + :cursor_or_execute: sql cursor or a callback used to execute statements + :cnx: if given, commit/rollback at each statement. + + :withpb: if True, display a progresse bar + :pbtitle: a string displayed as the progress bar title (if `withpb=True`) + + :delimiter: a string used to split sqlstmts (if it is a string) + + Return the failed statements (same type as sqlstmts) + """ + if hasattr(cursor_or_execute, 'execute'): + execute = cursor_or_execute.execute + else: + execute = cursor_or_execute + sqlstmts_as_string = False + if isinstance(sqlstmts, string_types): + sqlstmts_as_string = True + sqlstmts = sqlstmts.split(delimiter) + if withpb: + if sys.stdout.isatty(): + pb = ProgressBar(len(sqlstmts), title=pbtitle) + else: + pb = DummyProgressBar() + failed = [] + for sql in sqlstmts: + sql = sql.strip() + if withpb: + pb.update() + if not sql: + continue + try: + # some dbapi modules doesn't accept unicode for sql string + execute(str(sql)) + except Exception: + if cnx: + cnx.rollback() + failed.append(sql) + else: + if cnx: + cnx.commit() + if withpb: + print() + if sqlstmts_as_string: + failed = delimiter.join(failed) + return failed + + +def sqlgrants(schema, driver, user, + text_index=True, set_owner=True, + skip_relations=(), skip_entities=()): + """return sql to give all access privileges to the given user on the system + schema + """ + from cubicweb.server.schema2sql import grant_schema + from cubicweb.server.sources import native + output = [] + w = output.append + w(native.grant_schema(user, set_owner)) + w('') + if text_index: + dbhelper = db.get_db_helper(driver) + w(dbhelper.sql_grant_user_on_fti(user)) + w('') + w(grant_schema(schema, user, set_owner, skip_entities=skip_entities, prefix=SQL_PREFIX)) + return '\n'.join(output) + + +def sqlschema(schema, driver, text_index=True, + user=None, set_owner=False, + skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()): + """return the system sql schema, according to the given parameters""" + from cubicweb.server.schema2sql import schema2sql + from cubicweb.server.sources import native + if set_owner: + assert user, 'user is argument required when set_owner is true' + output = [] + w = output.append + w(native.sql_schema(driver)) + w('') + dbhelper = db.get_db_helper(driver) + if text_index: + w(dbhelper.sql_init_fti().replace(';', ';;')) + w('') + w(schema2sql(dbhelper, schema, prefix=SQL_PREFIX, + skip_entities=skip_entities, + skip_relations=skip_relations).replace(';', ';;')) + if dbhelper.users_support and user: + w('') + w(sqlgrants(schema, driver, user, text_index, set_owner, + skip_relations, skip_entities).replace(';', ';;')) + return '\n'.join(output) + + +def sqldropschema(schema, driver, text_index=True, + skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()): + """return the sql to drop the schema, according to the given parameters""" + from cubicweb.server.schema2sql import dropschema2sql + from cubicweb.server.sources import native + output = [] + w = output.append + if text_index: + dbhelper = db.get_db_helper(driver) + w(dbhelper.sql_drop_fti()) + w('') + w(dropschema2sql(dbhelper, schema, prefix=SQL_PREFIX, + skip_entities=skip_entities, + skip_relations=skip_relations)) + w('') + w(native.sql_drop_schema(driver)) + return '\n'.join(output) + + +_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION = re.compile('^(?!(sql|pg)_)').match +def sql_drop_all_user_tables(driver_or_helper, sqlcursor): + """Return ths sql to drop all tables found in the database system.""" + if not getattr(driver_or_helper, 'list_tables', None): + dbhelper = db.get_db_helper(driver_or_helper) + else: + dbhelper = driver_or_helper + + cmds = [dbhelper.sql_drop_sequence('entities_id_seq')] + # for mssql, we need to drop views before tables + if hasattr(dbhelper, 'list_views'): + cmds += ['DROP VIEW %s;' % name + for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))] + cmds += ['DROP TABLE %s;' % name + for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))] + return '\n'.join(cmds) + + +class ConnectionWrapper(object): + """handle connection to the system source, at some point associated to a + :class:`Session` + """ + + # since 3.19, we only have to manage the system source connection + def __init__(self, system_source): + # dictionary of (source, connection), indexed by sources'uri + self._source = system_source + self.cnx = system_source.get_connection() + self.cu = self.cnx.cursor() + + def commit(self): + """commit the current transaction for this user""" + # let exception propagates + self.cnx.commit() + + def rollback(self): + """rollback the current transaction for this user""" + # catch exceptions, rollback other sources anyway + try: + self.cnx.rollback() + except Exception: + self._source.critical('rollback error', exc_info=sys.exc_info()) + # error on rollback, the connection is much probably in a really + # bad state. Replace it by a new one. + self.reconnect() + + def close(self, i_know_what_i_do=False): + """close all connections in the set""" + if i_know_what_i_do is not True: # unexpected closing safety belt + raise RuntimeError('connections set shouldn\'t be closed') + try: + self.cu.close() + self.cu = None + except Exception: + pass + try: + self.cnx.close() + self.cnx = None + except Exception: + pass + + # internals ############################################################### + + def cnxset_freed(self): + """connections set is being freed from a session""" + pass # no nothing by default + + def reconnect(self): + """reopen a connection for this source or all sources if none specified + """ + try: + # properly close existing connection if any + self.cnx.close() + except Exception: + pass + self._source.info('trying to reconnect') + self.cnx = self._source.get_connection() + self.cu = self.cnx.cursor() + + @deprecated('[3.19] use .cu instead') + def __getitem__(self, uri): + assert uri == 'system' + return self.cu + + @deprecated('[3.19] use repo.system_source instead') + def source(self, uid): + assert uid == 'system' + return self._source + + @deprecated('[3.19] use .cnx instead') + def connection(self, uid): + assert uid == 'system' + return self.cnx + + +class SqliteConnectionWrapper(ConnectionWrapper): + """Sqlite specific connection wrapper: close the connection each time it's + freed (and reopen it later when needed) + """ + def __init__(self, system_source): + # don't call parent's __init__, we don't want to initiate the connection + self._source = system_source + + _cnx = None + + def cnxset_freed(self): + self.cu.close() + self.cnx.close() + self.cnx = self.cu = None + + @property + def cnx(self): + if self._cnx is None: + self._cnx = self._source.get_connection() + self._cu = self._cnx.cursor() + return self._cnx + @cnx.setter + def cnx(self, value): + self._cnx = value + + @property + def cu(self): + if self._cnx is None: + self._cnx = self._source.get_connection() + self._cu = self._cnx.cursor() + return self._cu + @cu.setter + def cu(self, value): + self._cu = value + + +class SQLAdapterMixIn(object): + """Mixin for SQL data sources, getting a connection from a configuration + dictionary and handling connection locking + """ + cnx_wrap = ConnectionWrapper + + def __init__(self, source_config, repairing=False): + try: + self.dbdriver = source_config['db-driver'].lower() + dbname = source_config['db-name'] + except KeyError: + raise ConfigurationError('missing some expected entries in sources file') + dbhost = source_config.get('db-host') + port = source_config.get('db-port') + dbport = port and int(port) or None + dbuser = source_config.get('db-user') + dbpassword = source_config.get('db-password') + dbencoding = source_config.get('db-encoding', 'UTF-8') + dbextraargs = source_config.get('db-extra-arguments') + dbnamespace = source_config.get('db-namespace') + self.dbhelper = db.get_db_helper(self.dbdriver) + self.dbhelper.record_connection_info(dbname, dbhost, dbport, dbuser, + dbpassword, dbextraargs, + dbencoding, dbnamespace) + self.sqlgen = SQLGenerator() + # copy back some commonly accessed attributes + dbapi_module = self.dbhelper.dbapi_module + self.OperationalError = dbapi_module.OperationalError + self.InterfaceError = dbapi_module.InterfaceError + self.DbapiError = dbapi_module.Error + self._binary = self.dbhelper.binary_value + self._process_value = dbapi_module.process_value + self._dbencoding = dbencoding + if self.dbdriver == 'sqlite': + self.cnx_wrap = SqliteConnectionWrapper + self.dbhelper.dbname = abspath(self.dbhelper.dbname) + if not repairing: + statement_timeout = int(source_config.get('db-statement-timeout', 0)) + if statement_timeout > 0: + def set_postgres_timeout(cnx): + cnx.cursor().execute('SET statement_timeout to %d' % statement_timeout) + cnx.commit() + postgres_hooks = SQL_CONNECT_HOOKS['postgres'] + postgres_hooks.append(set_postgres_timeout) + + def wrapped_connection(self): + """open and return a connection to the database, wrapped into a class + handling reconnection and all + """ + return self.cnx_wrap(self) + + def get_connection(self): + """open and return a connection to the database""" + return self.dbhelper.get_connection() + + def backup_to_file(self, backupfile, confirm): + for cmd in self.dbhelper.backup_commands(backupfile, + keepownership=False): + if _run_command(cmd): + if not confirm(' [Failed] Continue anyway?', default='n'): + raise Exception('Failed command: %s' % cmd) + + def restore_from_file(self, backupfile, confirm, drop=True): + for cmd in self.dbhelper.restore_commands(backupfile, + keepownership=False, + drop=drop): + if _run_command(cmd): + if not confirm(' [Failed] Continue anyway?', default='n'): + raise Exception('Failed command: %s' % cmd) + + def merge_args(self, args, query_args): + if args is not None: + newargs = {} + for key, val in args.items(): + # convert cubicweb binary into db binary + if isinstance(val, Binary): + val = self._binary(val.getvalue()) + # convert timestamp to utc. + # expect SET TiME ZONE to UTC at connection opening time. + # This shouldn't change anything for datetime without TZ. + elif isinstance(val, datetime) and val.tzinfo is not None: + val = utcdatetime(val) + elif isinstance(val, time) and val.tzinfo is not None: + val = utctime(val) + newargs[key] = val + # should not collide + assert not (frozenset(newargs) & frozenset(query_args)), \ + 'unexpected collision: %s' % (frozenset(newargs) & frozenset(query_args)) + newargs.update(query_args) + return newargs + return query_args + + def process_result(self, cursor, cnx=None, column_callbacks=None): + """return a list of CubicWeb compliant values from data in the given cursor + """ + return list(self.iter_process_result(cursor, cnx, column_callbacks)) + + def iter_process_result(self, cursor, cnx, column_callbacks=None): + """return a iterator on tuples of CubicWeb compliant values from data + in the given cursor + """ + # use two different implementations to avoid paying the price of + # callback lookup for each *cell* in results when there is nothing to + # lookup + if not column_callbacks: + return self.dbhelper.dbapi_module.process_cursor(cursor, self._dbencoding, + Binary) + assert cnx + return self._cb_process_result(cursor, column_callbacks, cnx) + + def _cb_process_result(self, cursor, column_callbacks, cnx): + # begin bind to locals for optimization + descr = cursor.description + encoding = self._dbencoding + process_value = self._process_value + binary = Binary + # /end + cursor.arraysize = 100 + while True: + results = cursor.fetchmany() + if not results: + break + for line in results: + result = [] + for col, value in enumerate(line): + if value is None: + result.append(value) + continue + cbstack = column_callbacks.get(col, None) + if cbstack is None: + value = process_value(value, descr[col], encoding, binary) + else: + for cb in cbstack: + value = cb(self, cnx, value) + result.append(value) + yield result + + def preprocess_entity(self, entity): + """return a dictionary to use as extra argument to cursor.execute + to insert/update an entity into a SQL database + """ + attrs = {} + eschema = entity.e_schema + converters = getattr(self.dbhelper, 'TYPE_CONVERTERS', {}) + for attr, value in entity.cw_edited.items(): + if value is not None and eschema.subjrels[attr].final: + atype = str(entity.e_schema.destination(attr)) + if atype in converters: + # It is easier to modify preprocess_entity rather + # than add_entity (native) as this behavior + # may also be used for update. + value = converters[atype](value) + elif atype == 'Password': # XXX could be done using a TYPE_CONVERTERS callback + # if value is a Binary instance, this mean we got it + # from a query result and so it is already encrypted + if isinstance(value, Binary): + value = value.getvalue() + else: + value = crypt_password(value) + value = self._binary(value) + elif isinstance(value, Binary): + value = self._binary(value.getvalue()) + attrs[SQL_PREFIX+str(attr)] = value + attrs[SQL_PREFIX+'eid'] = entity.eid + return attrs + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + +set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter')) + + +# connection initialization functions ########################################## + +def _install_sqlite_querier_patch(): + """This monkey-patch hotfixes a bug sqlite causing some dates to be returned as strings rather than + date objects (http://www.sqlite.org/cvstrac/tktview?tn=1327,33) + """ + from cubicweb.server.querier import QuerierHelper + + if hasattr(QuerierHelper, '_sqlite_patched'): + return # already monkey patched + + def wrap_execute(base_execute): + def new_execute(*args, **kwargs): + rset = base_execute(*args, **kwargs) + if rset.description: + found_date = False + for row, rowdesc in zip(rset, rset.description): + for cellindex, (value, vtype) in enumerate(zip(row, rowdesc)): + if vtype in ('TZDatetime', 'Date', 'Datetime') \ + and isinstance(value, text_type): + found_date = True + value = value.rsplit('.', 1)[0] + try: + row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S') + except Exception: + row[cellindex] = strptime(value, '%Y-%m-%d') + if vtype == 'TZDatetime': + row[cellindex] = row[cellindex].replace(tzinfo=utc) + if vtype == 'Time' and isinstance(value, text_type): + found_date = True + try: + row[cellindex] = strptime(value, '%H:%M:%S') + except Exception: + # DateTime used as Time? + row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S') + if vtype == 'Interval' and isinstance(value, int): + found_date = True + # XXX value is in number of seconds? + row[cellindex] = timedelta(0, value, 0) + if not found_date: + break + return rset + return new_execute + + QuerierHelper.execute = wrap_execute(QuerierHelper.execute) + QuerierHelper._sqlite_patched = True + + +def _init_sqlite_connection(cnx): + """Internal function that will be called to init a sqlite connection""" + _install_sqlite_querier_patch() + + class group_concat(object): + def __init__(self): + self.values = set() + def step(self, value): + if value is not None: + self.values.add(value) + def finalize(self): + return ', '.join(text_type(v) for v in self.values) + + cnx.create_aggregate("GROUP_CONCAT", 1, group_concat) + + def _limit_size(text, maxsize, format='text/plain'): + if len(text) < maxsize: + return text + if format in ('text/html', 'text/xhtml', 'text/xml'): + text = remove_html_tags(text) + if len(text) > maxsize: + text = text[:maxsize] + '...' + return text + + def limit_size3(text, format, maxsize): + return _limit_size(text, maxsize, format) + cnx.create_function("LIMIT_SIZE", 3, limit_size3) + + def limit_size2(text, maxsize): + return _limit_size(text, maxsize) + cnx.create_function("TEXT_LIMIT_SIZE", 2, limit_size2) + + from logilab.common.date import strptime + def weekday(ustr): + try: + dt = strptime(ustr, '%Y-%m-%d %H:%M:%S') + except: + dt = strptime(ustr, '%Y-%m-%d') + # expect sunday to be 1, saturday 7 while weekday method return 0 for + # monday + return (dt.weekday() + 1) % 7 + cnx.create_function("WEEKDAY", 1, weekday) + + cnx.cursor().execute("pragma foreign_keys = on") + + import yams.constraints + yams.constraints.patch_sqlite_decimal() + +sqlite_hooks = SQL_CONNECT_HOOKS.setdefault('sqlite', []) +sqlite_hooks.append(_init_sqlite_connection) + + +def _init_postgres_connection(cnx): + """Internal function that will be called to init a postgresql connection""" + cnx.cursor().execute('SET TIME ZONE UTC') + # commit is needed, else setting are lost if the connection is first + # rolled back + cnx.commit() + +postgres_hooks = SQL_CONNECT_HOOKS.setdefault('postgres', []) +postgres_hooks.append(_init_postgres_connection) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/ssplanner.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/ssplanner.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,543 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""plan execution of rql queries on a single source""" + +__docformat__ = "restructuredtext en" + +from six import text_type + +from rql.stmts import Union, Select +from rql.nodes import Constant, Relation + +from cubicweb import QueryError +from cubicweb.schema import VIRTUAL_RTYPES +from cubicweb.rqlrewrite import add_types_restriction +from cubicweb.server.edition import EditedEntity + +READ_ONLY_RTYPES = set(('eid', 'has_text', 'is', 'is_instance_of', 'identity')) + +_CONSTANT = object() +_FROM_SUBSTEP = object() + +def _extract_const_attributes(plan, rqlst, to_build): + """add constant values to entity def, mark variables to be selected + """ + to_select = {} + for relation in rqlst.main_relations: + lhs, rhs = relation.get_variable_parts() + rtype = relation.r_type + if rtype in READ_ONLY_RTYPES: + raise QueryError("can't assign to %s" % rtype) + try: + edef = to_build[str(lhs)] + except KeyError: + # lhs var is not to build, should be selected and added as an + # object relation + edef = to_build[str(rhs)] + to_select.setdefault(edef, []).append((rtype, lhs, 1)) + else: + if isinstance(rhs, Constant) and not rhs.uid: + # add constant values to entity def + value = rhs.eval(plan.args) + eschema = edef.entity.e_schema + attrtype = eschema.subjrels[rtype].objects(eschema)[0] + if attrtype == 'Password' and isinstance(value, text_type): + value = value.encode('UTF8') + edef.edited_attribute(rtype, value) + elif str(rhs) in to_build: + # create a relation between two newly created variables + plan.add_relation_def((edef, rtype, to_build[rhs.name])) + else: + to_select.setdefault(edef, []).append( (rtype, rhs, 0) ) + return to_select + +def _extract_eid_consts(plan, rqlst): + """return a dict mapping rqlst variable object to their eid if specified in + the syntax tree + """ + cnx = plan.cnx + if rqlst.where is None: + return {} + eidconsts = {} + neweids = cnx.transaction_data.get('neweids', ()) + checkread = cnx.read_security + eschema = cnx.vreg.schema.eschema + for rel in rqlst.where.get_nodes(Relation): + # only care for 'eid' relations ... + if (rel.r_type == 'eid' + # ... that are not part of a NOT clause ... + and not rel.neged(strict=True) + # ... and where eid is specified by '=' operator. + and rel.children[1].operator == '='): + lhs, rhs = rel.get_variable_parts() + if isinstance(rhs, Constant): + eid = int(rhs.eval(plan.args)) + # check read permission here since it may not be done by + # the generated select substep if not emited (eg nothing + # to be selected) + if checkread and eid not in neweids: + with cnx.security_enabled(read=False): + eschema(cnx.entity_metas(eid)['type']).check_perm( + cnx, 'read', eid=eid) + eidconsts[lhs.variable] = eid + return eidconsts + +def _build_substep_query(select, origrqlst): + """Finalize substep select query that should be executed to get proper + selection of stuff to insert/update. + + Return None when no query actually needed, else the given select node that + will be used as substep query. + """ + if origrqlst.where is not None and not select.selection: + # no selection, append one randomly by searching for a relation which is + # not neged neither a type restriction (is/is_instance_of) + for rel in origrqlst.where.iget_nodes(Relation): + if not (rel.neged(traverse_scope=True) or rel.is_types_restriction()): + select.append_selected(rel.children[0].copy(select)) + break + else: + return None + if select.selection: + if origrqlst.where is not None: + select.set_where(origrqlst.where.copy(select)) + if getattr(origrqlst, 'having', None): + select.set_having([sq.copy(select) for sq in origrqlst.having]) + return select + return None + +class SSPlanner(object): + """SingleSourcePlanner: build execution plan for rql queries + + optimized for single source repositories + """ + + def __init__(self, schema, rqlhelper): + self.schema = schema + self.rqlhelper = rqlhelper + + def build_plan(self, plan): + """build an execution plan from a RQL query + + do nothing here, dispatch according to the statement type + """ + build_plan = getattr(self, 'build_%s_plan' % plan.rqlst.TYPE) + for step in build_plan(plan, plan.rqlst): + plan.add_step(step) + + def build_select_plan(self, plan, rqlst): + """build execution plan for a SELECT RQL query. Suppose only one source + is available and so avoid work need for query decomposition among sources + + the rqlst should not be tagged at this point. + """ + plan.preprocess(rqlst) + return (OneFetchStep(plan, rqlst),) + + def build_insert_plan(self, plan, rqlst): + """get an execution plan from an INSERT RQL query""" + # each variable in main variables is a new entity to insert + to_build = {} + cnx = plan.cnx + etype_class = cnx.vreg['etypes'].etype_class + for etype, var in rqlst.main_variables: + # need to do this since entity class is shared w. web client code ! + to_build[var.name] = EditedEntity(etype_class(etype)(cnx)) + plan.add_entity_def(to_build[var.name]) + # add constant values to entity def, mark variables to be selected + to_select = _extract_const_attributes(plan, rqlst, to_build) + # add necessary steps to add relations and update attributes + step = InsertStep(plan) # insert each entity and its relations + step.children += self._compute_relation_steps(plan, rqlst, to_select) + return (step,) + + def _compute_relation_steps(self, plan, rqlst, to_select): + """handle the selection of relations for an insert query""" + eidconsts = _extract_eid_consts(plan, rqlst) + for edef, rdefs in to_select.items(): + # create a select rql st to fetch needed data + select = Select() + eschema = edef.entity.e_schema + for i, (rtype, term, reverse) in enumerate(rdefs): + if getattr(term, 'variable', None) in eidconsts: + value = eidconsts[term.variable] + else: + select.append_selected(term.copy(select)) + value = _FROM_SUBSTEP + if reverse: + rdefs[i] = (rtype, InsertRelationsStep.REVERSE_RELATION, value) + else: + rschema = eschema.subjrels[rtype] + if rschema.final or rschema.inlined: + rdefs[i] = (rtype, InsertRelationsStep.FINAL, value) + else: + rdefs[i] = (rtype, InsertRelationsStep.RELATION, value) + step = InsertRelationsStep(plan, edef, rdefs) + select = _build_substep_query(select, rqlst) + if select is not None: + step.children += self._select_plan(plan, select, rqlst.solutions) + yield step + + def build_delete_plan(self, plan, rqlst): + """get an execution plan from a DELETE RQL query""" + # build a select query to fetch entities to delete + steps = [] + for etype, var in rqlst.main_variables: + step = DeleteEntitiesStep(plan) + step.children += self._sel_variable_step(plan, rqlst, etype, var) + steps.append(step) + for relation in rqlst.main_relations: + step = DeleteRelationsStep(plan, relation.r_type) + step.children += self._sel_relation_steps(plan, rqlst, relation) + steps.append(step) + return steps + + def _sel_variable_step(self, plan, rqlst, etype, varref): + """handle the selection of variables for a delete query""" + select = Select() + varref = varref.copy(select) + select.defined_vars = {varref.name: varref.variable} + select.append_selected(varref) + if rqlst.where is not None: + select.set_where(rqlst.where.copy(select)) + if getattr(rqlst, 'having', None): + select.set_having([x.copy(select) for x in rqlst.having]) + if etype != 'Any': + select.add_type_restriction(varref.variable, etype) + return self._select_plan(plan, select, rqlst.solutions) + + def _sel_relation_steps(self, plan, rqlst, relation): + """handle the selection of relations for a delete query""" + select = Select() + lhs, rhs = relation.get_variable_parts() + select.append_selected(lhs.copy(select)) + select.append_selected(rhs.copy(select)) + select.set_where(relation.copy(select)) + if rqlst.where is not None: + select.add_restriction(rqlst.where.copy(select)) + if getattr(rqlst, 'having', None): + select.set_having([x.copy(select) for x in rqlst.having]) + return self._select_plan(plan, select, rqlst.solutions) + + def build_set_plan(self, plan, rqlst): + """get an execution plan from an SET RQL query""" + getrschema = self.schema.rschema + select = Select() # potential substep query + selectedidx = {} # local state + attributes = set() # edited attributes + updatedefs = [] # definition of update attributes/relations + selidx = residx = 0 # substep selection / resulting rset indexes + # search for eid const in the WHERE clause + eidconsts = _extract_eid_consts(plan, rqlst) + # build `updatedefs` describing things to update and add necessary + # variables to the substep selection + for i, relation in enumerate(rqlst.main_relations): + if relation.r_type in VIRTUAL_RTYPES: + raise QueryError('can not assign to %r relation' + % relation.r_type) + lhs, rhs = relation.get_variable_parts() + lhskey = lhs.as_string() + if not lhskey in selectedidx: + if lhs.variable in eidconsts: + eid = eidconsts[lhs.variable] + lhsinfo = (_CONSTANT, eid, residx) + else: + select.append_selected(lhs.copy(select)) + lhsinfo = (_FROM_SUBSTEP, selidx, residx) + selidx += 1 + residx += 1 + selectedidx[lhskey] = lhsinfo + else: + lhsinfo = selectedidx[lhskey][:-1] + (None,) + rhskey = rhs.as_string() + if not rhskey in selectedidx: + if isinstance(rhs, Constant): + rhsinfo = (_CONSTANT, rhs.eval(plan.args), residx) + elif getattr(rhs, 'variable', None) in eidconsts: + eid = eidconsts[rhs.variable] + rhsinfo = (_CONSTANT, eid, residx) + else: + select.append_selected(rhs.copy(select)) + rhsinfo = (_FROM_SUBSTEP, selidx, residx) + selidx += 1 + residx += 1 + selectedidx[rhskey] = rhsinfo + else: + rhsinfo = selectedidx[rhskey][:-1] + (None,) + rschema = getrschema(relation.r_type) + updatedefs.append( (lhsinfo, rhsinfo, rschema) ) + # the update step + step = UpdateStep(plan, updatedefs) + # when necessary add substep to fetch yet unknown values + select = _build_substep_query(select, rqlst) + if select is not None: + # set distinct to avoid potential duplicate key error + select.distinct = True + step.children += self._select_plan(plan, select, rqlst.solutions) + return (step,) + + # internal methods ######################################################## + + def _select_plan(self, plan, select, solutions): + union = Union() + union.append(select) + select.clean_solutions(solutions) + add_types_restriction(self.schema, select) + self.rqlhelper.annotate(union) + return self.build_select_plan(plan, union) + + +# execution steps and helper functions ######################################## + +def varmap_test_repr(varmap, tablesinorder): + if varmap is None: + return varmap + maprepr = {} + for var, sql in varmap.items(): + table, col = sql.split('.') + maprepr[var] = '%s.%s' % (tablesinorder[table], col) + return maprepr + +class Step(object): + """base abstract class for execution step""" + def __init__(self, plan): + self.plan = plan + self.children = [] + + def execute_child(self): + assert len(self.children) == 1 + return self.children[0].execute() + + def execute_children(self): + for step in self.children: + step.execute() + + def execute(self): + """execute this step and store partial (eg this step) results""" + raise NotImplementedError() + + def mytest_repr(self): + """return a representation of this step suitable for test""" + return (self.__class__.__name__,) + + def test_repr(self): + """return a representation of this step suitable for test""" + return self.mytest_repr() + ( + [step.test_repr() for step in self.children],) + + +class OneFetchStep(Step): + """step consisting in fetching data from sources and directly returning + results + """ + def __init__(self, plan, union, inputmap=None): + Step.__init__(self, plan) + self.union = union + self.inputmap = inputmap + + def execute(self): + """call .syntax_tree_search with the given syntax tree on each + source for each solution + """ + self.execute_children() + cnx = self.plan.cnx + args = self.plan.args + inputmap = self.inputmap + union = self.union + # do we have to use a inputmap from a previous step ? If so disable + # cachekey + if inputmap or self.plan.cache_key is None: + cachekey = None + # union may have been splited into subqueries, in which case we can't + # use plan.cache_key, rebuild a cache key + elif isinstance(self.plan.cache_key, tuple): + cachekey = list(self.plan.cache_key) + cachekey[0] = union.as_string() + cachekey = tuple(cachekey) + else: + cachekey = union.as_string() + # get results for query + source = cnx.repo.system_source + result = source.syntax_tree_search(cnx, union, args, cachekey, inputmap) + #print 'ONEFETCH RESULT %s' % (result) + return result + + def mytest_repr(self): + """return a representation of this step suitable for test""" + try: + inputmap = varmap_test_repr(self.inputmap, self.plan.tablesinorder) + except AttributeError: + inputmap = self.inputmap + return (self.__class__.__name__, + sorted((r.as_string(kwargs=self.plan.args), r.solutions) + for r in self.union.children), + inputmap) + + +# UPDATE/INSERT/DELETE steps ################################################## + +class InsertRelationsStep(Step): + """step consisting in adding attributes/relations to entity defs from a + previous FetchStep + + relations values comes from the latest result, with one columns for + each relation defined in self.rdefs + + for one entity definition, we'll construct N entity, where N is the + number of the latest result + """ + + FINAL = 0 + RELATION = 1 + REVERSE_RELATION = 2 + + def __init__(self, plan, edef, rdefs): + Step.__init__(self, plan) + # partial entity definition to expand + self.edef = edef + # definition of relations to complete + self.rdefs = rdefs + + def execute(self): + """execute this step""" + base_edef = self.edef + edefs = [] + if self.children: + result = self.execute_child() + else: + result = [[]] + for row in result: + # get a new entity definition for this row + edef = base_edef.clone() + # complete this entity def using row values + index = 0 + for rtype, rorder, value in self.rdefs: + if value is _FROM_SUBSTEP: + value = row[index] + index += 1 + if rorder == InsertRelationsStep.FINAL: + edef.edited_attribute(rtype, value) + elif rorder == InsertRelationsStep.RELATION: + self.plan.add_relation_def( (edef, rtype, value) ) + edef.querier_pending_relations[(rtype, 'subject')] = value + else: + self.plan.add_relation_def( (value, rtype, edef) ) + edef.querier_pending_relations[(rtype, 'object')] = value + edefs.append(edef) + self.plan.substitute_entity_def(base_edef, edefs) + return result + + +class InsertStep(Step): + """step consisting in inserting new entities / relations""" + + def execute(self): + """execute this step""" + for step in self.children: + assert isinstance(step, InsertRelationsStep) + step.plan = self.plan + step.execute() + # insert entities first + result = self.plan.insert_entity_defs() + # then relation + self.plan.insert_relation_defs() + # return eids of inserted entities + return result + + +class DeleteEntitiesStep(Step): + """step consisting in deleting entities""" + + def execute(self): + """execute this step""" + results = self.execute_child() + if results: + todelete = frozenset(int(eid) for eid, in results) + cnx = self.plan.cnx + cnx.repo.glob_delete_entities(cnx, todelete) + return results + +class DeleteRelationsStep(Step): + """step consisting in deleting relations""" + + def __init__(self, plan, rtype): + Step.__init__(self, plan) + self.rtype = rtype + + def execute(self): + """execute this step""" + cnx = self.plan.cnx + delete = cnx.repo.glob_delete_relation + for subj, obj in self.execute_child(): + delete(cnx, subj, self.rtype, obj) + + +class UpdateStep(Step): + """step consisting in updating entities / adding relations from relations + definitions and from results fetched in previous step + """ + + def __init__(self, plan, updatedefs): + Step.__init__(self, plan) + self.updatedefs = updatedefs + + def execute(self): + """execute this step""" + cnx = self.plan.cnx + repo = cnx.repo + edefs = {} + relations = {} + # insert relations + if self.children: + result = self.execute_child() + else: + result = [[]] + for i, row in enumerate(result): + newrow = [] + for (lhsinfo, rhsinfo, rschema) in self.updatedefs: + lhsval = _handle_relterm(lhsinfo, row, newrow) + rhsval = _handle_relterm(rhsinfo, row, newrow) + if rschema.final or rschema.inlined: + eid = int(lhsval) + try: + edited = edefs[eid] + except KeyError: + edef = cnx.entity_from_eid(eid) + edefs[eid] = edited = EditedEntity(edef) + edited.edited_attribute(str(rschema), rhsval) + else: + str_rschema = str(rschema) + if str_rschema in relations: + relations[str_rschema].append((lhsval, rhsval)) + else: + relations[str_rschema] = [(lhsval, rhsval)] + result[i] = newrow + # update entities + repo.glob_add_relations(cnx, relations) + for eid, edited in edefs.items(): + repo.glob_update_entity(cnx, edited) + return result + +def _handle_relterm(info, row, newrow): + if info[0] is _CONSTANT: + val = info[1] + else: # _FROM_SUBSTEP + val = row[info[1]] + if info[-1] is not None: + newrow.append(val) + return val diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-cwep002/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-cwep002/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,35 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import EntityType, RelationDefinition, Int, ComputedRelation + +class Person(EntityType): + salary = Int() + +class works_for(RelationDefinition): + subject = 'Person' + object = 'Company' + cardinality = '?*' + +class Company(EntityType): + total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE ' + 'P works_for X, P salary SA') + +class has_employee(ComputedRelation): + rule = 'O works_for S' + __permissions__ = {'read': ('managers',)} diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/bootstrap_cubes --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/bootstrap_cubes Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +card,comment,tag,basket,file,localperms,fakeemail diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/cubes/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/cubes/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/cubes/fakecustomtype/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/cubes/fakecustomtype/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/cubes/fakecustomtype/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,50 @@ +# pylint: disable-msg=W0622 +"""cubicweb-fakeemail packaging information""" + +modname = 'fakecustomtype' +distname = "cubicweb-%s" % modname + +numversion = (1, 0, 0) +version = '.'.join(str(num) for num in numversion) + +license = 'LGPL' +author = "Logilab" +author_email = "contact@logilab.fr" +web = 'http://www.cubicweb.org/project/%s' % distname +description = "whatever" +classifiers = [ + 'Environment :: Web Environment', + 'Framework :: CubicWeb', + 'Programming Language :: Python', + 'Programming Language :: JavaScript', +] + +# used packages +__depends__ = {'cubicweb': '>= 3.19.0', + } + + +# packaging ### + +from os import listdir as _listdir +from os.path import join, isdir +from glob import glob + +THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname) + +def listdir(dirpath): + return [join(dirpath, fname) for fname in _listdir(dirpath) + if fname[0] != '.' and not fname.endswith('.pyc') + and not fname.endswith('~') + and not isdir(join(dirpath, fname))] + +data_files = [ + # common files + [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']], + ] +# check for possible extended cube layout +for dirname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'i18n', 'migration', 'wdoc'): + if isdir(dirname): + data_files.append([join(THIS_CUBE_DIR, dirname), listdir(dirname)]) +# Note: here, you'll need to add subdirectories if you want +# them to be included in the debian package diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/cubes/fakecustomtype/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/cubes/fakecustomtype/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,7 @@ + +from yams.buildobjs import EntityType, make_type + +Numeric = make_type('Numeric') + +class Location(EntityType): + num = Numeric(scale=10, precision=18) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/cubes/fakecustomtype/site_cubicweb.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/cubes/fakecustomtype/site_cubicweb.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +from yams import register_base_type +from logilab.database import get_db_helper +from logilab.database.sqlgen import SQLExpression + +_NUMERIC_PARAMETERS = {'scale': 0, 'precision': None} +register_base_type('Numeric', _NUMERIC_PARAMETERS) + +# Add the datatype to the helper mapping +pghelper = get_db_helper('postgres') + + +def pg_numeric_sqltype(rdef): + """Return a PostgreSQL column type corresponding to rdef + """ + return 'numeric(%s, %s)' % (rdef.precision, rdef.scale) + +pghelper.TYPE_MAPPING['Numeric'] = pg_numeric_sqltype diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/cubes/fakeemail/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/cubes/fakeemail/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/cubes/fakeemail/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,53 @@ +# pylint: disable-msg=W0622 +"""cubicweb-fakeemail packaging information""" + +modname = 'fakeemail' +distname = "cubicweb-%s" % modname + +numversion = (1, 10, 0) +version = '.'.join(str(num) for num in numversion) + +license = 'LGPL' +author = "Logilab" +author_email = "contact@logilab.fr" +web = 'http://www.cubicweb.org/project/%s' % distname +description = "email component for the CubicWeb framework" +classifiers = [ + 'Environment :: Web Environment', + 'Framework :: CubicWeb', + 'Programming Language :: Python', + 'Programming Language :: JavaScript', +] + +# used packages +__depends__ = {'cubicweb': '>= 3.19.0', + 'cubicweb-file': '>= 1.9.0', + 'logilab-common': '>= 0.58.3', + } +__recommends__ = {'cubicweb-comment': None} + + +# packaging ### + +from os import listdir as _listdir +from os.path import join, isdir +from glob import glob + +THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname) + +def listdir(dirpath): + return [join(dirpath, fname) for fname in _listdir(dirpath) + if fname[0] != '.' and not fname.endswith('.pyc') + and not fname.endswith('~') + and not isdir(join(dirpath, fname))] + +data_files = [ + # common files + [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']], + ] +# check for possible extended cube layout +for dirname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'i18n', 'migration', 'wdoc'): + if isdir(dirname): + data_files.append([join(THIS_CUBE_DIR, dirname), listdir(dirname)]) +# Note: here, you'll need to add subdirectories if you want +# them to be included in the debian package diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/cubes/fakeemail/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/cubes/fakeemail/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,86 @@ +"""entity/relation schemas to store email in an cubicweb instance + +:organization: Logilab +:copyright: 2006-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +""" +__docformat__ = "restructuredtext en" +from cubicweb import _ + +# pylint: disable-msg=E0611,F0401 +from yams.buildobjs import (SubjectRelation, RelationType, EntityType, + String, Datetime, Int, RelationDefinition) +from yams.reader import context + +from cubicweb.schema import ERQLExpression + + +class Email(EntityType): + """electronic mail""" + subject = String(fulltextindexed=True) + date = Datetime(description=_('UTC time on which the mail was sent')) + messageid = String(required=True, indexed=True) + headers = String(description=_('raw headers')) + + sender = SubjectRelation('EmailAddress', cardinality='?*') + # an email with only Bcc is acceptable, don't require any recipients + recipients = SubjectRelation('EmailAddress') + cc = SubjectRelation('EmailAddress') + + parts = SubjectRelation('EmailPart', cardinality='*1', composite='subject') + attachment = SubjectRelation('File') + + reply_to = SubjectRelation('Email', cardinality='?*') + cites = SubjectRelation('Email') + in_thread = SubjectRelation('EmailThread', cardinality='?*') + + +class EmailPart(EntityType): + """an email attachment""" + __permissions__ = { + 'read': ('managers', 'users', 'guests',), # XXX if E parts X, U has_read_permission E + 'add': ('managers', ERQLExpression('E parts X, U has_update_permission E'),), + 'delete': ('managers', ERQLExpression('E parts X, U has_update_permission E')), + 'update': ('managers', 'owners',), + } + + content = String(fulltextindexed=True) + content_format = String(required=True, maxsize=50) + ordernum = Int(required=True) + alternative = SubjectRelation('EmailPart', symmetric=True) + + +class EmailThread(EntityType): + """discussion thread""" + title = String(required=True, indexed=True, fulltextindexed=True) + see_also = SubjectRelation('EmailThread') + forked_from = SubjectRelation('EmailThread', cardinality='?*') + +class parts(RelationType): + """ """ + fulltext_container = 'subject' + +class sender(RelationType): + """ """ + inlined = True + +class in_thread(RelationType): + """ """ + inlined = True + +class reply_to(RelationType): + """ """ + inlined = True + +class generated_by(RelationType): + """mark an entity as generated from an email""" + cardinality = '?*' + subject = ('TrInfo',) + object = 'Email' + +# if comment is installed +if 'Comment' in context.defined: + class comment_generated_by(RelationDefinition): + subject = 'Comment' + name = 'generated_by' + object = 'Email' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/migratedapp/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/migratedapp/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/migratedapp/bootstrap_cubes --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/migratedapp/bootstrap_cubes Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +card,comment,tag,basket,fakeemail,file diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/migratedapp/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/migratedapp/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,213 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cw.server.migraction test""" +import datetime as dt +from yams.buildobjs import (EntityType, RelationType, RelationDefinition, + SubjectRelation, Bytes, + RichString, String, Int, Boolean, Datetime, Date, Float) +from yams.constraints import SizeConstraint, UniqueConstraint +from cubicweb import _ +from cubicweb.schema import (WorkflowableEntityType, RQLConstraint, + RQLVocabularyConstraint, + ERQLExpression, RRQLExpression) + +class Affaire(EntityType): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), + 'update': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), + 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), + } + + ref = String(fulltextindexed=True, indexed=True, + constraints=[SizeConstraint(16)]) + sujet = String(fulltextindexed=True, + constraints=[SizeConstraint(256)]) + concerne = SubjectRelation('Societe') + opt_attr = Bytes() + +class Societe(WorkflowableEntityType): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'update': ('managers', 'owners'), + 'delete': ('managers', 'owners'), + 'add': ('managers', 'users',) + } + nom = String(maxsize=64, fulltextindexed=True) + web = String(maxsize=128) + tel = Float() + fax = Int() + rncs = String(maxsize=128) + ad1 = String(maxsize=128) + ad2 = String(maxsize=128) + ad3 = String(maxsize=128) + cp = String(maxsize=12) + ville= String(maxsize=32) + +# Division and SubDivision are gone + +# New +class Para(EntityType): + para = String(maxsize=512) + newattr = String() + newinlined = SubjectRelation('Affaire', cardinality='?*', inlined=True) + newnotinlined = SubjectRelation('Affaire', cardinality='?*') + +class Note(Para): + __specializes_schema__ = True + + __permissions__ = {'read': ('managers', 'users', 'guests',), + 'update': ('managers', 'owners',), + 'delete': ('managers', ), + 'add': ('managers', + ERQLExpression('X ecrit_part PE, U in_group G, ' + 'PE require_permission P, P name "add_note", ' + 'P require_group G'),)} + + whatever = Int(default=0) # keep it before `date` for unittest_migraction.test_add_attribute_int + yesno = Boolean(default=False) + date = Datetime() + type = String(maxsize=1) + unique_id = String(maxsize=1, required=True, unique=True) + mydate = Date(default='TODAY') + oldstyledefaultdate = Date(default='2013/01/01') + newstyledefaultdate = Date(default=dt.date(2013, 1, 1)) + shortpara = String(maxsize=64, default='hop') + ecrit_par = SubjectRelation('Personne', constraints=[RQLConstraint('S concerne A, O concerne A')]) + attachment = SubjectRelation('File') + + +class Frozable(EntityType): + __permissions__ = { + 'read': ('managers', 'users'), + 'add': ('managers', 'users'), + 'update': ('managers', ERQLExpression('X frozen False'),), + 'delete': ('managers', ERQLExpression('X frozen False'),) + } + name = String() + frozen = Boolean(default=False, + __permissions__ = { + 'read': ('managers', 'users'), + 'add': ('managers', 'users'), + 'update': ('managers', 'owners') + }) + + +class Personne(EntityType): + __permissions__ = { + 'read': ('managers', 'users'), # 'guests' was removed + 'add': ('managers', 'users'), + 'update': ('managers', 'owners'), + 'delete': ('managers', 'owners') + } + __unique_together__ = [('nom', 'prenom', 'datenaiss')] + nom = String(fulltextindexed=True, required=True, maxsize=64) + prenom = String(fulltextindexed=True, maxsize=64) + civility = String(maxsize=1, default='M', fulltextindexed=True) + promo = String(vocabulary=('bon','pasbon')) + titre = String(fulltextindexed=True, maxsize=128) + adel = String(maxsize=128) + ass = String(maxsize=128) + web = String(maxsize=128) + tel = Int() + fax = Int() + datenaiss = Datetime() + test = Boolean() + + travaille = SubjectRelation('Societe') + concerne = SubjectRelation('Affaire') + concerne2 = SubjectRelation(('Affaire', 'Note'), cardinality='1*') + connait = SubjectRelation('Personne', symmetric=True) + +class concerne(RelationType): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', RRQLExpression('U has_update_permission S')), + 'delete': ('managers', RRQLExpression('O owned_by U')), + } + +# `Old` entity type is gonce +# `comments` is gone +# `fiche` is gone +# `multisource_*` rdefs are gone +# `see_also_*` rdefs are gone + +class evaluee(RelationDefinition): + subject = ('Personne', 'CWUser', 'Societe') + object = ('Note') + constraints = [RQLVocabularyConstraint('S owned_by U')] + +class ecrit_par(RelationType): + __permissions__ = {'read': ('managers', 'users', 'guests',), + 'delete': ('managers', ), + 'add': ('managers', + RRQLExpression('O require_permission P, P name "add_note", ' + 'U in_group G, P require_group G'),) + } + inlined = True + cardinality = '?*' + +# `copain` rdef is gone +# `tags` rdef is gone +# `filed_under` rdef is gone +# `require_permission` rdef is gone +# `require_state` rdef is gone +# `personne_composite` rdef is gone +# `personne_inlined` rdef is gone +# `login_user` rdef is gone +# `ambiguous_inlined` rdef is gone + +class Folder(EntityType): + """folders are used to classify entities. They may be defined as a tree. + """ + name = String(required=True, indexed=True, internationalizable=True, + maxsize=64) + description = RichString(fulltextindexed=True) + filed_under = SubjectRelation('Folder', description=_('parent folder')) + + +# New +class Text(Para): + __specializes_schema__ = True + summary = String(maxsize=512) + + +# New +class Folder2(EntityType): + """folders are used to classify entities. They may be defined as a tree. + When you include the Folder entity, all application specific entities + may then be classified using the "filed_under" relation. + """ + name = String(required=True, indexed=True, internationalizable=True, + constraints=[UniqueConstraint(), SizeConstraint(64)]) + description = RichString(fulltextindexed=True) + +# New +class filed_under2(RelationDefinition): + subject ='*' + object = 'Folder2' + + +# New +class New(EntityType): + new_name = String() + +# New +class same_as(RelationDefinition): + subject = ('Societe',) + object = 'ExternalUri' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-migractions/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-migractions/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,290 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import (EntityType, RelationType, RelationDefinition, ComputedRelation, + SubjectRelation, RichString, String, Int, Float, + Boolean, Datetime, TZDatetime, Bytes) +from yams.constraints import SizeConstraint +from cubicweb.schema import (WorkflowableEntityType, + RQLConstraint, RQLUniqueConstraint, + RQLVocabularyConstraint, + ERQLExpression, RRQLExpression) +from cubicweb import _ + + +class Affaire(WorkflowableEntityType): + __permissions__ = { + 'read': ('managers', + ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), + 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), + 'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')), + 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), + } + + ref = String(fulltextindexed=True, indexed=True, + constraints=[SizeConstraint(16)]) + sujet = String(fulltextindexed=True, + constraints=[SizeConstraint(256)]) + descr = RichString(fulltextindexed=True, + description=_('more detailed description')) + + duration = Int() + invoiced = Float() + opt_attr = Bytes() + + depends_on = SubjectRelation('Affaire') + require_permission = SubjectRelation('CWPermission') + concerne = SubjectRelation(('Societe', 'Note')) + todo_by = SubjectRelation('Personne', cardinality='?*') + documented_by = SubjectRelation('Card') + + +class Societe(EntityType): + __unique_together__ = [('nom', 'type', 'cp')] + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), + 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), + 'add': ('managers', 'users',) + } + + nom = String(maxsize=64, fulltextindexed=True) + web = String(maxsize=128) + type = String(maxsize=128) # attribute in common with Note + tel = Int() + fax = Int() + rncs = String(maxsize=128) + ad1 = String(maxsize=128) + ad2 = String(maxsize=128) + ad3 = String(maxsize=128) + cp = String(maxsize=12) + ville= String(maxsize=32) + + +class Division(Societe): + __specializes_schema__ = True + +class SubDivision(Division): + __specializes_schema__ = True + +class travaille_subdivision(RelationDefinition): + subject = 'Personne' + object = 'SubDivision' + +from cubicweb.schemas.base import CWUser +next(CWUser.get_relations('login')).fulltextindexed = True + +class Note(WorkflowableEntityType): + date = String(maxsize=10) + type = String(vocabulary=[u'todo', u'a', u'b', u'T', u'lalala']) + para = String(maxsize=512, + __permissions__ = { + 'add': ('managers', ERQLExpression('X in_state S, S name "todo"')), + 'read': ('managers', 'users', 'guests'), + 'update': ('managers', ERQLExpression('X in_state S, S name "todo"')), + }) + something = String(maxsize=1, + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': (ERQLExpression('NOT X para NULL'),), + 'update': ('managers', 'owners') + }) + migrated_from = SubjectRelation('Note') + attachment = SubjectRelation('File') + inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*', + constraints=[RQLUniqueConstraint('S type T, S inline1 A1, A1 todo_by C, ' + 'Y type T, Y inline1 A2, A2 todo_by C', + 'S,Y')]) + todo_by = SubjectRelation('CWUser') + + +class Frozable(EntityType): + __permissions__ = { + 'read': ('managers', 'users'), + 'add': ('managers', 'users'), + 'update': ('managers', ERQLExpression('X frozen False'),), + 'delete': ('managers', ERQLExpression('X frozen False'),) + } + name = String() + frozen = Boolean(default=False, + __permissions__ = { + 'read': ('managers', 'users'), + 'add': ('managers', 'users'), + 'update': ('managers', 'owners') + }) + + +class Personne(EntityType): + __unique_together__ = [('nom', 'prenom', 'inline2')] + nom = String(fulltextindexed=True, required=True, maxsize=64) + prenom = String(fulltextindexed=True, maxsize=64) + sexe = String(maxsize=1, default='M', fulltextindexed=True) + promo = String(vocabulary=('bon','pasbon')) + titre = String(fulltextindexed=True, maxsize=128) + adel = String(maxsize=128) + ass = String(maxsize=128) + web = String(maxsize=128) + tel = Int() + fax = Int() + datenaiss = Datetime() + tzdatenaiss = TZDatetime() + test = Boolean(__permissions__={ + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'update': ('managers',), + }) + description = String() + firstname = String(fulltextindexed=True, maxsize=64) + + concerne = SubjectRelation('Affaire') + connait = SubjectRelation('Personne') + inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*') + + +class Old(EntityType): + name = String(__permissions__ = { + 'read' : ('managers', 'users', 'guests'), + 'add' : ('managers', 'users', 'guests'), + 'update' : () + }) + + +class connait(RelationType): + symmetric = True + +class concerne(RelationType): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', RRQLExpression('U has_update_permission S')), + 'delete': ('managers', RRQLExpression('O owned_by U')), + } + +class travaille(RelationDefinition): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', RRQLExpression('U has_update_permission S')), + 'delete': ('managers', RRQLExpression('O owned_by U')), + } + subject = 'Personne' + object = 'Societe' + constraints = [RQLVocabularyConstraint('S owned_by U'), + RQLVocabularyConstraint('S created_by U')] + +class comments(RelationDefinition): + subject = 'Comment' + object = 'Personne' + +class fiche(RelationDefinition): + inlined = True + subject = 'Personne' + object = 'Card' + cardinality = '??' + +class multisource_inlined_rel(RelationDefinition): + inlined = True + cardinality = '?*' + subject = ('Card', 'Note') + object = ('Affaire', 'Note') + + +class see_also_1(RelationDefinition): + name = 'see_also' + subject = object = 'Folder' + +class see_also_2(RelationDefinition): + name = 'see_also' + subject = ('Bookmark', 'Note') + object = ('Bookmark', 'Note') + +class evaluee(RelationDefinition): + subject = ('Personne', 'CWUser', 'Societe') + object = ('Note') + constraints = [ + RQLVocabularyConstraint('S created_by U'), + RQLVocabularyConstraint('S owned_by U'), + ] + +class ecrit_par(RelationType): + inlined = True + +class ecrit_par_1(RelationDefinition): + name = 'ecrit_par' + subject = 'Note' + object = 'Personne' + cardinality = '?*' + +class ecrit_par_2(RelationDefinition): + name = 'ecrit_par' + subject = 'Note' + object = 'CWUser' + cardinality='?*' + + +class copain(RelationDefinition): + subject = object = 'CWUser' + +class tags(RelationDefinition): + subject = 'Tag' + object = ('CWUser', 'CWGroup', 'State', 'Note', 'Card', 'Affaire') + +class Folder(EntityType): + """folders are used to classify entities. They may be defined as a tree. + """ + name = String(required=True, indexed=True, internationalizable=True, + maxsize=64) + description = RichString(fulltextindexed=True) + filed_under = SubjectRelation('Folder', description=_('parent folder')) + +class filed_under(RelationDefinition): + subject = ('Note', 'Affaire') + object = 'Folder' + +class require_permission(RelationDefinition): + subject = ('Card', 'Note', 'Personne') + object = 'CWPermission' + +class require_state(RelationDefinition): + subject = 'CWPermission' + object = 'State' + +class personne_composite(RelationDefinition): + subject='Personne' + object='Personne' + composite='subject' + +class personne_inlined(RelationDefinition): + subject='Personne' + object='Personne' + cardinality='?*' + inlined=True + + +class login_user(RelationDefinition): + subject = 'Personne' + object = 'CWUser' + cardinality = '??' + +class ambiguous_inlined(RelationDefinition): + subject = ('Affaire', 'Note') + object = 'CWUser' + inlined = True + cardinality = '?*' + + +class user_login(ComputedRelation): + rule = 'O login_user S' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schema2sql/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schema2sql/schema/Company.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-schema2sql/schema/Company.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,67 @@ +# copyright 2004-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +from yams.buildobjs import EntityType, RelationType, RelationDefinition, \ + SubjectRelation, String + +class Company(EntityType): + name = String() + +class Subcompany(Company): + __specializes_schema__ = True + subcompany_of = SubjectRelation('Company') + +class Division(Company): + __specializes_schema__ = True + division_of = SubjectRelation('Company') + +class Subdivision(Division): + __specializes_schema__ = True + subdivision_of = SubjectRelation('Company') + +class Employee(EntityType): + works_for = SubjectRelation('Company') + +class require_permission(RelationType): + """link a permission to the entity. This permission should be used in the + security definition of the entity's type to be useful. + """ + fulltext_container = 'subject' + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'delete': ('managers',), + } + + +class missing_require_permission(RelationDefinition): + name = 'require_permission' + subject = 'Company' + object = 'EPermission' + +class EPermission(EntityType): + """entity type that may be used to construct some advanced security configuration + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'delete': ('managers',), + 'update': ('managers', 'owners',), + } + name = String(required=True, indexed=True, internationalizable=True, + fulltextindexed=True, maxsize=100, + description=_('name or identifier of the permission')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schema2sql/schema/Dates.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-schema2sql/schema/Dates.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,28 @@ +# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +from datetime import time, date +from yams.buildobjs import EntityType, Datetime, Date, Time +from yams.constraints import TODAY, BoundaryConstraint + +class Datetest(EntityType): + dt1 = Datetime(default=u'now') + dt2 = Datetime(default=u'today') + d1 = Date(default=u'today', constraints=[BoundaryConstraint('<=', TODAY())]) + d2 = Date(default=date(2007, 12, 11)) + t1 = Time(default=time(8, 40)) + t2 = Time(default=time(9, 45)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schema2sql/schema/State.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-schema2sql/schema/State.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,81 @@ +# copyright 2004-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +from yams.buildobjs import (EntityType, RelationType, RelationDefinition, + SubjectRelation, Int, String, Boolean) +from yams.constraints import SizeConstraint, UniqueConstraint + +from . import RESTRICTED_RTYPE_PERMS + +class State(EntityType): + """used to associate simple states to an entity + type and/or to define workflows + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users',), + 'delete': ('managers', 'owners',), + 'update': ('managers', 'owners',), + } + + # attributes + eid = Int(required=True, uid=True) + name = String(required=True, + indexed=True, internationalizable=True, + constraints=[SizeConstraint(256)]) + description = String(fulltextindexed=True) + # relations + state_of = SubjectRelation('Eetype', cardinality='+*') + next_state = SubjectRelation('State', cardinality='**') + + +class state_of(RelationType): + """link a state to one or more entity type""" + __permissions__ = RESTRICTED_RTYPE_PERMS + +class next_state(RelationType): + """define a workflow by associating a state to possible following states + """ + __permissions__ = RESTRICTED_RTYPE_PERMS + +class initial_state(RelationType): + """indicate which state should be used by default when an entity using states + is created + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users',), + 'delete': ('managers', 'users',), + } + subject = 'Eetype' + object = 'State' + cardinality = '?*' + inlined = True + +class Eetype(EntityType): + """define an entity type, used to build the application schema""" + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'delete': ('managers',), + 'update': ('managers', 'owners',), + } + name = String(required=True, indexed=True, internationalizable=True, + constraints=[UniqueConstraint(), SizeConstraint(64)]) + description = String(fulltextindexed=True) + meta = Boolean() + final = Boolean() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schema2sql/schema/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-schema2sql/schema/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,23 @@ +# copyright 2004-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +"""test schema""" +RESTRICTED_RTYPE_PERMS = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'delete': ('managers',), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schema2sql/schema/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-schema2sql/schema/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,112 @@ +# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +from yams.buildobjs import (EntityType, RelationDefinition, RelationType, + SubjectRelation, String, Int, Float, Date, Boolean) +from yams.constraints import Attribute, BoundaryConstraint + +class Affaire(EntityType): + sujet = String(maxsize=128) + ref = String(maxsize=12) + + concerne = SubjectRelation('Societe') + obj_wildcard = SubjectRelation('*') + sym_rel = SubjectRelation('Person', symmetric=True) + inline_rel = SubjectRelation('Person', inlined=True, cardinality='?*') + +class subj_wildcard(RelationDefinition): + subject = '*' + object = 'Affaire' + + +class Person(EntityType): + __unique_together__ = [('nom', 'prenom')] + nom = String(maxsize=64, fulltextindexed=True, required=True) + prenom = String(maxsize=64, fulltextindexed=True) + sexe = String(maxsize=1, default='M') + promo = String(vocabulary=('bon','pasbon')) + titre = String(maxsize=128, fulltextindexed=True) + adel = String(maxsize=128) + ass = String(maxsize=128) + web = String(maxsize=128) + tel = Int(__permissions__={'read': (), + 'add': ('managers',), + 'update': ('managers',)}) + fax = Int() + datenaiss = Date() + test = Boolean() + salary = Float() + travaille = SubjectRelation('Societe', + __permissions__={'read': (), + 'add': (), + 'delete': ('managers',), + }) + + evaluee = SubjectRelation('Note') + +class Salaried(Person): + __specializes_schema__ = True + +class Societe(EntityType): + nom = String(maxsize=64, fulltextindexed=True) + web = String(maxsize=128) + tel = Int() + fax = Int(constraints=[BoundaryConstraint('<=', Attribute('tel'))]) + rncs = String(maxsize=32) + ad1 = String(maxsize=128) + ad2 = String(maxsize=128) + ad3 = String(maxsize=128) + cp = String(maxsize=12) + ville = String(maxsize=32) + + evaluee = SubjectRelation('Note') + + +class Note(EntityType): + date = String(maxsize=10) + type = String(maxsize=1) + para = String(maxsize=512) + + +class pkginfo(EntityType): + modname = String(maxsize=30, required=True) + version = String(maxsize=10, required=True, default='0.1') + copyright = String(required=True) + license = String(vocabulary=('GPL', 'ZPL')) + short_desc = String(maxsize=80, required=True) + long_desc = String(required=True, fulltextindexed=True) + author = String(maxsize=100, required=True) + author_email = String(maxsize=100, required=True) + mailinglist = String(maxsize=100) + debian_handler = String(vocabulary=('machin', 'bidule')) + + +class evaluee(RelationType): + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'delete': ('managers',), + } + +class concerne(RelationDefinition): + subject = 'Person' + object = 'Affaire' + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'delete': ('managers',), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schema2sql/schema/toignore --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-schema2sql/schema/toignore Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +coucou diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schemaserial/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-schemaserial/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,31 @@ +# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import EntityType, SubjectRelation, String, make_type +BabarTestType = make_type('BabarTestType') + +class Affaire(EntityType): + nom = String(unique=True, maxsize=64) + +class Personne(EntityType): + __unique_together__ = [('nom', 'prenom', 'inline2')] + nom = String(fulltextindexed=True, required=True, maxsize=64) + prenom = String(fulltextindexed=True, maxsize=64) + inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*') + + custom_field_of_jungle = BabarTestType(jungle_speed=42) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data-schemaserial/site_cubicweb.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data-schemaserial/site_cubicweb.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,29 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from logilab.database import FunctionDescr +from logilab.database.sqlite import register_sqlite_pyfunc +from rql.utils import register_function + +class DUMB_SORT(FunctionDescr): + pass + +register_function(DUMB_SORT) +def dumb_sort(something): + return something +register_sqlite_pyfunc(dumb_sort) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/bootstrap_cubes --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/bootstrap_cubes Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +card,comment,tag,basket,file,localperms diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/hooks.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/hooks.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,48 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from cubicweb.server.hook import Hook + +CALLED_EVENTS = {} + +class StartupHook(Hook): + __regid__ = 'mystartup' + events = ('server_startup',) + def __call__(self): + CALLED_EVENTS['server_startup'] = True + +class ShutdownHook(Hook): + __regid__ = 'myshutdown' + events = ('server_shutdown',) + def __call__(self): + CALLED_EVENTS['server_shutdown'] = True + + +class LoginHook(Hook): + __regid__ = 'mylogin' + events = ('session_open',) + def __call__(self): + CALLED_EVENTS['session_open'] = self._cw.user.login + +class LogoutHook(Hook): + __regid__ = 'mylogout' + events = ('session_close',) + def __call__(self): + CALLED_EVENTS['session_close'] = self._cw.user.login diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/ldap_test.ldif --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/ldap_test.ldif Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,74 @@ +dn: dc=cubicweb,dc=test +structuralObjectClass: organization +objectClass: dcObject +objectClass: organization +o: cubicweb +dc: cubicweb + +dn: ou=People,dc=cubicweb,dc=test +objectClass: organizationalUnit +ou: People +structuralObjectClass: organizationalUnit + +dn: ou=Group,dc=cubicweb,dc=test +objectClass: organizationalUnit +ou: Group + +dn: cn=logilab,ou=Group,dc=cubicweb,dc=test +gidNumber: 2000 +objectClass: posixGroup +objectClass: top +cn: logilab +memberUid: adim + +dn: cn=dir,ou=Group,dc=cubicweb,dc=test +gidNumber: 2002 +objectClass: posixGroup +objectClass: top +cn: dir +memberUid: adim +memberUid: syt + +dn: uid=syt,ou=People,dc=cubicweb,dc=test +loginShell: /bin/bash +objectClass: OpenLDAPperson +objectClass: posixAccount +objectClass: top +objectClass: shadowAccount +cn: Sylvain Thenault +sn: Thenault +shadowMax: 99999 +gidNumber: 1004 +uid: syt +homeDirectory: /home/syt +shadowFlag: 134538764 +uidNumber: 1004 +givenName: Sylvain +telephoneNumber: 106 +displayName: sthenault +gecos: Sylvain Thenault +mail: sylvain.thenault@logilab.fr +mail: syt@logilab.fr +userPassword: syt + +dn: uid=adim,ou=People,dc=cubicweb,dc=test +loginShell: /bin/bash +objectClass: OpenLDAPperson +objectClass: posixAccount +objectClass: top +objectClass: shadowAccount +cn: Adrien Di Mascio +sn: Di Mascio +shadowMax: 99999 +gidNumber: 1006 +uid: adim +homeDirectory: /home/adim +uidNumber: 1006 +givenName: Adrien +telephoneNumber: 109 +displayName: adimascio +gecos: Adrien Di Mascio +mail: adim@logilab.fr +mail: adrien.dimascio@logilab.fr +userPassword: adim + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/migration/postcreate.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/migration/postcreate.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,37 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb post creation script, set note's workflow + +""" + +wf = add_workflow(u'note workflow', 'Note') +todo = wf.add_state(u'todo', initial=True) +done = wf.add_state(u'done') +wf.add_transition(u'redoit', done, todo) +wf.add_transition(u'markasdone', todo, done) +commit() + +wf = add_workflow(u'affaire workflow', 'Affaire') +pitetre = wf.add_state(u'pitetre', initial=True) +encours = wf.add_state(u'en cours') +finie = wf.add_state(u'finie') +bennon = wf.add_state(u'ben non') +wf.add_transition(u'abort', pitetre, bennon) +wf.add_transition(u'start', pitetre, encours) +wf.add_transition(u'end', encours, finie) +commit() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,311 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import (EntityType, RelationType, RelationDefinition, ComputedRelation, + SubjectRelation, RichString, String, Int, Float, + Boolean, Datetime, TZDatetime, Bytes) +from yams.constraints import SizeConstraint +from cubicweb.schema import (WorkflowableEntityType, + RQLConstraint, RQLUniqueConstraint, + RQLVocabularyConstraint, + ERQLExpression, RRQLExpression) +from cubicweb import _ + +class Affaire(WorkflowableEntityType): + __permissions__ = { + 'read': ('managers', + ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), + 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), + 'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')), + 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), + } + + ref = String(fulltextindexed=True, indexed=True, + constraints=[SizeConstraint(16)]) + sujet = String(fulltextindexed=True, + constraints=[SizeConstraint(256)]) + descr = RichString(fulltextindexed=True, + description=_('more detailed description')) + + duration = Int() + invoiced = Float() + opt_attr = Bytes() + + depends_on = SubjectRelation('Affaire') + require_permission = SubjectRelation('CWPermission') + concerne = SubjectRelation(('Societe', 'Note')) + todo_by = SubjectRelation('Personne', cardinality='?*') + documented_by = SubjectRelation('Card') + + +class Societe(EntityType): + __unique_together__ = [('nom', 'type', 'cp')] + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), + 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), + 'add': ('managers', 'users',) + } + + nom = String(maxsize=64, fulltextindexed=True) + web = String(maxsize=128) + type = String(maxsize=128) # attribute in common with Note + tel = Int() + fax = Int() + rncs = String(maxsize=128) + ad1 = String(maxsize=128) + ad2 = String(maxsize=128) + ad3 = String(maxsize=128) + cp = String(maxsize=12) + ville= String(maxsize=32) + + +class Division(Societe): + __specializes_schema__ = True + +class SubDivision(Division): + __specializes_schema__ = True + +class travaille_subdivision(RelationDefinition): + subject = 'Personne' + object = 'SubDivision' + +from cubicweb.schemas.base import CWUser +next(CWUser.get_relations('login')).fulltextindexed = True + +class Note(WorkflowableEntityType): + date = String(maxsize=10) + type = String(vocabulary=[u'todo', u'a', u'b', u'T', u'lalala']) + para = String(maxsize=512, + __permissions__ = { + 'add': ('managers', ERQLExpression('X in_state S, S name "todo"')), + 'read': ('managers', 'users', 'guests'), + 'update': ('managers', ERQLExpression('X in_state S, S name "todo"')), + }) + something = String(maxsize=1, + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': (ERQLExpression('NOT X para NULL'),), + 'update': ('managers', 'owners') + }) + migrated_from = SubjectRelation('Note') + attachment = SubjectRelation('File') + inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*', + constraints=[RQLUniqueConstraint('S type T, S inline1 A1, A1 todo_by C, ' + 'Y type T, Y inline1 A2, A2 todo_by C', + 'S,Y')]) + todo_by = SubjectRelation('CWUser') + + +class Frozable(EntityType): + __permissions__ = { + 'read': ('managers', 'users'), + 'add': ('managers', 'users'), + 'update': ('managers', ERQLExpression('X frozen False'),), + 'delete': ('managers', ERQLExpression('X frozen False'),) + } + name = String() + frozen = Boolean(default=False, + __permissions__ = { + 'read': ('managers', 'users'), + 'add': ('managers', 'users'), + 'update': ('managers', 'owners') + }) + + +class Personne(EntityType): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), # 'guests' will be removed + 'add': ('managers', 'users'), + 'update': ('managers', 'owners'), + 'delete': ('managers', 'owners') + } + __unique_together__ = [('nom', 'prenom', 'inline2')] + nom = String(fulltextindexed=True, required=True, maxsize=64) + prenom = String(fulltextindexed=True, maxsize=64) + sexe = String(maxsize=1, default='M', fulltextindexed=True) + promo = String(vocabulary=('bon','pasbon')) + titre = String(fulltextindexed=True, maxsize=128) + adel = String(maxsize=128) + ass = String(maxsize=128) + web = String(maxsize=128) + tel = Int() + fax = Int() + datenaiss = Datetime() + tzdatenaiss = TZDatetime() + test = Boolean(__permissions__={ + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'update': ('managers',), + }) + description = String() + firstname = String(fulltextindexed=True, maxsize=64) + + concerne = SubjectRelation('Affaire') + connait = SubjectRelation('Personne') + inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*') + + +class Old(EntityType): + name = String(__permissions__ = { + 'read' : ('managers', 'users', 'guests'), + 'add' : ('managers', 'users', 'guests'), + 'update' : () + }) + + +class Email(EntityType): + subject = String(fulltextindexed=True) + messageid = String(required=True, indexed=True, unique=True) + sender = SubjectRelation('EmailAddress', cardinality='?*') + recipients = SubjectRelation('EmailAddress') + attachment = SubjectRelation('File') + + +class EmailPart(EntityType): + pass + + +class EmailThread(EntityType): + see_also = SubjectRelation('EmailThread') + + +class connait(RelationType): + symmetric = True + +class concerne(RelationType): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', RRQLExpression('U has_update_permission S')), + 'delete': ('managers', RRQLExpression('O owned_by U')), + } + +class travaille(RelationDefinition): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', RRQLExpression('U has_update_permission S')), + 'delete': ('managers', RRQLExpression('O owned_by U')), + } + subject = 'Personne' + object = 'Societe' + constraints = [RQLVocabularyConstraint('S owned_by U'), + RQLVocabularyConstraint('S created_by U')] + +class comments(RelationDefinition): + subject = 'Comment' + object = 'Personne' + +class fiche(RelationDefinition): + inlined = True + subject = 'Personne' + object = 'Card' + cardinality = '??' + +class multisource_inlined_rel(RelationDefinition): + inlined = True + cardinality = '?*' + subject = ('Card', 'Note') + object = ('Affaire', 'Note') + + +class see_also_1(RelationDefinition): + name = 'see_also' + subject = object = 'Folder' + +class see_also_2(RelationDefinition): + name = 'see_also' + subject = ('Bookmark', 'Note') + object = ('Bookmark', 'Note') + +class evaluee(RelationDefinition): + subject = ('Personne', 'CWUser', 'Societe') + object = ('Note') + constraints = [ + RQLVocabularyConstraint('S created_by U'), + RQLVocabularyConstraint('S owned_by U'), + ] + +class ecrit_par(RelationType): + inlined = True + +class ecrit_par_1(RelationDefinition): + name = 'ecrit_par' + subject = 'Note' + object ='Personne' + cardinality = '?*' + +class ecrit_par_2(RelationDefinition): + name = 'ecrit_par' + subject = 'Note' + object ='CWUser' + cardinality='?*' + + +class copain(RelationDefinition): + subject = object = 'CWUser' + +class tags(RelationDefinition): + subject = 'Tag' + object = ('CWUser', 'CWGroup', 'State', 'Note', 'Card', 'Affaire') + +class Folder(EntityType): + """folders are used to classify entities. They may be defined as a tree. + """ + name = String(required=True, indexed=True, internationalizable=True, + maxsize=64) + description = RichString(fulltextindexed=True) + filed_under = SubjectRelation('Folder', description=_('parent folder')) + +class filed_under(RelationDefinition): + subject = ('Note', 'Affaire') + object = 'Folder' + +class require_permission(RelationDefinition): + subject = ('Card', 'Note', 'Personne') + object = 'CWPermission' + +class require_state(RelationDefinition): + subject = 'CWPermission' + object = 'State' + +class personne_composite(RelationDefinition): + subject='Personne' + object='Personne' + composite='subject' + +class personne_inlined(RelationDefinition): + subject='Personne' + object='Personne' + cardinality='?*' + inlined=True + + +class login_user(RelationDefinition): + subject = 'Personne' + object = 'CWUser' + cardinality = '??' + +class ambiguous_inlined(RelationDefinition): + subject = ('Affaire', 'Note') + object = 'CWUser' + inlined = True + cardinality = '?*' + + +class user_login(ComputedRelation): + rule = 'O login_user S' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/site_cubicweb.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/site_cubicweb.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,29 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from logilab.database import FunctionDescr +from logilab.database.sqlite import register_sqlite_pyfunc +from rql.utils import register_function + +class DUMB_SORT(FunctionDescr): + pass + +register_function(DUMB_SORT) +def dumb_sort(something): + return something +register_sqlite_pyfunc(dumb_sort) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/slapd.conf.in --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/slapd.conf.in Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,53 @@ +# This is the main slapd configuration file. See slapd.conf(5) for more +# info on the configuration options. + +####################################################################### +# Global Directives: + +# Features to permit +#allow bind_v2 + +# Schema and objectClass definitions +include /etc/ldap/schema/core.schema +include /etc/ldap/schema/cosine.schema +include /etc/ldap/schema/nis.schema +include /etc/ldap/schema/inetorgperson.schema +include /etc/ldap/schema/openldap.schema +include /etc/ldap/schema/misc.schema + +# Where the pid file is put. The init.d script +# will not stop the server if you change this. +pidfile %(apphome)s/test-slapd.pid + +# List of arguments that were passed to the server +argsfile %(apphome)s/slapd.args + +# Read slapd.conf(5) for possible values +loglevel sync +# none + +# Where the dynamically loaded modules are stored +modulepath /usr/lib/ldap +moduleload back_hdb +moduleload back_bdb +moduleload back_monitor + +# The maximum number of entries that is returned for a search operation +sizelimit 500 + +# The tool-threads parameter sets the actual amount of cpu's that is used +# for indexing. +tool-threads 1 + +database bdb + +# The base of your directory in database #1 +suffix "dc=cubicweb,dc=test" + +# rootdn directive for specifying a superuser on the database. This is needed +# for syncrepl. and ldapdelete easyness +rootdn "cn=admin,dc=cubicweb,dc=test" +rootpw "cw" +# Where the database file are physically stored for database #1 +directory "%(testdir)s" + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/sources_extern --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/sources_extern Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +[system] +db-driver = sqlite +db-name = tmpdb-extern +db-encoding = UTF-8 diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/data/sources_multi --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/data/sources_multi Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,5 @@ +[system] +db-driver = sqlite +adapter = native +db-name = tmpdb-multi +db-encoding = UTF-8 diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/datacomputed/migratedapp/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/datacomputed/migratedapp/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,66 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import (EntityType, RelationDefinition, ComputedRelation, + Int, Float) + + +class Employee(EntityType): + pass + + +class employees(RelationDefinition): + subject = 'Company' + object = 'Employee' + + +class associates(RelationDefinition): + subject = 'Company' + object = 'Employee' + + +class works_for(ComputedRelation): + rule = 'O employees S, NOT EXISTS (O associates S)' + + +class Company(EntityType): + score = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note NN') + score100 = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note100 NN') + + +class Note(EntityType): + note = Int() + note100 = Int(formula='Any N*100 WHERE X note N') + + +class concerns(RelationDefinition): + subject = 'Note' + object = 'Employee' + + +class whatever(ComputedRelation): + rule = 'S employees E, O associates E' + + +class renamed(ComputedRelation): + rule = 'S employees E, O concerns E' + + +class perm_changes(ComputedRelation): + __permissions__ = {'read': ('managers',)} + rule = 'S employees E, O concerns E' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/datacomputed/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/datacomputed/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,65 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import EntityType, RelationDefinition, ComputedRelation, Int, Float + + +class Employee(EntityType): + pass + + +class employees(RelationDefinition): + subject = 'Company' + object = 'Employee' + + +class associates(RelationDefinition): + subject = 'Company' + object = 'Employee' + + +class Company(EntityType): + score100 = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note100 NN') + + +class Note(EntityType): + note = Int() + note20 = Int(formula='Any N*20 WHERE X note N') + note100 = Int(formula='Any N*20 WHERE X note N') + + +class concerns(RelationDefinition): + subject = 'Note' + object = 'Employee' + + +class notes(ComputedRelation): + rule = 'S employees E, O concerns E' + + +class whatever(ComputedRelation): + rule = 'S employees E, O concerns E' + + +class to_be_renamed(ComputedRelation): + rule = 'S employees E, O concerns E' + + +class perm_changes(ComputedRelation): + __permissions__ = {'read': ('managers', 'users')} + rule = 'S employees E, O concerns E' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,8 @@ +psycopg2 +ldap3 +cubicweb-basket +cubicweb-card +cubicweb-comment +cubicweb-file +cubicweb-localperms +cubicweb-tag diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_checkintegrity.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_checkintegrity.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,70 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +import sys + +from six import PY2 +if PY2: + from StringIO import StringIO +else: + from io import StringIO + +from logilab.common.testlib import TestCase, unittest_main +from cubicweb.devtools import get_test_db_handler, TestServerConfiguration + + +from cubicweb.server.checkintegrity import check, reindex_entities + +class CheckIntegrityTC(TestCase): + + def setUp(self): + handler = get_test_db_handler(TestServerConfiguration(apphome=self.datadir)) + handler.build_db_cache() + self.repo, _cnx = handler.get_repo_and_cnx() + sys.stderr = sys.stdout = StringIO() + + def tearDown(self): + sys.stderr = sys.__stderr__ + sys.stdout = sys.__stdout__ + self.repo.shutdown() + + def test_checks(self): + with self.repo.internal_cnx() as cnx: + check(self.repo, cnx, ('entities', 'relations', 'text_index', 'metadata'), + reindex=False, fix=True, withpb=False) + + def test_reindex_all(self): + with self.repo.internal_cnx() as cnx: + cnx.execute('INSERT Personne X: X nom "toto", X prenom "tutu"') + cnx.commit() + self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) + reindex_entities(self.repo.schema, cnx, withpb=False) + self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) + + def test_reindex_etype(self): + with self.repo.internal_cnx() as cnx: + cnx.execute('INSERT Personne X: X nom "toto", X prenom "tutu"') + cnx.execute('INSERT Affaire X: X ref "toto"') + cnx.commit() + reindex_entities(self.repo.schema, cnx, withpb=False, + etypes=('Personne',)) + self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) + self.assertTrue(cnx.execute('Any X WHERE X has_text "toto"')) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_datafeed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_datafeed.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,169 @@ +# coding: utf-8 +# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from datetime import timedelta +from contextlib import contextmanager + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server.sources import datafeed + + +class DataFeedTC(CubicWebTC): + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + with self.base_parser(cnx): + cnx.create_entity('CWSource', name=u'ô myfeed', type=u'datafeed', + parser=u'testparser', url=u'ignored', + config=u'synchronization-interval=1min') + cnx.commit() + + @contextmanager + def base_parser(self, session): + class AParser(datafeed.DataFeedParser): + __regid__ = 'testparser' + def process(self, url, raise_on_error=False): + entity = self.extid2entity('http://www.cubicweb.org/', 'Card', + item={'title': u'cubicweb.org', + 'content': u'the cw web site'}, + raise_on_error=raise_on_error) + if not self.created_during_pull(entity): + self.notify_updated(entity) + def before_entity_copy(self, entity, sourceparams): + entity.cw_edited.update(sourceparams['item']) + + with self.temporary_appobjects(AParser): + if u'ô myfeed' in self.repo.sources_by_uri: + yield self.repo.sources_by_uri[u'ô myfeed']._get_parser(session) + else: + yield + + def test(self): + self.assertIn(u'ô myfeed', self.repo.sources_by_uri) + dfsource = self.repo.sources_by_uri[u'ô myfeed'] + self.assertNotIn('use_cwuri_as_url', dfsource.__dict__) + self.assertEqual({'type': u'datafeed', 'uri': u'ô myfeed', 'use-cwuri-as-url': True}, + dfsource.public_config) + self.assertEqual(dfsource.use_cwuri_as_url, True) + self.assertEqual(dfsource.latest_retrieval, None) + self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60)) + self.assertFalse(dfsource.fresh()) + # ensure source's logger name has been unormalized + self.assertEqual(dfsource.info.__self__.name, 'cubicweb.sources.o myfeed') + + with self.repo.internal_cnx() as cnx: + with self.base_parser(cnx): + stats = dfsource.pull_data(cnx, force=True) + cnx.commit() + # test import stats + self.assertEqual(sorted(stats), ['checked', 'created', 'updated']) + self.assertEqual(len(stats['created']), 1) + entity = cnx.execute('Card X').get_entity(0, 0) + self.assertIn(entity.eid, stats['created']) + self.assertEqual(stats['updated'], set()) + # test imported entities + self.assertEqual(entity.title, 'cubicweb.org') + self.assertEqual(entity.content, 'the cw web site') + self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') + self.assertEqual(entity.cw_source[0].name, u'ô myfeed') + self.assertEqual(entity.cw_metainformation(), + {'type': 'Card', + 'source': {'uri': u'ô myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, + 'extid': b'http://www.cubicweb.org/'} + ) + self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/') + # test repo cache keys + self.assertEqual(self.repo._type_source_cache[entity.eid], + ('Card', b'http://www.cubicweb.org/', u'ô myfeed')) + self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], + entity.eid) + # test repull + stats = dfsource.pull_data(cnx, force=True) + self.assertEqual(stats['created'], set()) + self.assertEqual(stats['updated'], set((entity.eid,))) + # test repull with caches reseted + self.repo._type_source_cache.clear() + self.repo._extid_cache.clear() + stats = dfsource.pull_data(cnx, force=True) + self.assertEqual(stats['created'], set()) + self.assertEqual(stats['updated'], set((entity.eid,))) + self.assertEqual(self.repo._type_source_cache[entity.eid], + ('Card', b'http://www.cubicweb.org/', u'ô myfeed')) + self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], + entity.eid) + + self.assertEqual(dfsource.source_cwuris(cnx), + {b'http://www.cubicweb.org/': (entity.eid, 'Card')}) + self.assertTrue(dfsource.latest_retrieval) + self.assertTrue(dfsource.fresh()) + + # test_rename_source + with self.admin_access.repo_cnx() as cnx: + cnx.entity_from_eid(dfsource.eid).cw_set(name=u"myrenamedfeed") + cnx.commit() + entity = cnx.execute('Card X').get_entity(0, 0) + self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') + self.assertEqual(entity.cw_source[0].name, 'myrenamedfeed') + self.assertEqual(entity.cw_metainformation(), + {'type': 'Card', + 'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, + 'extid': b'http://www.cubicweb.org/'} + ) + self.assertEqual(self.repo._type_source_cache[entity.eid], + ('Card', b'http://www.cubicweb.org/', 'myrenamedfeed')) + self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], + entity.eid) + + # test_delete_source + cnx.execute('DELETE CWSource S WHERE S name "myrenamedfeed"') + cnx.commit() + self.assertFalse(cnx.execute('Card X WHERE X title "cubicweb.org"')) + self.assertFalse(cnx.execute('Any X WHERE X has_text "cubicweb.org"')) + + def test_parser_retrieve_url_local(self): + with self.admin_access.repo_cnx() as cnx: + with self.base_parser(cnx) as parser: + value = parser.retrieve_url('a string') + self.assertEqual(200, value.getcode()) + self.assertEqual('a string', value.geturl()) + + def test_update_url(self): + dfsource = self.repo.sources_by_uri[u'ô myfeed'] + with self.admin_access.repo_cnx() as cnx: + cnx.entity_from_eid(dfsource.eid).cw_set(url=u"http://pouet.com\nhttp://pouet.org") + self.assertEqual(dfsource.urls, [u'ignored']) + cnx.commit() + self.assertEqual(dfsource.urls, [u"http://pouet.com", u"http://pouet.org"]) + + +class DataFeedConfigTC(CubicWebTC): + + def test_use_cwuri_as_url_override(self): + with self.admin_access.client_cnx() as cnx: + cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed', + parser=u'testparser', url=u'ignored', + config=u'use-cwuri-as-url=no') + cnx.commit() + dfsource = self.repo.sources_by_uri['myfeed'] + self.assertEqual(dfsource.use_cwuri_as_url, False) + self.assertEqual({'type': u'datafeed', 'uri': u'myfeed', 'use-cwuri-as-url': False}, + dfsource.public_config) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_hook.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_hook.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit/functional tests for cubicweb.server.hook""" + +from logilab.common.testlib import TestCase, unittest_main, mock_object + +from cubicweb.devtools import TestServerConfiguration, fake +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server import hook +from cubicweb.hooks import integrity, syncschema + +class OperationsTC(CubicWebTC): + + def setUp(self): + CubicWebTC.setUp(self) + self.hm = self.repo.hm + + def test_late_operation(self): + with self.admin_access.repo_cnx() as cnx: + l1 = hook.LateOperation(cnx) + l2 = hook.LateOperation(cnx) + l3 = hook.Operation(cnx) + self.assertEqual(cnx.pending_operations, [l3, l1, l2]) + + def test_single_last_operation(self): + with self.admin_access.repo_cnx() as cnx: + l0 = hook.SingleLastOperation(cnx) + l1 = hook.LateOperation(cnx) + l2 = hook.LateOperation(cnx) + l3 = hook.Operation(cnx) + self.assertEqual(cnx.pending_operations, [l3, l1, l2, l0]) + l4 = hook.SingleLastOperation(cnx) + self.assertEqual(cnx.pending_operations, [l3, l1, l2, l4]) + + def test_global_operation_order(self): + with self.admin_access.repo_cnx() as cnx: + op1 = syncschema.RDefDelOp(cnx) + op2 = integrity._CheckORelationOp(cnx) + op3 = syncschema.MemSchemaNotifyChanges(cnx) + self.assertEqual([op1, op2, op3], cnx.pending_operations) + +class HookCalled(Exception): pass + +config = TestServerConfiguration('data') +config.bootstrap_cubes() +schema = config.load_schema() + +def tearDownModule(*args): + global config, schema + del config, schema + +class AddAnyHook(hook.Hook): + __regid__ = 'addany' + category = 'cat1' + events = ('before_add_entity',) + def __call__(self): + raise HookCalled() + + +class HooksRegistryTC(TestCase): + + def setUp(self): + """ called before each test from this class """ + self.vreg = mock_object(config=config, schema=schema) + self.o = hook.HooksRegistry(self.vreg) + + def test_register_bad_hook1(self): + class _Hook(hook.Hook): + events = ('before_add_entiti',) + with self.assertRaises(Exception) as cm: + self.o.register(_Hook) + self.assertEqual(str(cm.exception), 'bad event before_add_entiti on %s._Hook' % __name__) + + def test_register_bad_hook2(self): + class _Hook(hook.Hook): + events = None + with self.assertRaises(Exception) as cm: + self.o.register(_Hook) + self.assertEqual(str(cm.exception), 'bad .events attribute None on %s._Hook' % __name__) + + def test_register_bad_hook3(self): + class _Hook(hook.Hook): + events = 'before_add_entity' + with self.assertRaises(Exception) as cm: + self.o.register(_Hook) + self.assertEqual(str(cm.exception), 'bad event b on %s._Hook' % __name__) + + def test_call_hook(self): + self.o.register(AddAnyHook) + dis = set() + cw = fake.FakeSession() + cw.is_hook_activated = lambda cls: cls.category not in dis + self.assertRaises(HookCalled, + self.o.call_hooks, 'before_add_entity', cw) + dis.add('cat1') + self.o.call_hooks('before_add_entity', cw) # disabled hooks category, not called + dis.remove('cat1') + self.assertRaises(HookCalled, + self.o.call_hooks, 'before_add_entity', cw) + self.o.unregister(AddAnyHook) + self.o.call_hooks('before_add_entity', cw) # nothing to call + + +class SystemHooksTC(CubicWebTC): + + def test_startup_shutdown(self): + import hooks # cubicweb/server/test/data/hooks.py + self.assertEqual(hooks.CALLED_EVENTS['server_startup'], True) + # don't actually call repository.shutdown ! + self.repo.hm.call_hooks('server_shutdown', repo=self.repo) + self.assertEqual(hooks.CALLED_EVENTS['server_shutdown'], True) + + def test_session_open_close(self): + import hooks # cubicweb/server/test/data/hooks.py + anonaccess = self.new_access('anon') + with anonaccess.repo_cnx() as cnx: + self.assertEqual(hooks.CALLED_EVENTS['session_open'], 'anon') + anonaccess.close() + self.assertEqual(hooks.CALLED_EVENTS['session_close'], 'anon') + + +# class RelationHookTC(TestCase): +# """testcase for relation hooks grouping""" +# def setUp(self): +# """ called before each test from this class """ +# self.o = HooksManager(schema) +# self.called = [] + +# def test_before_add_relation(self): +# """make sure before_xxx_relation hooks are called directly""" +# self.o.register(self._before_relation_hook, +# 'before_add_relation', 'concerne') +# self.assertEqual(self.called, []) +# self.o.call_hooks('before_add_relation', 'concerne', 'USER', +# 1, 'concerne', 2) +# self.assertEqual(self.called, [(1, 'concerne', 2)]) + +# def test_after_add_relation(self): +# """make sure after_xxx_relation hooks are deferred""" +# self.o.register(self._after_relation_hook, +# 'after_add_relation', 'concerne') +# self.assertEqual(self.called, []) +# self.o.call_hooks('after_add_relation', 'concerne', 'USER', +# 1, 'concerne', 2) +# self.o.call_hooks('after_add_relation', 'concerne', 'USER', +# 3, 'concerne', 4) +# self.assertEqual(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) + +# def test_before_delete_relation(self): +# """make sure before_xxx_relation hooks are called directly""" +# self.o.register(self._before_relation_hook, +# 'before_delete_relation', 'concerne') +# self.assertEqual(self.called, []) +# self.o.call_hooks('before_delete_relation', 'concerne', 'USER', +# 1, 'concerne', 2) +# self.assertEqual(self.called, [(1, 'concerne', 2)]) + +# def test_after_delete_relation(self): +# """make sure after_xxx_relation hooks are deferred""" +# self.o.register(self._after_relation_hook, +# 'after_delete_relation', 'concerne') +# self.o.call_hooks('after_delete_relation', 'concerne', 'USER', +# 1, 'concerne', 2) +# self.o.call_hooks('after_delete_relation', 'concerne', 'USER', +# 3, 'concerne', 4) +# self.assertEqual(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) + + +# def _before_relation_hook(self, cnxset, subject, r_type, object): +# self.called.append((subject, r_type, object)) + +# def _after_relation_hook(self, cnxset, subject, r_type, object): +# self.called.append((subject, r_type, object)) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_ldapsource.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_ldapsource.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,491 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb.server.sources.ldapfeed unit and functional tests + +Those tests expect to have slapd, python-ldap3 and ldapscripts packages installed. +""" +from __future__ import print_function + +import os +import sys +import shutil +import time +import subprocess +import tempfile +import unittest +from os.path import join + +from six import string_types +from six.moves import range + +from cubicweb import AuthenticationError +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.httptest import get_available_port + + +CONFIG_LDAPFEED = u''' +user-base-dn=ou=People,dc=cubicweb,dc=test +group-base-dn=ou=Group,dc=cubicweb,dc=test +user-attrs-map=uid=login,mail=email,userPassword=upassword +group-attrs-map=cn=name,memberUid=member +''' +CONFIG_LDAPUSER = u''' +user-base-dn=ou=People,dc=cubicweb,dc=test +user-attrs-map=uid=login,mail=email,userPassword=upassword +''' + +URL = None + + +def create_slapd_configuration(cls): + global URL + slapddir = tempfile.mkdtemp('cw-unittest-ldap') + config = cls.config + slapdconf = join(config.apphome, "slapd.conf") + confin = open(join(config.apphome, "slapd.conf.in")).read() + confstream = open(slapdconf, 'w') + confstream.write(confin % {'apphome': config.apphome, 'testdir': slapddir}) + confstream.close() + # fill ldap server with some data + ldiffile = join(config.apphome, "ldap_test.ldif") + config.info('Initing ldap database') + cmdline = ['/usr/sbin/slapadd', '-f', slapdconf, '-l', ldiffile, '-c'] + PIPE = subprocess.PIPE + slapproc = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE) + stdout, stderr = slapproc.communicate() + if slapproc.returncode: + print('slapadd returned with status: %s' + % slapproc.returncode, file=sys.stderr) + sys.stdout.write(stdout) + sys.stderr.write(stderr) + + # ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f') + port = get_available_port(range(9000, 9100)) + host = 'localhost:%s' % port + ldapuri = 'ldap://%s' % host + cmdline = ["/usr/sbin/slapd", "-f", slapdconf, "-h", ldapuri, "-d", "0"] + config.info('Starting slapd:', ' '.join(cmdline)) + PIPE = subprocess.PIPE + cls.slapd_process = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE) + time.sleep(0.2) + if cls.slapd_process.poll() is None: + config.info('slapd started with pid %s', cls.slapd_process.pid) + else: + raise EnvironmentError('Cannot start slapd with cmdline="%s" (from directory "%s")' % + (" ".join(cmdline), os.getcwd())) + URL = u'ldap://%s' % host + return slapddir + + +def terminate_slapd(cls): + config = cls.config + if cls.slapd_process and cls.slapd_process.returncode is None: + config.info('terminating slapd') + if hasattr(cls.slapd_process, 'terminate'): + cls.slapd_process.terminate() + else: + import signal + os.kill(cls.slapd_process.pid, signal.SIGTERM) + stdout, stderr = cls.slapd_process.communicate() + if cls.slapd_process.returncode: + print('slapd returned with status: %s' + % cls.slapd_process.returncode, file=sys.stderr) + sys.stdout.write(stdout) + sys.stderr.write(stderr) + config.info('DONE') + + +class LDAPFeedTestBase(CubicWebTC): + test_db_id = 'ldap-feed' + loglevel = 'ERROR' + + @classmethod + def setUpClass(cls): + if not os.path.exists('/usr/sbin/slapd'): + raise unittest.SkipTest('slapd not found') + from cubicweb.cwctl import init_cmdline_log_threshold + init_cmdline_log_threshold(cls.config, cls.loglevel) + cls._tmpdir = create_slapd_configuration(cls) + + @classmethod + def tearDownClass(cls): + terminate_slapd(cls) + try: + shutil.rmtree(cls._tmpdir) + except: + pass + + @classmethod + def pre_setup_database(cls, cnx, config): + cnx.create_entity('CWSource', name=u'ldap', type=u'ldapfeed', parser=u'ldapfeed', + url=URL, config=CONFIG_LDAPFEED) + + cnx.commit() + return cls.pull(cnx) + + @classmethod + def pull(self, cnx): + lfsource = cnx.repo.sources_by_uri['ldap'] + stats = lfsource.pull_data(cnx, force=True, raise_on_error=True) + cnx.commit() + return stats + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('DELETE Any E WHERE E cw_source S, S name "ldap"') + cnx.execute('SET S config %(conf)s, S url %(url)s ' + 'WHERE S is CWSource, S name "ldap"', + {"conf": CONFIG_LDAPFEED, 'url': URL}) + cnx.commit() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + + def add_ldap_entry(self, dn, mods): + """ + add an LDAP entity + """ + modcmd = ['dn: %s' % dn, 'changetype: add'] + for key, values in mods.items(): + if isinstance(values, string_types): + values = [values] + for value in values: + modcmd.append('%s: %s' % (key, value)) + self._ldapmodify(modcmd) + + def delete_ldap_entry(self, dn): + """ + delete an LDAP entity + """ + modcmd = ['dn: %s' % dn, 'changetype: delete'] + self._ldapmodify(modcmd) + + def update_ldap_entry(self, dn, mods): + """ + modify one or more attributes of an LDAP entity + """ + modcmd = ['dn: %s' % dn, 'changetype: modify'] + for (kind, key), values in mods.items(): + modcmd.append('%s: %s' % (kind, key)) + if isinstance(values, string_types): + values = [values] + for value in values: + modcmd.append('%s: %s' % (key, value)) + modcmd.append('-') + self._ldapmodify(modcmd) + + def _ldapmodify(self, modcmd): + uri = self.repo.sources_by_uri['ldap'].urls[0] + updatecmd = ['ldapmodify', '-H', uri, '-v', '-x', '-D', + 'cn=admin,dc=cubicweb,dc=test', '-w', 'cw'] + PIPE = subprocess.PIPE + p = subprocess.Popen(updatecmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) + p.stdin.write('\n'.join(modcmd).encode('ascii')) + p.stdin.close() + if p.wait(): + raise RuntimeError("ldap update failed: %s" % ('\n'.join(p.stderr.readlines()))) + + +class CheckWrongGroup(LDAPFeedTestBase): + """ + A testcase for situations where the default group for CWUser + created from LDAP is wrongly configured. + """ + + def test_wrong_group(self): + with self.admin_access.repo_cnx() as cnx: + source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0, 0) + config = source.repo_source.check_config(source) + # inject a bogus group here, along with at least a valid one + config['user-default-group'] = ('thisgroupdoesnotexists', 'users') + source.repo_source.update_config(source, config) + cnx.commit() + # here we emitted an error log entry + source.repo_source.pull_data(cnx, force=True, raise_on_error=True) + cnx.commit() + + +class LDAPFeedUserTC(LDAPFeedTestBase): + """ + A testcase for CWUser support in ldapfeed (basic tests and authentication). + """ + + def assertMetadata(self, entity): + self.assertTrue(entity.creation_date) + self.assertTrue(entity.modification_date) + + def test_authenticate(self): + source = self.repo.sources_by_uri['ldap'] + with self.admin_access.repo_cnx() as cnx: + # ensure we won't be logged against + self.assertRaises(AuthenticationError, + source.authenticate, cnx, 'toto', 'toto') + self.assertTrue(source.authenticate(cnx, 'syt', 'syt')) + sessionid = self.repo.connect('syt', password='syt') + self.assertTrue(sessionid) + self.repo.close(sessionid) + + def test_base(self): + with self.admin_access.repo_cnx() as cnx: + # check a known one + rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) + e = rset.get_entity(0, 0) + self.assertEqual(e.login, 'syt') + e.complete() + self.assertMetadata(e) + self.assertEqual(e.firstname, None) + self.assertEqual(e.surname, None) + self.assertIn('users', set(g.name for g in e.in_group)) + self.assertEqual(e.owned_by[0].login, 'syt') + self.assertEqual(e.created_by, ()) + addresses = [pe.address for pe in e.use_email] + addresses.sort() + self.assertEqual(['sylvain.thenault@logilab.fr', 'syt@logilab.fr'], + addresses) + self.assertIn(e.primary_email[0].address, ['sylvain.thenault@logilab.fr', + 'syt@logilab.fr']) + # email content should be indexed on the user + rset = cnx.execute('CWUser X WHERE X has_text "thenault"') + self.assertEqual(rset.rows, [[e.eid]]) + + def test_copy_to_system_source(self): + "make sure we can 'convert' an LDAP user into a system one" + with self.admin_access.repo_cnx() as cnx: + source = self.repo.sources_by_uri['ldap'] + eid = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0] + cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid}) + cnx.commit() + source.reset_caches() + rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) + self.assertEqual(len(rset), 1) + e = rset.get_entity(0, 0) + self.assertEqual(e.eid, eid) + self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', + 'uri': u'system', + 'use-cwuri-as-url': False}, + 'type': 'CWUser', + 'extid': None}) + self.assertEqual(e.cw_source[0].name, 'system') + self.assertTrue(e.creation_date) + self.assertTrue(e.modification_date) + source.pull_data(cnx) + rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) + self.assertEqual(len(rset), 1) + self.assertTrue(self.repo.system_source.authenticate(cnx, 'syt', password='syt')) + # make sure the pull from ldap have not "reverted" user as a ldap-feed user + self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', + 'uri': u'system', + 'use-cwuri-as-url': False}, + 'type': 'CWUser', + 'extid': None}) + # and that the password stored in the system source is not empty or so + user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) + user.cw_clear_all_caches() + cu = cnx.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';") + pwd = cu.fetchall()[0][0] + self.assertIsNotNone(pwd) + self.assertTrue(str(pwd)) + + +class LDAPFeedUserDeletionTC(LDAPFeedTestBase): + """ + A testcase for situations where users are deleted from or + unavailable in the LDAP database. + """ + + def test_a_filter_inactivate(self): + """ filtered out people should be deactivated, unable to authenticate """ + with self.admin_access.repo_cnx() as cnx: + source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0, 0) + config = source.repo_source.check_config(source) + # filter with adim's phone number + config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109') + source.repo_source.update_config(source, config) + cnx.commit() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt') + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' + 'U in_state S, S name N').rows[0][0], + 'deactivated') + self.assertEqual(cnx.execute('Any N WHERE U login "adim", ' + 'U in_state S, S name N').rows[0][0], + 'activated') + # unfilter, syt should be activated again + config['user-filter'] = u'' + source.repo_source.update_config(source, config) + cnx.commit() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' + 'U in_state S, S name N').rows[0][0], + 'activated') + self.assertEqual(cnx.execute('Any N WHERE U login "adim", ' + 'U in_state S, S name N').rows[0][0], + 'activated') + + def test_delete(self): + """ delete syt, pull, check deactivation, repull, + read syt, pull, check activation + """ + self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test') + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt') + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' + 'U in_state S, S name N').rows[0][0], + 'deactivated') + with self.repo.internal_cnx() as cnx: + # check that it doesn't choke + self.pull(cnx) + # reinsert syt + self.add_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test', + {'objectClass': ['OpenLDAPperson', 'posixAccount', 'top', + 'shadowAccount'], + 'cn': 'Sylvain Thenault', + 'sn': 'Thenault', + 'gidNumber': '1004', + 'uid': 'syt', + 'homeDirectory': '/home/syt', + 'shadowFlag': '134538764', + 'uidNumber': '1004', + 'givenName': 'Sylvain', + 'telephoneNumber': '106', + 'displayName': 'sthenault', + 'gecos': 'Sylvain Thenault', + 'mail': ['sylvain.thenault@logilab.fr', 'syt@logilab.fr'], + 'userPassword': 'syt', + }) + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' + 'U in_state S, S name N').rows[0][0], + 'activated') + + def test_reactivate_deleted(self): + # test reactivating BY HAND the user isn't enough to + # authenticate, as the native source refuse to authenticate + # user from other sources + self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test') + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + with self.admin_access.repo_cnx() as cnx: + # reactivate user (which source is still ldap-feed) + user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) + user.cw_adapt_to('IWorkflowable').fire_transition('activate') + cnx.commit() + with self.assertRaises(AuthenticationError): + self.repo.connect('syt', password='syt') + + # ok now let's try to make it a system user + cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': user.eid}) + cnx.commit() + # and that we can now authenticate again + self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='toto') + sessionid = self.repo.connect('syt', password='syt') + self.assertTrue(sessionid) + self.repo.close(sessionid) + + +class LDAPFeedGroupTC(LDAPFeedTestBase): + """ + A testcase for group support in ldapfeed. + """ + + def test_groups_exist(self): + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('CWGroup X WHERE X name "dir"') + self.assertEqual(len(rset), 1) + + rset = cnx.execute('CWGroup X WHERE X cw_source S, S name "ldap"') + self.assertEqual(len(rset), 2) + + def test_group_deleted(self): + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('CWGroup X WHERE X name "dir"') + self.assertEqual(len(rset), 1) + + def test_in_group(self): + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('CWGroup X WHERE X name %(name)s', {'name': 'dir'}) + dirgroup = rset.get_entity(0, 0) + self.assertEqual(set(['syt', 'adim']), + set([u.login for u in dirgroup.reverse_in_group])) + rset = cnx.execute('CWGroup X WHERE X name %(name)s', {'name': 'logilab'}) + logilabgroup = rset.get_entity(0, 0) + self.assertEqual(set(['adim']), + set([u.login for u in logilabgroup.reverse_in_group])) + + def test_group_member_added(self): + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', + {'name': 'logilab'}) + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][0], 'adim') + + try: + self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test', + {('add', 'memberUid'): ['syt']}) + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', + {'name': 'logilab'}) + self.assertEqual(len(rset), 2) + members = set([u[0] for u in rset]) + self.assertEqual(set(['adim', 'syt']), members) + + finally: + # back to normal ldap setup + self.tearDownClass() + self.setUpClass() + + def test_group_member_deleted(self): + with self.repo.internal_cnx() as cnx: + self.pull(cnx) # ensure we are sync'ed + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', + {'name': 'logilab'}) + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][0], 'adim') + + try: + self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test', + {('delete', 'memberUid'): ['adim']}) + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', + {'name': 'logilab'}) + self.assertEqual(len(rset), 0, rset.rows) + finally: + # back to normal ldap setup + self.tearDownClass() + self.setUpClass() + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_migractions.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_migractions.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,933 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.server.migractions""" + +from datetime import date +import os, os.path as osp +from contextlib import contextmanager + +from logilab.common.testlib import unittest_main, Tags, tag +from logilab.common import tempattr + +from yams.constraints import UniqueConstraint + +from cubicweb import ConfigurationError, ValidationError, ExecutionError +from cubicweb.devtools import startpgcluster, stoppgcluster +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server.sqlutils import SQL_PREFIX +from cubicweb.server.migractions import ServerMigrationHelper + +import cubicweb.devtools + + +HERE = osp.dirname(osp.abspath(__file__)) + + +def setUpModule(): + startpgcluster(__file__) + + +migrschema = None +def tearDownModule(*args): + global migrschema + del migrschema + if hasattr(MigrationCommandsTC, 'origschema'): + del MigrationCommandsTC.origschema + if hasattr(MigrationCommandsComputedTC, 'origschema'): + del MigrationCommandsComputedTC.origschema + stoppgcluster(__file__) + + +class MigrationConfig(cubicweb.devtools.TestServerConfiguration): + default_sources = cubicweb.devtools.DEFAULT_PSQL_SOURCES + CUBES_PATH = cubicweb.devtools.TestServerConfiguration.CUBES_PATH + [ + osp.join(HERE, 'data-migractions', 'cubes')] + + +class MigrationTC(CubicWebTC): + + appid = 'data-migractions' + + configcls = MigrationConfig + + tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions')) + + def _init_repo(self): + super(MigrationTC, self)._init_repo() + # we have to read schema from the database to get eid for schema entities + self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) + # hack to read the schema from data/migrschema + config = self.config + config.appid = osp.join(self.appid, 'migratedapp') + config._apphome = osp.join(HERE, config.appid) + global migrschema + migrschema = config.load_schema() + config.appid = self.appid + config._apphome = osp.join(HERE, self.appid) + + def setUp(self): + self.configcls.cls_adjust_sys_path() + super(MigrationTC, self).setUp() + + def tearDown(self): + super(MigrationTC, self).tearDown() + self.repo.vreg['etypes'].clear_caches() + + @contextmanager + def mh(self): + with self.admin_access.repo_cnx() as cnx: + yield cnx, ServerMigrationHelper(self.repo.config, migrschema, + repo=self.repo, cnx=cnx, + interactive=False) + + def table_sql(self, mh, tablename): + result = mh.sqlexec("SELECT table_name FROM information_schema.tables WHERE LOWER(table_name)=%(table)s", + {'table': tablename.lower()}) + if result: + return result[0][0] + return None # no such table + + def table_schema(self, mh, tablename): + result = mh.sqlexec("SELECT column_name, data_type, character_maximum_length FROM information_schema.columns " + "WHERE LOWER(table_name) = %(table)s", {'table': tablename.lower()}) + assert result, 'no table %s' % tablename + return dict((x[0], (x[1], x[2])) for x in result) + + +class MigrationCommandsTC(MigrationTC): + + def _init_repo(self): + super(MigrationCommandsTC, self)._init_repo() + assert 'Folder' in migrschema + + def test_add_attribute_bool(self): + with self.mh() as (cnx, mh): + self.assertNotIn('yesno', self.schema) + cnx.create_entity('Note') + cnx.commit() + mh.cmd_add_attribute('Note', 'yesno') + self.assertIn('yesno', self.schema) + self.assertEqual(self.schema['yesno'].subjects(), ('Note',)) + self.assertEqual(self.schema['yesno'].objects(), ('Boolean',)) + self.assertEqual(self.schema['Note'].default('yesno'), False) + # test default value set on existing entities + note = cnx.execute('Note X').get_entity(0, 0) + self.assertEqual(note.yesno, False) + # test default value set for next entities + self.assertEqual(cnx.create_entity('Note').yesno, False) + + def test_add_attribute_int(self): + with self.mh() as (cnx, mh): + self.assertNotIn('whatever', self.schema) + cnx.create_entity('Note') + cnx.commit() + orderdict = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' + 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) + mh.cmd_add_attribute('Note', 'whatever') + self.assertIn('whatever', self.schema) + self.assertEqual(self.schema['whatever'].subjects(), ('Note',)) + self.assertEqual(self.schema['whatever'].objects(), ('Int',)) + self.assertEqual(self.schema['Note'].default('whatever'), 0) + # test default value set on existing entities + note = cnx.execute('Note X').get_entity(0, 0) + self.assertIsInstance(note.whatever, int) + self.assertEqual(note.whatever, 0) + # test default value set for next entities + self.assertEqual(cnx.create_entity('Note').whatever, 0) + # test attribute order + orderdict2 = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' + 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) + whateverorder = migrschema['whatever'].rdef('Note', 'Int').order + for k, v in orderdict.items(): + if v >= whateverorder: + orderdict[k] = v+1 + orderdict['whatever'] = whateverorder + self.assertDictEqual(orderdict, orderdict2) + #self.assertEqual([r.type for r in self.schema['Note'].ordered_relations()], + # ['modification_date', 'creation_date', 'owned_by', + # 'eid', 'ecrit_par', 'inline1', 'date', 'type', + # 'whatever', 'date', 'in_basket']) + # NB: commit instead of rollback make following test fail with py2.5 + # this sounds like a pysqlite/2.5 bug (the same eid is affected to + # two different entities) + + def test_add_attribute_varchar(self): + with self.mh() as (cnx, mh): + self.assertNotIn('whatever', self.schema) + cnx.create_entity('Note') + cnx.commit() + self.assertNotIn('shortpara', self.schema) + mh.cmd_add_attribute('Note', 'shortpara') + self.assertIn('shortpara', self.schema) + self.assertEqual(self.schema['shortpara'].subjects(), ('Note', )) + self.assertEqual(self.schema['shortpara'].objects(), ('String', )) + # test created column is actually a varchar(64) + fields = self.table_schema(mh, '%sNote' % SQL_PREFIX) + self.assertEqual(fields['%sshortpara' % SQL_PREFIX], ('character varying', 64)) + # test default value set on existing entities + self.assertEqual(cnx.execute('Note X').get_entity(0, 0).shortpara, 'hop') + # test default value set for next entities + self.assertEqual(cnx.create_entity('Note').shortpara, 'hop') + + def test_add_datetime_with_default_value_attribute(self): + with self.mh() as (cnx, mh): + self.assertNotIn('mydate', self.schema) + self.assertNotIn('oldstyledefaultdate', self.schema) + self.assertNotIn('newstyledefaultdate', self.schema) + mh.cmd_add_attribute('Note', 'mydate') + mh.cmd_add_attribute('Note', 'oldstyledefaultdate') + mh.cmd_add_attribute('Note', 'newstyledefaultdate') + self.assertIn('mydate', self.schema) + self.assertIn('oldstyledefaultdate', self.schema) + self.assertIn('newstyledefaultdate', self.schema) + self.assertEqual(self.schema['mydate'].subjects(), ('Note', )) + self.assertEqual(self.schema['mydate'].objects(), ('Date', )) + testdate = date(2005, 12, 13) + eid1 = mh.rqlexec('INSERT Note N')[0][0] + eid2 = mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0] + d1 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0] + d2 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0] + d3 = mh.rqlexec('Any D WHERE X eid %(x)s, X oldstyledefaultdate D', {'x': eid1})[0][0] + d4 = mh.rqlexec('Any D WHERE X eid %(x)s, X newstyledefaultdate D', {'x': eid1})[0][0] + self.assertEqual(d1, date.today()) + self.assertEqual(d2, testdate) + myfavoritedate = date(2013, 1, 1) + self.assertEqual(d3, myfavoritedate) + self.assertEqual(d4, myfavoritedate) + + def test_drop_chosen_constraints_ctxmanager(self): + with self.mh() as (cnx, mh): + with mh.cmd_dropped_constraints('Note', 'unique_id', UniqueConstraint): + mh.cmd_add_attribute('Note', 'unique_id') + # make sure the maxsize constraint is not dropped + self.assertRaises(ValidationError, + mh.rqlexec, + 'INSERT Note N: N unique_id "xyz"') + mh.rollback() + # make sure the unique constraint is dropped + mh.rqlexec('INSERT Note N: N unique_id "x"') + mh.rqlexec('INSERT Note N: N unique_id "x"') + mh.rqlexec('DELETE Note N') + + def test_drop_required_ctxmanager(self): + with self.mh() as (cnx, mh): + with mh.cmd_dropped_constraints('Note', 'unique_id', cstrtype=None, + droprequired=True): + mh.cmd_add_attribute('Note', 'unique_id') + mh.rqlexec('INSERT Note N') + mh.rqlexec('SET N unique_id "x"') + # make sure the required=True was restored + self.assertRaises(ValidationError, mh.rqlexec, 'INSERT Note N') + mh.rollback() + + def test_rename_attribute(self): + with self.mh() as (cnx, mh): + self.assertNotIn('civility', self.schema) + eid1 = mh.rqlexec('INSERT Personne X: X nom "lui", X sexe "M"')[0][0] + eid2 = mh.rqlexec('INSERT Personne X: X nom "l\'autre", X sexe NULL')[0][0] + mh.cmd_rename_attribute('Personne', 'sexe', 'civility') + self.assertNotIn('sexe', self.schema) + self.assertIn('civility', self.schema) + # test data has been backported + c1 = mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid1)[0][0] + self.assertEqual(c1, 'M') + c2 = mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid2)[0][0] + self.assertEqual(c2, None) + + def test_workflow_actions(self): + with self.mh() as (cnx, mh): + wf = mh.cmd_add_workflow(u'foo', ('Personne', 'Email'), + ensure_workflowable=False) + for etype in ('Personne', 'Email'): + s1 = mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' % + etype)[0][0] + self.assertEqual(s1, "foo") + s1 = mh.rqlexec('Any N WHERE ET default_workflow WF, ET name "%s", WF name N' % + etype)[0][0] + self.assertEqual(s1, "foo") + + def test_add_entity_type(self): + with self.mh() as (cnx, mh): + self.assertNotIn('Folder2', self.schema) + self.assertNotIn('filed_under2', self.schema) + mh.cmd_add_entity_type('Folder2') + self.assertIn('Folder2', self.schema) + self.assertIn('Old', self.schema) + self.assertTrue(cnx.execute('CWEType X WHERE X name "Folder2"')) + self.assertIn('filed_under2', self.schema) + self.assertTrue(cnx.execute('CWRType X WHERE X name "filed_under2"')) + self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()), + ['created_by', 'creation_date', 'cw_source', 'cwuri', + 'description', 'description_format', + 'eid', + 'filed_under2', 'has_text', + 'identity', 'in_basket', 'is', 'is_instance_of', + 'modification_date', 'name', 'owned_by']) + self.assertCountEqual([str(rs) for rs in self.schema['Folder2'].object_relations()], + ['filed_under2', 'identity']) + # Old will be missing as it has been renamed into 'New' in the migrated + # schema while New hasn't been added here. + self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), + sorted(str(e) for e in self.schema.entities() if not e.final and e != 'Old')) + self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) + eschema = self.schema.eschema('Folder2') + for cstr in eschema.rdef('name').constraints: + self.assertTrue(hasattr(cstr, 'eid')) + + def test_add_cube_with_custom_final_type(self): + with self.mh() as (cnx, mh): + try: + mh.cmd_add_cube('fakecustomtype') + self.assertIn('Numeric', self.schema) + self.assertTrue(self.schema['Numeric'].final) + rdef = self.schema['num'].rdefs[('Location', 'Numeric')] + self.assertEqual(rdef.scale, 10) + self.assertEqual(rdef.precision, 18) + fields = self.table_schema(mh, '%sLocation' % SQL_PREFIX) + self.assertEqual(fields['%snum' % SQL_PREFIX], ('numeric', None)) # XXX + finally: + mh.cmd_drop_cube('fakecustomtype') + + def test_add_drop_entity_type(self): + with self.mh() as (cnx, mh): + mh.cmd_add_entity_type('Folder2') + wf = mh.cmd_add_workflow(u'folder2 wf', 'Folder2', + ensure_workflowable=False) + todo = wf.add_state(u'todo', initial=True) + done = wf.add_state(u'done') + wf.add_transition(u'redoit', done, todo) + wf.add_transition(u'markasdone', todo, done) + cnx.commit() + eschema = self.schema.eschema('Folder2') + mh.cmd_drop_entity_type('Folder2') + self.assertNotIn('Folder2', self.schema) + self.assertFalse(cnx.execute('CWEType X WHERE X name "Folder2"')) + # test automatic workflow deletion + self.assertFalse(cnx.execute('Workflow X WHERE NOT X workflow_of ET')) + self.assertFalse(cnx.execute('State X WHERE NOT X state_of WF')) + self.assertFalse(cnx.execute('Transition X WHERE NOT X transition_of WF')) + + def test_rename_entity_type(self): + with self.mh() as (cnx, mh): + entity = mh.create_entity('Old', name=u'old') + self.repo.type_and_source_from_eid(entity.eid, entity._cw) + mh.cmd_rename_entity_type('Old', 'New') + mh.cmd_rename_attribute('New', 'name', 'new_name') + + def test_add_drop_relation_type(self): + with self.mh() as (cnx, mh): + mh.cmd_add_entity_type('Folder2', auto=False) + mh.cmd_add_relation_type('filed_under2') + self.assertIn('filed_under2', self.schema) + # Old will be missing as it has been renamed into 'New' in the migrated + # schema while New hasn't been added here. + self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), + sorted(str(e) for e in self.schema.entities() + if not e.final and e != 'Old')) + self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) + mh.cmd_drop_relation_type('filed_under2') + self.assertNotIn('filed_under2', self.schema) + # this should not crash + mh.cmd_drop_relation_type('filed_under2') + + def test_add_relation_definition_nortype(self): + with self.mh() as (cnx, mh): + mh.cmd_add_relation_definition('Personne', 'concerne2', 'Affaire') + self.assertEqual(self.schema['concerne2'].subjects(), + ('Personne',)) + self.assertEqual(self.schema['concerne2'].objects(), + ('Affaire', )) + self.assertEqual(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality, + '1*') + mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note') + self.assertEqual(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note']) + mh.create_entity('Personne', nom=u'tot') + mh.create_entity('Affaire') + mh.rqlexec('SET X concerne2 Y WHERE X is Personne, Y is Affaire') + cnx.commit() + mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Affaire') + self.assertIn('concerne2', self.schema) + mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Note') + self.assertNotIn('concerne2', self.schema) + + def test_drop_relation_definition_existant_rtype(self): + with self.mh() as (cnx, mh): + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) + mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire') + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Division', 'Note', 'Societe', 'SubDivision']) + mh.cmd_add_relation_definition('Personne', 'concerne', 'Affaire') + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced types + self.maxeid = cnx.execute('Any MAX(X)')[0][0] + + def test_drop_relation_definition_with_specialization(self): + with self.mh() as (cnx, mh): + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) + mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe') + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Note']) + mh.cmd_add_relation_definition('Affaire', 'concerne', 'Societe') + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced types + self.maxeid = cnx.execute('Any MAX(X)')[0][0] + + def test_rename_relation(self): + self.skipTest('implement me') + + def test_change_relation_props_non_final(self): + with self.mh() as (cnx, mh): + rschema = self.schema['concerne'] + card = rschema.rdef('Affaire', 'Societe').cardinality + self.assertEqual(card, '**') + try: + mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', + cardinality='?*') + card = rschema.rdef('Affaire', 'Societe').cardinality + self.assertEqual(card, '?*') + finally: + mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', + cardinality='**') + + def test_change_relation_props_final(self): + with self.mh() as (cnx, mh): + rschema = self.schema['adel'] + card = rschema.rdef('Personne', 'String').fulltextindexed + self.assertEqual(card, False) + try: + mh.cmd_change_relation_props('Personne', 'adel', 'String', + fulltextindexed=True) + card = rschema.rdef('Personne', 'String').fulltextindexed + self.assertEqual(card, True) + finally: + mh.cmd_change_relation_props('Personne', 'adel', 'String', + fulltextindexed=False) + + def test_sync_schema_props_perms_rqlconstraints(self): + with self.mh() as (cnx, mh): + # Drop one of the RQLConstraint. + rdef = self.schema['evaluee'].rdefs[('Personne', 'Note')] + oldconstraints = rdef.constraints + self.assertIn('S created_by U', + [cstr.expression for cstr in oldconstraints]) + mh.cmd_sync_schema_props_perms('evaluee', commit=True) + newconstraints = rdef.constraints + self.assertNotIn('S created_by U', + [cstr.expression for cstr in newconstraints]) + + # Drop all RQLConstraint. + rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')] + oldconstraints = rdef.constraints + self.assertEqual(len(oldconstraints), 2) + mh.cmd_sync_schema_props_perms('travaille', commit=True) + rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')] + newconstraints = rdef.constraints + self.assertEqual(len(newconstraints), 0) + + @tag('longrun') + def test_sync_schema_props_perms(self): + with self.mh() as (cnx, mh): + nbrqlexpr_start = cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0] + migrschema['titre'].rdefs[('Personne', 'String')].order = 7 + migrschema['adel'].rdefs[('Personne', 'String')].order = 6 + migrschema['ass'].rdefs[('Personne', 'String')].order = 5 + migrschema['Personne'].description = 'blabla bla' + migrschema['titre'].description = 'usually a title' + migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person' + delete_concerne_rqlexpr = self._rrqlexpr_rset(cnx, 'delete', 'concerne') + add_concerne_rqlexpr = self._rrqlexpr_rset(cnx, 'add', 'concerne') + + # make sure properties (e.g. etype descriptions) are synced by the + # second call to sync_schema + mh.cmd_sync_schema_props_perms(syncprops=False, commit=False) + mh.cmd_sync_schema_props_perms(commit=False) + + self.assertEqual(cnx.execute('Any D WHERE X name "Personne", X description D')[0][0], + 'blabla bla') + self.assertEqual(cnx.execute('Any D WHERE X name "titre", X description D')[0][0], + 'usually a title') + self.assertEqual(cnx.execute('Any D WHERE X relation_type RT, RT name "titre",' + 'X from_entity FE, FE name "Personne",' + 'X description D')[0][0], + 'title for this person') + rinorder = [n for n, in cnx.execute( + 'Any N ORDERBY O,N WHERE X is CWAttribute, X relation_type RT, RT name N,' + 'X from_entity FE, FE name "Personne",' + 'X ordernum O')] + expected = [u'nom', u'prenom', u'sexe', u'promo', u'ass', u'adel', u'titre', + u'web', u'tel', u'fax', u'datenaiss', u'test', u'tzdatenaiss', + u'description', u'firstname', + u'creation_date', u'cwuri', u'modification_date'] + self.assertEqual(expected, rinorder) + + # test permissions synchronization #################################### + # new rql expr to add note entity + eexpr = self._erqlexpr_entity(cnx, 'add', 'Note') + self.assertEqual(eexpr.expression, + 'X ecrit_part PE, U in_group G, ' + 'PE require_permission P, P name "add_note", P require_group G') + self.assertEqual([et.name for et in eexpr.reverse_add_permission], ['Note']) + self.assertEqual(eexpr.reverse_read_permission, ()) + self.assertEqual(eexpr.reverse_delete_permission, ()) + self.assertEqual(eexpr.reverse_update_permission, ()) + self.assertTrue(self._rrqlexpr_rset(cnx, 'add', 'para')) + # no rqlexpr to delete para attribute + self.assertFalse(self._rrqlexpr_rset(cnx, 'delete', 'para')) + # new rql expr to add ecrit_par relation + rexpr = self._rrqlexpr_entity(cnx, 'add', 'ecrit_par') + self.assertEqual(rexpr.expression, + 'O require_permission P, P name "add_note", ' + 'U in_group G, P require_group G') + self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par']) + self.assertEqual(rexpr.reverse_read_permission, ()) + self.assertEqual(rexpr.reverse_delete_permission, ()) + # no more rqlexpr to delete and add travaille relation + self.assertFalse(self._rrqlexpr_rset(cnx, 'add', 'travaille')) + self.assertFalse(self._rrqlexpr_rset(cnx, 'delete', 'travaille')) + # no more rqlexpr to delete and update Societe entity + self.assertFalse(self._erqlexpr_rset(cnx, 'update', 'Societe')) + self.assertFalse(self._erqlexpr_rset(cnx, 'delete', 'Societe')) + # no more rqlexpr to read Affaire entity + self.assertFalse(self._erqlexpr_rset(cnx, 'read', 'Affaire')) + # rqlexpr to update Affaire entity has been updated + eexpr = self._erqlexpr_entity(cnx, 'update', 'Affaire') + self.assertEqual(eexpr.expression, 'X concerne S, S owned_by U') + # no change for rqlexpr to add and delete Affaire entity + self.assertEqual(len(self._erqlexpr_rset(cnx, 'delete', 'Affaire')), 1) + self.assertEqual(len(self._erqlexpr_rset(cnx, 'add', 'Affaire')), 1) + # no change for rqlexpr to add and delete concerne relation + self.assertEqual(len(self._rrqlexpr_rset(cnx, 'delete', 'concerne')), + len(delete_concerne_rqlexpr)) + self.assertEqual(len(self._rrqlexpr_rset(cnx, 'add', 'concerne')), + len(add_concerne_rqlexpr)) + # * migrschema involve: + # * 7 erqlexprs deletions (2 in (Affaire + Societe + Note.para) + 1 Note.something + # * 2 rrqlexprs deletions (travaille) + # * 1 update (Affaire update) + # * 2 new (Note add, ecrit_par add) + # * 2 implicit new for attributes (Note.para, Person.test) + # remaining orphan rql expr which should be deleted at commit (composite relation) + # unattached expressions -> pending deletion on commit + self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",' + 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' + 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], + 7) + self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",' + 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' + 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], + 2) + # finally + self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0], + nbrqlexpr_start + 1 + 2 + 2 + 2) + cnx.commit() + # unique_together test + self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1) + self.assertCountEqual(self.schema.eschema('Personne')._unique_together[0], + ('nom', 'prenom', 'datenaiss')) + rset = cnx.execute('Any C WHERE C is CWUniqueTogetherConstraint, C constraint_of ET, ET name "Personne"') + self.assertEqual(len(rset), 1) + relations = [r.name for r in rset.get_entity(0, 0).relations] + self.assertCountEqual(relations, ('nom', 'prenom', 'datenaiss')) + + def _erqlexpr_rset(self, cnx, action, ertype): + rql = 'RQLExpression X WHERE ET is CWEType, ET %s_permission X, ET name %%(name)s' % action + return cnx.execute(rql, {'name': ertype}) + + def _erqlexpr_entity(self, cnx, action, ertype): + rset = self._erqlexpr_rset(cnx, action, ertype) + self.assertEqual(len(rset), 1) + return rset.get_entity(0, 0) + + def _rrqlexpr_rset(self, cnx, action, ertype): + rql = 'RQLExpression X WHERE RT is CWRType, RDEF %s_permission X, RT name %%(name)s, RDEF relation_type RT' % action + return cnx.execute(rql, {'name': ertype}) + + def _rrqlexpr_entity(self, cnx, action, ertype): + rset = self._rrqlexpr_rset(cnx, action, ertype) + self.assertEqual(len(rset), 1) + return rset.get_entity(0, 0) + + def test_set_size_constraint(self): + with self.mh() as (cnx, mh): + # existing previous value + try: + mh.cmd_set_size_constraint('CWEType', 'name', 128) + finally: + mh.cmd_set_size_constraint('CWEType', 'name', 64) + # non existing previous value + try: + mh.cmd_set_size_constraint('CWEType', 'description', 256) + finally: + mh.cmd_set_size_constraint('CWEType', 'description', None) + + @tag('longrun') + def test_add_drop_cube_and_deps(self): + with self.mh() as (cnx, mh): + schema = self.repo.schema + self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs), + sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), + ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), + ('Note', 'Note'), ('Note', 'Bookmark')])) + try: + mh.cmd_drop_cube('fakeemail', removedeps=True) + # file was there because it's an email dependancy, should have been removed + self.assertNotIn('fakeemail', self.config.cubes()) + self.assertNotIn(self.config.cube_dir('fakeemail'), self.config.cubes_path()) + self.assertNotIn('file', self.config.cubes()) + self.assertNotIn(self.config.cube_dir('file'), self.config.cubes_path()) + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', + 'sender', 'in_thread', 'reply_to', 'data_format'): + self.assertNotIn(ertype, schema) + self.assertEqual(sorted(schema['see_also'].rdefs), + sorted([('Folder', 'Folder'), + ('Bookmark', 'Bookmark'), + ('Bookmark', 'Note'), + ('Note', 'Note'), + ('Note', 'Bookmark')])) + self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'Folder', 'Note']) + self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note']) + self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.fakeemail"').rowcount, 0) + self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0) + finally: + mh.cmd_add_cube('fakeemail') + self.assertIn('fakeemail', self.config.cubes()) + self.assertIn(self.config.cube_dir('fakeemail'), self.config.cubes_path()) + self.assertIn('file', self.config.cubes()) + self.assertIn(self.config.cube_dir('file'), self.config.cubes_path()) + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', + 'sender', 'in_thread', 'reply_to', 'data_format'): + self.assertIn(ertype, schema) + self.assertEqual(sorted(schema['see_also'].rdefs), + sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), + ('Bookmark', 'Bookmark'), + ('Bookmark', 'Note'), + ('Note', 'Note'), + ('Note', 'Bookmark')])) + self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) + self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) + from cubes.fakeemail.__pkginfo__ import version as email_version + from cubes.file.__pkginfo__ import version as file_version + self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.fakeemail"')[0][0], + email_version) + self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0], + file_version) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced + # types (and their associated tables!) + self.maxeid = cnx.execute('Any MAX(X)')[0][0] + # why this commit is necessary is unclear to me (though without it + # next test may fail complaining of missing tables + cnx.commit() + + + @tag('longrun') + def test_add_drop_cube_no_deps(self): + with self.mh() as (cnx, mh): + cubes = set(self.config.cubes()) + schema = self.repo.schema + try: + mh.cmd_drop_cube('fakeemail') + cubes.remove('fakeemail') + self.assertNotIn('fakeemail', self.config.cubes()) + self.assertIn('file', self.config.cubes()) + for ertype in ('Email', 'EmailThread', 'EmailPart', + 'sender', 'in_thread', 'reply_to'): + self.assertNotIn(ertype, schema) + finally: + mh.cmd_add_cube('fakeemail') + self.assertIn('fakeemail', self.config.cubes()) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced + # types (and their associated tables!) + self.maxeid = cnx.execute('Any MAX(X)')[0][0] # XXXXXXX KILL KENNY + # why this commit is necessary is unclear to me (though without it + # next test may fail complaining of missing tables + cnx.commit() + + def test_drop_dep_cube(self): + with self.mh() as (cnx, mh): + with self.assertRaises(ConfigurationError) as cm: + mh.cmd_drop_cube('file') + self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency") + + @tag('longrun') + def test_introduce_base_class(self): + with self.mh() as (cnx, mh): + mh.cmd_add_entity_type('Para') + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), + ['Note']) + self.assertEqual(self.schema['Note'].specializes().type, 'Para') + mh.cmd_add_entity_type('Text') + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), + ['Note', 'Text']) + self.assertEqual(self.schema['Text'].specializes().type, 'Para') + # test columns have been actually added + text = cnx.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0) + note = cnx.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0) + aff = cnx.execute('INSERT Affaire X').get_entity(0, 0) + self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', + {'x': text.eid, 'y': aff.eid})) + self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', + {'x': note.eid, 'y': aff.eid})) + self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', + {'x': text.eid, 'y': aff.eid})) + self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', + {'x': note.eid, 'y': aff.eid})) + # XXX remove specializes by ourselves, else tearDown fails when removing + # Para because of Note inheritance. This could be fixed by putting the + # MemSchemaCWETypeDel(session, name) operation in the + # after_delete_entity(CWEType) hook, since in that case the MemSchemaSpecializesDel + # operation would be removed before, but I'm not sure this is a desired behaviour. + # + # also we need more tests about introducing/removing base classes or + # specialization relationship... + cnx.execute('DELETE X specializes Y WHERE Y name "Para"') + cnx.commit() + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), + []) + self.assertEqual(self.schema['Note'].specializes(), None) + self.assertEqual(self.schema['Text'].specializes(), None) + + def test_add_symmetric_relation_type(self): + with self.mh() as (cnx, mh): + self.assertFalse(self.table_sql(mh, 'same_as_relation')) + mh.cmd_add_relation_type('same_as') + self.assertTrue(self.table_sql(mh, 'same_as_relation')) + + def test_change_attribute_type(self): + with self.mh() as (cnx, mh): + mh.cmd_create_entity('Societe', tel=1) + mh.commit() + mh.change_attribute_type('Societe', 'tel', 'Float') + self.assertNotIn(('Societe', 'Int'), self.schema['tel'].rdefs) + self.assertIn(('Societe', 'Float'), self.schema['tel'].rdefs) + self.assertEqual(self.schema['tel'].rdefs[('Societe', 'Float')].object, 'Float') + tel = mh.rqlexec('Any T WHERE X tel T')[0][0] + self.assertEqual(tel, 1.0) + self.assertIsInstance(tel, float) + + def test_drop_required_inlined_relation(self): + with self.mh() as (cnx, mh): + bob = mh.cmd_create_entity('Personne', nom=u'bob') + note = mh.cmd_create_entity('Note', ecrit_par=bob) + mh.commit() + rdef = mh.fs_schema.rschema('ecrit_par').rdefs[('Note', 'Personne')] + with tempattr(rdef, 'cardinality', '1*'): + mh.sync_schema_props_perms('ecrit_par', syncperms=False) + mh.cmd_drop_relation_type('ecrit_par') + self.assertNotIn('%secrit_par' % SQL_PREFIX, + self.table_schema(mh, '%sPersonne' % SQL_PREFIX)) + + def test_drop_inlined_rdef_delete_data(self): + with self.mh() as (cnx, mh): + note = mh.cmd_create_entity('Note', ecrit_par=cnx.user.eid) + mh.commit() + mh.drop_relation_definition('Note', 'ecrit_par', 'CWUser') + self.assertFalse(mh.sqlexec('SELECT * FROM cw_Note WHERE cw_ecrit_par IS NOT NULL')) + +class MigrationCommandsComputedTC(MigrationTC): + """ Unit tests for computed relations and attributes + """ + appid = 'datacomputed' + + def setUp(self): + MigrationTC.setUp(self) + # ensure vregistry is reloaded, needed by generated hooks for computed + # attributes + self.repo.vreg.set_schema(self.repo.schema) + + def test_computed_relation_add_relation_definition(self): + self.assertNotIn('works_for', self.schema) + with self.mh() as (cnx, mh): + with self.assertRaises(ExecutionError) as exc: + mh.cmd_add_relation_definition('Employee', 'works_for', 'Company') + self.assertEqual(str(exc.exception), + 'Cannot add a relation definition for a computed ' + 'relation (works_for)') + + def test_computed_relation_drop_relation_definition(self): + self.assertIn('notes', self.schema) + with self.mh() as (cnx, mh): + with self.assertRaises(ExecutionError) as exc: + mh.cmd_drop_relation_definition('Company', 'notes', 'Note') + self.assertEqual(str(exc.exception), + 'Cannot drop a relation definition for a computed ' + 'relation (notes)') + + def test_computed_relation_add_relation_type(self): + self.assertNotIn('works_for', self.schema) + with self.mh() as (cnx, mh): + mh.cmd_add_relation_type('works_for') + self.assertIn('works_for', self.schema) + self.assertEqual(self.schema['works_for'].rule, + 'O employees S, NOT EXISTS (O associates S)') + self.assertEqual(self.schema['works_for'].objects(), ('Company',)) + self.assertEqual(self.schema['works_for'].subjects(), ('Employee',)) + self.assertFalse(self.table_sql(mh, 'works_for_relation')) + e = cnx.create_entity('Employee') + a = cnx.create_entity('Employee') + cnx.create_entity('Company', employees=e, associates=a) + cnx.commit() + company = cnx.execute('Company X').get_entity(0, 0) + self.assertEqual([e.eid], + [x.eid for x in company.reverse_works_for]) + mh.rollback() + + def test_computed_relation_drop_relation_type(self): + self.assertIn('notes', self.schema) + with self.mh() as (cnx, mh): + mh.cmd_drop_relation_type('notes') + self.assertNotIn('notes', self.schema) + + def test_computed_relation_sync_schema_props_perms(self): + self.assertIn('whatever', self.schema) + with self.mh() as (cnx, mh): + mh.cmd_sync_schema_props_perms('whatever') + self.assertEqual(self.schema['whatever'].rule, + 'S employees E, O associates E') + self.assertEqual(self.schema['whatever'].objects(), ('Company',)) + self.assertEqual(self.schema['whatever'].subjects(), ('Company',)) + self.assertFalse(self.table_sql(mh, 'whatever_relation')) + + def test_computed_relation_sync_schema_props_perms_security(self): + with self.mh() as (cnx, mh): + rdef = next(iter(self.schema['perm_changes'].rdefs.values())) + self.assertEqual(rdef.permissions, + {'add': (), 'delete': (), + 'read': ('managers', 'users')}) + mh.cmd_sync_schema_props_perms('perm_changes') + self.assertEqual(self.schema['perm_changes'].permissions, + {'read': ('managers',)}) + rdef = next(iter(self.schema['perm_changes'].rdefs.values())) + self.assertEqual(rdef.permissions, + {'add': (), 'delete': (), + 'read': ('managers',)}) + + def test_computed_relation_sync_schema_props_perms_on_rdef(self): + self.assertIn('whatever', self.schema) + with self.mh() as (cnx, mh): + with self.assertRaises(ExecutionError) as exc: + mh.cmd_sync_schema_props_perms( + ('Company', 'whatever', 'Person')) + self.assertEqual(str(exc.exception), + 'Cannot synchronize a relation definition for a computed ' + 'relation (whatever)') + + def test_computed_relation_rename_relation_type(self): + with self.mh() as (cnx, mh): + mh.cmd_rename_relation_type('to_be_renamed', 'renamed') + self.assertIn('renamed', self.schema) + self.assertNotIn('to_be_renamed', self.schema) + + # computed attributes migration ############################################ + + def setup_add_score(self): + with self.admin_access.client_cnx() as cnx: + assert not cnx.execute('Company X') + c = cnx.create_entity('Company') + e1 = cnx.create_entity('Employee', reverse_employees=c) + cnx.create_entity('Note', note=2, concerns=e1) + e2 = cnx.create_entity('Employee', reverse_employees=c) + cnx.create_entity('Note', note=4, concerns=e2) + cnx.commit() + + def assert_score_initialized(self, mh): + self.assertEqual(self.schema['score'].rdefs['Company', 'Float'].formula, + 'Any AVG(NN) WHERE X employees E, N concerns E, N note NN') + fields = self.table_schema(mh, '%sCompany' % SQL_PREFIX) + self.assertEqual(fields['%sscore' % SQL_PREFIX], ('double precision', None)) + self.assertEqual([[3.0]], + mh.rqlexec('Any CS WHERE C score CS, C is Company').rows) + + def test_computed_attribute_add_relation_type(self): + self.assertNotIn('score', self.schema) + self.setup_add_score() + with self.mh() as (cnx, mh): + mh.cmd_add_relation_type('score') + self.assertIn('score', self.schema) + self.assertEqual(self.schema['score'].objects(), ('Float',)) + self.assertEqual(self.schema['score'].subjects(), ('Company',)) + self.assert_score_initialized(mh) + + def test_computed_attribute_add_attribute(self): + self.assertNotIn('score', self.schema) + self.setup_add_score() + with self.mh() as (cnx, mh): + mh.cmd_add_attribute('Company', 'score') + self.assertIn('score', self.schema) + self.assert_score_initialized(mh) + + def assert_computed_attribute_dropped(self): + self.assertNotIn('note20', self.schema) + with self.mh() as (cnx, mh): + fields = self.table_schema(mh, '%sNote' % SQL_PREFIX) + self.assertNotIn('%snote20' % SQL_PREFIX, fields) + + def test_computed_attribute_drop_type(self): + self.assertIn('note20', self.schema) + with self.mh() as (cnx, mh): + mh.cmd_drop_relation_type('note20') + self.assert_computed_attribute_dropped() + + def test_computed_attribute_drop_relation_definition(self): + self.assertIn('note20', self.schema) + with self.mh() as (cnx, mh): + mh.cmd_drop_relation_definition('Note', 'note20', 'Int') + self.assert_computed_attribute_dropped() + + def test_computed_attribute_drop_attribute(self): + self.assertIn('note20', self.schema) + with self.mh() as (cnx, mh): + mh.cmd_drop_attribute('Note', 'note20') + self.assert_computed_attribute_dropped() + + def test_computed_attribute_sync_schema_props_perms_rtype(self): + self.assertIn('note100', self.schema) + with self.mh() as (cnx, mh): + mh.cmd_sync_schema_props_perms('note100') + rdef = self.schema['note100'].rdefs['Note', 'Int'] + self.assertEqual(rdef.formula_select.as_string(), + 'Any (N * 100) WHERE X note N, X is Note') + self.assertEqual(rdef.formula, 'Any N*100 WHERE X note N') + + def test_computed_attribute_sync_schema_props_perms_rdef(self): + self.setup_add_score() + with self.mh() as (cnx, mh): + mh.cmd_sync_schema_props_perms(('Note', 'note100', 'Int')) + self.assertEqual([[200], [400]], + cnx.execute('Any N ORDERBY N WHERE X note100 N').rows) + self.assertEqual([[300]], + cnx.execute('Any CS WHERE C score100 CS, C is Company').rows) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_postgres.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_postgres.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,186 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from datetime import datetime +from threading import Thread + +from six.moves import range + +from logilab.common.testlib import SkipTest + +import logilab.database as lgdb +from cubicweb import ValidationError +from cubicweb.devtools import PostgresApptestConfiguration, startpgcluster, stoppgcluster +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.predicates import is_instance +from cubicweb.entities.adapters import IFTIndexableAdapter + +from unittest_querier import FixedOffset + + +def setUpModule(): + startpgcluster(__file__) + + +def tearDownModule(): + stoppgcluster(__file__) + + +class PostgresTimeoutConfiguration(PostgresApptestConfiguration): + def __init__(self, *args, **kwargs): + self.default_sources = PostgresApptestConfiguration.default_sources.copy() + self.default_sources['system'] = PostgresApptestConfiguration.default_sources['system'].copy() + self.default_sources['system']['db-statement-timeout'] = 200 + super(PostgresTimeoutConfiguration, self).__init__(*args, **kwargs) + + +class PostgresFTITC(CubicWebTC): + configcls = PostgresTimeoutConfiguration + + @classmethod + def setUpClass(cls): + cls.orig_connect_hooks = lgdb.SQL_CONNECT_HOOKS['postgres'][:] + + @classmethod + def tearDownClass(cls): + lgdb.SQL_CONNECT_HOOKS['postgres'] = cls.orig_connect_hooks + + def test_eid_range(self): + # concurrent allocation of eid ranges + source = self.session.repo.sources_by_uri['system'] + range1 = [] + range2 = [] + def allocate_eid_ranges(session, target): + for x in range(1, 10): + eid = source.create_eid(session, count=x) + target.extend(range(eid-x, eid)) + + t1 = Thread(target=lambda: allocate_eid_ranges(self.session, range1)) + t2 = Thread(target=lambda: allocate_eid_ranges(self.session, range2)) + t1.start() + t2.start() + t1.join() + t2.join() + self.assertEqual(range1, sorted(range1)) + self.assertEqual(range2, sorted(range2)) + self.assertEqual(set(), set(range1) & set(range2)) + + def test_occurence_count(self): + with self.admin_access.repo_cnx() as cnx: + c1 = cnx.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = cnx.create_entity('Card', title=u'c3', + content=u'cubicweb') + c3 = cnx.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + cnx.commit() + self.assertEqual(cnx.execute('Card X ORDERBY FTIRANK(X) DESC ' + 'WHERE X has_text "cubicweb"').rows, + [[c1.eid,], [c3.eid,], [c2.eid,]]) + + + def test_attr_weight(self): + class CardIFTIndexableAdapter(IFTIndexableAdapter): + __select__ = is_instance('Card') + attr_weight = {'title': 'A'} + with self.temporary_appobjects(CardIFTIndexableAdapter): + with self.admin_access.repo_cnx() as cnx: + c1 = cnx.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = cnx.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + c3 = cnx.create_entity('Card', title=u'cubicweb', + content=u'autre chose') + cnx.commit() + self.assertEqual(cnx.execute('Card X ORDERBY FTIRANK(X) DESC ' + 'WHERE X has_text "cubicweb"').rows, + [[c3.eid,], [c1.eid,], [c2.eid,]]) + + def test_entity_weight(self): + class PersonneIFTIndexableAdapter(IFTIndexableAdapter): + __select__ = is_instance('Personne') + entity_weight = 2.0 + with self.temporary_appobjects(PersonneIFTIndexableAdapter): + with self.admin_access.repo_cnx() as cnx: + c1 = cnx.create_entity('Personne', nom=u'c1', prenom=u'cubicweb') + c2 = cnx.create_entity('Comment', content=u'cubicweb cubicweb', + comments=c1) + c3 = cnx.create_entity('Comment', content=u'cubicweb cubicweb cubicweb', + comments=c1) + cnx.commit() + self.assertEqual(cnx.execute('Any X ORDERBY FTIRANK(X) DESC ' + 'WHERE X has_text "cubicweb"').rows, + [[c1.eid,], [c3.eid,], [c2.eid,]]) + + def test_tz_datetime(self): + with self.admin_access.repo_cnx() as cnx: + bob = cnx.create_entity('Personne', nom=u'bob', + tzdatenaiss=datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))) + datenaiss = cnx.execute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] + self.assertIsNotNone(datenaiss.tzinfo) + self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) + cnx.commit() + cnx.create_entity('Personne', nom=u'boby', + tzdatenaiss=datetime(1977, 6, 7, 2, 0)) + datenaiss = cnx.execute("Any XD WHERE X nom 'boby', X tzdatenaiss XD")[0][0] + self.assertIsNotNone(datenaiss.tzinfo) + self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 2, 0)) + rset = cnx.execute("Any X WHERE X tzdatenaiss %(d)s", + {'d': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) + self.assertEqual(rset.rows, [[bob.eid]]) + + def test_constraint_validationerror(self): + with self.admin_access.repo_cnx() as cnx: + with cnx.allow_all_hooks_but('integrity'): + with self.assertRaises(ValidationError) as cm: + cnx.execute("INSERT Note N: N type 'nogood'") + self.assertEqual(cm.exception.errors, + {'type-subject': u'invalid value %(KEY-value)s, it must be one of %(KEY-choices)s'}) + self.assertEqual(cm.exception.msgargs, + {'type-subject-value': u'"nogood"', + 'type-subject-choices': u'"todo", "a", "b", "T", "lalala"'}) + + def test_statement_timeout(self): + with self.admin_access.repo_cnx() as cnx: + cnx.system_sql('select pg_sleep(0.1)') + with self.assertRaises(Exception): + cnx.system_sql('select pg_sleep(0.3)') + + +class PostgresLimitSizeTC(CubicWebTC): + configcls = PostgresApptestConfiguration + + def test(self): + with self.admin_access.repo_cnx() as cnx: + def sql(string): + return cnx.system_sql(string).fetchone()[0] + yield self.assertEqual, sql("SELECT limit_size('

hello

', 'text/html', 20)"), \ + '

hello

' + yield self.assertEqual, sql("SELECT limit_size('

hello

', 'text/html', 2)"), \ + 'he...' + yield self.assertEqual, sql("SELECT limit_size('
hello', 'text/html', 2)"), \ + 'he...' + yield self.assertEqual, sql("SELECT limit_size('hello', 'text/html', 2)"), \ + 'he...' + yield self.assertEqual, sql("SELECT limit_size('a>b', 'text/html', 2)"), \ + 'a>...' + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_querier.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_querier.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1668 @@ +# -*- coding: iso-8859-1 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for modules cubicweb.server.querier and cubicweb.server.ssplanner +""" + +from datetime import date, datetime, timedelta, tzinfo + +import pytz + +from six import PY2, integer_types, binary_type, text_type + +from logilab.common.testlib import TestCase, unittest_main +from rql import BadRQLQuery + +from cubicweb import QueryError, Unauthorized, Binary +from cubicweb.server.sqlutils import SQL_PREFIX +from cubicweb.server.utils import crypt_password +from cubicweb.server.querier import manual_build_descr, _make_description +from cubicweb.devtools import get_test_db_handler, TestServerConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.repotest import tuplify, BaseQuerierTC + + +class FixedOffset(tzinfo): + def __init__(self, hours=0): + self.hours = hours + def utcoffset(self, dt): + return timedelta(hours=self.hours) + def dst(self, dt): + return timedelta(0) + + +# register priority/severity sorting registered procedure +from rql.utils import register_function, FunctionDescr + +class group_sort_value(FunctionDescr): + supported_backends = ('sqlite',) + rtype = 'Int' +try: + register_function(group_sort_value) +except AssertionError: + pass +from cubicweb.server.sqlutils import SQL_CONNECT_HOOKS +def init_sqlite_connexion(cnx): + def group_sort_value(text): + return {"managers": "3", "users": "2", "guests": "1", "owners": "0"}[text] + cnx.create_function("GROUP_SORT_VALUE", 1, group_sort_value) +SQL_CONNECT_HOOKS['sqlite'].append(init_sqlite_connexion) + + +def setUpClass(cls, *args): + global repo, cnx + config = TestServerConfiguration(apphome=UtilsTC.datadir) + handler = get_test_db_handler(config) + handler.build_db_cache() + repo, cnx = handler.get_repo_and_cnx() + cls.repo = repo + +def tearDownClass(cls, *args): + global repo, cnx + repo.shutdown() + del repo, cnx + + +class Variable: + def __init__(self, name): + self.name = name + self.children = [] + + def get_type(self, solution, args=None): + return solution[self.name] + def as_string(self): + return self.name + +class Function: + def __init__(self, name, varname): + self.name = name + self.children = [Variable(varname)] + def get_type(self, solution, args=None): + return 'Int' + +class MakeDescriptionTC(TestCase): + def test_known_values(self): + solution = {'A': 'Int', 'B': 'CWUser'} + self.assertEqual(_make_description((Function('max', 'A'), Variable('B')), {}, solution), + ['Int','CWUser']) + + +class UtilsTC(BaseQuerierTC): + setUpClass = classmethod(setUpClass) + tearDownClass = classmethod(tearDownClass) + + def get_max_eid(self): + # no need for cleanup here + return None + def cleanup(self): + # no need for cleanup here + pass + + def test_preprocess_1(self): + with self.session.new_cnx() as cnx: + reid = cnx.execute('Any X WHERE X is CWRType, X name "owned_by"')[0][0] + rqlst = self._prepare(cnx, 'Any COUNT(RDEF) WHERE RDEF relation_type X, X eid %(x)s', + {'x': reid}) + self.assertEqual([{'RDEF': 'CWAttribute'}, {'RDEF': 'CWRelation'}], + rqlst.solutions) + + def test_preprocess_2(self): + with self.session.new_cnx() as cnx: + teid = cnx.execute("INSERT Tag X: X name 'tag'")[0][0] + #geid = self.execute("CWGroup G WHERE G name 'users'")[0][0] + #self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", + # {'g': geid, 't': teid}, 'g') + rqlst = self._prepare(cnx, 'Any X WHERE E eid %(x)s, E tags X', {'x': teid}) + # the query may be optimized, should keep only one solution + # (any one, etype will be discarded) + self.assertEqual(1, len(rqlst.solutions)) + + def assertRQLEqual(self, expected, got): + from rql import parse + self.assertMultiLineEqual(text_type(parse(expected)), + text_type(parse(got))) + + def test_preprocess_security(self): + s = self.user_groups_session('users') + with s.new_cnx() as cnx: + plan = self._prepare_plan(cnx, 'Any ETN,COUNT(X) GROUPBY ETN ' + 'WHERE X is ET, ET name ETN') + union = plan.rqlst + plan.preprocess(union) + self.assertEqual(len(union.children), 1) + self.assertEqual(len(union.children[0].with_), 1) + subq = union.children[0].with_[0].query + self.assertEqual(len(subq.children), 4) + self.assertEqual([t.as_string() for t in union.children[0].selection], + ['ETN','COUNT(X)']) + self.assertEqual([t.as_string() for t in union.children[0].groupby], + ['ETN']) + partrqls = sorted(((rqlst.as_string(), rqlst.solutions) for rqlst in subq.children)) + rql, solutions = partrqls[0] + self.assertRQLEqual(rql, + 'Any ETN,X WHERE X is ET, ET name ETN, (EXISTS(X owned_by %(B)s))' + ' OR ((((EXISTS(D concerne C?, C owned_by %(B)s, ' + ' X identity D, C is Division, D is Affaire))' + ' OR (EXISTS(H concerne G?, G owned_by %(B)s, G is SubDivision, ' + ' X identity H, H is Affaire)))' + ' OR (EXISTS(I concerne F?, F owned_by %(B)s, F is Societe, ' + ' X identity I, I is Affaire)))' + ' OR (EXISTS(J concerne E?, E owned_by %(B)s, E is Note, ' + ' X identity J, J is Affaire)))' + ', ET is CWEType, X is Affaire') + self.assertEqual(solutions, [{'C': 'Division', + 'D': 'Affaire', + 'E': 'Note', + 'F': 'Societe', + 'G': 'SubDivision', + 'H': 'Affaire', + 'I': 'Affaire', + 'J': 'Affaire', + 'X': 'Affaire', + 'ET': 'CWEType', 'ETN': 'String'}]) + rql, solutions = partrqls[1] + self.assertRQLEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, ' + 'X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWComputedRType, ' + ' CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, ' + ' CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, CWUser, Card, ' + ' Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, ' + ' Frozable, Note, Old, Personne, RQLExpression, Societe, State, SubDivision, ' + ' SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') + self.assertCountEqual(solutions, + [{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWComputedRType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Frozable', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Old', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Societe', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'State', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'SubDivision', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'SubWorkflowExitPoint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Tag', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Transition', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'TrInfo', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}]) + rql, solutions = partrqls[2] + self.assertEqual(rql, + 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(%(D)s use_email X), ' + 'ET is CWEType, X is EmailAddress') + self.assertEqual(solutions, [{'X': 'EmailAddress', 'ET': 'CWEType', 'ETN': 'String'}]) + rql, solutions = partrqls[3] + self.assertEqual(rql, + 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(X owned_by %(C)s), ' + 'ET is CWEType, X is Basket') + self.assertEqual(solutions, [{'X': 'Basket', 'ET': 'CWEType', 'ETN': 'String'}]) + + def test_preprocess_security_aggregat(self): + s = self.user_groups_session('users') + with s.new_cnx() as cnx: + plan = self._prepare_plan(cnx, 'Any MAX(X)') + union = plan.rqlst + plan.preprocess(union) + self.assertEqual(len(union.children), 1) + self.assertEqual(len(union.children[0].with_), 1) + subq = union.children[0].with_[0].query + self.assertEqual(len(subq.children), 4) + self.assertEqual([t.as_string() for t in union.children[0].selection], + ['MAX(X)']) + + def test_preprocess_nonregr(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any S ORDERBY SI WHERE NOT S ecrit_par O, S para SI') + self.assertEqual(len(rqlst.solutions), 1) + + def test_build_description(self): + # should return an empty result set + rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': self.session.user.eid}) + self.assertEqual(rset.description[0][0], 'CWUser') + rset = self.qexecute('Any 1') + self.assertEqual(rset.description[0][0], 'Int') + rset = self.qexecute('Any TRUE') + self.assertEqual(rset.description[0][0], 'Boolean') + rset = self.qexecute('Any "hop"') + self.assertEqual(rset.description[0][0], 'String') + rset = self.qexecute('Any TODAY') + self.assertEqual(rset.description[0][0], 'Date') + rset = self.qexecute('Any NOW') + self.assertEqual(rset.description[0][0], 'Datetime') + rset = self.qexecute('Any %(x)s', {'x': 1}) + self.assertEqual(rset.description[0][0], 'Int') + if PY2: + rset = self.qexecute('Any %(x)s', {'x': long(1)}) + self.assertEqual(rset.description[0][0], 'Int') + rset = self.qexecute('Any %(x)s', {'x': True}) + self.assertEqual(rset.description[0][0], 'Boolean') + rset = self.qexecute('Any %(x)s', {'x': 1.0}) + self.assertEqual(rset.description[0][0], 'Float') + rset = self.qexecute('Any %(x)s', {'x': datetime.now()}) + self.assertEqual(rset.description[0][0], 'Datetime') + rset = self.qexecute('Any %(x)s', {'x': 'str'}) + self.assertEqual(rset.description[0][0], 'String') + rset = self.qexecute('Any %(x)s', {'x': u'str'}) + self.assertEqual(rset.description[0][0], 'String') + + def test_build_descr1(self): + with self.session.new_cnx() as cnx: + rset = cnx.execute('(Any U,L WHERE U login L) UNION ' + '(Any G,N WHERE G name N, G is CWGroup)') + # rset.req = self.session + orig_length = len(rset) + rset.rows[0][0] = 9999999 + description = manual_build_descr(cnx, rset.syntax_tree(), None, rset.rows) + self.assertEqual(len(description), orig_length - 1) + self.assertEqual(len(rset.rows), orig_length - 1) + self.assertNotEqual(rset.rows[0][0], 9999999) + + def test_build_descr2(self): + rset = self.qexecute('Any X,Y WITH X,Y BEING ((Any G,NULL WHERE G is CWGroup) UNION ' + '(Any U,G WHERE U in_group G))') + for x, y in rset.description: + if y is not None: + self.assertEqual(y, 'CWGroup') + + def test_build_descr3(self): + rset = self.qexecute('(Any G,NULL WHERE G is CWGroup) UNION ' + '(Any U,G WHERE U in_group G)') + for x, y in rset.description: + if y is not None: + self.assertEqual(y, 'CWGroup') + + +class QuerierTC(BaseQuerierTC): + setUpClass = classmethod(setUpClass) + tearDownClass = classmethod(tearDownClass) + + def test_unknown_eid(self): + # should return an empty result set + self.assertFalse(self.qexecute('Any X WHERE X eid 99999999')) + + def test_typed_eid(self): + # should return an empty result set + rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': '1'}) + self.assertIsInstance(rset[0][0], integer_types) + + def test_bytes_storage(self): + feid = self.qexecute('INSERT File X: X data_name "foo.pdf", ' + 'X data_format "text/plain", X data %(data)s', + {'data': Binary(b"xxx")})[0][0] + fdata = self.qexecute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0] + self.assertIsInstance(fdata, Binary) + self.assertEqual(fdata.getvalue(), b'xxx') + + # selection queries tests ################################################# + + def test_select_1(self): + rset = self.qexecute('Any X ORDERBY X WHERE X is CWGroup') + result, descr = rset.rows, rset.description + self.assertEqual(tuplify(result), [(2,), (3,), (4,), (5,)]) + self.assertEqual(descr, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) + + def test_select_2(self): + rset = self.qexecute('Any X ORDERBY N WHERE X is CWGroup, X name N') + self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)]) + self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) + rset = self.qexecute('Any X ORDERBY N DESC WHERE X is CWGroup, X name N') + self.assertEqual(tuplify(rset.rows), [(5,), (4,), (3,), (2,)]) + + def test_select_3(self): + rset = self.qexecute('Any N GROUPBY N WHERE X is CWGroup, X name N') + result, descr = rset.rows, rset.description + result.sort() + self.assertEqual(tuplify(result), [('guests',), ('managers',), ('owners',), ('users',)]) + self.assertEqual(descr, [('String',), ('String',), ('String',), ('String',)]) + + def test_select_is(self): + rset = self.qexecute('Any X, TN ORDERBY TN LIMIT 10 WHERE X is T, T name TN') + result, descr = rset.rows, rset.description + self.assertEqual(result[0][1], descr[0][0]) + + def test_select_is_aggr(self): + rset = self.qexecute('Any TN, COUNT(X) GROUPBY TN ORDERBY 2 DESC WHERE X is T, T name TN') + result, descr = rset.rows, rset.description + self.assertEqual(descr[0][0], 'String') + self.assertEqual(descr[0][1], 'Int') + self.assertEqual(result[0][0], 'RQLExpression') # XXX may change as schema evolve + + def test_select_groupby_orderby(self): + rset = self.qexecute('Any N GROUPBY N ORDERBY N WHERE X is CWGroup, X name N') + self.assertEqual(tuplify(rset.rows), [('guests',), ('managers',), ('owners',), ('users',)]) + self.assertEqual(rset.description, [('String',), ('String',), ('String',), ('String',)]) + + def test_select_complex_groupby(self): + rset = self.qexecute('Any N GROUPBY N WHERE X name N') + rset = self.qexecute('Any N,MAX(D) GROUPBY N LIMIT 5 WHERE X name N, X creation_date D') + + def test_select_inlined_groupby(self): + seid = self.qexecute('State X WHERE X name "deactivated"')[0][0] + rset = self.qexecute('Any U,L,S GROUPBY U,L,S WHERE X in_state S, U login L, S eid %s' % seid) + + def test_select_groupby_funccall(self): + rset = self.qexecute('Any YEAR(CD), COUNT(X) GROUPBY YEAR(CD) ' + 'WHERE X is CWUser, X creation_date CD') + self.assertListEqual(rset.rows, [[date.today().year, 2]]) + + def test_select_groupby_colnumber(self): + rset = self.qexecute('Any YEAR(CD), COUNT(X) GROUPBY 1 ' + 'WHERE X is CWUser, X creation_date CD') + self.assertListEqual(rset.rows, [[date.today().year, 2]]) + + def test_select_complex_orderby(self): + rset1 = self.qexecute('Any N ORDERBY N WHERE X name N') + self.assertEqual(sorted(rset1.rows), rset1.rows) + rset = self.qexecute('Any N ORDERBY N LIMIT 5 OFFSET 1 WHERE X name N') + self.assertEqual(rset.rows[0][0], rset1.rows[1][0]) + self.assertEqual(len(rset), 5) + + def test_select_5(self): + rset = self.qexecute('Any X, TMP ORDERBY TMP WHERE X name TMP, X is CWGroup') + self.assertEqual(tuplify(rset.rows), + [(2, 'guests',), + (3, 'managers',), + (4, 'owners',), + (5, 'users',)]) + self.assertEqual(rset.description, + [('CWGroup', 'String',), + ('CWGroup', 'String',), + ('CWGroup', 'String',), + ('CWGroup', 'String',)]) + + def test_select_6(self): + self.qexecute("INSERT Personne X: X nom 'bidule'")[0] + rset = self.qexecute('Any Y where X name TMP, Y nom in (TMP, "bidule")') + #self.assertEqual(rset.description, [('Personne',), ('Personne',)]) + self.assertIn(('Personne',), rset.description) + rset = self.qexecute('DISTINCT Any Y where X name TMP, Y nom in (TMP, "bidule")') + self.assertIn(('Personne',), rset.description) + + def test_select_not_attr(self): + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + seid = self.qexecute("INSERT Societe X: X nom 'chouette'")[0][0] + rset = self.qexecute('Personne X WHERE NOT X nom "bidule"') + self.assertEqual(len(rset.rows), 0, rset.rows) + rset = self.qexecute('Personne X WHERE NOT X nom "bid"') + self.assertEqual(len(rset.rows), 1, rset.rows) + self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") + rset = self.qexecute('Personne X WHERE NOT X travaille S') + self.assertEqual(len(rset.rows), 0, rset.rows) + + def test_select_is_in(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Societe X: X nom 'chouette'") + self.assertEqual(len(self.qexecute("Any X WHERE X is IN (Personne, Societe)")), + 2) + + def test_select_not_rel(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Societe X: X nom 'chouette'") + self.qexecute("INSERT Personne X: X nom 'autre'") + self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") + rset = self.qexecute('Personne X WHERE NOT X travaille S') + self.assertEqual(len(rset.rows), 1, rset.rows) + rset = self.qexecute('Personne X WHERE NOT X travaille S, S nom "chouette"') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_select_nonregr_inlined(self): + self.qexecute("INSERT Note X: X para 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + self.qexecute("INSERT Personne X: X nom 'autre'") + self.qexecute("SET X ecrit_par P WHERE X para 'bidule', P nom 'chouette'") + rset = self.qexecute('Any U,T ORDERBY T DESC WHERE U is CWUser, ' + 'N ecrit_par U, N type T')#, {'x': self.ueid}) + self.assertEqual(len(rset.rows), 0) + + def test_select_nonregr_edition_not(self): + groupeids = set((2, 3, 4)) + groupreadperms = set(r[0] for r in self.qexecute('Any Y WHERE X name "CWGroup", ' + 'Y eid IN(2, 3, 4), X read_permission Y')) + rset = self.qexecute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", ' + 'Y eid IN(2, 3, 4), NOT X read_permission Y') + self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) + rset = self.qexecute('DISTINCT Any Y WHERE X name "CWGroup", ' + 'Y eid IN(2, 3, 4), NOT X read_permission Y') + self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) + + def test_select_outer_join(self): + peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + peid2 = self.qexecute("INSERT Personne X: X nom 'autre'")[0][0] + seid1 = self.qexecute("INSERT Societe X: X nom 'chouette'")[0][0] + seid2 = self.qexecute("INSERT Societe X: X nom 'chouetos'")[0][0] + rset = self.qexecute('Any X,S ORDERBY X WHERE X travaille S?') + self.assertEqual(rset.rows, [[peid1, None], [peid2, None]]) + self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") + rset = self.qexecute('Any X,S ORDERBY X WHERE X travaille S?') + self.assertEqual(rset.rows, [[peid1, seid1], [peid2, None]]) + rset = self.qexecute('Any S,X ORDERBY S WHERE X? travaille S') + self.assertEqual(rset.rows, [[seid1, peid1], [seid2, None]]) + + def test_select_outer_join_optimized(self): + peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + rset = self.qexecute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}) + self.assertEqual(rset.rows, [[peid1]]) + rset = self.qexecute('Any X WHERE X eid %(x)s, X require_permission P?', + {'x':peid1}) + self.assertEqual(rset.rows, [[peid1]]) + + def test_select_left_outer_join(self): + rset = self.qexecute('DISTINCT Any G WHERE U? in_group G') + self.assertEqual(len(rset), 4) + rset = self.qexecute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s', + {'x': self.session.user.eid}) + self.assertEqual(len(rset), 4) + + def test_select_ambigous_outer_join(self): + teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] + self.qexecute("INSERT Tag X: X name 'tagbis'")[0][0] + geid = self.qexecute("CWGroup G WHERE G name 'users'")[0][0] + self.qexecute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", + {'g': geid, 't': teid}) + rset = self.qexecute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN") + self.assertIn(['users', 'tag'], rset.rows) + self.assertIn(['activated', None], rset.rows) + rset = self.qexecute("Any GN,TN ORDERBY GN WHERE T tags G?, T name TN, G name GN") + self.assertEqual(rset.rows, [[None, 'tagbis'], ['users', 'tag']]) + + def test_select_not_inline_rel(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Note X: X type 'a'") + self.qexecute("INSERT Note X: X type 'b'") + self.qexecute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") + rset = self.qexecute('Note X WHERE NOT X ecrit_par P') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_select_not_unlinked_multiple_solutions(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Note X: X type 'a'") + self.qexecute("INSERT Note X: X type 'b'") + self.qexecute("SET Y evaluee X WHERE X type 'a', Y nom 'bidule'") + rset = self.qexecute('Note X WHERE NOT Y evaluee X') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_select_date_extraction(self): + self.qexecute("INSERT Personne X: X nom 'foo', X datenaiss %(d)s", + {'d': datetime(2001, 2,3, 12,13)}) + test_data = [('YEAR', 2001), ('MONTH', 2), ('DAY', 3), + ('HOUR', 12), ('MINUTE', 13), ('WEEKDAY', 6)] + for funcname, result in test_data: + rset = self.qexecute('Any %s(D) WHERE X is Personne, X datenaiss D' + % funcname) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.rows[0][0], result) + self.assertEqual(rset.description, [('Int',)]) + + def test_regexp_based_pattern_matching(self): + peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + peid2 = self.qexecute("INSERT Personne X: X nom 'cidule'")[0][0] + rset = self.qexecute('Any X WHERE X is Personne, X nom REGEXP "^b"') + self.assertEqual(len(rset.rows), 1, rset.rows) + self.assertEqual(rset.rows[0][0], peid1) + rset = self.qexecute('Any X WHERE X is Personne, X nom REGEXP "idu"') + self.assertEqual(len(rset.rows), 2, rset.rows) + + def test_select_aggregat_count(self): + rset = self.qexecute('Any COUNT(X)') + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Int',)]) + + def test_select_aggregat_sum(self): + rset = self.qexecute('Any SUM(O) WHERE X ordernum O') + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Int',)]) + + def test_select_aggregat_min(self): + rset = self.qexecute('Any MIN(X) WHERE X is Personne') + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Personne',)]) + rset = self.qexecute('Any MIN(O) WHERE X ordernum O') + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Int',)]) + + def test_select_aggregat_max(self): + rset = self.qexecute('Any MAX(X) WHERE X is Personne') + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Personne',)]) + rset = self.qexecute('Any MAX(O) WHERE X ordernum O') + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Int',)]) + + def test_select_custom_aggregat_concat_string(self): + rset = self.qexecute('Any GROUP_CONCAT(N) WHERE X is CWGroup, X name N') + self.assertTrue(rset) + self.assertEqual(sorted(rset[0][0].split(', ')), ['guests', 'managers', + 'owners', 'users']) + + def test_select_custom_regproc_limit_size(self): + rset = self.qexecute('Any TEXT_LIMIT_SIZE(N, 3) WHERE X is CWGroup, X name N, X name "managers"') + self.assertTrue(rset) + self.assertEqual(rset[0][0], 'man...') + self.qexecute("INSERT Basket X: X name 'bidule', X description 'hop hop', X description_format 'text/html'") + rset = self.qexecute('Any LIMIT_SIZE(D, DF, 3) WHERE X is Basket, X description D, X description_format DF') + self.assertTrue(rset) + self.assertEqual(rset[0][0], 'hop...') + + def test_select_regproc_orderby(self): + rset = self.qexecute('DISTINCT Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N, X name "managers"') + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][1], 'managers') + rset = self.qexecute('Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N, NOT U in_group X, U login "admin"') + self.assertEqual(len(rset), 3) + self.assertEqual(rset[0][1], 'owners') + + def test_select_aggregat_sort(self): + rset = self.qexecute('Any G, COUNT(U) GROUPBY G ORDERBY 2 WHERE U in_group G') + self.assertEqual(len(rset.rows), 2) + self.assertEqual(len(rset.rows[0]), 2) + self.assertEqual(rset.description[0], ('CWGroup', 'Int',)) + + def test_select_aggregat_having(self): + rset = self.qexecute('Any N,COUNT(RDEF) GROUPBY N ORDERBY 2,N ' + 'WHERE RT name N, RDEF relation_type RT ' + 'HAVING COUNT(RDEF) > 10') + self.assertListEqual(rset.rows, + [[u'description_format', 13], + [u'description', 14], + [u'name', 19], + [u'created_by', 45], + [u'creation_date', 45], + [u'cw_source', 45], + [u'cwuri', 45], + [u'in_basket', 45], + [u'is', 45], + [u'is_instance_of', 45], + [u'modification_date', 45], + [u'owned_by', 45]]) + + def test_select_aggregat_having_dumb(self): + # dumb but should not raise an error + rset = self.qexecute('Any U,COUNT(X) GROUPBY U ' + 'WHERE U eid %(x)s, X owned_by U ' + 'HAVING COUNT(X) > 10', {'x': self.ueid}) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.rows[0][0], self.ueid) + + def test_select_having_non_aggregat_1(self): + rset = self.qexecute('Any L WHERE X login L, X creation_date CD ' + 'HAVING YEAR(CD) = %s' % date.today().year) + self.assertListEqual(rset.rows, + [[u'admin'], + [u'anon']]) + + def test_select_having_non_aggregat_2(self): + rset = self.qexecute('Any L GROUPBY L WHERE X login L, X in_group G, ' + 'X creation_date CD HAVING YEAR(CD) = %s OR COUNT(G) > 1' + % date.today().year) + self.assertListEqual(rset.rows, + [[u'admin'], + [u'anon']]) + + def test_select_complex_sort(self): + """need sqlite including http://www.sqlite.org/cvstrac/tktview?tn=3773 fix""" + rset = self.qexecute('Any X ORDERBY X,D LIMIT 5 WHERE X creation_date D') + result = rset.rows + result.sort() + self.assertEqual(tuplify(result), [(1,), (2,), (3,), (4,), (5,)]) + + def test_select_upper(self): + rset = self.qexecute('Any X, UPPER(L) ORDERBY L WHERE X is CWUser, X login L') + self.assertEqual(len(rset.rows), 2) + self.assertEqual(rset.rows[0][1], 'ADMIN') + self.assertEqual(rset.description[0], ('CWUser', 'String',)) + self.assertEqual(rset.rows[1][1], 'ANON') + self.assertEqual(rset.description[1], ('CWUser', 'String',)) + eid = rset.rows[0][0] + rset = self.qexecute('Any UPPER(L) WHERE X eid %s, X login L'%eid) + self.assertEqual(rset.rows[0][0], 'ADMIN') + self.assertEqual(rset.description, [('String',)]) + + def test_select_float_abs(self): + # test positive number + eid = self.qexecute('INSERT Affaire A: A invoiced %(i)s', {'i': 1.2})[0][0] + rset = self.qexecute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid}) + self.assertEqual(rset.rows[0][0], 1.2) + # test negative number + eid = self.qexecute('INSERT Affaire A: A invoiced %(i)s', {'i': -1.2})[0][0] + rset = self.qexecute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid}) + self.assertEqual(rset.rows[0][0], 1.2) + + def test_select_int_abs(self): + # test positive number + eid = self.qexecute('INSERT Affaire A: A duration %(d)s', {'d': 12})[0][0] + rset = self.qexecute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid}) + self.assertEqual(rset.rows[0][0], 12) + # test negative number + eid = self.qexecute('INSERT Affaire A: A duration %(d)s', {'d': -12})[0][0] + rset = self.qexecute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid}) + self.assertEqual(rset.rows[0][0], 12) + +## def test_select_simplified(self): +## ueid = self.session.user.eid +## rset = self.qexecute('Any L WHERE %s login L'%ueid) +## self.assertEqual(rset.rows[0][0], 'admin') +## rset = self.qexecute('Any L WHERE %(x)s login L', {'x':ueid}) +## self.assertEqual(rset.rows[0][0], 'admin') + + def test_select_searchable_text_1(self): + rset = self.qexecute(u"INSERT Personne X: X nom 'bidle'") + rset = self.qexecute(u"INSERT Societe X: X nom 'bidle'") + rset = self.qexecute("INSERT Societe X: X nom 'chouette'") + rset = self.qexecute('Any X where X has_text %(text)s', {'text': u'bidle'}) + self.assertEqual(len(rset.rows), 2, rset.rows) + rset = self.qexecute(u'Any N where N has_text "bidle"') + self.assertEqual(len(rset.rows), 2, rset.rows) + biduleeids = [r[0] for r in rset.rows] + rset = self.qexecute(u'Any N where NOT N has_text "bidle"') + self.assertFalse([r[0] for r in rset.rows if r[0] in biduleeids]) + # duh? + rset = self.qexecute('Any X WHERE X has_text %(text)s', {'text': u'a'}) + + def test_select_searchable_text_2(self): + rset = self.qexecute("INSERT Personne X: X nom 'bidule'") + rset = self.qexecute("INSERT Personne X: X nom 'chouette'") + rset = self.qexecute("INSERT Societe X: X nom 'bidule'") + rset = self.qexecute('Personne N where N has_text "bidule"') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_select_searchable_text_3(self): + rset = self.qexecute("INSERT Personne X: X nom 'bidule', X sexe 'M'") + rset = self.qexecute("INSERT Personne X: X nom 'bidule', X sexe 'F'") + rset = self.qexecute("INSERT Societe X: X nom 'bidule'") + rset = self.qexecute('Any X where X has_text "bidule" and X sexe "M"') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_select_multiple_searchable_text(self): + self.qexecute(u"INSERT Personne X: X nom 'bidle'") + self.qexecute("INSERT Societe X: X nom 'chouette', S travaille X") + self.qexecute(u"INSERT Personne X: X nom 'bidle'") + rset = self.qexecute('Personne X WHERE X has_text %(text)s, X travaille S, S has_text %(text2)s', + {'text': u'bidle', + 'text2': u'chouette',} + ) + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_select_no_descr(self): + rset = self.qexecute('Any X WHERE X is CWGroup', build_descr=0) + rset.rows.sort() + self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)]) + self.assertEqual(rset.description, ()) + + def test_select_limit_offset(self): + rset = self.qexecute('CWGroup X ORDERBY N LIMIT 2 WHERE X name N') + self.assertEqual(tuplify(rset.rows), [(2,), (3,)]) + self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',)]) + rset = self.qexecute('CWGroup X ORDERBY N LIMIT 2 OFFSET 2 WHERE X name N') + self.assertEqual(tuplify(rset.rows), [(4,), (5,)]) + + def test_select_symmetric(self): + self.qexecute("INSERT Personne X: X nom 'machin'") + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + self.qexecute("INSERT Personne X: X nom 'trucmuche'") + self.qexecute("SET X connait Y WHERE X nom 'chouette', Y nom 'bidule'") + self.qexecute("SET X connait Y WHERE X nom 'machin', Y nom 'chouette'") + rset = self.qexecute('Any P WHERE P connait P2') + self.assertEqual(len(rset.rows), 4, rset.rows) + rset = self.qexecute('Any P WHERE NOT P connait P2') + self.assertEqual(len(rset.rows), 1, rset.rows) # trucmuche + rset = self.qexecute('Any P WHERE P connait P2, P2 nom "bidule"') + self.assertEqual(len(rset.rows), 1, rset.rows) + rset = self.qexecute('Any P WHERE P2 connait P, P2 nom "bidule"') + self.assertEqual(len(rset.rows), 1, rset.rows) + rset = self.qexecute('Any P WHERE P connait P2, P2 nom "chouette"') + self.assertEqual(len(rset.rows), 2, rset.rows) + rset = self.qexecute('Any P WHERE P2 connait P, P2 nom "chouette"') + self.assertEqual(len(rset.rows), 2, rset.rows) + + def test_select_inline(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Note X: X type 'a'") + self.qexecute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") + rset = self.qexecute('Any N where N ecrit_par X, X nom "bidule"') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_select_creation_date(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + rset = self.qexecute('Any D WHERE X nom "bidule", X creation_date D') + self.assertEqual(len(rset.rows), 1) + + def test_select_or_relation(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + self.qexecute("INSERT Societe X: X nom 'logilab'") + self.qexecute("INSERT Societe X: X nom 'caesium'") + self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'logilab'") + rset = self.qexecute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, ' + 'S1 nom "logilab", S2 nom "caesium"') + self.assertEqual(len(rset.rows), 1) + self.qexecute("SET P travaille S WHERE P nom 'chouette', S nom 'caesium'") + rset = self.qexecute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, ' + 'S1 nom "logilab", S2 nom "caesium"') + self.assertEqual(len(rset.rows), 2) + + def test_select_or_sym_relation(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + self.qexecute("INSERT Personne X: X nom 'truc'") + self.qexecute("SET P connait S WHERE P nom 'bidule', S nom 'chouette'") + rset = self.qexecute('DISTINCT Any P WHERE S connait P, S nom "chouette"') + self.assertEqual(len(rset.rows), 1, rset.rows) + rset = self.qexecute('DISTINCT Any P WHERE P connait S or S connait P, S nom "chouette"') + self.assertEqual(len(rset.rows), 1, rset.rows) + self.qexecute("SET P connait S WHERE P nom 'chouette', S nom 'truc'") + rset = self.qexecute('DISTINCT Any P WHERE S connait P, S nom "chouette"') + self.assertEqual(len(rset.rows), 2, rset.rows) + rset = self.qexecute('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"') + self.assertEqual(len(rset.rows), 2, rset.rows) + + def test_select_follow_relation(self): + self.qexecute("INSERT Affaire X: X sujet 'cool'") + self.qexecute("INSERT Societe X: X nom 'chouette'") + self.qexecute("SET A concerne S WHERE A is Affaire, S is Societe") + self.qexecute("INSERT Note X: X para 'truc'") + self.qexecute("SET S evaluee N WHERE S is Societe, N is Note") + self.qexecute("INSERT Societe X: X nom 'bidule'") + self.qexecute("INSERT Note X: X para 'troc'") + self.qexecute("SET S evaluee N WHERE S nom 'bidule', N para 'troc'") + rset = self.qexecute('DISTINCT Any A,N WHERE A concerne S, S evaluee N') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_select_ordered_distinct_1(self): + self.assertRaises(BadRQLQuery, + self.qexecute, 'DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R') + + def test_select_ordered_distinct_2(self): + self.qexecute("INSERT Affaire X: X sujet 'minor'") + self.qexecute("INSERT Affaire X: X sujet 'zou'") + self.qexecute("INSERT Affaire X: X sujet 'abcd'") + rset = self.qexecute('DISTINCT Any S ORDERBY S WHERE A is Affaire, A sujet S') + self.assertEqual(rset.rows, [['abcd'], ['minor'], ['zou']]) + + def test_select_ordered_distinct_3(self): + rset = self.qexecute('DISTINCT Any N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N') + self.assertEqual(rset.rows, [['owners'], ['guests'], ['users'], ['managers']]) + + def test_select_or_value(self): + rset = self.qexecute('Any U WHERE U in_group G, G name "owners" OR G name "users"') + self.assertEqual(len(rset.rows), 0) + rset = self.qexecute('Any U WHERE U in_group G, G name "guests" OR G name "managers"') + self.assertEqual(len(rset.rows), 2) + + def test_select_explicit_eid(self): + rset = self.qexecute('Any X,E WHERE X owned_by U, X eid E, U eid %(u)s', + {'u': self.session.user.eid}) + self.assertTrue(rset) + self.assertEqual(rset.description[0][1], 'Int') + +# def test_select_rewritten_optional(self): +# eid = self.qexecute("INSERT Affaire X: X sujet 'cool'")[0][0] +# rset = self.qexecute('Any X WHERE X eid %(x)s, EXISTS(X owned_by U) OR EXISTS(X concerne S?, S owned_by U)', +# {'x': eid}, 'x') +# self.assertEqual(rset.rows, [[eid]]) + + def test_today_bug(self): + self.qexecute("INSERT Tag X: X name 'bidule', X creation_date NOW") + self.qexecute("INSERT Tag Y: Y name 'toto'") + rset = self.qexecute("Any D WHERE X name in ('bidule', 'toto') , X creation_date D") + self.assertIsInstance(rset.rows[0][0], datetime) + rset = self.qexecute('Tag X WHERE X creation_date TODAY') + self.assertEqual(len(rset.rows), 2) + + def test_sqlite_patch(self): + """this test monkey patch done by sqlutils._install_sqlite_querier_patch""" + self.qexecute("INSERT Personne X: X nom 'bidule', X datenaiss NOW, X tzdatenaiss NOW") + rset = self.qexecute('Any MAX(D) WHERE X is Personne, X datenaiss D') + self.assertIsInstance(rset[0][0], datetime) + rset = self.qexecute('Any MAX(D) WHERE X is Personne, X tzdatenaiss D') + self.assertIsInstance(rset[0][0], datetime) + self.assertEqual(rset[0][0].tzinfo, pytz.utc) + + def test_today(self): + self.qexecute("INSERT Tag X: X name 'bidule', X creation_date TODAY") + self.qexecute("INSERT Tag Y: Y name 'toto'") + rset = self.qexecute('Tag X WHERE X creation_date TODAY') + self.assertEqual(len(rset.rows), 2) + + def test_select_boolean(self): + rset = self.qexecute('Any N WHERE X is CWEType, X name N, X final %(val)s', + {'val': True}) + self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes', + 'Date', 'Datetime', + 'Decimal', 'Float', + 'Int', 'Interval', + 'Password', 'String', + 'TZDatetime', 'TZTime', + 'Time']) + rset = self.qexecute('Any N WHERE X is CWEType, X name N, X final TRUE') + self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes', + 'Date', 'Datetime', + 'Decimal', 'Float', + 'Int', 'Interval', + 'Password', 'String', + 'TZDatetime', 'TZTime', + 'Time']) + with self.session.new_cnx() as cnx: + cnx.create_entity('Personne', nom=u'louis', test=True) + self.assertEqual(len(cnx.execute('Any X WHERE X test %(val)s', {'val': True})), 1) + self.assertEqual(len(cnx.execute('Any X WHERE X test TRUE')), 1) + self.assertEqual(len(cnx.execute('Any X WHERE X test %(val)s', {'val': False})), 0) + self.assertEqual(len(cnx.execute('Any X WHERE X test FALSE')), 0) + + def test_select_constant(self): + rset = self.qexecute('Any X, "toto" ORDERBY X WHERE X is CWGroup') + self.assertEqual(rset.rows, + [list(x) for x in zip((2,3,4,5), ('toto','toto','toto','toto',))]) + self.assertIsInstance(rset[0][1], text_type) + self.assertEqual(rset.description, + list(zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), + ('String', 'String', 'String', 'String',)))) + rset = self.qexecute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'}) + self.assertEqual(rset.rows, + list(map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))) + self.assertIsInstance(rset[0][1], text_type) + self.assertEqual(rset.description, + list(zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), + ('String', 'String', 'String', 'String',)))) + rset = self.qexecute('Any X,GN WHERE X is CWUser, G is CWGroup, X login "syt", ' + 'X in_group G, G name GN') + + def test_select_union(self): + rset = self.qexecute('Any X,N ORDERBY N WITH X,N BEING ' + '((Any X,N WHERE X name N, X transition_of WF, WF workflow_of E, E name %(name)s)' + ' UNION ' + '(Any X,N WHERE X name N, X state_of WF, WF workflow_of E, E name %(name)s))', + {'name': 'CWUser'}) + self.assertEqual([x[1] for x in rset.rows], + ['activate', 'activated', 'deactivate', 'deactivated']) + self.assertEqual(rset.description, + [('Transition', 'String'), ('State', 'String'), + ('Transition', 'String'), ('State', 'String')]) + + def test_select_union_aggregat(self): + # meaningless, the goal in to have group by done on different attribute + # for each sub-query + self.qexecute('(Any N,COUNT(X) GROUPBY N WHERE X name N, X is State)' + ' UNION ' + '(Any N,COUNT(X) GROUPBY N ORDERBY 2 WHERE X login N)') + + def test_select_union_aggregat_independant_group(self): + with self.session.new_cnx() as cnx: + cnx.execute('INSERT State X: X name "hop"') + cnx.execute('INSERT State X: X name "hop"') + cnx.execute('INSERT Transition X: X name "hop"') + cnx.execute('INSERT Transition X: X name "hop"') + rset = cnx.execute('Any N,NX ORDERBY 2 WITH N,NX BEING ' + '((Any N,COUNT(X) GROUPBY N WHERE X name N, ' + ' X is State HAVING COUNT(X)>1)' + ' UNION ' + '(Any N,COUNT(X) GROUPBY N WHERE X name N, ' + ' X is Transition HAVING COUNT(X)>1))') + self.assertEqual(rset.rows, [[u'hop', 2], [u'hop', 2]]) + + def test_select_union_selection_with_diff_variables(self): + rset = self.qexecute('(Any N WHERE X name N, X is State)' + ' UNION ' + '(Any NN WHERE XX name NN, XX is Transition)') + self.assertEqual(sorted(r[0] for r in rset.rows), + ['abort', 'activate', 'activated', 'ben non', + 'deactivate', 'deactivated', 'done', 'en cours', + 'end', 'finie', 'markasdone', 'pitetre', 'redoit', + 'start', 'todo']) + + def test_select_union_description_diff_var(self): + eid1 = self.qexecute('CWGroup X WHERE X name "managers"')[0][0] + eid2 = self.qexecute('CWUser X WHERE X login "admin"')[0][0] + rset = self.qexecute('(Any X WHERE X eid %(x)s)' + ' UNION ' + '(Any Y WHERE Y eid %(y)s)', + {'x': eid1, 'y': eid2}) + self.assertEqual(rset.description[:], [('CWGroup',), ('CWUser',)]) + + def test_exists(self): + geid = self.qexecute("INSERT CWGroup X: X name 'lulufanclub'")[0][0] + self.qexecute("SET U in_group G WHERE G name 'lulufanclub'") + peid = self.qexecute("INSERT Personne X: X prenom 'lulu', X nom 'petit'")[0][0] + rset = self.qexecute("Any X WHERE X prenom 'lulu'," + "EXISTS (U in_group G, G name 'lulufanclub' OR G name 'managers');") + self.assertEqual(rset.rows, [[peid]]) + + def test_identity(self): + eid = self.qexecute('Any X WHERE X identity Y, Y eid 1')[0][0] + self.assertEqual(eid, 1) + eid = self.qexecute('Any X WHERE Y identity X, Y eid 1')[0][0] + self.assertEqual(eid, 1) + login = self.qexecute('Any L WHERE X login "admin", X identity Y, Y login L')[0][0] + self.assertEqual(login, 'admin') + + def test_select_date_mathexp(self): + rset = self.qexecute('Any X, TODAY - CD WHERE X is CWUser, X creation_date CD') + self.assertTrue(rset) + self.assertEqual(rset.description[0][1], 'Interval') + eid, = self.qexecute("INSERT Personne X: X nom 'bidule'")[0] + rset = self.qexecute('Any X, NOW - CD WHERE X is Personne, X creation_date CD') + self.assertEqual(rset.description[0][1], 'Interval') + + def test_select_subquery_aggregat_1(self): + # percent users by groups + self.qexecute('SET X in_group G WHERE G name "users"') + rset = self.qexecute('Any GN, COUNT(X)*100/T GROUPBY GN ORDERBY 2,1' + ' WHERE G name GN, X in_group G' + ' WITH T BEING (Any COUNT(U) WHERE U is CWUser)') + self.assertEqual(rset.rows, [[u'guests', 50], [u'managers', 50], [u'users', 100]]) + self.assertEqual(rset.description, [('String', 'Int'), ('String', 'Int'), ('String', 'Int')]) + + def test_select_subquery_aggregat_2(self): + expected = self.qexecute('Any X, 0, COUNT(T) GROUPBY X ' + 'WHERE X is Workflow, T transition_of X').rows + rset = self.qexecute(''' +Any P1,B,E WHERE P1 identity P2 WITH + P1,B BEING (Any P,COUNT(T) GROUPBY P WHERE P is Workflow, T is Transition, + T? transition_of P, T type "auto"), + P2,E BEING (Any P,COUNT(T) GROUPBY P WHERE P is Workflow, T is Transition, + T? transition_of P, T type "normal")''') + self.assertEqual(sorted(rset.rows), sorted(expected)) + + def test_select_subquery_const(self): + rset = self.qexecute('Any X WITH X BEING ((Any NULL) UNION (Any "toto"))') + self.assertEqual(rset.rows, [[None], ['toto']]) + self.assertEqual(rset.description, [(None,), ('String',)]) + + # insertion queries tests ################################################# + + def test_insert_is(self): + eid, = self.qexecute("INSERT Personne X: X nom 'bidule'")[0] + etype, = self.qexecute("Any TN WHERE X is T, X eid %s, T name TN" % eid)[0] + self.assertEqual(etype, 'Personne') + self.qexecute("INSERT Personne X: X nom 'managers'") + + def test_insert_1(self): + rset = self.qexecute("INSERT Personne X: X nom 'bidule'") + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('Personne',)]) + rset = self.qexecute('Personne X WHERE X nom "bidule"') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne',)]) + + def test_insert_1_multiple(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + rset = self.qexecute("INSERT Societe Y: Y nom N, P travaille Y WHERE P nom N") + self.assertEqual(len(rset.rows), 2) + self.assertEqual(rset.description, [('Societe',), ('Societe',)]) + + def test_insert_2(self): + rset = self.qexecute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'") + self.assertEqual(rset.description, [('Personne', 'Personne')]) + rset = self.qexecute('Personne X WHERE X nom "bidule" or X nom "tutu"') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne',), ('Personne',)]) + + def test_insert_3(self): + self.qexecute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y") + rset = self.qexecute('Personne X WHERE X nom "admin"') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne',)]) + + def test_insert_4(self): + self.qexecute("INSERT Societe Y: Y nom 'toto'") + self.qexecute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'") + rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) + + def test_insert_4bis(self): + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + seid = self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", + {'x': str(peid)})[0][0] + self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 1) + self.qexecute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", + {'x': str(seid)}) + self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 2) + + def test_insert_4ter(self): + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + seid = self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", + {'x': text_type(peid)})[0][0] + self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 1) + self.qexecute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", + {'x': text_type(seid)}) + self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 2) + + def test_insert_5(self): + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'") + rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) + + def test_insert_5bis(self): + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", + {'x': peid}) + rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) + + def test_insert_6(self): + self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y") + rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) + + def test_insert_7(self): + self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', " + "X travaille Y WHERE U login 'admin', U login N") + rset = self.qexecute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) + + def test_insert_7_2(self): + self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', " + "X travaille Y WHERE U login N") + rset = self.qexecute('Any X, Y WHERE Y nom "toto", X travaille Y') + self.assertEqual(len(rset), 2) + self.assertEqual(rset.description, [('Personne', 'Societe',), + ('Personne', 'Societe',)]) + + def test_insert_8(self): + self.qexecute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y " + "WHERE U login 'admin', U login N") + rset = self.qexecute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y') + self.assertTrue(rset.rows) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) + + def test_insert_9(self): + self.qexecute("INSERT Societe X: X nom 'Lo'") + self.qexecute("INSERT Societe X: X nom 'Gi'") + self.qexecute("INSERT SubDivision X: X nom 'Lab'") + rset = self.qexecute("INSERT Personne X: X nom N, X travaille Y, X travaille_subdivision Z " + "WHERE Y is Societe, Z is SubDivision, Y nom N") + self.assertEqual(len(rset), 2) + self.assertEqual(rset.description, [('Personne',), ('Personne',)]) + # self.assertSetEqual(set(x.nom for x in rset.entities()), + # ['Lo', 'Gi']) + # self.assertSetEqual(set(y.nom for x in rset.entities() for y in x.travaille), + # ['Lo', 'Gi']) + # self.assertEqual([y.nom for x in rset.entities() for y in x.travaille_subdivision], + # ['Lab', 'Lab']) + + def test_insert_query_error(self): + self.assertRaises(Exception, + self.qexecute, + "INSERT Personne X: X nom 'toto', X is Personne") + self.assertRaises(Exception, + self.qexecute, + "INSERT Personne X: X nom 'toto', X is_instance_of Personne") + self.assertRaises(QueryError, + self.qexecute, + "INSERT Personne X: X nom 'toto', X has_text 'tutu'") + + self.assertRaises(QueryError, + self.qexecute, + "INSERT CWUser X: X login 'toto', X eid %s" % cnx.user(self.session).eid) + + def test_insertion_description_with_where(self): + rset = self.qexecute('INSERT CWUser E, EmailAddress EM: E login "X", E upassword "X", ' + 'E primary_email EM, EM address "X", E in_group G ' + 'WHERE G name "managers"') + self.assertEqual(list(rset.description[0]), ['CWUser', 'EmailAddress']) + + # deletion queries tests ################################################## + + def test_delete_1(self): + self.qexecute("INSERT Personne Y: Y nom 'toto'") + rset = self.qexecute('Personne X WHERE X nom "toto"') + self.assertEqual(len(rset.rows), 1) + drset = self.qexecute("DELETE Personne Y WHERE Y nom 'toto'") + self.assertEqual(drset.rows, rset.rows) + rset = self.qexecute('Personne X WHERE X nom "toto"') + self.assertEqual(len(rset.rows), 0) + + def test_delete_2(self): + rset = self.qexecute("INSERT Personne X, Personne Y, Societe Z : " + "X nom 'syt', Y nom 'adim', Z nom 'Logilab', X travaille Z, Y travaille Z") + self.assertEqual(len(rset), 1) + self.assertEqual(len(rset[0]), 3) + self.assertEqual(rset.description[0], ('Personne', 'Personne', 'Societe')) + self.assertEqual(self.qexecute('Any N WHERE X nom N, X eid %s'% rset[0][0])[0][0], 'syt') + rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') + self.assertEqual(len(rset.rows), 2, rset.rows) + self.qexecute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilabo'") + rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') + self.assertEqual(len(rset.rows), 2, rset.rows) + self.qexecute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilab'") + rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') + self.assertEqual(len(rset.rows), 0, rset.rows) + + def test_delete_3(self): + s = self.user_groups_session('users') + with s.new_cnx() as cnx: + peid, = self.o.execute(cnx, "INSERT Personne P: P nom 'toto'")[0] + seid, = self.o.execute(cnx, "INSERT Societe S: S nom 'logilab'")[0] + self.o.execute(cnx, "SET P travaille S") + cnx.commit() + rset = self.qexecute('Personne P WHERE P travaille S') + self.assertEqual(len(rset.rows), 1) + self.qexecute("DELETE X travaille Y WHERE X eid %s, Y eid %s" % (peid, seid)) + rset = self.qexecute('Personne P WHERE P travaille S') + self.assertEqual(len(rset.rows), 0) + + def test_delete_symmetric(self): + teid1 = self.qexecute("INSERT Folder T: T name 'toto'")[0][0] + teid2 = self.qexecute("INSERT Folder T: T name 'tutu'")[0][0] + self.qexecute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) + rset = self.qexecute('Any X,Y WHERE X see_also Y') + self.assertEqual(len(rset) , 2, rset.rows) + self.qexecute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) + rset = self.qexecute('Any X,Y WHERE X see_also Y') + self.assertEqual(len(rset) , 0) + self.qexecute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) + rset = self.qexecute('Any X,Y WHERE X see_also Y') + self.assertEqual(len(rset) , 2) + self.qexecute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid2, teid1)) + rset = self.qexecute('Any X,Y WHERE X see_also Y') + self.assertEqual(len(rset) , 0) + + def test_nonregr_delete_cache(self): + """test that relations are properly cleaned when an entity is deleted + (using cachekey on sql generation returned always the same query for an eid, + whatever the relation) + """ + aeid, = self.qexecute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')[0] + # XXX would be nice if the rql below was enough... + #'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y' + eeid, = self.qexecute('INSERT Email X: X messageid "<1234>", X subject "test", ' + 'X sender Y, X recipients Y WHERE Y is EmailAddress')[0] + self.qexecute("DELETE Email X") + with self.session.new_cnx() as cnx: + sqlc = cnx.cnxset.cu + sqlc.execute('SELECT * FROM recipients_relation') + self.assertEqual(len(sqlc.fetchall()), 0) + sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid) + self.assertEqual(len(sqlc.fetchall()), 0) + + def test_nonregr_delete_cache2(self): + eid = self.qexecute("INSERT Folder T: T name 'toto'")[0][0] + # fill the cache + self.qexecute("Any X WHERE X eid %(x)s", {'x': eid}) + self.qexecute("Any X WHERE X eid %s" % eid) + self.qexecute("Folder X WHERE X eid %(x)s", {'x': eid}) + self.qexecute("Folder X WHERE X eid %s" % eid) + self.qexecute("DELETE Folder T WHERE T eid %s" % eid) + rset = self.qexecute("Any X WHERE X eid %(x)s", {'x': eid}) + self.assertEqual(rset.rows, []) + rset = self.qexecute("Any X WHERE X eid %s" % eid) + self.assertEqual(rset.rows, []) + rset = self.qexecute("Folder X WHERE X eid %(x)s", {'x': eid}) + self.assertEqual(rset.rows, []) + rset = self.qexecute("Folder X WHERE X eid %s" %eid) + self.assertEqual(rset.rows, []) + + # update queries tests #################################################### + + def test_update_1(self): + peid = self.qexecute("INSERT Personne Y: Y nom 'toto'")[0][0] + rset = self.qexecute('Personne X WHERE X nom "toto"') + self.assertEqual(len(rset.rows), 1) + rset = self.qexecute("SET X nom 'tutu', X prenom 'original' WHERE X is Personne, X nom 'toto'") + self.assertEqual(tuplify(rset.rows), [(peid, 'tutu', 'original')]) + rset = self.qexecute('Any Y, Z WHERE X is Personne, X nom Y, X prenom Z') + self.assertEqual(tuplify(rset.rows), [('tutu', 'original')]) + + def test_update_2(self): + peid, seid = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")[0] + rset = self.qexecute("SET X travaille Y WHERE X nom 'bidule', Y nom 'toto'") + self.assertEqual(tuplify(rset.rows), [(peid, seid)]) + rset = self.qexecute('Any X, Y WHERE X travaille Y') + self.assertEqual(len(rset.rows), 1) + + def test_update_2bis(self): + rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") + eid1, eid2 = rset[0][0], rset[0][1] + self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", + {'x': str(eid1), 'y': str(eid2)}) + rset = self.qexecute('Any X, Y WHERE X travaille Y') + self.assertEqual(len(rset.rows), 1) + # test add of an existant relation but with NOT X rel Y protection + self.assertFalse(self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s," + "NOT X travaille Y", + {'x': str(eid1), 'y': str(eid2)})) + + def test_update_2ter(self): + rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") + eid1, eid2 = rset[0][0], rset[0][1] + self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", + {'x': text_type(eid1), 'y': text_type(eid2)}) + rset = self.qexecute('Any X, Y WHERE X travaille Y') + self.assertEqual(len(rset.rows), 1) + + def test_update_multiple1(self): + peid1 = self.qexecute("INSERT Personne Y: Y nom 'tutu'")[0][0] + peid2 = self.qexecute("INSERT Personne Y: Y nom 'toto'")[0][0] + self.qexecute("SET X nom 'tutu', Y nom 'toto' WHERE X nom 'toto', Y nom 'tutu'") + self.assertEqual(self.qexecute('Any X WHERE X nom "toto"').rows, [[peid1]]) + self.assertEqual(self.qexecute('Any X WHERE X nom "tutu"').rows, [[peid2]]) + + def test_update_multiple2(self): + with self.session.new_cnx() as cnx: + ueid = cnx.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0] + peid1 = cnx.execute("INSERT Personne Y: Y nom 'turlu'")[0][0] + peid2 = cnx.execute("INSERT Personne Y: Y nom 'tutu'")[0][0] + cnx.execute('SET P1 owned_by U, P2 owned_by U ' + 'WHERE P1 eid %s, P2 eid %s, U eid %s' % (peid1, peid2, ueid)) + self.assertTrue(cnx.execute('Any X WHERE X eid %s, X owned_by U, U eid %s' + % (peid1, ueid))) + self.assertTrue(cnx.execute('Any X WHERE X eid %s, X owned_by U, U eid %s' + % (peid2, ueid))) + + def test_update_math_expr(self): + orders = [r[0] for r in self.qexecute('Any O ORDERBY O WHERE ST name "Personne", ' + 'X from_entity ST, X ordernum O')] + for i,v in enumerate(orders): + if v != orders[0]: + splitidx = i + break + self.qexecute('SET X ordernum Y+1 WHERE X from_entity SE, SE name "Personne", ' + 'X ordernum Y, X ordernum >= %(order)s', + {'order': orders[splitidx]}) + orders2 = [r[0] for r in self.qexecute('Any O ORDERBY O WHERE ST name "Personne", ' + 'X from_entity ST, X ordernum O')] + orders = orders[:splitidx] + [o+1 for o in orders[splitidx:]] + self.assertEqual(orders2, orders) + + def test_update_string_concat(self): + beid = self.qexecute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0] + self.qexecute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', + {'suffix': u'-moved'}) + newname = self.qexecute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0] + self.assertEqual(newname, 'toto-moved') + + def test_update_not_exists(self): + rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") + eid1, eid2 = rset[0][0], rset[0][1] + rset = self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s, " + "NOT EXISTS(Z ecrit_par X)", + {'x': text_type(eid1), 'y': text_type(eid2)}) + self.assertEqual(tuplify(rset.rows), [(eid1, eid2)]) + + def test_update_query_error(self): + self.qexecute("INSERT Personne Y: Y nom 'toto'") + self.assertRaises(Exception, self.qexecute, "SET X nom 'toto', X is Personne") + self.assertRaises(QueryError, self.qexecute, "SET X nom 'toto', X has_text 'tutu' " + "WHERE X is Personne") + self.assertRaises(QueryError, + self.qexecute, + "SET X login 'tutu', X eid %s" % cnx.user(self.session).eid) + + + # HAVING on write queries test ############################################# + + def test_update_having(self): + peid1 = self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] + peid2 = self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2")[0][0] + rset = self.qexecute("SET X tel 3 WHERE X tel TEL HAVING TEL&1=1") + self.assertEqual(tuplify(rset.rows), [(peid1, 3)]) + + def test_insert_having(self): + self.skipTest('unsupported yet') + self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] + self.assertFalse(self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2 " + "WHERE X tel XT HAVING XT&2=2")) + self.assertTrue(self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2 " + "WHERE X tel XT HAVING XT&1=1")) + + def test_delete_having(self): + self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] + self.assertFalse(self.qexecute("DELETE Personne Y WHERE X tel XT HAVING XT&2=2")) + self.assertTrue(self.qexecute("DELETE Personne Y WHERE X tel XT HAVING XT&1=1")) + + # upassword encryption tests ################################################# + + def test_insert_upassword(self): + rset = self.qexecute("INSERT CWUser X: X login 'bob', X upassword 'toto', " + "X in_group G WHERE G name 'users'") + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('CWUser',)]) + self.assertRaises(Unauthorized, + self.qexecute, "Any P WHERE X is CWUser, X login 'bob', X upassword P") + with self.session.new_cnx() as cnx: + cursor = cnx.cnxset.cu + cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" + % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) + passwd = binary_type(cursor.fetchone()[0]) + self.assertEqual(passwd, crypt_password('toto', passwd)) + rset = self.qexecute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", + {'pwd': Binary(passwd)}) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('CWUser',)]) + + def test_update_upassword(self): + with self.session.new_cnx() as cnx: + rset = cnx.execute("INSERT CWUser X: X login 'bob', X upassword %(pwd)s", + {'pwd': 'toto'}) + self.assertEqual(rset.description[0][0], 'CWUser') + rset = cnx.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'", + {'pwd': b'tutu'}) + cursor = cnx.cnxset.cu + cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" + % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) + passwd = binary_type(cursor.fetchone()[0]) + self.assertEqual(passwd, crypt_password('tutu', passwd)) + rset = cnx.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", + {'pwd': Binary(passwd)}) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('CWUser',)]) + + # ZT datetime tests ######################################################## + + def test_tz_datetime(self): + self.qexecute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", + {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) + datenaiss = self.qexecute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] + self.assertIsNotNone(datenaiss.tzinfo) + self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) + + def test_tz_datetime_cache_nonregr(self): + datenaiss = datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1)) + self.qexecute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", + {'date': datenaiss}) + self.assertTrue(self.qexecute("Any X WHERE X tzdatenaiss %(d)s", {'d': datenaiss})) + self.assertFalse(self.qexecute("Any X WHERE X tzdatenaiss %(d)s", {'d': datenaiss - timedelta(1)})) + + # non regression tests ##################################################### + + def test_nonregr_1(self): + teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] + self.qexecute("SET X tags Y WHERE X name 'tag', Y is State, Y name 'activated'") + rset = self.qexecute('Any X WHERE T tags X') + self.assertEqual(len(rset.rows), 1, rset.rows) + rset = self.qexecute('Any T WHERE T tags X, X is State') + self.assertEqual(rset.rows, [[teid]]) + rset = self.qexecute('Any T WHERE T tags X') + self.assertEqual(rset.rows, [[teid]]) + + def test_nonregr_2(self): + teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] + geid = self.qexecute("CWGroup G WHERE G name 'users'")[0][0] + self.qexecute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", + {'g': geid, 't': teid}) + rset = self.qexecute('Any X WHERE E eid %(x)s, E tags X', + {'x': teid}) + self.assertEqual(rset.rows, [[geid]]) + + def test_nonregr_3(self): + """bad sql generated on the second query (destination_state is not + detected as an inlined relation) + """ + rset = self.qexecute('Any S,ES,T WHERE S state_of WF, WF workflow_of ET, ET name "CWUser",' + 'ES allowed_transition T, T destination_state S') + self.assertEqual(len(rset.rows), 2) + + def test_nonregr_4(self): + # fix variables'type, else we get (nb of entity types with a 'name' attribute)**3 + # union queries and that make for instance a 266Ko sql query which is refused + # by the server (or client lib) + rset = self.qexecute('Any ER,SE,OE WHERE SE name "Comment", ER name "comments", OE name "Comment",' + 'ER is CWRType, SE is CWEType, OE is CWEType') + self.assertEqual(len(rset), 1) + + def test_nonregr_5(self): + # jpl #15505: equivalent queries returning different result sets + teid1 = self.qexecute("INSERT Folder X: X name 'hop'")[0][0] + teid2 = self.qexecute("INSERT Folder X: X name 'hip'")[0][0] + neid = self.qexecute("INSERT Note X: X todo_by U, X filed_under T " + "WHERE U login 'admin', T name 'hop'")[0][0] + weid = self.qexecute("INSERT Affaire X: X concerne N, X filed_under T " + "WHERE N is Note, T name 'hip'")[0][0] + rset1 = self.qexecute('Any N,U WHERE N filed_under T, T eid %s,' + 'N todo_by U, W concerne N,' + 'W is Affaire, W filed_under A, A eid %s' % (teid1, teid2)) + rset2 = self.qexecute('Any N,U WHERE N filed_under T, T eid %s,' + 'N todo_by U, W concerne N,' + 'W filed_under A, A eid %s' % (teid1, teid2)) + rset3 = self.qexecute('Any N,U WHERE N todo_by U, T eid %s,' + 'N filed_under T, W concerne N,' + 'W is Affaire, W filed_under A, A eid %s' % (teid1, teid2)) + rset4 = self.qexecute('Any N,U WHERE N todo_by U, T eid %s,' + 'N filed_under T, W concerne N,' + 'W filed_under A, A eid %s' % (teid1, teid2)) + self.assertEqual(rset1.rows, rset2.rows) + self.assertEqual(rset1.rows, rset3.rows) + self.assertEqual(rset1.rows, rset4.rows) + + def test_nonregr_6(self): + self.qexecute('Any N,COUNT(S) GROUPBY N ORDERBY COUNT(N) WHERE S name N, S is State') + + def test_sqlite_encoding(self): + """XXX this test was trying to show a bug on use of lower which only + occurs with non ascii string and misconfigured locale + """ + self.qexecute("INSERT Tag X: X name %(name)s," + "X modification_date %(modification_date)s," + "X creation_date %(creation_date)s", + {'name': u'name0', + 'modification_date': '2003/03/12 11:00', + 'creation_date': '2000/07/03 11:00'}) + rset = self.qexecute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,' + 'X owned_by U, U eid %(x)s', + {'x':self.session.user.eid}) + self.assertEqual(rset.rows, [[u'\xe9name0']]) + + + def test_nonregr_description(self): + """check that a correct description is built in case where infered + solutions may be "fusionned" into one by the querier while all solutions + are needed to build the result's description + """ + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Societe Y: Y nom 'toto'") + beid = self.qexecute("INSERT Basket B: B name 'mybasket'")[0][0] + self.qexecute("SET X in_basket B WHERE X is Personne") + self.qexecute("SET X in_basket B WHERE X is Societe") + rset = self.qexecute('Any X WHERE X in_basket B, B eid %s' % beid) + self.assertEqual(len(rset), 2) + self.assertEqual(rset.description, [('Personne',), ('Societe',)]) + + + def test_nonregr_cache_1(self): + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + beid = self.qexecute("INSERT Basket X: X name 'tag'")[0][0] + self.qexecute("SET X in_basket Y WHERE X is Personne, Y eid %(y)s", + {'y': beid}) + rset = self.qexecute("Any X WHERE X in_basket B, B eid %(x)s", + {'x': beid}) + self.assertEqual(rset.rows, [[peid]]) + rset = self.qexecute("Any X WHERE X in_basket B, B eid %(x)s", + {'x': beid}) + self.assertEqual(rset.rows, [[peid]]) + + def test_nonregr_has_text_cache(self): + eid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + eid2 = self.qexecute("INSERT Personne X: X nom 'tag'")[0][0] + rset = self.qexecute("Any X WHERE X has_text %(text)s", {'text': u'bidule'}) + self.assertEqual(rset.rows, [[eid1]]) + rset = self.qexecute("Any X WHERE X has_text %(text)s", {'text': u'tag'}) + self.assertEqual(rset.rows, [[eid2]]) + + def test_nonregr_sortterm_management(self): + """Error: Variable has no attribute 'sql' in rql2sql.py (visit_variable) + + cause: old variable ref inserted into a fresh rqlst copy + (in RQLSpliter._complex_select_plan) + + need sqlite including http://www.sqlite.org/cvstrac/tktview?tn=3773 fix + """ + self.qexecute('Any X ORDERBY D DESC WHERE X creation_date D') + + def test_nonregr_extra_joins(self): + ueid = self.session.user.eid + teid1 = self.qexecute("INSERT Folder X: X name 'folder1'")[0][0] + teid2 = self.qexecute("INSERT Folder X: X name 'folder2'")[0][0] + neid1 = self.qexecute("INSERT Note X: X para 'note1'")[0][0] + neid2 = self.qexecute("INSERT Note X: X para 'note2'")[0][0] + self.qexecute("SET X filed_under Y WHERE X eid %s, Y eid %s" + % (neid1, teid1)) + self.qexecute("SET X filed_under Y WHERE X eid %s, Y eid %s" + % (neid2, teid2)) + self.qexecute("SET X todo_by Y WHERE X is Note, Y eid %s" % ueid) + rset = self.qexecute('Any N WHERE N todo_by U, N is Note, U eid %s, N filed_under T, T eid %s' + % (ueid, teid1)) + self.assertEqual(len(rset), 1) + + def test_nonregr_XXX(self): + teid = self.qexecute('Transition S WHERE S name "deactivate"')[0][0] + rset = self.qexecute('Any O WHERE O is State, ' + 'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid}) + self.assertEqual(len(rset), 2) + rset = self.qexecute('Any O WHERE O is State, NOT S destination_state O, ' + 'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid}) + self.assertEqual(len(rset), 1) + + + def test_nonregr_set_datetime(self): + # huum, psycopg specific + self.qexecute('SET X creation_date %(date)s WHERE X eid 1', {'date': date.today()}) + + def test_nonregr_u_owned_by_u(self): + ueid = self.qexecute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G " + "WHERE G name 'users'")[0][0] + rset = self.qexecute("CWUser U") + self.assertEqual(len(rset), 3) # bob + admin + anon + rset = self.qexecute("Any U WHERE NOT U owned_by U") + # even admin created at repo initialization time should belong to itself + self.assertEqual(len(rset), 0) + + def test_nonreg_update_index(self): + # this is the kind of queries generated by "cubicweb-ctl db-check -ry" + self.qexecute("SET X description D WHERE X is State, X description D") + + def test_nonregr_is(self): + uteid = self.qexecute('Any ET WHERE ET name "CWUser"')[0][0] + self.qexecute('Any X, ET WHERE X is ET, ET eid %s' % uteid) + + def test_nonregr_orderby(self): + seid = self.qexecute('Any X WHERE X name "activated"')[0][0] + self.qexecute('Any X,S, MAX(T) GROUPBY X,S ORDERBY S ' + 'WHERE X is CWUser, T tags X, S eid IN(%s), X in_state S' % seid) + + def test_nonregr_solution_cache(self): + self.skipTest('XXX should be fixed or documented') # (doesn't occur if cache key is provided.) + rset = self.qexecute('Any X WHERE X is CWUser, X eid %(x)s', {'x':self.ueid}) + self.assertEqual(len(rset), 1) + rset = self.qexecute('Any X WHERE X is CWUser, X eid %(x)s', {'x':12345}) + self.assertEqual(len(rset), 0) + + def test_nonregr_final_norestr(self): + self.assertRaises(BadRQLQuery, self.qexecute, 'Date X') + + def test_nonregr_eid_cmp(self): + peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + peid2 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + rset = self.qexecute('Any X,Y WHERE X is Personne, Y is Personne, ' + 'X nom XD, Y nom XD, X eid Z, Y eid > Z') + self.assertEqual(rset.rows, [[peid1, peid2]]) + rset = self.qexecute('Any X,Y WHERE X nom XD, Y nom XD, X eid Z, Y eid > Z') + self.assertEqual(rset.rows, [[peid1, peid2]]) + + def test_nonregr_has_text_ambiguity_1(self): + peid = self.qexecute("INSERT CWUser X: X login 'bidule', X upassword 'bidule', " + "X in_group G WHERE G name 'users'")[0][0] + aeid = self.qexecute("INSERT Affaire X: X ref 'bidule'")[0][0] + rset = self.qexecute('Any X WHERE X is CWUser, X has_text "bidule"') + self.assertEqual(rset.rows, [[peid]]) + rset = self.qexecute('Any X WHERE X is CWUser, X has_text "bidule", ' + 'X in_state S, S name SN') + self.assertEqual(rset.rows, [[peid]]) + + + def test_nonregr_sql_cache(self): + # different SQL generated when 'name' is None or not (IS NULL). + self.assertFalse(self.qexecute('Any X WHERE X is CWEType, X name %(name)s', + {'name': None})) + self.assertTrue(self.qexecute('Any X WHERE X is CWEType, X name %(name)s', + {'name': 'CWEType'})) + + +class NonRegressionTC(CubicWebTC): + + def test_has_text_security_cache_bug(self): + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, 'user', ('users',)) + aff1 = cnx.create_entity('Societe', nom=u'aff1') + aff2 = cnx.create_entity('Societe', nom=u'aff2') + cnx.commit() + with self.new_access('user').repo_cnx() as cnx: + res = cnx.execute('Any X WHERE X has_text %(text)s', {'text': u'aff1'}) + self.assertEqual(res.rows, [[aff1.eid]]) + res = cnx.execute('Any X WHERE X has_text %(text)s', {'text': u'aff2'}) + self.assertEqual(res.rows, [[aff2.eid]]) + + def test_set_relations_eid(self): + with self.admin_access.repo_cnx() as cnx: + # create 3 email addresses + a1 = cnx.create_entity('EmailAddress', address=u'a1') + a2 = cnx.create_entity('EmailAddress', address=u'a2') + a3 = cnx.create_entity('EmailAddress', address=u'a3') + # SET relations using '>=' operator on eids + cnx.execute('SET U use_email A WHERE U login "admin", A eid >= %s' % a2.eid) + self.assertEqual( + [[a2.eid], [a3.eid]], + cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) + # DELETE + cnx.execute('DELETE U use_email A WHERE U login "admin", A eid > %s' % a2.eid) + self.assertEqual( + [[a2.eid]], + cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) + cnx.execute('DELETE U use_email A WHERE U login "admin"') + # SET relations using '<' operator on eids + cnx.execute('SET U use_email A WHERE U login "admin", A eid < %s' % a2.eid) + self.assertEqual( + [[a1.eid]], + cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_repository.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_repository.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,751 @@ +# -*- coding: iso-8859-1 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.server.repository""" + +import threading +import time +import logging + +from six.moves import range + +from yams.constraints import UniqueConstraint +from yams import register_base_type, unregister_base_type + +from logilab.database import get_db_helper + +from cubicweb import (BadConnectionId, ValidationError, + UnknownEid, AuthenticationError, Unauthorized, QueryError) +from cubicweb.predicates import is_instance +from cubicweb.schema import RQLConstraint +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.repotest import tuplify +from cubicweb.server import hook +from cubicweb.server.sqlutils import SQL_PREFIX +from cubicweb.server.hook import Hook +from cubicweb.server.sources import native +from cubicweb.server.session import SessionClosedError + + +class RepositoryTC(CubicWebTC): + """ singleton providing access to a persistent storage for entities + and relation + """ + + def test_unique_together_constraint(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"') + with self.assertRaises(ValidationError) as wraperr: + cnx.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"') + self.assertEqual( + {'cp': u'%(KEY-rtype)s is part of violated unicity constraint', + 'nom': u'%(KEY-rtype)s is part of violated unicity constraint', + 'type': u'%(KEY-rtype)s is part of violated unicity constraint', + '': u'some relations violate a unicity constraint'}, + wraperr.exception.args[1]) + + def test_unique_together_schema(self): + person = self.repo.schema.eschema('Personne') + self.assertEqual(len(person._unique_together), 1) + self.assertItemsEqual(person._unique_together[0], + ('nom', 'prenom', 'inline2')) + + def test_all_entities_have_owner(self): + with self.admin_access.repo_cnx() as cnx: + self.assertFalse(cnx.execute('Any X WHERE NOT X owned_by U')) + + def test_all_entities_have_is(self): + with self.admin_access.repo_cnx() as cnx: + self.assertFalse(cnx.execute('Any X WHERE NOT X is ET')) + + def test_all_entities_have_cw_source(self): + with self.admin_access.repo_cnx() as cnx: + self.assertFalse(cnx.execute('Any X WHERE NOT X cw_source S')) + + def test_connect(self): + cnxid = self.repo.connect(self.admlogin, password=self.admpassword) + self.assertTrue(cnxid) + self.repo.close(cnxid) + self.assertRaises(AuthenticationError, + self.repo.connect, self.admlogin, password='nimportnawak') + self.assertRaises(AuthenticationError, + self.repo.connect, self.admlogin, password='') + self.assertRaises(AuthenticationError, + self.repo.connect, self.admlogin, password=None) + self.assertRaises(AuthenticationError, + self.repo.connect, None, password=None) + self.assertRaises(AuthenticationError, + self.repo.connect, self.admlogin) + self.assertRaises(AuthenticationError, + self.repo.connect, None) + + def test_login_upassword_accent(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT CWUser X: X login %(login)s, X upassword %(passwd)s, ' + 'X in_group G WHERE G name "users"', + {'login': u"barnab", 'passwd': u"hhh".encode('UTF8')}) + cnx.commit() + repo = self.repo + cnxid = repo.connect(u"barnab", password=u"hhh".encode('UTF8')) + self.assertTrue(cnxid) + repo.close(cnxid) + + def test_rollback_on_execute_validation_error(self): + class ValidationErrorAfterHook(Hook): + __regid__ = 'valerror-after-hook' + __select__ = Hook.__select__ & is_instance('CWGroup') + events = ('after_update_entity',) + def __call__(self): + raise ValidationError(self.entity.eid, {}) + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(ValidationErrorAfterHook): + self.assertRaises(ValidationError, + cnx.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') + self.assertTrue(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) + with self.assertRaises(QueryError) as cm: + cnx.commit() + self.assertEqual(str(cm.exception), 'transaction must be rolled back') + cnx.rollback() + self.assertFalse(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) + + def test_rollback_on_execute_unauthorized(self): + class UnauthorizedAfterHook(Hook): + __regid__ = 'unauthorized-after-hook' + __select__ = Hook.__select__ & is_instance('CWGroup') + events = ('after_update_entity',) + def __call__(self): + raise Unauthorized() + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(UnauthorizedAfterHook): + self.assertRaises(Unauthorized, + cnx.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') + self.assertTrue(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) + with self.assertRaises(QueryError) as cm: + cnx.commit() + self.assertEqual(str(cm.exception), 'transaction must be rolled back') + cnx.rollback() + self.assertFalse(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) + + + def test_close(self): + repo = self.repo + cnxid = repo.connect(self.admlogin, password=self.admpassword) + self.assertTrue(cnxid) + repo.close(cnxid) + + + def test_initial_schema(self): + schema = self.repo.schema + # check order of attributes is respected + notin = set(('eid', 'is', 'is_instance_of', 'identity', + 'creation_date', 'modification_date', 'cwuri', + 'owned_by', 'created_by', 'cw_source', + 'update_permission', 'read_permission', + 'add_permission', 'in_basket')) + self.assertListEqual(['relation_type', + 'from_entity', 'to_entity', + 'constrained_by', + 'cardinality', 'ordernum', 'formula', + 'indexed', 'fulltextindexed', 'internationalizable', + 'defaultval', 'extra_props', + 'description', 'description_format'], + [r.type + for r in schema.eschema('CWAttribute').ordered_relations() + if r.type not in notin]) + + self.assertEqual(schema.eschema('CWEType').main_attribute(), 'name') + self.assertEqual(schema.eschema('State').main_attribute(), 'name') + + constraints = schema.rschema('name').rdef('CWEType', 'String').constraints + self.assertEqual(len(constraints), 2) + for cstr in constraints[:]: + if isinstance(cstr, UniqueConstraint): + constraints.remove(cstr) + break + else: + self.fail('unique constraint not found') + sizeconstraint = constraints[0] + self.assertEqual(sizeconstraint.min, None) + self.assertEqual(sizeconstraint.max, 64) + + constraints = schema.rschema('relation_type').rdef('CWAttribute', 'CWRType').constraints + self.assertEqual(len(constraints), 1) + cstr = constraints[0] + self.assertIsInstance(cstr, RQLConstraint) + self.assertEqual(cstr.expression, 'O final TRUE') + + ownedby = schema.rschema('owned_by') + self.assertEqual(ownedby.objects('CWEType'), ('CWUser',)) + + def test_internal_api(self): + repo = self.repo + cnxid = repo.connect(self.admlogin, password=self.admpassword) + session = repo._get_session(cnxid) + with session.new_cnx() as cnx: + self.assertEqual(repo.type_and_source_from_eid(2, cnx), + ('CWGroup', None, 'system')) + self.assertEqual(repo.type_from_eid(2, cnx), 'CWGroup') + repo.close(cnxid) + + def test_public_api(self): + self.assertEqual(self.repo.get_schema(), self.repo.schema) + self.assertEqual(self.repo.source_defs(), {'system': {'type': 'native', + 'uri': 'system', + 'use-cwuri-as-url': False} + }) + # .properties() return a result set + self.assertEqual(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U') + + def test_schema_is_relation(self): + with self.admin_access.repo_cnx() as cnx: + no_is_rset = cnx.execute('Any X WHERE NOT X is ET') + self.assertFalse(no_is_rset, no_is_rset.description) + + def test_delete_if_singlecard1(self): + with self.admin_access.repo_cnx() as cnx: + note = cnx.create_entity('Affaire') + p1 = cnx.create_entity('Personne', nom=u'toto') + cnx.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', + {'x': note.eid, 'p': p1.eid}) + rset = cnx.execute('Any P WHERE A todo_by P, A eid %(x)s', + {'x': note.eid}) + self.assertEqual(len(rset), 1) + p2 = cnx.create_entity('Personne', nom=u'tutu') + cnx.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', + {'x': note.eid, 'p': p2.eid}) + rset = cnx.execute('Any P WHERE A todo_by P, A eid %(x)s', + {'x': note.eid}) + self.assertEqual(len(rset), 1) + self.assertEqual(rset.rows[0][0], p2.eid) + + def test_delete_if_object_inlined_singlecard(self): + with self.admin_access.repo_cnx() as cnx: + c = cnx.create_entity('Card', title=u'Carte') + cnx.create_entity('Personne', nom=u'Vincent', fiche=c) + cnx.create_entity('Personne', nom=u'Florent', fiche=c) + cnx.commit() + self.assertEqual(len(c.reverse_fiche), 1) + + def test_delete_computed_relation_nonregr(self): + with self.admin_access.repo_cnx() as cnx: + c = cnx.create_entity('Personne', nom=u'Adam', login_user=cnx.user.eid) + cnx.commit() + c.cw_delete() + cnx.commit() + + def test_cw_set_in_before_update(self): + # local hook + class DummyBeforeHook(Hook): + __regid__ = 'dummy-before-hook' + __select__ = Hook.__select__ & is_instance('EmailAddress') + events = ('before_update_entity',) + def __call__(self): + # safety belt: avoid potential infinite recursion if the test + # fails (i.e. RuntimeError not raised) + pendings = self._cw.transaction_data.setdefault('pending', set()) + if self.entity.eid not in pendings: + pendings.add(self.entity.eid) + self.entity.cw_set(alias=u'foo') + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(DummyBeforeHook): + addr = cnx.create_entity('EmailAddress', address=u'a@b.fr') + addr.cw_set(address=u'a@b.com') + rset = cnx.execute('Any A,AA WHERE X eid %(x)s, X address A, X alias AA', + {'x': addr.eid}) + self.assertEqual(rset.rows, [[u'a@b.com', u'foo']]) + + def test_cw_set_in_before_add(self): + # local hook + class DummyBeforeHook(Hook): + __regid__ = 'dummy-before-hook' + __select__ = Hook.__select__ & is_instance('EmailAddress') + events = ('before_add_entity',) + def __call__(self): + # cw_set is forbidden within before_add_entity() + self.entity.cw_set(alias=u'foo') + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(DummyBeforeHook): + # XXX will fail with python -O + self.assertRaises(AssertionError, cnx.create_entity, + 'EmailAddress', address=u'a@b.fr') + + def test_multiple_edit_cw_set(self): + """make sure cw_edited doesn't get cluttered + by previous entities on multiple set + """ + # local hook + class DummyBeforeHook(Hook): + _test = self # keep reference to test instance + __regid__ = 'dummy-before-hook' + __select__ = Hook.__select__ & is_instance('Affaire') + events = ('before_update_entity',) + def __call__(self): + # invoiced attribute shouldn't be considered "edited" before the hook + self._test.assertFalse('invoiced' in self.entity.cw_edited, + 'cw_edited cluttered by previous update') + self.entity.cw_edited['invoiced'] = 10 + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(DummyBeforeHook): + cnx.create_entity('Affaire', ref=u'AFF01') + cnx.create_entity('Affaire', ref=u'AFF02') + cnx.execute('SET A duration 10 WHERE A is Affaire') + + + def test_user_friendly_error(self): + from cubicweb.entities.adapters import IUserFriendlyUniqueTogether + class MyIUserFriendlyUniqueTogether(IUserFriendlyUniqueTogether): + __select__ = IUserFriendlyUniqueTogether.__select__ & is_instance('Societe') + def raise_user_exception(self): + raise ValidationError(self.entity.eid, {'hip': 'hop'}) + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(MyIUserFriendlyUniqueTogether): + s = cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013') + cnx.commit() + with self.assertRaises(ValidationError) as cm: + cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013') + self.assertEqual(cm.exception.errors, {'hip': 'hop'}) + cnx.rollback() + cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'31400') + with self.assertRaises(ValidationError) as cm: + s.cw_set(cp=u'31400') + self.assertEqual(cm.exception.entity, s.eid) + self.assertEqual(cm.exception.errors, {'hip': 'hop'}) + cnx.rollback() + + def test_attribute_cache(self): + with self.admin_access.repo_cnx() as cnx: + bk = cnx.create_entity('Bookmark', title=u'index', path=u'/') + cnx.commit() + self.assertEqual(bk.title, 'index') + bk.cw_set(title=u'root') + self.assertEqual(bk.title, 'root') + cnx.commit() + self.assertEqual(bk.title, 'root') + +class SchemaDeserialTC(CubicWebTC): + + appid = 'data-schemaserial' + + @classmethod + def setUpClass(cls): + register_base_type('BabarTestType', ('jungle_speed',)) + helper = get_db_helper('sqlite') + helper.TYPE_MAPPING['BabarTestType'] = 'TEXT' + helper.TYPE_CONVERTERS['BabarTestType'] = lambda x: '"%s"' % x + super(SchemaDeserialTC, cls).setUpClass() + + + @classmethod + def tearDownClass(cls): + unregister_base_type('BabarTestType') + helper = get_db_helper('sqlite') + helper.TYPE_MAPPING.pop('BabarTestType', None) + helper.TYPE_CONVERTERS.pop('BabarTestType', None) + super(SchemaDeserialTC, cls).tearDownClass() + + def test_deserialization_base(self): + """Check the following deserialization + + * all CWEtype has name + * Final type + * CWUniqueTogetherConstraint + * _unique_together__ content""" + origshema = self.repo.schema + try: + self.repo.config.repairing = True # avoid versions checking + self.repo.set_schema(self.repo.deserialize_schema()) + table = SQL_PREFIX + 'CWEType' + namecol = SQL_PREFIX + 'name' + finalcol = SQL_PREFIX + 'final' + with self.admin_access.repo_cnx() as cnx: + cu = cnx.system_sql('SELECT %s FROM %s WHERE %s is NULL' + % (namecol, table, finalcol)) + self.assertEqual(cu.fetchall(), []) + cu = cnx.system_sql('SELECT %s FROM %s ' + 'WHERE %s=%%(final)s ORDER BY %s' + % (namecol, table, finalcol, namecol), + {'final': True}) + self.assertEqual(cu.fetchall(), + [(u'BabarTestType',), + (u'BigInt',), (u'Boolean',), (u'Bytes',), + (u'Date',), (u'Datetime',), + (u'Decimal',),(u'Float',), + (u'Int',), + (u'Interval',), (u'Password',), + (u'String',), + (u'TZDatetime',), (u'TZTime',), (u'Time',)]) + sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to " + "FROM cw_CWUniqueTogetherConstraint as cstr, " + " relations_relation as rel, " + " cw_CWEType as etype " + "WHERE cstr.cw_eid = rel.eid_from " + " AND cstr.cw_constraint_of = etype.cw_eid " + " AND etype.cw_name = 'Personne' " + ";") + cu = cnx.system_sql(sql) + rows = cu.fetchall() + self.assertEqual(len(rows), 3) + person = self.repo.schema.eschema('Personne') + self.assertEqual(len(person._unique_together), 1) + self.assertItemsEqual(person._unique_together[0], + ('nom', 'prenom', 'inline2')) + + finally: + self.repo.set_schema(origshema) + + def test_custom_attribute_param(self): + origshema = self.repo.schema + try: + self.repo.config.repairing = True # avoid versions checking + self.repo.set_schema(self.repo.deserialize_schema()) + pes = self.repo.schema['Personne'] + attr = pes.rdef('custom_field_of_jungle') + self.assertIn('jungle_speed', vars(attr)) + self.assertEqual(42, attr.jungle_speed) + finally: + self.repo.set_schema(origshema) + + + +class DataHelpersTC(CubicWebTC): + + def test_type_from_eid(self): + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(self.repo.type_from_eid(2, cnx), 'CWGroup') + + def test_type_from_eid_raise(self): + with self.admin_access.repo_cnx() as cnx: + self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, cnx) + + def test_add_delete_info(self): + with self.admin_access.repo_cnx() as cnx: + entity = self.repo.vreg['etypes'].etype_class('Personne')(cnx) + entity.eid = -1 + entity.complete = lambda x: None + self.repo.add_info(cnx, entity, self.repo.system_source) + cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1') + data = cu.fetchall() + self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None)]) + self.repo._delete_cascade_multi(cnx, [entity]) + self.repo.system_source.delete_info_multi(cnx, [entity]) + cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1') + data = cu.fetchall() + self.assertEqual(data, []) + + +class FTITC(CubicWebTC): + + def test_fulltext_container_entity(self): + with self.admin_access.repo_cnx() as cnx: + assert self.schema.rschema('use_email').fulltext_container == 'subject' + toto = cnx.create_entity('EmailAddress', address=u'toto@logilab.fr') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) + self.assertEqual(rset.rows, []) + cnx.user.cw_set(use_email=toto) + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) + self.assertEqual(rset.rows, [[cnx.user.eid]]) + cnx.execute('DELETE X use_email Y WHERE X login "admin", Y eid %(y)s', + {'y': toto.eid}) + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) + self.assertEqual(rset.rows, []) + tutu = cnx.create_entity('EmailAddress', address=u'tutu@logilab.fr') + cnx.user.cw_set(use_email=tutu) + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) + self.assertEqual(rset.rows, [[cnx.user.eid]]) + tutu.cw_set(address=u'hip@logilab.fr') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) + self.assertEqual(rset.rows, []) + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'hip'}) + self.assertEqual(rset.rows, [[cnx.user.eid]]) + + def test_no_uncessary_ftiindex_op(self): + with self.admin_access.repo_cnx() as cnx: + cnx.create_entity('Workflow', + name=u'dummy workflow', + description=u'huuuuu') + self.assertFalse(any(x for x in cnx.pending_operations + if isinstance(x, native.FTIndexEntityOp))) + + +class DBInitTC(CubicWebTC): + + def test_versions_inserted(self): + with self.admin_access.repo_cnx() as cnx: + inserted = [r[0] + for r in cnx.execute('Any K ORDERBY K ' + 'WHERE P pkey K, P pkey ~= "system.version.%"')] + self.assertEqual(inserted, + [u'system.version.basket', + u'system.version.card', + u'system.version.comment', + u'system.version.cubicweb', + u'system.version.file', + u'system.version.localperms', + u'system.version.tag']) + +CALLED = [] + +class InlineRelHooksTC(CubicWebTC): + """test relation hooks are called for inlined relations + """ + def setUp(self): + CubicWebTC.setUp(self) + CALLED[:] = () + + def test_inline_relation(self): + """make sure _relation hooks are called for inlined relation""" + + class EcritParHook(hook.Hook): + __regid__ = 'inlinedrelhook' + __select__ = hook.Hook.__select__ & hook.match_rtype('ecrit_par') + events = ('before_add_relation', 'after_add_relation', + 'before_delete_relation', 'after_delete_relation') + def __call__(self): + CALLED.append((self.event, self.eidfrom, self.rtype, self.eidto)) + + with self.temporary_appobjects(EcritParHook): + with self.admin_access.repo_cnx() as cnx: + eidp = cnx.execute('INSERT Personne X: X nom "toto"')[0][0] + eidn = cnx.execute('INSERT Note X: X type "T"')[0][0] + cnx.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') + self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), + ('after_add_relation', eidn, 'ecrit_par', eidp)]) + CALLED[:] = () + cnx.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"') + self.assertEqual(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp), + ('after_delete_relation', eidn, 'ecrit_par', eidp)]) + CALLED[:] = () + eidn = cnx.execute('INSERT Note N: N ecrit_par P WHERE P nom "toto"')[0][0] + self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), + ('after_add_relation', eidn, 'ecrit_par', eidp)]) + + def test_unique_contraint(self): + with self.admin_access.repo_cnx() as cnx: + toto = cnx.create_entity('Personne', nom=u'toto') + a01 = cnx.create_entity('Affaire', ref=u'A01', todo_by=toto) + cnx.commit() + cnx.create_entity('Note', type=u'todo', inline1=a01) + cnx.commit() + cnx.create_entity('Note', type=u'todo', inline1=a01) + with self.assertRaises(ValidationError) as cm: + cnx.commit() + self.assertEqual(cm.exception.errors, + {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, ' + 'A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'}) + + def test_add_relations_at_creation_with_del_existing_rel(self): + with self.admin_access.repo_cnx() as cnx: + person = cnx.create_entity('Personne', + nom=u'Toto', + prenom=u'Lanturlu', + sexe=u'M') + users_rql = 'Any U WHERE U is CWGroup, U name "users"' + users = cnx.execute(users_rql).get_entity(0, 0) + cnx.create_entity('CWUser', + login=u'Toto', + upassword=u'firstname', + firstname=u'firstname', + surname=u'surname', + reverse_login_user=person, + in_group=users) + cnx.commit() + + +class PerformanceTest(CubicWebTC): + def setUp(self): + super(PerformanceTest, self).setUp() + logger = logging.getLogger('cubicweb.session') + #logger.handlers = [logging.StreamHandler(sys.stdout)] + logger.setLevel(logging.INFO) + self.info = logger.info + + def tearDown(self): + super(PerformanceTest, self).tearDown() + logger = logging.getLogger('cubicweb.session') + logger.setLevel(logging.CRITICAL) + + def test_composite_deletion(self): + with self.admin_access.repo_cnx() as cnx: + personnes = [] + t0 = time.time() + for i in range(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + for j in range(0, 2000, 100): + abraham.cw_set(personne_composite=personnes[j:j+100]) + t1 = time.time() + self.info('creation: %.2gs', (t1 - t0)) + cnx.commit() + t2 = time.time() + self.info('commit creation: %.2gs', (t2 - t1)) + cnx.execute('DELETE Personne P WHERE P eid %(eid)s', {'eid': abraham.eid}) + t3 = time.time() + self.info('deletion: %.2gs', (t3 - t2)) + cnx.commit() + t4 = time.time() + self.info("commit deletion: %2gs", (t4 - t3)) + + def test_add_relation_non_inlined(self): + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in range(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + cnx.commit() + t0 = time.time() + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M', + personne_composite=personnes[:100]) + t1 = time.time() + self.info('creation: %.2gs', (t1 - t0)) + for j in range(100, 2000, 100): + abraham.cw_set(personne_composite=personnes[j:j+100]) + t2 = time.time() + self.info('more relations: %.2gs', (t2-t1)) + cnx.commit() + t3 = time.time() + self.info('commit creation: %.2gs', (t3 - t2)) + + def test_add_relation_inlined(self): + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in range(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + cnx.commit() + t0 = time.time() + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M', + personne_inlined=personnes[:100]) + t1 = time.time() + self.info('creation: %.2gs', (t1 - t0)) + for j in range(100, 2000, 100): + abraham.cw_set(personne_inlined=personnes[j:j+100]) + t2 = time.time() + self.info('more relations: %.2gs', (t2-t1)) + cnx.commit() + t3 = time.time() + self.info('commit creation: %.2gs', (t3 - t2)) + + + def test_session_add_relation(self): + """ to be compared with test_session_add_relations""" + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in range(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + cnx.commit() + t0 = time.time() + add_relation = cnx.add_relation + for p in personnes: + add_relation(abraham.eid, 'personne_composite', p.eid) + cnx.commit() + t1 = time.time() + self.info('add relation: %.2gs', t1-t0) + + def test_session_add_relations (self): + """ to be compared with test_session_add_relation""" + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in range(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + cnx.commit() + t0 = time.time() + add_relations = cnx.add_relations + relations = [('personne_composite', [(abraham.eid, p.eid) for p in personnes])] + add_relations(relations) + cnx.commit() + t1 = time.time() + self.info('add relations: %.2gs', t1-t0) + + def test_session_add_relation_inlined(self): + """ to be compared with test_session_add_relations""" + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in range(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + cnx.commit() + t0 = time.time() + add_relation = cnx.add_relation + for p in personnes: + add_relation(abraham.eid, 'personne_inlined', p.eid) + cnx.commit() + t1 = time.time() + self.info('add relation (inlined): %.2gs', t1-t0) + + def test_session_add_relations_inlined (self): + """ to be compared with test_session_add_relation""" + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in range(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + cnx.commit() + t0 = time.time() + add_relations = cnx.add_relations + relations = [('personne_inlined', [(abraham.eid, p.eid) for p in personnes])] + add_relations(relations) + cnx.commit() + t1 = time.time() + self.info('add relations (inlined): %.2gs', t1-t0) + + def test_optional_relation_reset_1(self): + with self.admin_access.repo_cnx() as cnx: + p1 = cnx.create_entity('Personne', nom=u'Vincent') + p2 = cnx.create_entity('Personne', nom=u'Florent') + w = cnx.create_entity('Affaire', ref=u'wc') + w.cw_set(todo_by=[p1,p2]) + w.cw_clear_all_caches() + cnx.commit() + self.assertEqual(len(w.todo_by), 1) + self.assertEqual(w.todo_by[0].eid, p2.eid) + + def test_optional_relation_reset_2(self): + with self.admin_access.repo_cnx() as cnx: + p1 = cnx.create_entity('Personne', nom=u'Vincent') + p2 = cnx.create_entity('Personne', nom=u'Florent') + w = cnx.create_entity('Affaire', ref=u'wc') + w.cw_set(todo_by=p1) + cnx.commit() + w.cw_set(todo_by=p2) + w.cw_clear_all_caches() + cnx.commit() + self.assertEqual(len(w.todo_by), 1) + self.assertEqual(w.todo_by[0].eid, p2.eid) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_rql2sql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_rql2sql.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2231 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.server.sources.rql2sql""" +from __future__ import print_function + +import sys +import os +from datetime import date +from logilab.common.testlib import TestCase, unittest_main, mock_object + +from rql import BadRQLQuery +from rql.utils import register_function, FunctionDescr + +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.repotest import RQLGeneratorTC +from cubicweb.server.sources.rql2sql import remove_unused_solutions + + +# add a dumb registered procedure +class stockproc(FunctionDescr): + supported_backends = ('postgres', 'sqlite', 'mysql') +try: + register_function(stockproc) +except AssertionError as ex: + pass # already registered + + +from logilab import database as db +def monkey_patch_import_driver_module(driver, drivers, quiet=True): + if not driver in drivers: + raise db.UnknownDriver(driver) + for modname in drivers[driver]: + try: + if not quiet: + sys.stderr.write('Trying %s\n' % modname) + module = db.load_module_from_name(modname, use_sys=False) + break + except ImportError: + if not quiet: + sys.stderr.write('%s is not available\n' % modname) + continue + else: + return mock_object(STRING=1, BOOLEAN=2, BINARY=3, DATETIME=4, NUMBER=5), drivers[driver][0] + return module, modname + + +def setUpModule(): + global config, schema + config = TestServerConfiguration('data', apphome=CWRQLTC.datadir) + config.bootstrap_cubes() + schema = config.load_schema() + schema['in_state'].inlined = True + schema['state_of'].inlined = False + schema['comments'].inlined = False + db._backup_import_driver_module = db._import_driver_module + db._import_driver_module = monkey_patch_import_driver_module + +def tearDownModule(): + global config, schema + del config, schema + db._import_driver_module = db._backup_import_driver_module + del db._backup_import_driver_module + +PARSER = [ + (r"Personne P WHERE P nom 'Zig\'oto';", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE _P.cw_nom=Zig\'oto'''), + + (r'Personne P WHERE P nom ~= "Zig\"oto%";', + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE _P.cw_nom ILIKE Zig"oto%'''), + ] + +BASIC = [ + ("Any AS WHERE AS is Affaire", + '''SELECT _AS.cw_eid +FROM cw_Affaire AS _AS'''), + + ("Any X WHERE X is Affaire", + '''SELECT _X.cw_eid +FROM cw_Affaire AS _X'''), + + ("Any X WHERE X eid 0", + '''SELECT 0'''), + + ("Personne P", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P'''), + + ("Personne P WHERE P test TRUE", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE _P.cw_test=True'''), + + ("Personne P WHERE P test false", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE _P.cw_test=False'''), + + ("Personne P WHERE P eid -1", + '''SELECT -1'''), + + ("Personne P WHERE S is Societe, P travaille S, S nom 'Logilab';", + '''SELECT rel_travaille0.eid_from +FROM cw_Societe AS _S, travaille_relation AS rel_travaille0 +WHERE rel_travaille0.eid_to=_S.cw_eid AND _S.cw_nom=Logilab'''), + + ("Personne P WHERE P concerne A, A concerne S, S nom 'Logilab', S is Societe;", + '''SELECT rel_concerne0.eid_from +FROM concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1, cw_Societe AS _S +WHERE rel_concerne0.eid_to=rel_concerne1.eid_from AND rel_concerne1.eid_to=_S.cw_eid AND _S.cw_nom=Logilab'''), + + ("Note N WHERE X evaluee N, X nom 'Logilab';", + '''SELECT rel_evaluee0.eid_to +FROM cw_Division AS _X, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom=Logilab +UNION ALL +SELECT rel_evaluee0.eid_to +FROM cw_Personne AS _X, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom=Logilab +UNION ALL +SELECT rel_evaluee0.eid_to +FROM cw_Societe AS _X, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom=Logilab +UNION ALL +SELECT rel_evaluee0.eid_to +FROM cw_SubDivision AS _X, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom=Logilab'''), + + ("Note N WHERE X evaluee N, X nom in ('Logilab', 'Caesium');", + '''SELECT rel_evaluee0.eid_to +FROM cw_Division AS _X, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom IN(Logilab, Caesium) +UNION ALL +SELECT rel_evaluee0.eid_to +FROM cw_Personne AS _X, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom IN(Logilab, Caesium) +UNION ALL +SELECT rel_evaluee0.eid_to +FROM cw_Societe AS _X, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom IN(Logilab, Caesium) +UNION ALL +SELECT rel_evaluee0.eid_to +FROM cw_SubDivision AS _X, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom IN(Logilab, Caesium)'''), + + ("Any N WHERE G is CWGroup, G name N, E eid 12, E read_permission G", + '''SELECT _G.cw_name +FROM cw_CWGroup AS _G, read_permission_relation AS rel_read_permission0 +WHERE rel_read_permission0.eid_from=12 AND rel_read_permission0.eid_to=_G.cw_eid'''), + + ('Any Y WHERE U login "admin", U login Y', # stupid but valid... + """SELECT _U.cw_login +FROM cw_CWUser AS _U +WHERE _U.cw_login=admin"""), + + ('Any T WHERE T tags X, X is State', + '''SELECT rel_tags0.eid_from +FROM cw_State AS _X, tags_relation AS rel_tags0 +WHERE rel_tags0.eid_to=_X.cw_eid'''), + + ('Any X,Y WHERE X eid 0, Y eid 1, X concerne Y', + '''SELECT 0, 1 +FROM concerne_relation AS rel_concerne0 +WHERE rel_concerne0.eid_from=0 AND rel_concerne0.eid_to=1'''), + + ("Any X WHERE X prenom 'lulu'," + "EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');", + '''SELECT _X.cw_eid +FROM cw_Personne AS _X +WHERE _X.cw_prenom=lulu AND EXISTS(SELECT 1 FROM cw_CWGroup AS _G, in_group_relation AS rel_in_group1, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers)))'''), + + ("Any X WHERE X prenom 'lulu'," + "NOT EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');", + '''SELECT _X.cw_eid +FROM cw_Personne AS _X +WHERE _X.cw_prenom=lulu AND NOT (EXISTS(SELECT 1 FROM cw_CWGroup AS _G, in_group_relation AS rel_in_group1, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers))))'''), + + ('Any X WHERE X title V, NOT X wikiid V, NOT X title "parent", X is Card', + '''SELECT _X.cw_eid +FROM cw_Card AS _X +WHERE NOT (_X.cw_wikiid=_X.cw_title) AND NOT (_X.cw_title=parent)'''), + + ("Any -AS WHERE AS is Affaire", + '''SELECT -_AS.cw_eid +FROM cw_Affaire AS _AS'''), + +] + +BASIC_WITH_LIMIT = [ + ("Personne P LIMIT 20 OFFSET 10", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +LIMIT 20 +OFFSET 10'''), + ("Any P ORDERBY N LIMIT 1 WHERE P is Personne, P travaille S, S eid %(eid)s, P nom N, P nom %(text)s", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P, travaille_relation AS rel_travaille0 +WHERE rel_travaille0.eid_from=_P.cw_eid AND rel_travaille0.eid_to=12345 AND _P.cw_nom=hip hop momo +ORDER BY _P.cw_nom +LIMIT 1'''), + ] + + +ADVANCED = [ + ("Societe S WHERE S2 is Societe, S2 nom SN, S nom 'Logilab' OR S nom SN", + '''SELECT _S.cw_eid +FROM cw_Societe AS _S, cw_Societe AS _S2 +WHERE ((_S.cw_nom=Logilab) OR (_S2.cw_nom=_S.cw_nom))'''), + + ("Societe S WHERE S nom 'Logilab' OR S nom 'Caesium'", + '''SELECT _S.cw_eid +FROM cw_Societe AS _S +WHERE ((_S.cw_nom=Logilab) OR (_S.cw_nom=Caesium))'''), + + ('Any X WHERE X nom "toto", X eid IN (9700, 9710, 1045, 674)', + '''SELECT _X.cw_eid +FROM cw_Division AS _X +WHERE _X.cw_nom=toto AND _X.cw_eid IN(9700, 9710, 1045, 674) +UNION ALL +SELECT _X.cw_eid +FROM cw_Personne AS _X +WHERE _X.cw_nom=toto AND _X.cw_eid IN(9700, 9710, 1045, 674) +UNION ALL +SELECT _X.cw_eid +FROM cw_Societe AS _X +WHERE _X.cw_nom=toto AND _X.cw_eid IN(9700, 9710, 1045, 674) +UNION ALL +SELECT _X.cw_eid +FROM cw_SubDivision AS _X +WHERE _X.cw_nom=toto AND _X.cw_eid IN(9700, 9710, 1045, 674)'''), + + ('Any Y, COUNT(N) GROUPBY Y WHERE Y evaluee N;', + '''SELECT rel_evaluee0.eid_from, COUNT(rel_evaluee0.eid_to) +FROM evaluee_relation AS rel_evaluee0 +GROUP BY rel_evaluee0.eid_from'''), + + ("Any X WHERE X concerne B or C concerne X", + '''SELECT _X.cw_eid +FROM concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1, cw_Affaire AS _X +WHERE ((rel_concerne0.eid_from=_X.cw_eid) OR (rel_concerne1.eid_to=_X.cw_eid))'''), + + ("Any X WHERE X travaille S or X concerne A", + '''SELECT _X.cw_eid +FROM concerne_relation AS rel_concerne1, cw_Personne AS _X, travaille_relation AS rel_travaille0 +WHERE ((rel_travaille0.eid_from=_X.cw_eid) OR (rel_concerne1.eid_from=_X.cw_eid))'''), + + ("Any N WHERE A evaluee N or N ecrit_par P", + '''SELECT _N.cw_eid +FROM cw_Note AS _N, evaluee_relation AS rel_evaluee0 +WHERE ((rel_evaluee0.eid_to=_N.cw_eid) OR (_N.cw_ecrit_par IS NOT NULL))'''), + + ("Any N WHERE A evaluee N or EXISTS(N todo_by U)", + '''SELECT _N.cw_eid +FROM cw_Note AS _N, evaluee_relation AS rel_evaluee0 +WHERE ((rel_evaluee0.eid_to=_N.cw_eid) OR (EXISTS(SELECT 1 FROM todo_by_relation AS rel_todo_by1 WHERE rel_todo_by1.eid_from=_N.cw_eid)))'''), + + ("Any N WHERE A evaluee N or N todo_by U", + '''SELECT _N.cw_eid +FROM cw_Note AS _N, evaluee_relation AS rel_evaluee0, todo_by_relation AS rel_todo_by1 +WHERE ((rel_evaluee0.eid_to=_N.cw_eid) OR (rel_todo_by1.eid_from=_N.cw_eid))'''), + + ("Any X WHERE X concerne B or C concerne X, B eid 12, C eid 13", + '''SELECT _X.cw_eid +FROM concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1, cw_Affaire AS _X +WHERE ((rel_concerne0.eid_from=_X.cw_eid AND rel_concerne0.eid_to=12) OR (rel_concerne1.eid_from=13 AND rel_concerne1.eid_to=_X.cw_eid))'''), + + ('Any X WHERE X created_by U, X concerne B OR C concerne X, B eid 12, C eid 13', + '''SELECT rel_created_by0.eid_from +FROM concerne_relation AS rel_concerne1, concerne_relation AS rel_concerne2, created_by_relation AS rel_created_by0 +WHERE ((rel_concerne1.eid_from=rel_created_by0.eid_from AND rel_concerne1.eid_to=12) OR (rel_concerne2.eid_from=13 AND rel_concerne2.eid_to=rel_created_by0.eid_from))'''), + + ('Any P WHERE P travaille_subdivision S1 OR P travaille_subdivision S2, S1 nom "logilab", S2 nom "caesium"', + '''SELECT _P.cw_eid +FROM cw_Personne AS _P, cw_SubDivision AS _S1, cw_SubDivision AS _S2, travaille_subdivision_relation AS rel_travaille_subdivision0, travaille_subdivision_relation AS rel_travaille_subdivision1 +WHERE ((rel_travaille_subdivision0.eid_from=_P.cw_eid AND rel_travaille_subdivision0.eid_to=_S1.cw_eid) OR (rel_travaille_subdivision1.eid_from=_P.cw_eid AND rel_travaille_subdivision1.eid_to=_S2.cw_eid)) AND _S1.cw_nom=logilab AND _S2.cw_nom=caesium'''), + + ('Any X WHERE T tags X', + '''SELECT rel_tags0.eid_to +FROM tags_relation AS rel_tags0'''), + + ('Any X WHERE X in_basket B, B eid 12', + '''SELECT rel_in_basket0.eid_from +FROM in_basket_relation AS rel_in_basket0 +WHERE rel_in_basket0.eid_to=12'''), + + ('Any SEN,RN,OEN WHERE X from_entity SE, SE eid 44, X relation_type R, R eid 139, X to_entity OE, OE eid 42, R name RN, SE name SEN, OE name OEN', + '''SELECT _SE.cw_name, _R.cw_name, _OE.cw_name +FROM cw_CWAttribute AS _X, cw_CWEType AS _OE, cw_CWEType AS _SE, cw_CWRType AS _R +WHERE _X.cw_from_entity=44 AND _SE.cw_eid=44 AND _X.cw_relation_type=139 AND _R.cw_eid=139 AND _X.cw_to_entity=42 AND _OE.cw_eid=42 +UNION ALL +SELECT _SE.cw_name, _R.cw_name, _OE.cw_name +FROM cw_CWEType AS _OE, cw_CWEType AS _SE, cw_CWRType AS _R, cw_CWRelation AS _X +WHERE _X.cw_from_entity=44 AND _SE.cw_eid=44 AND _X.cw_relation_type=139 AND _R.cw_eid=139 AND _X.cw_to_entity=42 AND _OE.cw_eid=42'''), + + # Any O WHERE NOT S corrected_in O, S eid %(x)s, S concerns P, O version_of P, O in_state ST, NOT ST name "published", O modification_date MTIME ORDERBY MTIME DESC LIMIT 9 + ('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P', + '''SELECT _O.cw_eid +FROM cw_Note AS _S, cw_Personne AS _O +WHERE (_S.cw_ecrit_par IS NULL OR _S.cw_ecrit_par!=_O.cw_eid) AND _S.cw_eid=1 AND _S.cw_inline1 IS NOT NULL AND _O.cw_inline2=_S.cw_inline1'''), + + ('Any N WHERE N todo_by U, N is Note, U eid 2, N filed_under T, T eid 3', + # N would actually be invarient if U eid 2 had given a specific type to U + '''SELECT _N.cw_eid +FROM cw_Note AS _N, filed_under_relation AS rel_filed_under1, todo_by_relation AS rel_todo_by0 +WHERE rel_todo_by0.eid_from=_N.cw_eid AND rel_todo_by0.eid_to=2 AND rel_filed_under1.eid_from=_N.cw_eid AND rel_filed_under1.eid_to=3'''), + + ('Any N WHERE N todo_by U, U eid 2, P evaluee N, P eid 3', + '''SELECT rel_evaluee1.eid_to +FROM evaluee_relation AS rel_evaluee1, todo_by_relation AS rel_todo_by0 +WHERE rel_evaluee1.eid_to=rel_todo_by0.eid_from AND rel_todo_by0.eid_to=2 AND rel_evaluee1.eid_from=3'''), + + + (' Any X,U WHERE C owned_by U, NOT X owned_by U, C eid 1, X eid 2', + '''SELECT 2, rel_owned_by0.eid_to +FROM owned_by_relation AS rel_owned_by0 +WHERE rel_owned_by0.eid_from=1 AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=2 AND rel_owned_by0.eid_to=rel_owned_by1.eid_to))'''), + + ('Any GN WHERE X in_group G, G name GN, (G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))', + '''SELECT _G.cw_name +FROM cw_CWGroup AS _G, in_group_relation AS rel_in_group0 +WHERE rel_in_group0.eid_to=_G.cw_eid AND ((_G.cw_name=managers) OR (EXISTS(SELECT 1 FROM copain_relation AS rel_copain1, cw_CWUser AS _T WHERE rel_copain1.eid_from=rel_in_group0.eid_from AND rel_copain1.eid_to=_T.cw_eid AND _T.cw_login IN(comme, cochon))))'''), + + ('Any C WHERE C is Card, EXISTS(X documented_by C)', + """SELECT _C.cw_eid +FROM cw_Card AS _C +WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_to=_C.cw_eid)"""), + + ('Any C WHERE C is Card, EXISTS(X documented_by C, X eid 12)', + """SELECT _C.cw_eid +FROM cw_Card AS _C +WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_from=12 AND rel_documented_by0.eid_to=_C.cw_eid)"""), + + ('Any T WHERE C is Card, C title T, EXISTS(X documented_by C, X eid 12)', + """SELECT _C.cw_title +FROM cw_Card AS _C +WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_from=12 AND rel_documented_by0.eid_to=_C.cw_eid)"""), + + ('Any GN,L WHERE X in_group G, X login L, G name GN, EXISTS(X copain T, T login L, T login IN("comme", "cochon"))', + '''SELECT _G.cw_name, _X.cw_login +FROM cw_CWGroup AS _G, cw_CWUser AS _X, in_group_relation AS rel_in_group0 +WHERE rel_in_group0.eid_from=_X.cw_eid AND rel_in_group0.eid_to=_G.cw_eid AND EXISTS(SELECT 1 FROM copain_relation AS rel_copain1, cw_CWUser AS _T WHERE rel_copain1.eid_from=_X.cw_eid AND rel_copain1.eid_to=_T.cw_eid AND _T.cw_login=_X.cw_login AND _T.cw_login IN(comme, cochon))'''), + + ('Any X,S, MAX(T) GROUPBY X,S ORDERBY S WHERE X is CWUser, T tags X, S eid IN(32), X in_state S', + '''SELECT _X.cw_eid, 32, MAX(rel_tags0.eid_from) +FROM cw_CWUser AS _X, tags_relation AS rel_tags0 +WHERE rel_tags0.eid_to=_X.cw_eid AND _X.cw_in_state=32 +GROUP BY _X.cw_eid'''), + + + ('Any X WHERE Y evaluee X, Y is CWUser', + '''SELECT rel_evaluee0.eid_to +FROM cw_CWUser AS _Y, evaluee_relation AS rel_evaluee0 +WHERE rel_evaluee0.eid_from=_Y.cw_eid'''), + + ('Any L WHERE X login "admin", X identity Y, Y login L', + '''SELECT _Y.cw_login +FROM cw_CWUser AS _X, cw_CWUser AS _Y +WHERE _X.cw_login=admin AND _X.cw_eid=_Y.cw_eid'''), + + ('Any L WHERE X login "admin", NOT X identity Y, Y login L', + '''SELECT _Y.cw_login +FROM cw_CWUser AS _X, cw_CWUser AS _Y +WHERE _X.cw_login=admin AND NOT (_X.cw_eid=_Y.cw_eid)'''), + + ('Any L WHERE X login "admin", X identity Y?, Y login L', + '''SELECT _Y.cw_login +FROM cw_CWUser AS _X LEFT OUTER JOIN cw_CWUser AS _Y ON (_X.cw_eid=_Y.cw_eid) +WHERE _X.cw_login=admin'''), + + ('Any XN ORDERBY XN WHERE X name XN, X is IN (Basket,Folder,Tag)', + '''SELECT _X.cw_name +FROM cw_Basket AS _X +UNION ALL +SELECT _X.cw_name +FROM cw_Folder AS _X +UNION ALL +SELECT _X.cw_name +FROM cw_Tag AS _X +ORDER BY 1'''), + + # DISTINCT, can use relation under exists scope as principal + ('DISTINCT Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), EXISTS(X read_permission Y)', + '''SELECT DISTINCT _X.cw_eid, rel_read_permission0.eid_to +FROM cw_CWEType AS _X, read_permission_relation AS rel_read_permission0 +WHERE _X.cw_name=CWGroup AND rel_read_permission0.eid_to IN(1, 2, 3) AND EXISTS(SELECT 1 WHERE rel_read_permission0.eid_from=_X.cw_eid)'''), + + # no distinct, Y can't be invariant + ('Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), EXISTS(X read_permission Y)', + '''SELECT _X.cw_eid, _Y.cw_eid +FROM cw_CWEType AS _X, cw_CWGroup AS _Y +WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) +UNION ALL +SELECT _X.cw_eid, _Y.cw_eid +FROM cw_CWEType AS _X, cw_RQLExpression AS _Y +WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''), + + # DISTINCT but NEGED exists, can't be invariant + ('DISTINCT Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)', + '''SELECT DISTINCT _X.cw_eid, _Y.cw_eid +FROM cw_CWEType AS _X, cw_CWGroup AS _Y +WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)) +UNION +SELECT DISTINCT _X.cw_eid, _Y.cw_eid +FROM cw_CWEType AS _X, cw_RQLExpression AS _Y +WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''), + + # should generate the same query as above + ('DISTINCT Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), NOT X read_permission Y', + '''SELECT DISTINCT _X.cw_eid, _Y.cw_eid +FROM cw_CWEType AS _X, cw_CWGroup AS _Y +WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)) +UNION +SELECT DISTINCT _X.cw_eid, _Y.cw_eid +FROM cw_CWEType AS _X, cw_RQLExpression AS _Y +WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''), + + # neged relation, can't be inveriant + ('Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), NOT X read_permission Y', + '''SELECT _X.cw_eid, _Y.cw_eid +FROM cw_CWEType AS _X, cw_CWGroup AS _Y +WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)) +UNION ALL +SELECT _X.cw_eid, _Y.cw_eid +FROM cw_CWEType AS _X, cw_RQLExpression AS _Y +WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''), + + ('Any MAX(X)+MIN(X), N GROUPBY N WHERE X name N, X is IN (Basket, Folder, Tag);', + '''SELECT (MAX(T1.C0) + MIN(T1.C0)), T1.C1 FROM (SELECT _X.cw_eid AS C0, _X.cw_name AS C1 +FROM cw_Basket AS _X +UNION ALL +SELECT _X.cw_eid AS C0, _X.cw_name AS C1 +FROM cw_Folder AS _X +UNION ALL +SELECT _X.cw_eid AS C0, _X.cw_name AS C1 +FROM cw_Tag AS _X) AS T1 +GROUP BY T1.C1'''), + + ('Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 1, N, DF WHERE X data_name N, X data D, X data_format DF;', + '''SELECT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))), _X.cw_data_name +FROM cw_File AS _X +GROUP BY _X.cw_data_name,_X.cw_data_format +ORDER BY 1,2,_X.cw_data_format'''), + + # ambiguity in EXISTS() -> should union the sub-query + ('Any T WHERE T is Tag, NOT T name in ("t1", "t2"), EXISTS(T tags X, X is IN (CWUser, CWGroup))', + '''SELECT _T.cw_eid +FROM cw_Tag AS _T +WHERE NOT (_T.cw_name IN(t1, t2)) AND EXISTS(SELECT 1 FROM cw_CWGroup AS _X, tags_relation AS rel_tags0 WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X, tags_relation AS rel_tags1 WHERE rel_tags1.eid_from=_T.cw_eid AND rel_tags1.eid_to=_X.cw_eid)'''), + + # must not use a relation in EXISTS scope to inline a variable + ('Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)', + '''SELECT _U.cw_eid +FROM cw_CWUser AS _U +WHERE _U.cw_eid IN(1, 2) AND EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_to=_U.cw_eid)'''), + + ('Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)', + '''SELECT _U.cw_eid +FROM cw_CWUser AS _U +WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE _U.cw_eid IN(1, 2) AND rel_owned_by0.eid_to=_U.cw_eid)'''), + + ('Any COUNT(U) WHERE EXISTS (P owned_by U, P is IN (Note, Affaire))', + '''SELECT COUNT(_U.cw_eid) +FROM cw_CWUser AS _U +WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=_U.cw_eid UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=_U.cw_eid)'''), + + ('Any MAX(X)', + '''SELECT MAX(_X.eid) +FROM entities AS _X'''), + + ('Any MAX(X) WHERE X is Note', + '''SELECT MAX(_X.cw_eid) +FROM cw_Note AS _X'''), + + ('Any X WHERE X eid > 12', + '''SELECT _X.eid +FROM entities AS _X +WHERE _X.eid>12'''), + + ('Any X WHERE X eid > 12, X is Note', + """SELECT _X.eid +FROM entities AS _X +WHERE _X.type='Note' AND _X.eid>12"""), + + ('Any X, T WHERE X eid > 12, X title T, X is IN (Bookmark, Card)', + """SELECT _X.cw_eid, _X.cw_title +FROM cw_Bookmark AS _X +WHERE _X.cw_eid>12 +UNION ALL +SELECT _X.cw_eid, _X.cw_title +FROM cw_Card AS _X +WHERE _X.cw_eid>12"""), + + ('Any X', + '''SELECT _X.eid +FROM entities AS _X'''), + + ('Any X GROUPBY X WHERE X eid 12', + '''SELECT 12'''), + + ('Any X GROUPBY X ORDERBY Y WHERE X eid 12, X login Y', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE _X.cw_eid=12 +GROUP BY _X.cw_eid,_X.cw_login +ORDER BY _X.cw_login'''), + + ('Any U,COUNT(X) GROUPBY U WHERE U eid 12, X owned_by U HAVING COUNT(X) > 10', + '''SELECT rel_owned_by0.eid_to, COUNT(rel_owned_by0.eid_from) +FROM owned_by_relation AS rel_owned_by0 +WHERE rel_owned_by0.eid_to=12 +GROUP BY rel_owned_by0.eid_to +HAVING COUNT(rel_owned_by0.eid_from)>10'''), + + + ("Any X WHERE X eid 0, X test TRUE", + '''SELECT _X.cw_eid +FROM cw_Personne AS _X +WHERE _X.cw_eid=0 AND _X.cw_test=True'''), + + ('Any 1 WHERE X in_group G, X is CWUser', + '''SELECT 1 +FROM in_group_relation AS rel_in_group0'''), + + ('CWEType X WHERE X name CV, X description V HAVING NOT V=CV AND NOT V = "parent"', + '''SELECT _X.cw_eid +FROM cw_CWEType AS _X +WHERE NOT (EXISTS(SELECT 1 WHERE _X.cw_description=parent)) AND NOT (EXISTS(SELECT 1 WHERE _X.cw_description=_X.cw_name))'''), + ('CWEType X WHERE X name CV, X description V HAVING V!=CV AND V != "parent"', + '''SELECT _X.cw_eid +FROM cw_CWEType AS _X +WHERE _X.cw_description!=parent AND _X.cw_description!=_X.cw_name'''), + + ('DISTINCT Any X, SUM(C) GROUPBY X ORDERBY SUM(C) DESC WHERE H todo_by X, H duration C', + '''SELECT DISTINCT rel_todo_by0.eid_to, SUM(_H.cw_duration) +FROM cw_Affaire AS _H, todo_by_relation AS rel_todo_by0 +WHERE rel_todo_by0.eid_from=_H.cw_eid +GROUP BY rel_todo_by0.eid_to +ORDER BY 2 DESC'''), + + ('Any R2 WHERE R2 concerne R, R eid RE, R2 eid > RE', + '''SELECT _R2.eid +FROM concerne_relation AS rel_concerne0, entities AS _R2 +WHERE _R2.eid=rel_concerne0.eid_from AND _R2.eid>rel_concerne0.eid_to'''), + + ('Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y', + '''SELECT _X.cw_eid +FROM cw_Note AS _X +WHERE _X.cw_eid IN(999998, 999999) AND NOT (EXISTS(SELECT 1 FROM cw_source_relation AS rel_cw_source0 WHERE rel_cw_source0.eid_from=_X.cw_eid))'''), + + # Test for https://www.cubicweb.org/ticket/5503548 + ('''Any X + WHERE X is CWSourceSchemaConfig, + EXISTS(X created_by U, U login L), + X cw_schema X_CW_SCHEMA, + X owned_by X_OWNED_BY? + ''', '''SELECT _X.cw_eid +FROM cw_CWSourceSchemaConfig AS _X LEFT OUTER JOIN owned_by_relation AS rel_owned_by1 ON (rel_owned_by1.eid_from=_X.cw_eid) +WHERE EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0, cw_CWUser AS _U WHERE rel_created_by0.eid_from=_X.cw_eid AND rel_created_by0.eid_to=_U.cw_eid) AND _X.cw_cw_schema IS NOT NULL +''') + ] + +ADVANCED_WITH_GROUP_CONCAT = [ + ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN, X is CWGroup", + '''SELECT _X.cw_eid, GROUP_CONCAT(_T.cw_name) +FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0 +WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid +GROUP BY _X.cw_eid,_X.cw_name +ORDER BY _X.cw_name'''), + + ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN", + '''SELECT T1.C0, GROUP_CONCAT(T1.C1) FROM (SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2 +FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0 +WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid +UNION ALL +SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2 +FROM cw_State AS _X, cw_Tag AS _T, tags_relation AS rel_tags0 +WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid +UNION ALL +SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2 +FROM cw_Tag AS _T, cw_Tag AS _X, tags_relation AS rel_tags0 +WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid) AS T1 +GROUP BY T1.C0,T1.C2 +ORDER BY T1.C2'''), + +] + +ADVANCED_WITH_LIMIT_OR_ORDERBY = [ + ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))', + '''SELECT COUNT(rel_concerne0.eid_from), _C.cw_nom +FROM concerne_relation AS rel_concerne0, cw_Societe AS _C +WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM cw_Card AS _N, documented_by_relation AS rel_documented_by2 WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published))) +GROUP BY _C.cw_nom +ORDER BY 1 DESC +LIMIT 10'''), + ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI', + '''SELECT T1.C0 FROM (SELECT DISTINCT _S.cw_eid AS C0, STOCKPROC(_S.cw_para) AS C1 +FROM cw_Note AS _S +WHERE _S.cw_ecrit_par IS NULL +ORDER BY 2) AS T1'''), + + ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;', + '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2 +FROM cw_File AS _X +GROUP BY _X.cw_data_name,_X.cw_data_format +ORDER BY 2,3) AS T1 +'''), + + ('DISTINCT Any X ORDERBY stockproc(X) WHERE U login X', + '''SELECT T1.C0 FROM (SELECT DISTINCT _U.cw_login AS C0, STOCKPROC(_U.cw_login) AS C1 +FROM cw_CWUser AS _U +ORDER BY 2) AS T1'''), + + ('DISTINCT Any X ORDERBY Y WHERE B bookmarked_by X, X login Y', + '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _X.cw_login AS C1 +FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_CWUser AS _X +WHERE rel_bookmarked_by0.eid_to=_X.cw_eid +ORDER BY 2) AS T1'''), + + ('DISTINCT Any X ORDERBY SN WHERE X in_state S, S name SN', + '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 +FROM cw_Affaire AS _X, cw_State AS _S +WHERE _X.cw_in_state=_S.cw_eid +UNION +SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 +FROM cw_CWUser AS _X, cw_State AS _S +WHERE _X.cw_in_state=_S.cw_eid +UNION +SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 +FROM cw_Note AS _X, cw_State AS _S +WHERE _X.cw_in_state=_S.cw_eid +ORDER BY 2) AS T1'''), + + ('Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT S use_email O, S eid 1, O is EmailAddress, O address AA, O alias AB, O modification_date AC, ' + 'EXISTS(A use_email O, EXISTS(A identity B, NOT B in_group D, D name "guests", D is CWGroup), A is CWUser), B eid 2', + '''SELECT _O.cw_eid, _O.cw_address, _O.cw_alias, _O.cw_modification_date +FROM cw_EmailAddress AS _O +WHERE NOT (EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid)) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid)) AND _D.cw_name=guests)) +ORDER BY 4 DESC'''), + + + ] + +MULTIPLE_SEL = [ + ("DISTINCT Any X,Y where P is Personne, P nom X , P prenom Y;", + '''SELECT DISTINCT _P.cw_nom, _P.cw_prenom +FROM cw_Personne AS _P'''), + ("Any X,Y where P is Personne, P nom X , P prenom Y, not P nom NULL;", + '''SELECT _P.cw_nom, _P.cw_prenom +FROM cw_Personne AS _P +WHERE NOT (_P.cw_nom IS NULL)'''), + ("Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE", + '''SELECT _X.cw_eid, _Y.cw_eid +FROM cw_Personne AS _X, cw_Personne AS _Y +WHERE _Y.cw_nom=_X.cw_nom AND NOT (_Y.cw_eid=_X.cw_eid)'''), + + ('Any X,Y WHERE X is Personne, Y is Personne, X nom XD, Y nom XD, X eid Z, Y eid > Z', + '''SELECT _X.cw_eid, _Y.cw_eid +FROM cw_Personne AS _X, cw_Personne AS _Y +WHERE _Y.cw_nom=_X.cw_nom AND _Y.cw_eid>_X.cw_eid'''), + ] + + +NEGATIONS = [ + + ("Personne X WHERE NOT X evaluee Y;", + '''SELECT _X.cw_eid +FROM cw_Personne AS _X +WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_X.cw_eid))'''), + + ("Note N WHERE NOT X evaluee N, X eid 0", + '''SELECT _N.cw_eid +FROM cw_Note AS _N +WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=0 AND rel_evaluee0.eid_to=_N.cw_eid))'''), + + ('Any X WHERE NOT X travaille S, X is Personne', + '''SELECT _X.cw_eid +FROM cw_Personne AS _X +WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid))'''), + + ("Personne P where NOT P concerne A", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=_P.cw_eid))'''), + + ("Affaire A where not P concerne A", + '''SELECT _A.cw_eid +FROM cw_Affaire AS _A +WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_to=_A.cw_eid))'''), + ("Personne P where not P concerne A, A sujet ~= 'TEST%'", + '''SELECT _P.cw_eid +FROM cw_Affaire AS _A, cw_Personne AS _P +WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=_P.cw_eid AND rel_concerne0.eid_to=_A.cw_eid)) AND _A.cw_sujet ILIKE TEST%'''), + + ('Any S WHERE NOT T eid 28258, T tags S', + '''SELECT rel_tags0.eid_to +FROM tags_relation AS rel_tags0 +WHERE NOT (rel_tags0.eid_from=28258)'''), + + ('Any S WHERE T is Tag, T name TN, NOT T eid 28258, T tags S, S name SN', + '''SELECT _S.cw_eid +FROM cw_CWGroup AS _S, cw_Tag AS _T, tags_relation AS rel_tags0 +WHERE NOT (_T.cw_eid=28258) AND rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_S.cw_eid +UNION ALL +SELECT _S.cw_eid +FROM cw_State AS _S, cw_Tag AS _T, tags_relation AS rel_tags0 +WHERE NOT (_T.cw_eid=28258) AND rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_S.cw_eid +UNION ALL +SELECT _S.cw_eid +FROM cw_Tag AS _S, cw_Tag AS _T, tags_relation AS rel_tags0 +WHERE NOT (_T.cw_eid=28258) AND rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_S.cw_eid'''), + + ('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid 6', + '''SELECT 5, rel_created_by0.eid_to +FROM created_by_relation AS rel_created_by0 +WHERE rel_created_by0.eid_from=5 AND NOT (rel_created_by0.eid_to=6)'''), + + ('Note X WHERE NOT Y evaluee X', + '''SELECT _X.cw_eid +FROM cw_Note AS _X +WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_to=_X.cw_eid))'''), + + ('Any Y WHERE NOT Y evaluee X', + '''SELECT _Y.cw_eid +FROM cw_CWUser AS _Y +WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)) +UNION ALL +SELECT _Y.cw_eid +FROM cw_Division AS _Y +WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)) +UNION ALL +SELECT _Y.cw_eid +FROM cw_Personne AS _Y +WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)) +UNION ALL +SELECT _Y.cw_eid +FROM cw_Societe AS _Y +WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)) +UNION ALL +SELECT _Y.cw_eid +FROM cw_SubDivision AS _Y +WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid))'''), + + ('Any X WHERE NOT Y evaluee X, Y is CWUser', + '''SELECT _X.cw_eid +FROM cw_Note AS _X +WHERE NOT (EXISTS(SELECT 1 FROM cw_CWUser AS _Y, evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid AND rel_evaluee0.eid_to=_X.cw_eid))'''), + + ('Any X,RT WHERE X relation_type RT, NOT X is CWAttribute', + '''SELECT _X.cw_eid, _X.cw_relation_type +FROM cw_CWRelation AS _X +WHERE _X.cw_relation_type IS NOT NULL'''), + + ('Any K,V WHERE P is CWProperty, P pkey K, P value V, NOT P for_user U', + '''SELECT _P.cw_pkey, _P.cw_value +FROM cw_CWProperty AS _P +WHERE _P.cw_for_user IS NULL'''), + + ('Any S WHERE NOT X in_state S, X is IN(Affaire, CWUser)', + '''SELECT _S.cw_eid +FROM cw_State AS _S +WHERE NOT (EXISTS(SELECT 1 FROM cw_Affaire AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid))'''), + + ('Any S WHERE NOT(X in_state S, S name "somename"), X is CWUser', + '''SELECT _S.cw_eid +FROM cw_State AS _S +WHERE NOT (EXISTS(SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid AND _S.cw_name=somename))'''), + ] + +HAS_TEXT_LG_INDEXER = [ + ('Any X WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), + ('Personne X WHERE X has_text "toto tata"', + """SELECT DISTINCT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.eid AND _X.type='Personne'"""), + ('Personne X WHERE X has_text %(text)s', + """SELECT DISTINCT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('hip', 'hop', 'momo')) AND appears0.uid=_X.eid AND _X.type='Personne' +"""), + ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION +SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu""") + ] + + + +# XXXFIXME fail +# ('Any X,RT WHERE X relation_type RT?, NOT X is CWAttribute', +# '''SELECT _X.cw_eid, _X.cw_relation_type +# FROM cw_CWRelation AS _X'''), + + +OUTER_JOIN = [ + + ('Any U,G WHERE U login L, G name L?, G is CWGroup', + '''SELECT _U.cw_eid, _G.cw_eid +FROM cw_CWUser AS _U LEFT OUTER JOIN cw_CWGroup AS _G ON (_G.cw_name=_U.cw_login)'''), + + ('Any X,S WHERE X travaille S?', + '''SELECT _X.cw_eid, rel_travaille0.eid_to +FROM cw_Personne AS _X LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=_X.cw_eid)''' + ), + ('Any S,X WHERE X? travaille S, S is Societe', + '''SELECT _S.cw_eid, rel_travaille0.eid_from +FROM cw_Societe AS _S LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_to=_S.cw_eid)''' + ), + + ('Any N,A WHERE N inline1 A?', + '''SELECT _N.cw_eid, _N.cw_inline1 +FROM cw_Note AS _N'''), + + ('Any SN WHERE X from_state S?, S name SN', + '''SELECT _S.cw_name +FROM cw_TrInfo AS _X LEFT OUTER JOIN cw_State AS _S ON (_X.cw_from_state=_S.cw_eid)''' + ), + + ('Any A,N WHERE N? inline1 A', + '''SELECT _A.cw_eid, _N.cw_eid +FROM cw_Affaire AS _A LEFT OUTER JOIN cw_Note AS _N ON (_N.cw_inline1=_A.cw_eid)''' + ), + + ('Any A,B,C,D,E,F,G WHERE A eid 12,A creation_date B,A modification_date C,A comment D,A from_state E?,A to_state F?,A wf_info_for G?', + '''SELECT _A.cw_eid, _A.cw_creation_date, _A.cw_modification_date, _A.cw_comment, _A.cw_from_state, _A.cw_to_state, _A.cw_wf_info_for +FROM cw_TrInfo AS _A +WHERE _A.cw_eid=12'''), + + ('Any FS,TS,C,D,U ORDERBY D DESC WHERE WF wf_info_for X,WF from_state FS?, WF to_state TS, WF comment C,WF creation_date D, WF owned_by U, X eid 1', + '''SELECT _WF.cw_from_state, _WF.cw_to_state, _WF.cw_comment, _WF.cw_creation_date, rel_owned_by0.eid_to +FROM cw_TrInfo AS _WF, owned_by_relation AS rel_owned_by0 +WHERE _WF.cw_wf_info_for=1 AND _WF.cw_to_state IS NOT NULL AND rel_owned_by0.eid_from=_WF.cw_eid +ORDER BY 4 DESC'''), + + ('Any X WHERE X is Affaire, S is Societe, EXISTS(X owned_by U OR (X concerne S?, S owned_by U))', + '''SELECT _X.cw_eid +FROM cw_Affaire AS _X +WHERE EXISTS(SELECT 1 FROM cw_CWUser AS _U, owned_by_relation AS rel_owned_by0, owned_by_relation AS rel_owned_by2, cw_Affaire AS _A LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=_A.cw_eid) LEFT OUTER JOIN cw_Societe AS _S ON (rel_concerne1.eid_to=_S.cw_eid) WHERE ((rel_owned_by0.eid_from=_A.cw_eid AND rel_owned_by0.eid_to=_U.cw_eid) OR (rel_owned_by2.eid_from=_S.cw_eid AND rel_owned_by2.eid_to=_U.cw_eid)) AND _X.cw_eid=_A.cw_eid)'''), + + ('Any C,M WHERE C travaille G?, G evaluee M?, G is Societe', + '''SELECT _C.cw_eid, rel_evaluee1.eid_to +FROM cw_Personne AS _C LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=_C.cw_eid) LEFT OUTER JOIN cw_Societe AS _G ON (rel_travaille0.eid_to=_G.cw_eid) LEFT OUTER JOIN evaluee_relation AS rel_evaluee1 ON (rel_evaluee1.eid_from=_G.cw_eid)''' + ), + + ('Any A,C WHERE A documented_by C?, (C is NULL) OR (EXISTS(C require_permission F, ' + 'F name "read", F require_group E, U in_group E)), U eid 1', + '''SELECT _A.cw_eid, rel_documented_by0.eid_to +FROM cw_Affaire AS _A LEFT OUTER JOIN documented_by_relation AS rel_documented_by0 ON (rel_documented_by0.eid_from=_A.cw_eid) +WHERE ((rel_documented_by0.eid_to IS NULL) OR (EXISTS(SELECT 1 FROM cw_CWPermission AS _F, in_group_relation AS rel_in_group3, require_group_relation AS rel_require_group2, require_permission_relation AS rel_require_permission1 WHERE rel_documented_by0.eid_to=rel_require_permission1.eid_from AND rel_require_permission1.eid_to=_F.cw_eid AND _F.cw_name=read AND rel_require_group2.eid_from=_F.cw_eid AND rel_in_group3.eid_to=rel_require_group2.eid_to AND rel_in_group3.eid_from=1)))'''), + + ("Any X WHERE X eid 12, P? connait X", + '''SELECT _X.cw_eid +FROM cw_Personne AS _X LEFT OUTER JOIN connait_relation AS rel_connait0 ON (rel_connait0.eid_to=_X.cw_eid) +WHERE _X.cw_eid=12''' + ), + ("Any P WHERE X eid 12, P? concerne X, X todo_by S", + '''SELECT rel_concerne1.eid_from +FROM todo_by_relation AS rel_todo_by0 LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_to=12) +WHERE rel_todo_by0.eid_from=12''' + ), + + ('Any GN, TN ORDERBY GN WHERE T tags G?, T name TN, G name GN', + ''' +SELECT _T0.C1, _T.cw_name +FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN (SELECT _G.cw_eid AS C0, _G.cw_name AS C1 +FROM cw_CWGroup AS _G +UNION ALL +SELECT _G.cw_eid AS C0, _G.cw_name AS C1 +FROM cw_State AS _G +UNION ALL +SELECT _G.cw_eid AS C0, _G.cw_name AS C1 +FROM cw_Tag AS _G) AS _T0 ON (rel_tags0.eid_to=_T0.C0) +ORDER BY 1'''), + + + # optional variable with additional restriction + ('Any T,G WHERE T tags G?, G name "hop", G is CWGroup', + '''SELECT _T.cw_eid, _G.cw_eid +FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWGroup AS _G ON (rel_tags0.eid_to=_G.cw_eid AND _G.cw_name=hop)'''), + + # optional variable with additional invariant restriction + ('Any T,G WHERE T tags G?, G eid 12', + '''SELECT _T.cw_eid, rel_tags0.eid_to +FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=12)'''), + + # optional variable with additional restriction appearing before the relation + ('Any T,G WHERE G name "hop", T tags G?, G is CWGroup', + '''SELECT _T.cw_eid, _G.cw_eid +FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWGroup AS _G ON (rel_tags0.eid_to=_G.cw_eid AND _G.cw_name=hop)'''), + + # optional variable with additional restriction on inlined relation + # XXX the expected result should be as the query below. So what, raise BadRQLQuery ? + ('Any T,G,S WHERE T tags G?, G in_state S, S name "hop", G is CWUser', + '''SELECT _T.cw_eid, _G.cw_eid, _S.cw_eid +FROM cw_State AS _S, cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWUser AS _G ON (rel_tags0.eid_to=_G.cw_eid) +WHERE _G.cw_in_state=_S.cw_eid AND _S.cw_name=hop +'''), + + # optional variable with additional invariant restriction on an inlined relation + ('Any T,G,S WHERE T tags G, G in_state S?, S eid 1, G is CWUser', + '''SELECT rel_tags0.eid_from, _G.cw_eid, _G.cw_in_state +FROM cw_CWUser AS _G, tags_relation AS rel_tags0 +WHERE rel_tags0.eid_to=_G.cw_eid AND (_G.cw_in_state=1 OR _G.cw_in_state IS NULL)'''), + + # two optional variables with additional invariant restriction on an inlined relation + ('Any T,G,S WHERE T tags G?, G in_state S?, S eid 1, G is CWUser', + '''SELECT _T.cw_eid, _G.cw_eid, _G.cw_in_state +FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWUser AS _G ON (rel_tags0.eid_to=_G.cw_eid AND (_G.cw_in_state=1 OR _G.cw_in_state IS NULL))'''), + + # two optional variables with additional restriction on an inlined relation + ('Any T,G,S WHERE T tags G?, G in_state S?, S name "hop", G is CWUser', + '''SELECT _T.cw_eid, _G.cw_eid, _S.cw_eid +FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWUser AS _G ON (rel_tags0.eid_to=_G.cw_eid) LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop)'''), + + # two optional variables with additional restriction on an ambigous inlined relation + ('Any T,G,S WHERE T tags G?, G in_state S?, S name "hop"', + ''' +SELECT _T.cw_eid, _T0.C0, _T0.C1 +FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN (SELECT _G.cw_eid AS C0, _S.cw_eid AS C1 +FROM cw_Affaire AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop) +UNION ALL +SELECT _G.cw_eid AS C0, _S.cw_eid AS C1 +FROM cw_CWUser AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop) +UNION ALL +SELECT _G.cw_eid AS C0, _S.cw_eid AS C1 +FROM cw_Note AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop)) AS _T0 ON (rel_tags0.eid_to=_T0.C0)'''), + + ('Any O,AD WHERE NOT S inline1 O, S eid 123, O todo_by AD?', + '''SELECT _O.cw_eid, rel_todo_by0.eid_to +FROM cw_Note AS _S, cw_Affaire AS _O LEFT OUTER JOIN todo_by_relation AS rel_todo_by0 ON (rel_todo_by0.eid_from=_O.cw_eid) +WHERE (_S.cw_inline1 IS NULL OR _S.cw_inline1!=_O.cw_eid) AND _S.cw_eid=123'''), + + ('Any X,AE WHERE X multisource_inlined_rel S?, S ambiguous_inlined A, A modification_date AE', + '''SELECT _X.cw_eid, _T0.C2 +FROM cw_Card AS _X LEFT OUTER JOIN (SELECT _S.cw_eid AS C0, _A.cw_eid AS C1, _A.cw_modification_date AS C2 +FROM cw_Affaire AS _S, cw_CWUser AS _A +WHERE _S.cw_ambiguous_inlined=_A.cw_eid +UNION ALL +SELECT _S.cw_eid AS C0, _A.cw_eid AS C1, _A.cw_modification_date AS C2 +FROM cw_CWUser AS _A, cw_Note AS _S +WHERE _S.cw_ambiguous_inlined=_A.cw_eid) AS _T0 ON (_X.cw_multisource_inlined_rel=_T0.C0) +UNION ALL +SELECT _X.cw_eid, _T0.C2 +FROM cw_Note AS _X LEFT OUTER JOIN (SELECT _S.cw_eid AS C0, _A.cw_eid AS C1, _A.cw_modification_date AS C2 +FROM cw_Affaire AS _S, cw_CWUser AS _A +WHERE _S.cw_ambiguous_inlined=_A.cw_eid +UNION ALL +SELECT _S.cw_eid AS C0, _A.cw_eid AS C1, _A.cw_modification_date AS C2 +FROM cw_CWUser AS _A, cw_Note AS _S +WHERE _S.cw_ambiguous_inlined=_A.cw_eid) AS _T0 ON (_X.cw_multisource_inlined_rel=_T0.C0)''' + ), + + ('Any X,T,OT WHERE X tags T, OT? tags X, X is Tag, X eid 123', + '''SELECT rel_tags0.eid_from, rel_tags0.eid_to, rel_tags1.eid_from +FROM tags_relation AS rel_tags0 LEFT OUTER JOIN tags_relation AS rel_tags1 ON (rel_tags1.eid_to=123) +WHERE rel_tags0.eid_from=123'''), + + ('Any CASE, CALIBCFG, CFG ' + 'WHERE CASE eid 1, CFG ecrit_par CASE, CALIBCFG? ecrit_par CASE', + '''SELECT _CFG.cw_ecrit_par, _CALIBCFG.cw_eid, _CFG.cw_eid +FROM cw_Note AS _CFG LEFT OUTER JOIN cw_Note AS _CALIBCFG ON (_CALIBCFG.cw_ecrit_par=1) +WHERE _CFG.cw_ecrit_par=1'''), + + ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)=UPPER(GL)?', + '''SELECT _U.cw_eid, _G.cw_eid +FROM cw_CWUser AS _U LEFT OUTER JOIN cw_CWGroup AS _G ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''), + + ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)?=UPPER(GL)', + '''SELECT _U.cw_eid, _G.cw_eid +FROM cw_CWGroup AS _G LEFT OUTER JOIN cw_CWUser AS _U ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''), + + ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)?=UPPER(GL)?', + '''SELECT _U.cw_eid, _G.cw_eid +FROM cw_CWUser AS _U FULL OUTER JOIN cw_CWGroup AS _G ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''), + + ('Any H, COUNT(X), SUM(XCE)/1000 ' + 'WHERE X type "0", X date XSCT, X para XCE, X? ecrit_par F, F eid 999999, F is Personne, ' + 'DH is Affaire, DH ref H ' + 'HAVING XSCT?=H', + '''SELECT _DH.cw_ref, COUNT(_X.cw_eid), (SUM(_X.cw_para) / 1000) +FROM cw_Affaire AS _DH LEFT OUTER JOIN cw_Note AS _X ON (_X.cw_date=_DH.cw_ref AND _X.cw_type=0 AND _X.cw_ecrit_par=999999)'''), + + ('Any C WHERE X ecrit_par C?, X? inline1 F, F eid 1, X type XT, Z is Personne, Z nom ZN HAVING ZN=XT?', + '''SELECT _X.cw_ecrit_par +FROM cw_Personne AS _Z LEFT OUTER JOIN cw_Note AS _X ON (_Z.cw_nom=_X.cw_type AND _X.cw_inline1=1)'''), + ] + +VIRTUAL_VARS = [ + + ('Any X WHERE X is CWUser, X creation_date > D1, Y creation_date D1, Y login "SWEB09"', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X, cw_CWUser AS _Y +WHERE _X.cw_creation_date>_Y.cw_creation_date AND _Y.cw_login=SWEB09'''), + + ('Any X WHERE X is CWUser, Y creation_date D1, Y login "SWEB09", X creation_date > D1', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X, cw_CWUser AS _Y +WHERE _Y.cw_login=SWEB09 AND _X.cw_creation_date>_Y.cw_creation_date'''), + + ('Personne P WHERE P travaille S, S tel T, S fax T, S is Societe', + '''SELECT rel_travaille0.eid_from +FROM cw_Societe AS _S, travaille_relation AS rel_travaille0 +WHERE rel_travaille0.eid_to=_S.cw_eid AND _S.cw_tel=_S.cw_fax'''), + + ("Personne P where X eid 0, X creation_date D, P tzdatenaiss < D, X is Affaire", + '''SELECT _P.cw_eid +FROM cw_Affaire AS _X, cw_Personne AS _P +WHERE _X.cw_eid=0 AND _P.cw_tzdatenaiss<_X.cw_creation_date'''), + + ("Any N,T WHERE N is Note, N type T;", + '''SELECT _N.cw_eid, _N.cw_type +FROM cw_Note AS _N'''), + + ("Personne P where X is Personne, X tel T, X fax F, P fax T+F", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P, cw_Personne AS _X +WHERE _P.cw_fax=(_X.cw_tel + _X.cw_fax)'''), + + ("Personne P where X tel T, X fax F, P fax IN (T,F)", + '''SELECT _P.cw_eid +FROM cw_Division AS _X, cw_Personne AS _P +WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax) +UNION ALL +SELECT _P.cw_eid +FROM cw_Personne AS _P, cw_Personne AS _X +WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax) +UNION ALL +SELECT _P.cw_eid +FROM cw_Personne AS _P, cw_Societe AS _X +WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax) +UNION ALL +SELECT _P.cw_eid +FROM cw_Personne AS _P, cw_SubDivision AS _X +WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax)'''), + + ("Personne P where X tel T, X fax F, P fax IN (T,F,0832542332)", + '''SELECT _P.cw_eid +FROM cw_Division AS _X, cw_Personne AS _P +WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax, 832542332) +UNION ALL +SELECT _P.cw_eid +FROM cw_Personne AS _P, cw_Personne AS _X +WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax, 832542332) +UNION ALL +SELECT _P.cw_eid +FROM cw_Personne AS _P, cw_Societe AS _X +WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax, 832542332) +UNION ALL +SELECT _P.cw_eid +FROM cw_Personne AS _P, cw_SubDivision AS _X +WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax, 832542332)'''), + ] + +FUNCS = [ + ("Any COUNT(P) WHERE P is Personne", + '''SELECT COUNT(_P.cw_eid) +FROM cw_Personne AS _P'''), + ] + +INLINE = [ + + ('Any P WHERE N eid 1, N ecrit_par P, NOT P owned_by P2', + '''SELECT _N.cw_ecrit_par +FROM cw_Note AS _N +WHERE _N.cw_eid=1 AND _N.cw_ecrit_par IS NOT NULL AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE _N.cw_ecrit_par=rel_owned_by0.eid_from))'''), + + ('Any P, L WHERE N ecrit_par P, P nom L, N eid 0', + '''SELECT _P.cw_eid, _P.cw_nom +FROM cw_Note AS _N, cw_Personne AS _P +WHERE _N.cw_ecrit_par=_P.cw_eid AND _N.cw_eid=0'''), + + ('Any N WHERE NOT N ecrit_par P, P nom "toto"', + '''SELECT _N.cw_eid +FROM cw_Note AS _N, cw_Personne AS _P +WHERE (_N.cw_ecrit_par IS NULL OR _N.cw_ecrit_par!=_P.cw_eid) AND _P.cw_nom=toto'''), + + ('Any P WHERE NOT N ecrit_par P, P nom "toto"', + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE NOT (EXISTS(SELECT 1 FROM cw_Note AS _N WHERE _N.cw_ecrit_par=_P.cw_eid)) AND _P.cw_nom=toto'''), + + ('Any P WHERE N ecrit_par P, N eid 0', + '''SELECT _N.cw_ecrit_par +FROM cw_Note AS _N +WHERE _N.cw_ecrit_par IS NOT NULL AND _N.cw_eid=0'''), + + ('Any P WHERE N ecrit_par P, P is Personne, N eid 0', + '''SELECT _P.cw_eid +FROM cw_Note AS _N, cw_Personne AS _P +WHERE _N.cw_ecrit_par=_P.cw_eid AND _N.cw_eid=0'''), + + ('Any P WHERE NOT N ecrit_par P, P is Personne, N eid 512', + '''SELECT _P.cw_eid +FROM cw_Note AS _N, cw_Personne AS _P +WHERE (_N.cw_ecrit_par IS NULL OR _N.cw_ecrit_par!=_P.cw_eid) AND _N.cw_eid=512'''), + + ('Any S,ES,T WHERE S state_of ET, ET name "CWUser", ES allowed_transition T, T destination_state S', + # XXX "_T.cw_destination_state IS NOT NULL" could be avoided here but it's not worth it + '''SELECT _T.cw_destination_state, rel_allowed_transition1.eid_from, _T.cw_eid +FROM allowed_transition_relation AS rel_allowed_transition1, cw_Transition AS _T, cw_Workflow AS _ET, state_of_relation AS rel_state_of0 +WHERE _T.cw_destination_state=rel_state_of0.eid_from AND rel_state_of0.eid_to=_ET.cw_eid AND _ET.cw_name=CWUser AND rel_allowed_transition1.eid_to=_T.cw_eid AND _T.cw_destination_state IS NOT NULL'''), + + ('Any O WHERE S eid 0, S in_state O', + '''SELECT _S.cw_in_state +FROM cw_Affaire AS _S +WHERE _S.cw_eid=0 AND _S.cw_in_state IS NOT NULL +UNION ALL +SELECT _S.cw_in_state +FROM cw_CWUser AS _S +WHERE _S.cw_eid=0 AND _S.cw_in_state IS NOT NULL +UNION ALL +SELECT _S.cw_in_state +FROM cw_Note AS _S +WHERE _S.cw_eid=0 AND _S.cw_in_state IS NOT NULL'''), + + ('Any X WHERE NOT Y for_user X, X eid 123', + '''SELECT 123 +WHERE NOT (EXISTS(SELECT 1 FROM cw_CWProperty AS _Y WHERE _Y.cw_for_user=123))'''), + + ('DISTINCT Any X WHERE X from_entity OET, NOT X from_entity NET, OET name "Image", NET eid 1', + '''SELECT DISTINCT _X.cw_eid +FROM cw_CWAttribute AS _X, cw_CWEType AS _OET +WHERE _X.cw_from_entity=_OET.cw_eid AND (_X.cw_from_entity IS NULL OR _X.cw_from_entity!=1) AND _OET.cw_name=Image +UNION +SELECT DISTINCT _X.cw_eid +FROM cw_CWEType AS _OET, cw_CWRelation AS _X +WHERE _X.cw_from_entity=_OET.cw_eid AND (_X.cw_from_entity IS NULL OR _X.cw_from_entity!=1) AND _OET.cw_name=Image'''), + + ] + +INTERSECT = [ + ('Any SN WHERE NOT X in_state S, S name SN', + '''SELECT _S.cw_name +FROM cw_State AS _S +WHERE NOT (EXISTS(SELECT 1 FROM cw_Affaire AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_Note AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid))'''), + + ('Any PN WHERE NOT X travaille S, X nom PN, S is IN(Division, Societe)', + '''SELECT _X.cw_nom +FROM cw_Personne AS _X +WHERE NOT (EXISTS(SELECT 1 FROM cw_Division AS _S, travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid UNION SELECT 1 FROM cw_Societe AS _S, travaille_relation AS rel_travaille1 WHERE rel_travaille1.eid_from=_X.cw_eid AND rel_travaille1.eid_to=_S.cw_eid))'''), + + ('Any PN WHERE NOT X travaille S, S nom PN, S is IN(Division, Societe)', + '''SELECT _S.cw_nom +FROM cw_Division AS _S +WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid)) +UNION ALL +SELECT _S.cw_nom +FROM cw_Societe AS _S +WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid))'''), + + ('Personne X WHERE NOT X travaille S, S nom "chouette"', + '''SELECT _X.cw_eid +FROM cw_Division AS _S, cw_Personne AS _X +WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette +UNION ALL +SELECT _X.cw_eid +FROM cw_Personne AS _X, cw_Societe AS _S +WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette +UNION ALL +SELECT _X.cw_eid +FROM cw_Personne AS _X, cw_SubDivision AS _S +WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette'''), + + ('Any X WHERE X is ET, ET eid 2', + '''SELECT rel_is0.eid_from +FROM is_relation AS rel_is0 +WHERE rel_is0.eid_to=2'''), + + ] +class CWRQLTC(RQLGeneratorTC): + backend = 'sqlite' + + def setUp(self): + self.__class__.schema = schema + super(CWRQLTC, self).setUp() + + def test_nonregr_sol(self): + delete = self.rqlhelper.parse( + 'DELETE X read_permission READ_PERMISSIONSUBJECT,X add_permission ADD_PERMISSIONSUBJECT,' + 'X in_basket IN_BASKETSUBJECT,X delete_permission DELETE_PERMISSIONSUBJECT,' + 'X update_permission UPDATE_PERMISSIONSUBJECT,' + 'X created_by CREATED_BYSUBJECT,X is ISSUBJECT,X is_instance_of IS_INSTANCE_OFSUBJECT,' + 'X owned_by OWNED_BYSUBJECT,X specializes SPECIALIZESSUBJECT,ISOBJECT is X,' + 'SPECIALIZESOBJECT specializes X,IS_INSTANCE_OFOBJECT is_instance_of X,' + 'TO_ENTITYOBJECT to_entity X,FROM_ENTITYOBJECT from_entity X ' + 'WHERE X is CWEType') + self.rqlhelper.compute_solutions(delete) + def var_sols(var): + s = set() + for sol in delete.solutions: + s.add(sol.get(var)) + return s + self.assertEqual(var_sols('FROM_ENTITYOBJECT'), set(('CWAttribute', 'CWRelation'))) + self.assertEqual(var_sols('FROM_ENTITYOBJECT'), delete.defined_vars['FROM_ENTITYOBJECT'].stinfo['possibletypes']) + self.assertEqual(var_sols('ISOBJECT'), + set(x.type for x in self.schema.entities() if not x.final)) + self.assertEqual(var_sols('ISOBJECT'), delete.defined_vars['ISOBJECT'].stinfo['possibletypes']) + + +def strip(text): + return '\n'.join(l.strip() for l in text.strip().splitlines()) + +class PostgresSQLGeneratorTC(RQLGeneratorTC): + backend = 'postgres' + + def setUp(self): + self.__class__.schema = schema + super(PostgresSQLGeneratorTC, self).setUp() + + def _norm_sql(self, sql): + return sql.strip() + + def _check(self, rql, sql, varmap=None, args=None): + if args is None: + args = {'text': 'hip hop momo', 'eid': 12345} + try: + union = self._prepare(rql) + r, nargs, cbs = self.o.generate(union, args, + varmap=varmap) + args.update(nargs) + self.assertMultiLineEqual(strip(r % args), self._norm_sql(sql)) + except Exception as ex: + if 'r' in locals(): + try: + print((r%args).strip()) + except KeyError: + print('strange, missing substitution') + print(r, nargs) + print('!=') + print(sql.strip()) + print('RQL:', rql) + raise + + def _parse(self, rqls): + for rql, sql in rqls: + yield self._check, rql, sql + + def _checkall(self, rql, sql): + if isinstance(rql, tuple): + rql, args = rql + else: + args = None + try: + rqlst = self._prepare(rql) + r, args, cbs = self.o.generate(rqlst, args) + self.assertEqual((r.strip(), args), sql) + except Exception as ex: + print(rql) + if 'r' in locals(): + print(r.strip()) + print('!=') + print(sql[0].strip()) + raise + return + + def test1(self): + self._checkall(('Any count(RDEF) WHERE RDEF relation_type X, X eid %(x)s', {'x': None}), + ("""SELECT COUNT(T1.C0) FROM (SELECT _RDEF.cw_eid AS C0 +FROM cw_CWAttribute AS _RDEF +WHERE _RDEF.cw_relation_type=%(x)s +UNION ALL +SELECT _RDEF.cw_eid AS C0 +FROM cw_CWRelation AS _RDEF +WHERE _RDEF.cw_relation_type=%(x)s) AS T1""", {}), + ) + + def test2(self): + self._checkall(('Any X WHERE C comments X, C eid %(x)s', {'x': None}), + ('''SELECT rel_comments0.eid_to +FROM comments_relation AS rel_comments0 +WHERE rel_comments0.eid_from=%(x)s''', {}) + ) + + def test_cache_1(self): + self._check('Any X WHERE X in_basket B, B eid 12', + '''SELECT rel_in_basket0.eid_from +FROM in_basket_relation AS rel_in_basket0 +WHERE rel_in_basket0.eid_to=12''') + + self._check('Any X WHERE X in_basket B, B eid 12', + '''SELECT rel_in_basket0.eid_from +FROM in_basket_relation AS rel_in_basket0 +WHERE rel_in_basket0.eid_to=12''') + + def test_varmap1(self): + self._check('Any X,L WHERE X is CWUser, X in_group G, X login L, G name "users"', + '''SELECT T00.x, T00.l +FROM T00, cw_CWGroup AS _G, in_group_relation AS rel_in_group0 +WHERE rel_in_group0.eid_from=T00.x AND rel_in_group0.eid_to=_G.cw_eid AND _G.cw_name=users''', + varmap={'X': 'T00.x', 'X.login': 'T00.l'}) + + def test_varmap2(self): + self._check('Any X,L,GN WHERE X is CWUser, X in_group G, X login L, G name GN', + '''SELECT T00.x, T00.l, _G.cw_name +FROM T00, cw_CWGroup AS _G, in_group_relation AS rel_in_group0 +WHERE rel_in_group0.eid_from=T00.x AND rel_in_group0.eid_to=_G.cw_eid''', + varmap={'X': 'T00.x', 'X.login': 'T00.l'}) + + def test_varmap3(self): + self._check('Any %(x)s,D WHERE F data D, F is File', + 'SELECT 728, _TDF0.C0\nFROM _TDF0', + args={'x': 728}, + varmap={'F.data': '_TDF0.C0', 'D': '_TDF0.C0'}) + + def test_is_null_transform(self): + union = self._prepare('Any X WHERE X login %(login)s') + r, args, cbs = self.o.generate(union, {'login': None}) + self.assertMultiLineEqual((r % args).strip(), + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE _X.cw_login IS NULL''') + + def test_today(self): + for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire", + '''SELECT _X.cw_eid +FROM cw_Affaire AS _X +WHERE DATE(_X.cw_creation_date)=CAST(clock_timestamp() AS DATE)'''), + ("Personne P where not P datenaiss TODAY", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE NOT (DATE(_P.cw_datenaiss)=CAST(clock_timestamp() AS DATE))'''), + ]): + yield t + + def test_date_extraction(self): + self._check("Any MONTH(D) WHERE P is Personne, P creation_date D", + '''SELECT CAST(EXTRACT(MONTH from _P.cw_creation_date) AS INTEGER) +FROM cw_Personne AS _P''') + + def test_weekday_extraction(self): + self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D", + '''SELECT (CAST(EXTRACT(DOW from _P.cw_creation_date) AS INTEGER) + 1) +FROM cw_Personne AS _P''') + + def test_substring(self): + self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne", + '''SELECT SUBSTR(_P.cw_nom, 1, 1) +FROM cw_Personne AS _P''') + + def test_cast(self): + self._check("Any CAST(String, P) WHERE P is Personne", + '''SELECT CAST(_P.cw_eid AS text) +FROM cw_Personne AS _P''') + + def test_regexp(self): + self._check("Any X WHERE X login REGEXP '[0-9].*'", + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE _X.cw_login ~ [0-9].* +''') + + def test_parser_parse(self): + for t in self._parse(PARSER): + yield t + + def test_basic_parse(self): + for t in self._parse(BASIC + BASIC_WITH_LIMIT): + yield t + + def test_advanced_parse(self): + for t in self._parse(ADVANCED + ADVANCED_WITH_LIMIT_OR_ORDERBY + ADVANCED_WITH_GROUP_CONCAT): + yield t + + def test_outer_join_parse(self): + for t in self._parse(OUTER_JOIN): + yield t + + def test_virtual_vars_parse(self): + for t in self._parse(VIRTUAL_VARS): + yield t + + def test_multiple_sel_parse(self): + for t in self._parse(MULTIPLE_SEL): + yield t + + def test_functions(self): + for t in self._parse(FUNCS): + yield t + + def test_negation(self): + for t in self._parse(NEGATIONS): + yield t + + def test_intersection(self): + for t in self._parse(INTERSECT): + yield t + + def test_union(self): + for t in self._parse(( + ('(Any N ORDERBY 1 WHERE X name N, X is State)' + ' UNION ' + '(Any NN ORDERBY 1 WHERE XX name NN, XX is Transition)', + '''(SELECT _X.cw_name +FROM cw_State AS _X +ORDER BY 1) +UNION ALL +(SELECT _XX.cw_name +FROM cw_Transition AS _XX +ORDER BY 1)'''), + )): + yield t + + def test_subquery(self): + for t in self._parse(( + + ('Any X,N ' + 'WHERE NOT EXISTS(X owned_by U) ' + 'WITH X,N BEING ' + '((Any X,N WHERE X name N, X is State)' + ' UNION ' + '(Any XX,NN WHERE XX name NN, XX is Transition))', + '''SELECT _T0.C0, _T0.C1 +FROM ((SELECT _X.cw_eid AS C0, _X.cw_name AS C1 +FROM cw_State AS _X) +UNION ALL +(SELECT _XX.cw_eid AS C0, _XX.cw_name AS C1 +FROM cw_Transition AS _XX)) AS _T0 +WHERE NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_T0.C0))'''), + + ('Any N ORDERBY 1 WITH N BEING ' + '((Any N WHERE X name N, X is State)' + ' UNION ' + '(Any NN WHERE XX name NN, XX is Transition))', + '''SELECT _T0.C0 +FROM ((SELECT _X.cw_name AS C0 +FROM cw_State AS _X) +UNION ALL +(SELECT _XX.cw_name AS C0 +FROM cw_Transition AS _XX)) AS _T0 +ORDER BY 1'''), + + ('Any N,NX ORDERBY NX WITH N,NX BEING ' + '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)' + ' UNION ' + '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))', + '''SELECT _T0.C0, _T0.C1 +FROM ((SELECT _X.cw_name AS C0, COUNT(_X.cw_eid) AS C1 +FROM cw_State AS _X +GROUP BY _X.cw_name +HAVING COUNT(_X.cw_eid)>1) +UNION ALL +(SELECT _X.cw_name AS C0, COUNT(_X.cw_eid) AS C1 +FROM cw_Transition AS _X +GROUP BY _X.cw_name +HAVING COUNT(_X.cw_eid)>1)) AS _T0 +ORDER BY 2'''), + + ('Any N,COUNT(X) GROUPBY N HAVING COUNT(X)>1 ' + 'WITH X, N BEING ((Any X, N WHERE X name N, X is State) UNION ' + ' (Any X, N WHERE X name N, X is Transition))', + '''SELECT _T0.C1, COUNT(_T0.C0) +FROM ((SELECT _X.cw_eid AS C0, _X.cw_name AS C1 +FROM cw_State AS _X) +UNION ALL +(SELECT _X.cw_eid AS C0, _X.cw_name AS C1 +FROM cw_Transition AS _X)) AS _T0 +GROUP BY _T0.C1 +HAVING COUNT(_T0.C0)>1'''), + + ('Any ETN,COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN ' + 'WITH X BEING ((Any X WHERE X is Societe) UNION (Any X WHERE X is Affaire, (EXISTS(X owned_by 1)) OR ((EXISTS(D concerne B?, B owned_by 1, X identity D, B is Note)) OR (EXISTS(F concerne E?, E owned_by 1, E is Societe, X identity F)))))', + '''SELECT _ET.cw_name, COUNT(_T0.C0) +FROM ((SELECT _X.cw_eid AS C0 +FROM cw_Societe AS _X) +UNION ALL +(SELECT _X.cw_eid AS C0 +FROM cw_Affaire AS _X +WHERE ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_owned_by0.eid_to=1)) OR (((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by2, cw_Affaire AS _D LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=_D.cw_eid) LEFT OUTER JOIN cw_Note AS _B ON (rel_concerne1.eid_to=_B.cw_eid) WHERE rel_owned_by2.eid_from=_B.cw_eid AND rel_owned_by2.eid_to=1 AND _X.cw_eid=_D.cw_eid)) OR (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by4, cw_Affaire AS _F LEFT OUTER JOIN concerne_relation AS rel_concerne3 ON (rel_concerne3.eid_from=_F.cw_eid) LEFT OUTER JOIN cw_Societe AS _E ON (rel_concerne3.eid_to=_E.cw_eid) WHERE rel_owned_by4.eid_from=_E.cw_eid AND rel_owned_by4.eid_to=1 AND _X.cw_eid=_F.cw_eid))))))) AS _T0, cw_CWEType AS _ET, is_relation AS rel_is0 +WHERE rel_is0.eid_from=_T0.C0 AND rel_is0.eid_to=_ET.cw_eid +GROUP BY _ET.cw_name'''), + + ('Any A WHERE A ordernum O, A is CWAttribute WITH O BEING (Any MAX(O) WHERE A ordernum O, A is CWAttribute)', + '''SELECT _A.cw_eid +FROM (SELECT MAX(_A.cw_ordernum) AS C0 +FROM cw_CWAttribute AS _A) AS _T0, cw_CWAttribute AS _A +WHERE _A.cw_ordernum=_T0.C0'''), + + ('Any O1 HAVING O1=O2? WITH O1 BEING (Any MAX(O) WHERE A ordernum O, A is CWAttribute), O2 BEING (Any MAX(O) WHERE A ordernum O, A is CWRelation)', + '''SELECT _T0.C0 +FROM (SELECT MAX(_A.cw_ordernum) AS C0 +FROM cw_CWAttribute AS _A) AS _T0 LEFT OUTER JOIN (SELECT MAX(_A.cw_ordernum) AS C0 +FROM cw_CWRelation AS _A) AS _T1 ON (_T0.C0=_T1.C0)'''), + + ('''Any TT1,STD,STDD WHERE TT2 identity TT1? + WITH TT1,STDD BEING (Any T,SUM(TD) GROUPBY T WHERE T is Affaire, T duration TD, TAG? tags T, TAG name "t"), + TT2,STD BEING (Any T,SUM(TD) GROUPBY T WHERE T is Affaire, T duration TD)''', + '''SELECT _T0.C0, _T1.C1, _T0.C1 +FROM (SELECT _T.cw_eid AS C0, SUM(_T.cw_duration) AS C1 +FROM cw_Affaire AS _T +GROUP BY _T.cw_eid) AS _T1 LEFT OUTER JOIN (SELECT _T.cw_eid AS C0, SUM(_T.cw_duration) AS C1 +FROM cw_Affaire AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_to=_T.cw_eid) LEFT OUTER JOIN cw_Tag AS _TAG ON (rel_tags0.eid_from=_TAG.cw_eid AND _TAG.cw_name=t) +GROUP BY _T.cw_eid) AS _T0 ON (_T1.C0=_T0.C0)'''), + + )): + yield t + + + def test_subquery_error(self): + rql = ('Any N WHERE X name N WITH X BEING ' + '((Any X WHERE X is State)' + ' UNION ' + ' (Any X WHERE X is Transition))') + rqlst = self._prepare(rql) + self.assertRaises(BadRQLQuery, self.o.generate, rqlst) + + def test_inline(self): + for t in self._parse(INLINE): + yield t + + def test_has_text(self): + for t in self._parse(( + ('Any X WHERE X has_text "toto tata"', + """SELECT appears0.uid +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""), + + ('Personne X WHERE X has_text "toto tata"', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.eid AND _X.type='Personne'"""), + + ('Personne X WHERE X has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne'"""), + + ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT _X.cw_eid +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION ALL +SELECT _X.cw_eid +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu"""), + + ('Personne X where X has_text %(text)s, X travaille S, S has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo')"""), + + ('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "toto tata"', + """SELECT appears0.uid +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight DESC"""), + + ('Personne X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.eid AND _X.type='Personne' +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight"""), + + ('Personne X ORDERBY FTIRANK(X) WHERE X has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT T1.C0 FROM (SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION ALL +SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +ORDER BY 2) AS T1"""), + + ('Personne X ORDERBY FTIRANK(X),FTIRANK(S) WHERE X has_text %(text)s, X travaille S, S has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo') +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight,ts_rank(appears2.words, to_tsquery('default', 'hip&hop&momo'))*appears2.weight"""), + + + ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT appears0.uid, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""), + + + ('Any X WHERE NOT A tags X, X has_text "pouet"', + '''SELECT appears1.uid +FROM appears AS appears1 +WHERE NOT (EXISTS(SELECT 1 FROM tags_relation AS rel_tags0 WHERE appears1.uid=rel_tags0.eid_to)) AND appears1.words @@ to_tsquery('default', 'pouet') +'''), + + )): + yield t + + + def test_from_clause_needed(self): + queries = [("Any 1 WHERE EXISTS(T is CWGroup, T name 'managers')", + '''SELECT 1 +WHERE EXISTS(SELECT 1 FROM cw_CWGroup AS _T WHERE _T.cw_name=managers)'''), + ('Any X,Y WHERE NOT X created_by Y, X eid 5, Y eid 6', + '''SELECT 5, 6 +WHERE NOT (EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6))'''), + ] + for t in self._parse(queries): + yield t + + def test_ambigous_exists_no_from_clause(self): + self._check('Any COUNT(U) WHERE U eid 1, EXISTS (P owned_by U, P is IN (Note, Affaire))', + '''SELECT COUNT(1) +WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=1)''') + + def test_attr_map_sqlcb(self): + def generate_ref(gen, linkedvar, rel): + linkedvar.accept(gen) + return 'VERSION_DATA(%s)' % linkedvar._q_sql + self.o.attr_map['Affaire.ref'] = (generate_ref, False) + try: + self._check('Any R WHERE X ref R', + '''SELECT VERSION_DATA(_X.cw_eid) +FROM cw_Affaire AS _X''') + self._check('Any X WHERE X ref 1', + '''SELECT _X.cw_eid +FROM cw_Affaire AS _X +WHERE VERSION_DATA(_X.cw_eid)=1''') + finally: + self.o.attr_map.clear() + + def test_attr_map_sourcecb(self): + cb = lambda x,y: None + self.o.attr_map['Affaire.ref'] = (cb, True) + try: + union = self._prepare('Any R WHERE X ref R') + r, nargs, cbs = self.o.generate(union, args={}) + self.assertMultiLineEqual(r.strip(), 'SELECT _X.cw_ref\nFROM cw_Affaire AS _X') + self.assertEqual(cbs, {0: [cb]}) + finally: + self.o.attr_map.clear() + + + def test_concat_string(self): + self._check('Any "A"+R WHERE X ref R', + '''SELECT (A || _X.cw_ref) +FROM cw_Affaire AS _X''') + + def test_or_having_fake_terms_base(self): + self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE ((CAST(EXTRACT(YEAR from _X.cw_creation_date) AS INTEGER)=2010) OR (_X.cw_creation_date IS NULL))''') + + def test_or_having_fake_terms_exists(self): + # crash with rql <= 0.29.0 + self._check('Any X WHERE X is CWUser, EXISTS(B bookmarked_by X, B creation_date D) HAVING D=2010 OR D=NULL, D=1 OR D=NULL', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE EXISTS(SELECT 1 FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_Bookmark AS _B WHERE rel_bookmarked_by0.eid_from=_B.cw_eid AND rel_bookmarked_by0.eid_to=_X.cw_eid AND ((_B.cw_creation_date=1) OR (_B.cw_creation_date IS NULL)) AND ((_B.cw_creation_date=2010) OR (_B.cw_creation_date IS NULL)))''') + + def test_or_having_fake_terms_nocrash(self): + # crash with rql <= 0.29.0 + self._check('Any X WHERE X is CWUser, X creation_date D HAVING D=2010 OR D=NULL, D=1 OR D=NULL', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE ((_X.cw_creation_date=1) OR (_X.cw_creation_date IS NULL)) AND ((_X.cw_creation_date=2010) OR (_X.cw_creation_date IS NULL))''') + + def test_not_no_where(self): + # XXX will check if some in_group relation exists, that's it. + # We can't actually know if we want to check if there are some + # X without in_group relation, or some G without it. + self._check('Any 1 WHERE NOT X in_group G, X is CWUser', + '''SELECT 1 +WHERE NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group0))''') + + def test_nonregr_outer_join_multiple(self): + self._check('Any COUNT(P1148),G GROUPBY G ' + 'WHERE G owned_by D, D eid 1122, K1148 bookmarked_by P1148, ' + 'K1148 eid 1148, P1148? in_group G', + '''SELECT COUNT(rel_bookmarked_by1.eid_to), _G.cw_eid +FROM owned_by_relation AS rel_owned_by0, cw_CWGroup AS _G LEFT OUTER JOIN in_group_relation AS rel_in_group2 ON (rel_in_group2.eid_to=_G.cw_eid) LEFT OUTER JOIN bookmarked_by_relation AS rel_bookmarked_by1 ON (rel_in_group2.eid_from=rel_bookmarked_by1.eid_to) +WHERE rel_owned_by0.eid_from=_G.cw_eid AND rel_owned_by0.eid_to=1122 AND rel_bookmarked_by1.eid_from=1148 +GROUP BY _G.cw_eid''' + ) + + def test_nonregr_outer_join_multiple2(self): + self._check('Any COUNT(P1148),G GROUPBY G ' + 'WHERE G owned_by D, D eid 1122, K1148 bookmarked_by P1148?, ' + 'K1148 eid 1148, P1148? in_group G', + '''SELECT COUNT(rel_bookmarked_by1.eid_to), _G.cw_eid +FROM owned_by_relation AS rel_owned_by0, cw_CWGroup AS _G LEFT OUTER JOIN in_group_relation AS rel_in_group2 ON (rel_in_group2.eid_to=_G.cw_eid) LEFT OUTER JOIN bookmarked_by_relation AS rel_bookmarked_by1 ON (rel_bookmarked_by1.eid_from=1148 AND rel_in_group2.eid_from=rel_bookmarked_by1.eid_to) +WHERE rel_owned_by0.eid_from=_G.cw_eid AND rel_owned_by0.eid_to=1122 +GROUP BY _G.cw_eid''') + + def test_groupby_orderby_insertion_dont_modify_intention(self): + self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) ' + 'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 ' + 'WHERE X creation_date XSCT, X modification_date XECT, ' + 'X ordernum XCE, X is CWAttribute', + '''SELECT ((CAST(EXTRACT(YEAR from _X.cw_modification_date) AS INTEGER) * 100) + CAST(EXTRACT(MONTH from _X.cw_modification_date) AS INTEGER)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date)) +FROM cw_CWAttribute AS _X +GROUP BY CAST(EXTRACT(YEAR from _X.cw_modification_date) AS INTEGER),CAST(EXTRACT(MONTH from _X.cw_modification_date) AS INTEGER) +ORDER BY 1'''), + + def test_modulo(self): + self._check('Any 5 % 2', '''SELECT (5 % 2)''') + + +class SqlServer2005SQLGeneratorTC(PostgresSQLGeneratorTC): + backend = 'sqlserver2005' + def _norm_sql(self, sql): + return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ') + + def test_has_text(self): + for t in self._parse(HAS_TEXT_LG_INDEXER): + yield t + + def test_regexp(self): + self.skipTest('regexp-based pattern matching not implemented in sqlserver') + + def test_or_having_fake_terms_base(self): + self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE ((DATEPART(YEAR, _X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''') + + def test_date_extraction(self): + self._check("Any MONTH(D) WHERE P is Personne, P creation_date D", + '''SELECT DATEPART(MONTH, _P.cw_creation_date) +FROM cw_Personne AS _P''') + + def test_weekday_extraction(self): + self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D", + '''SELECT DATEPART(WEEKDAY, _P.cw_creation_date) +FROM cw_Personne AS _P''') + + def test_basic_parse(self): + for t in self._parse(BASIC):# + BASIC_WITH_LIMIT): + yield t + + def test_advanced_parse(self): + for t in self._parse(ADVANCED):# + ADVANCED_WITH_LIMIT_OR_ORDERBY): + yield t + + def test_limit_offset(self): + WITH_LIMIT = [ + ("Personne P LIMIT 20 OFFSET 10", + '''WITH orderedrows AS ( +SELECT +_L01 +, ROW_NUMBER() OVER (ORDER BY _L01) AS __RowNumber +FROM ( +SELECT _P.cw_eid AS _L01 FROM cw_Personne AS _P +) AS _SQ1 ) +SELECT +_L01 +FROM orderedrows WHERE +__RowNumber <= 30 AND __RowNumber > 10 + '''), + + ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))', + '''WITH orderedrows AS ( +SELECT +_L01, _L02 +, ROW_NUMBER() OVER (ORDER BY _L01 DESC) AS __RowNumber +FROM ( +SELECT COUNT(rel_concerne0.eid_from) AS _L01, _C.cw_nom AS _L02 FROM concerne_relation AS rel_concerne0, cw_Societe AS _C +WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM cw_Card AS _N, documented_by_relation AS rel_documented_by2 WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published))) +GROUP BY _C.cw_nom +) AS _SQ1 ) +SELECT +_L01, _L02 +FROM orderedrows WHERE +__RowNumber <= 10 + '''), + + ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;', + '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2 +FROM cw_File AS _X +GROUP BY _X.cw_data_name,_X.cw_data_format) AS T1 +ORDER BY T1.C1,T1.C2 +'''), + + + ('DISTINCT Any X ORDERBY Y WHERE B bookmarked_by X, X login Y', + '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _X.cw_login AS C1 +FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_CWUser AS _X +WHERE rel_bookmarked_by0.eid_to=_X.cw_eid) AS T1 +ORDER BY T1.C1 + '''), + + ('DISTINCT Any X ORDERBY SN WHERE X in_state S, S name SN', + '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 +FROM cw_Affaire AS _X, cw_State AS _S +WHERE _X.cw_in_state=_S.cw_eid +UNION +SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 +FROM cw_CWUser AS _X, cw_State AS _S +WHERE _X.cw_in_state=_S.cw_eid +UNION +SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 +FROM cw_Note AS _X, cw_State AS _S +WHERE _X.cw_in_state=_S.cw_eid) AS T1 +ORDER BY T1.C1'''), + + ('Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT S use_email O, S eid 1, O is EmailAddress, O address AA, O alias AB, O modification_date AC, ' + 'EXISTS(A use_email O, EXISTS(A identity B, NOT B in_group D, D name "guests", D is CWGroup), A is CWUser), B eid 2', + ''' +SELECT _O.cw_eid, _O.cw_address, _O.cw_alias, _O.cw_modification_date +FROM cw_EmailAddress AS _O +WHERE NOT (EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid)) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid)) AND _D.cw_name=guests)) +ORDER BY 4 DESC'''), + + ("Any P ORDERBY N LIMIT 1 WHERE P is Personne, P travaille S, S eid %(eid)s, P nom N, P nom %(text)s", + '''WITH orderedrows AS ( +SELECT +_L01 +, ROW_NUMBER() OVER (ORDER BY _L01) AS __RowNumber +FROM ( +SELECT _P.cw_eid AS _L01 FROM cw_Personne AS _P, travaille_relation AS rel_travaille0 +WHERE rel_travaille0.eid_from=_P.cw_eid AND rel_travaille0.eid_to=12345 AND _P.cw_nom=hip hop momo +) AS _SQ1 ) +SELECT +_L01 +FROM orderedrows WHERE +__RowNumber <= 1'''), + + ("Any P ORDERBY N LIMIT 1 WHERE P is Personne, P nom N", + '''WITH orderedrows AS ( +SELECT +_L01 +, ROW_NUMBER() OVER (ORDER BY _L01) AS __RowNumber +FROM ( +SELECT _P.cw_eid AS _L01 FROM cw_Personne AS _P +) AS _SQ1 ) +SELECT +_L01 +FROM orderedrows WHERE +__RowNumber <= 1 +'''), + + ("Any PN, N, P ORDERBY N LIMIT 1 WHERE P is Personne, P nom N, P prenom PN", + '''WITH orderedrows AS ( +SELECT +_L01, _L02, _L03 +, ROW_NUMBER() OVER (ORDER BY _L02) AS __RowNumber +FROM ( +SELECT _P.cw_prenom AS _L01, _P.cw_nom AS _L02, _P.cw_eid AS _L03 FROM cw_Personne AS _P +) AS _SQ1 ) +SELECT +_L01, _L02, _L03 +FROM orderedrows WHERE +__RowNumber <= 1 +'''), + ] + for t in self._parse(WITH_LIMIT):# + ADVANCED_WITH_LIMIT_OR_ORDERBY): + yield t + + def test_cast(self): + self._check("Any CAST(String, P) WHERE P is Personne", + '''SELECT CAST(_P.cw_eid AS nvarchar(max)) +FROM cw_Personne AS _P''') + + def test_groupby_orderby_insertion_dont_modify_intention(self): + self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) ' + 'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 ' + 'WHERE X creation_date XSCT, X modification_date XECT, ' + 'X ordernum XCE, X is CWAttribute', + '''SELECT ((DATEPART(YEAR, _X.cw_modification_date) * 100) + DATEPART(MONTH, _X.cw_modification_date)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date)) +FROM cw_CWAttribute AS _X +GROUP BY DATEPART(YEAR, _X.cw_modification_date),DATEPART(MONTH, _X.cw_modification_date) +ORDER BY 1''') + + def test_today(self): + for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire", + '''SELECT _X.cw_eid +FROM cw_Affaire AS _X +WHERE DATE(_X.cw_creation_date)=%s''' % self.dbhelper.sql_current_date()), + + ("Personne P where not P datenaiss TODAY", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE NOT (DATE(_P.cw_datenaiss)=%s)''' % self.dbhelper.sql_current_date()), + ]): + yield t + + +class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC): + backend = 'sqlite' + + def _norm_sql(self, sql): + return sql.strip().replace(' ILIKE ', ' LIKE ') + + def test_date_extraction(self): + self._check("Any MONTH(D) WHERE P is Personne, P creation_date D", + '''SELECT MONTH(_P.cw_creation_date) +FROM cw_Personne AS _P''') + + def test_weekday_extraction(self): + # custom impl. in cw.server.sqlutils + self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D", + '''SELECT WEEKDAY(_P.cw_creation_date) +FROM cw_Personne AS _P''') + + def test_regexp(self): + self._check("Any X WHERE X login REGEXP '[0-9].*'", + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE _X.cw_login REGEXP [0-9].* +''') + + + def test_union(self): + for t in self._parse(( + ('(Any N ORDERBY 1 WHERE X name N, X is State)' + ' UNION ' + '(Any NN ORDERBY 1 WHERE XX name NN, XX is Transition)', + '''SELECT _X.cw_name +FROM cw_State AS _X +ORDER BY 1 +UNION ALL +SELECT _XX.cw_name +FROM cw_Transition AS _XX +ORDER BY 1'''), + )): + yield t + + + def test_subquery(self): + # NOTE: no paren around UNION with sqlitebackend + for t in self._parse(( + + ('Any N ORDERBY 1 WITH N BEING ' + '((Any N WHERE X name N, X is State)' + ' UNION ' + '(Any NN WHERE XX name NN, XX is Transition))', + '''SELECT _T0.C0 +FROM (SELECT _X.cw_name AS C0 +FROM cw_State AS _X +UNION ALL +SELECT _XX.cw_name AS C0 +FROM cw_Transition AS _XX) AS _T0 +ORDER BY 1'''), + + ('Any N,NX ORDERBY NX WITH N,NX BEING ' + '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)' + ' UNION ' + '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))', + '''SELECT _T0.C0, _T0.C1 +FROM (SELECT _X.cw_name AS C0, COUNT(_X.cw_eid) AS C1 +FROM cw_State AS _X +GROUP BY _X.cw_name +HAVING COUNT(_X.cw_eid)>1 +UNION ALL +SELECT _X.cw_name AS C0, COUNT(_X.cw_eid) AS C1 +FROM cw_Transition AS _X +GROUP BY _X.cw_name +HAVING COUNT(_X.cw_eid)>1) AS _T0 +ORDER BY 2'''), + + ('Any N,COUNT(X) GROUPBY N HAVING COUNT(X)>1 ' + 'WITH X, N BEING ((Any X, N WHERE X name N, X is State) UNION ' + ' (Any X, N WHERE X name N, X is Transition))', + '''SELECT _T0.C1, COUNT(_T0.C0) +FROM (SELECT _X.cw_eid AS C0, _X.cw_name AS C1 +FROM cw_State AS _X +UNION ALL +SELECT _X.cw_eid AS C0, _X.cw_name AS C1 +FROM cw_Transition AS _X) AS _T0 +GROUP BY _T0.C1 +HAVING COUNT(_T0.C0)>1'''), + )): + yield t + + def test_has_text(self): + for t in self._parse(( + ('Any X WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), + + ('Any X WHERE X has_text %(text)s', + """SELECT DISTINCT appears0.uid +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('hip', 'hop', 'momo'))"""), + + ('Personne X WHERE X has_text "toto tata"', + """SELECT DISTINCT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.eid AND _X.type='Personne'"""), + + ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION +SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION +SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +"""), + + ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid, 1.0 +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), + )): + yield t + + + def test_or_having_fake_terms_base(self): + self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE ((YEAR(_X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''') + + def test_groupby_orderby_insertion_dont_modify_intention(self): + self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) ' + 'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 ' + 'WHERE X creation_date XSCT, X modification_date XECT, ' + 'X ordernum XCE, X is CWAttribute', + '''SELECT ((YEAR(_X.cw_modification_date) * 100) + MONTH(_X.cw_modification_date)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date)) +FROM cw_CWAttribute AS _X +GROUP BY YEAR(_X.cw_modification_date),MONTH(_X.cw_modification_date) +ORDER BY 1'''), + + def test_today(self): + for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire", + '''SELECT _X.cw_eid +FROM cw_Affaire AS _X +WHERE DATE(_X.cw_creation_date)=CURRENT_DATE'''), + + ("Personne P where not P datenaiss TODAY", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE NOT (DATE(_P.cw_datenaiss)=CURRENT_DATE)'''), + ]): + yield t + + +class MySQLGenerator(PostgresSQLGeneratorTC): + backend = 'mysql' + + def _norm_sql(self, sql): + sql = sql.strip().replace(' ILIKE ', ' LIKE ') + newsql = [] + latest = None + for line in sql.splitlines(False): + firstword = line.split(None, 1)[0] + if firstword == 'WHERE' and latest == 'SELECT': + newsql.append('FROM (SELECT 1) AS _T') + newsql.append(line) + latest = firstword + return '\n'.join(newsql) + + def test_date_extraction(self): + self._check("Any MONTH(D) WHERE P is Personne, P creation_date D", + '''SELECT EXTRACT(MONTH from _P.cw_creation_date) +FROM cw_Personne AS _P''') + + def test_weekday_extraction(self): + self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D", + '''SELECT DAYOFWEEK(_P.cw_creation_date) +FROM cw_Personne AS _P''') + + def test_cast(self): + self._check("Any CAST(String, P) WHERE P is Personne", + '''SELECT CAST(_P.cw_eid AS mediumtext) +FROM cw_Personne AS _P''') + + def test_regexp(self): + self._check("Any X WHERE X login REGEXP '[0-9].*'", + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE _X.cw_login REGEXP [0-9].* +''') + + def test_from_clause_needed(self): + queries = [("Any 1 WHERE EXISTS(T is CWGroup, T name 'managers')", + '''SELECT 1 +FROM (SELECT 1) AS _T +WHERE EXISTS(SELECT 1 FROM cw_CWGroup AS _T WHERE _T.cw_name=managers)'''), + ('Any X,Y WHERE NOT X created_by Y, X eid 5, Y eid 6', + '''SELECT 5, 6 +FROM (SELECT 1) AS _T +WHERE NOT (EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6))'''), + ] + for t in self._parse(queries): + yield t + + + def test_has_text(self): + queries = [ + ('Any X WHERE X has_text "toto tata"', + """SELECT appears0.uid +FROM appears AS appears0 +WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE)"""), + ('Personne X WHERE X has_text "toto tata"', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=_X.eid AND _X.type='Personne'"""), + ('Personne X WHERE X has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE MATCH (appears0.words) AGAINST ('hip hop momo' IN BOOLEAN MODE) AND appears0.uid=_X.eid AND _X.type='Personne'"""), + ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT _X.cw_eid +FROM appears AS appears0, cw_Basket AS _X +WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION ALL +SELECT _X.cw_eid +FROM appears AS appears0, cw_Folder AS _X +WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +""") + ] + for t in self._parse(queries): + yield t + + + def test_ambigous_exists_no_from_clause(self): + self._check('Any COUNT(U) WHERE U eid 1, EXISTS (P owned_by U, P is IN (Note, Affaire))', + '''SELECT COUNT(1) +FROM (SELECT 1) AS _T +WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=1)''') + + def test_groupby_multiple_outerjoins(self): + self._check('Any A,U,P,group_concat(TN) GROUPBY A,U,P WHERE A is Affaire, A concerne N, N todo_by U?, T? tags A, T name TN, A todo_by P?', + '''SELECT _A.cw_eid, rel_todo_by1.eid_to, rel_todo_by3.eid_to, GROUP_CONCAT(_T.cw_name) +FROM concerne_relation AS rel_concerne0, cw_Affaire AS _A LEFT OUTER JOIN tags_relation AS rel_tags2 ON (rel_tags2.eid_to=_A.cw_eid) LEFT OUTER JOIN cw_Tag AS _T ON (rel_tags2.eid_from=_T.cw_eid) LEFT OUTER JOIN todo_by_relation AS rel_todo_by3 ON (rel_todo_by3.eid_from=_A.cw_eid), cw_Note AS _N LEFT OUTER JOIN todo_by_relation AS rel_todo_by1 ON (rel_todo_by1.eid_from=_N.cw_eid) +WHERE rel_concerne0.eid_from=_A.cw_eid AND rel_concerne0.eid_to=_N.cw_eid +GROUP BY _A.cw_eid,rel_todo_by1.eid_to,rel_todo_by3.eid_to''') + + def test_substring(self): + self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne", + '''SELECT SUBSTRING(_P.cw_nom, 1, 1) +FROM cw_Personne AS _P''') + + + def test_or_having_fake_terms_base(self): + self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL', + '''SELECT _X.cw_eid +FROM cw_CWUser AS _X +WHERE ((EXTRACT(YEAR from _X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''') + + + def test_not_no_where(self): + self._check('Any 1 WHERE NOT X in_group G, X is CWUser', + '''SELECT 1 +FROM (SELECT 1) AS _T +WHERE NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group0))''') + + def test_groupby_orderby_insertion_dont_modify_intention(self): + self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) ' + 'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 ' + 'WHERE X creation_date XSCT, X modification_date XECT, ' + 'X ordernum XCE, X is CWAttribute', + '''SELECT ((EXTRACT(YEAR from _X.cw_modification_date) * 100) + EXTRACT(MONTH from _X.cw_modification_date)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date)) +FROM cw_CWAttribute AS _X +GROUP BY EXTRACT(YEAR from _X.cw_modification_date),EXTRACT(MONTH from _X.cw_modification_date) +ORDER BY 1'''), + + def test_today(self): + for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire", + '''SELECT _X.cw_eid +FROM cw_Affaire AS _X +WHERE DATE(_X.cw_creation_date)=CURRENT_DATE'''), + + ("Personne P where not P datenaiss TODAY", + '''SELECT _P.cw_eid +FROM cw_Personne AS _P +WHERE NOT (DATE(_P.cw_datenaiss)=CURRENT_DATE)'''), + ]): + yield t + +class removeUnsusedSolutionsTC(TestCase): + def test_invariant_not_varying(self): + rqlst = mock_object(defined_vars={}) + rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) + rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) + self.assertEqual(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, + {'A': 'FootGroup', 'B': 'FootTeam'}], {}, None), + ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, + {'A': 'FootGroup', 'B': 'FootTeam'}], + {}, set('B')) + ) + + def test_invariant_varying(self): + rqlst = mock_object(defined_vars={}) + rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) + rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) + self.assertEqual(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, + {'A': 'FootGroup', 'B': 'RugbyTeam'}], {}, None), + ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set()) + ) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_rqlannotation.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_rqlannotation.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,445 @@ +# -*- coding: iso-8859-1 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for modules cubicweb.server.rqlannotation""" + +from cubicweb.devtools import TestServerConfiguration, get_test_db_handler +from cubicweb.devtools.repotest import BaseQuerierTC + +class SQLGenAnnotatorTC(BaseQuerierTC): + + def setUp(self): + handler = get_test_db_handler(TestServerConfiguration( + 'data2', apphome=SQLGenAnnotatorTC.datadir)) + handler.build_db_cache() + repo, _cnx = handler.get_repo_and_cnx() + self.__class__.repo = repo + super(SQLGenAnnotatorTC, self).setUp() + + def get_max_eid(self): + # no need for cleanup here + return None + + def cleanup(self): + # no need for cleanup here + pass + + def test_0_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any SEN,RN,OEN WHERE X from_entity SE, ' + 'SE eid 44, X relation_type R, R eid 139, ' + 'X to_entity OE, OE eid 42, R name RN, SE name SEN, ' + 'OE name OEN') + self.assertEqual(rqlst.defined_vars['SE']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['OE']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['R']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['SE'].stinfo['attrvar'], None) + self.assertEqual(rqlst.defined_vars['OE'].stinfo['attrvar'], None) + self.assertEqual(rqlst.defined_vars['R'].stinfo['attrvar'], None) + + def test_0_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any O WHERE NOT S ecrit_par O, S eid 1, ' + 'S inline1 P, O inline2 P') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['O'].stinfo['attrvar'], None) + + def test_0_4(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any A,B,C WHERE A eid 12,A comment B, ' + 'A ?wf_info_for C') + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) + self.assertTrue(rqlst.defined_vars['B'].stinfo['attrvar']) + self.assertEqual(rqlst.defined_vars['C']._q_invariant, False) + self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'}, + {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'}, + {'A': 'TrInfo', 'B': 'String', 'C': 'Note'}]) + + def test_0_5(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any P WHERE N ecrit_par P, N eid 0') + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) + + def test_0_6(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any P WHERE NOT N ecrit_par P, N eid 512') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + + def test_0_7(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Personne X,Y where X nom NX, ' + 'Y nom NX, X eid XE, not Y eid XE') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + self.assertTrue(rqlst.defined_vars['XE'].stinfo['attrvar']) + + def test_0_8(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any P WHERE X eid 0, NOT X connait P') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + #self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(len(rqlst.solutions), 1, rqlst.solutions) + + def test_0_10(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X concerne Y, Y is Note') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_0_11(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X todo_by Y, X is Affaire') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_0_12(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Personne P WHERE P concerne A, ' + 'A concerne S, S nom "Logilab"') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) + + def test_1_0(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X created_by Y, ' + 'X eid 5, NOT Y eid 6') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_1_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X created_by Y, X eid 5, ' + 'NOT Y eid IN (6,7)') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X identity Y, Y eid 1') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + + def test_7(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Personne X,Y where X nom NX, Y nom NX, ' + 'X eid XE, not Y eid XE') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_8(self): + with self.session.new_cnx() as cnx: + # DISTINCT Any P WHERE P require_group %(g)s, + # NOT %(u)s has_group_permission P, P is CWPermission + rqlst = self._prepare(cnx, 'DISTINCT Any X WHERE A concerne X, ' + 'NOT N migrated_from X, ' + 'X is Note, N eid 1') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + + def test_diff_scope_identity_deamb(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X concerne Y, Y is Note, ' + 'EXISTS(Y identity Z, Z migrated_from N)') + self.assertEqual(rqlst.defined_vars['Z']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_optional_inlined(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,S where X from_state S?') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) + + def test_optional_inlined_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any N,A WHERE N? inline1 A') + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) + + def test_optional_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,S WHERE X travaille S?') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) + + def test_greater_eid(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X eid > 5') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_greater_eid_typed(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X eid > 5, X is Note') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_max_eid(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any MAX(X)') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_max_eid_typed(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any MAX(X) WHERE X is Note') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + + def test_all_entities(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_all_typed_entity(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X is Note') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + + def test_has_text_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X has_text "toto tata"') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, + 'has_text') + + def test_has_text_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X is Personne, ' + 'X has_text "coucou"') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, + 'has_text') + + def test_not_relation_1(self): + with self.session.new_cnx() as cnx: + # P can't be invariant since deambiguification caused by "NOT X require_permission P" + # is not considered by generated sql (NOT EXISTS(...)) + rqlst = self._prepare(cnx, 'Any P,G WHERE P require_group G, ' + 'NOT X require_permission P') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['G']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_not_relation_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'TrInfo X WHERE X eid 2, ' + 'NOT X from_state Y, Y is State') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + + def test_not_relation_3(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X, Y WHERE X eid 1, Y eid in (2, 3)') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_not_relation_4_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Note X WHERE NOT Y evaluee X') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_not_relation_4_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_not_relation_4_3(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any Y WHERE NOT Y evaluee X') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_not_relation_4_4(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X, Y is CWUser') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_not_relation_4_5(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X, ' + 'Y eid %s, X is Note' % self.ueid) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.solutions, [{'X': 'Note'}]) + + def test_not_relation_5_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), NOT X read_permission Y') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_not_relation_5_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), NOT X read_permission Y') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_not_relation_6(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Personne P where NOT P concerne A') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) + + def test_not_relation_7(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any K,V WHERE P is CWProperty, ' + 'P pkey K, P value V, NOT P for_user U') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) + + def test_exists_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_exists_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_exists_3(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(X owned_by U, X bookmarked_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_exists_4(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_exists_5(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_not_exists_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE NOT EXISTS(X owned_by U, ' + 'X bookmarked_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_not_exists_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_not_exists_distinct_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + + def test_or_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X concerne B OR ' + 'C concerne X, B eid 12, C eid 13') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + + def test_or_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X created_by U, X concerne B OR ' + 'C concerne X, B eid 12, C eid 13') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'created_by') + + def test_or_3(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any N WHERE A evaluee N or EXISTS(N todo_by U)') + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) + + def test_or_exists_1(self): + with self.session.new_cnx() as cnx: + # query generated by security rewriting + rqlst = self._prepare(cnx, 'DISTINCT Any A,S WHERE A is Affaire, S nom "chouette", ' + 'S is IN(Division, Societe, SubDivision),' + '(EXISTS(A owned_by D)) ' + 'OR ((((EXISTS(E concerne C?, C owned_by D, A identity E, ' + ' C is Note, E is Affaire)) ' + 'OR (EXISTS(I concerne H?, H owned_by D, H is Societe, ' + ' A identity I, I is Affaire))) ' + 'OR (EXISTS(J concerne G?, G owned_by D, G is SubDivision, ' + ' A identity J, J is Affaire))) ' + 'OR (EXISTS(K concerne F?, F owned_by D, F is Division, ' + ' A identity K, K is Affaire)))') + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) + + def test_or_exists_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(U in_group G, G name "managers") OR ' + 'EXISTS(X owned_by U, X bookmarked_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['G']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + + def test_or_exists_3(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' + 'WHERE C is Societe, S concerne C, C nom CS, ' + '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) + rqlst = self._prepare(cnx, 'Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' + 'WHERE S is Affaire, C is Societe, S concerne C, C nom CS, ' + '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) + + def test_nonregr_ambiguity(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Note N WHERE N attachment F') + # N may be an image as well, not invariant + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['F']._q_invariant, True) + + def test_nonregr_ambiguity_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any S,SN WHERE X has_text "tot", X in_state S, S name SN, X is CWUser') + # X use has_text but should not be invariant as ambiguous, and has_text + # may not be its principal + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) + + def test_remove_from_deleted_source_1(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Note X WHERE X eid 999998, NOT X cw_source Y') + self.assertNotIn('X', rqlst.defined_vars) # simplified + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_remove_from_deleted_source_2(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + + def test_has_text_security_cache_bug(self): + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X has_text "toto" WITH X BEING ' + '(Any C WHERE C is Societe, C nom CS)') + self.assertTrue(rqlst.parent.has_text_query) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_schema2sql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_schema2sql.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,285 @@ +# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.server.schema2sql +""" + +import os.path as osp + +from logilab.common.testlib import TestCase, unittest_main +from logilab.database import get_db_helper + +from yams.reader import SchemaLoader +from cubicweb.server import schema2sql + +schema2sql.SET_DEFAULT = True + +DATADIR = osp.abspath(osp.join(osp.dirname(__file__), 'data-schema2sql')) + +schema = SchemaLoader().load([DATADIR]) + + +EXPECTED_DATA_NO_DROP = """ +CREATE TABLE Affaire( + sujet varchar(128), + ref varchar(12), + inline_rel integer REFERENCES entities (eid) +); +CREATE INDEX affaire_inline_rel_idx ON Affaire(inline_rel); + +CREATE TABLE Company( + name text +); + +CREATE TABLE Datetest( + dt1 timestamp, + dt2 timestamp, + d1 date, + d2 date, + t1 time, + t2 time +, CONSTRAINT cstredd407706bdfbd2285714dd689e8fcc0 CHECK(d1 <= CAST(clock_timestamp() AS DATE)) +); + +CREATE TABLE Division( + name text +); + +CREATE TABLE EPermission( + name varchar(100) NOT NULL +); +CREATE INDEX epermission_name_idx ON EPermission(name); + +CREATE TABLE Eetype( + name varchar(64) UNIQUE NOT NULL, + description text, + meta boolean, + final boolean, + initial_state integer REFERENCES entities (eid) +); +CREATE INDEX eetype_name_idx ON Eetype(name); +CREATE INDEX eetype_initial_state_idx ON Eetype(initial_state); + +CREATE TABLE Employee( +); + +CREATE TABLE Note( + date varchar(10), + type varchar(1), + para varchar(512) +); + +CREATE TABLE Person( + nom varchar(64) NOT NULL, + prenom varchar(64), + sexe varchar(1) DEFAULT 'M', + promo varchar(6), + titre varchar(128), + adel varchar(128), + ass varchar(128), + web varchar(128), + tel integer, + fax integer, + datenaiss date, + test boolean, + salary float +, CONSTRAINT cstr41fe7db9ce1d5be95de2477e26590386 CHECK(promo IN ('bon', 'pasbon')) +); +CREATE UNIQUE INDEX unique_e6c2d219772dbf1715597f7d9a6b3892 ON Person(nom,prenom); + +CREATE TABLE Salaried( + nom varchar(64) NOT NULL, + prenom varchar(64), + sexe varchar(1) DEFAULT 'M', + promo varchar(6), + titre varchar(128), + adel varchar(128), + ass varchar(128), + web varchar(128), + tel integer, + fax integer, + datenaiss date, + test boolean, + salary float +, CONSTRAINT cstrc8556fcc665865217761cdbcd220cae0 CHECK(promo IN ('bon', 'pasbon')) +); +CREATE UNIQUE INDEX unique_98da0f9de8588baa8966f0b1a6f850a3 ON Salaried(nom,prenom); + +CREATE TABLE Societe( + nom varchar(64), + web varchar(128), + tel integer, + fax integer, + rncs varchar(32), + ad1 varchar(128), + ad2 varchar(128), + ad3 varchar(128), + cp varchar(12), + ville varchar(32) +, CONSTRAINT cstrc51dd462e9f6115506a0fe468d4c8114 CHECK(fax <= tel) +); + +CREATE TABLE State( + eid integer PRIMARY KEY REFERENCES entities (eid), + name varchar(256) NOT NULL, + description text +); +CREATE INDEX state_name_idx ON State(name); + +CREATE TABLE Subcompany( + name text +); + +CREATE TABLE Subdivision( + name text +); + +CREATE TABLE pkginfo( + modname varchar(30) NOT NULL, + version varchar(10) DEFAULT '0.1' NOT NULL, + copyright text NOT NULL, + license varchar(3), + short_desc varchar(80) NOT NULL, + long_desc text NOT NULL, + author varchar(100) NOT NULL, + author_email varchar(100) NOT NULL, + mailinglist varchar(100), + debian_handler varchar(6) +, CONSTRAINT cstr70f766f834557c715815d76f0a0db956 CHECK(license IN ('GPL', 'ZPL')) +, CONSTRAINT cstr831a117424d0007ae0278cc15f344f5e CHECK(debian_handler IN ('machin', 'bidule')) +); + + +CREATE TABLE concerne_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT concerne_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX concerne_relation_from_idx ON concerne_relation(eid_from); +CREATE INDEX concerne_relation_to_idx ON concerne_relation(eid_to); + +CREATE TABLE division_of_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT division_of_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX division_of_relation_from_idx ON division_of_relation(eid_from); +CREATE INDEX division_of_relation_to_idx ON division_of_relation(eid_to); + +CREATE TABLE evaluee_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT evaluee_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX evaluee_relation_from_idx ON evaluee_relation(eid_from); +CREATE INDEX evaluee_relation_to_idx ON evaluee_relation(eid_to); + +CREATE TABLE next_state_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT next_state_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX next_state_relation_from_idx ON next_state_relation(eid_from); +CREATE INDEX next_state_relation_to_idx ON next_state_relation(eid_to); + +CREATE TABLE obj_wildcard_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT obj_wildcard_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX obj_wildcard_relation_from_idx ON obj_wildcard_relation(eid_from); +CREATE INDEX obj_wildcard_relation_to_idx ON obj_wildcard_relation(eid_to); + +CREATE TABLE require_permission_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT require_permission_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX require_permission_relation_from_idx ON require_permission_relation(eid_from); +CREATE INDEX require_permission_relation_to_idx ON require_permission_relation(eid_to); + +CREATE TABLE state_of_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT state_of_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX state_of_relation_from_idx ON state_of_relation(eid_from); +CREATE INDEX state_of_relation_to_idx ON state_of_relation(eid_to); + +CREATE TABLE subcompany_of_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT subcompany_of_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX subcompany_of_relation_from_idx ON subcompany_of_relation(eid_from); +CREATE INDEX subcompany_of_relation_to_idx ON subcompany_of_relation(eid_to); + +CREATE TABLE subdivision_of_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT subdivision_of_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX subdivision_of_relation_from_idx ON subdivision_of_relation(eid_from); +CREATE INDEX subdivision_of_relation_to_idx ON subdivision_of_relation(eid_to); + +CREATE TABLE subj_wildcard_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT subj_wildcard_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX subj_wildcard_relation_from_idx ON subj_wildcard_relation(eid_from); +CREATE INDEX subj_wildcard_relation_to_idx ON subj_wildcard_relation(eid_to); + +CREATE TABLE sym_rel_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT sym_rel_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX sym_rel_relation_from_idx ON sym_rel_relation(eid_from); +CREATE INDEX sym_rel_relation_to_idx ON sym_rel_relation(eid_to); + +CREATE TABLE travaille_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT travaille_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX travaille_relation_from_idx ON travaille_relation(eid_from); +CREATE INDEX travaille_relation_to_idx ON travaille_relation(eid_to); +""" + +class SQLSchemaTC(TestCase): + + def test_known_values(self): + dbhelper = get_db_helper('postgres') + output = schema2sql.schema2sql(dbhelper, schema, skip_relations=('works_for',)) + self.assertMultiLineEqual(EXPECTED_DATA_NO_DROP.strip(), output.strip()) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_schemaserial.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_schemaserial.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,446 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for schema rql (de)serialization""" + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb import Binary +from cubicweb.schema import CubicWebSchemaLoader +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.testlib import CubicWebTC + +from cubicweb.server.schemaserial import (updateeschema2rql, updaterschema2rql, rschema2rql, + eschema2rql, rdef2rql, specialize2rql, + _erperms2rql as erperms2rql) + +from logilab.database import get_db_helper +from yams import register_base_type, unregister_base_type + +schema = config = None +def setUpModule(*args): + register_base_type('BabarTestType', ('jungle_speed',)) + helper = get_db_helper('sqlite') + helper.TYPE_MAPPING['BabarTestType'] = 'TEXT' + helper.TYPE_CONVERTERS['BabarTestType'] = lambda x: '"%s"' % x + + global schema, config + loader = CubicWebSchemaLoader() + apphome = Schema2RQLTC.datadir + '-schemaserial' + config = TestServerConfiguration('data', apphome=apphome) + config.bootstrap_cubes() + schema = loader.load(config) + + +def tearDownModule(*args): + global schema, config + schema = config = None + + unregister_base_type('BabarTestType') + helper = get_db_helper('sqlite') + helper.TYPE_MAPPING.pop('BabarTestType', None) + helper.TYPE_CONVERTERS.pop('BabarTestType', None) + +cstrtypemap = {'RQLConstraint': 'RQLConstraint_eid', + 'SizeConstraint': 'SizeConstraint_eid', + 'StaticVocabularyConstraint': 'StaticVocabularyConstraint_eid', + 'FormatConstraint': 'FormatConstraint_eid', + } + +class Schema2RQLTC(TestCase): + + def test_eschema2rql1(self): + self.assertListEqual([ + ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s', + {'description': u'define a final relation: ' + 'link a final relation type from a non final entity ' + 'to a final entity type. used to build the instance schema', + 'name': u'CWAttribute', 'final': False})], + list(eschema2rql(schema.eschema('CWAttribute')))) + + def test_eschema2rql2(self): + self.assertListEqual([ + ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s', + {'description': u'', 'final': True, 'name': u'String'})], + list(eschema2rql(schema.eschema('String')))) + + def test_eschema2rql_specialization(self): + # x: None since eschema.eid are None + self.assertListEqual([('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', + {'et': None, 'x': None}), + ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', + {'et': None, 'x': None})], + sorted(specialize2rql(schema))) + + def test_esche2rql_custom_type(self): + expected = [('INSERT CWEType X: X description %(description)s,X final %(final)s,' + 'X name %(name)s', + {'description': u'', + 'name': u'BabarTestType', 'final': True},)] + got = list(eschema2rql(schema.eschema('BabarTestType'))) + self.assertListEqual(expected, got) + + def test_rschema2rql1(self): + self.assertListEqual([ + ('INSERT CWRType X: X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s', + {'description': u'link a relation definition to its relation type', + 'symmetric': False, + 'name': u'relation_type', + 'final' : False, + 'fulltext_container': None, + 'inlined': True}), + + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, 'rt': None, 'oe': None, + 'description': u'', + 'composite': u'object', + 'cardinality': u'1*', + 'ordernum': 1}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, 'ct': u'RQLConstraint_eid', + 'value': u';O;O final TRUE\n'}), + + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, 'rt': None, 'oe': None, + 'description': u'', 'composite': u'object', + 'ordernum': 1, 'cardinality': u'1*'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, 'ct': u'RQLConstraint_eid', 'value': u';O;O final FALSE\n'}), + ], + list(rschema2rql(schema.rschema('relation_type'), cstrtypemap))) + + def test_rschema2rql2(self): + self.assertListEqual([ + ('INSERT CWRType X: X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s', + {'description': u'', + 'symmetric': False, + 'name': u'add_permission', + 'final': False, + 'fulltext_container': None, + 'inlined': False}), + + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'groups allowed to add entities/relations of this type', + 'composite': None, + 'ordernum': 9999, + 'cardinality': u'**'}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'rql expression allowing to add entities/relations of this type', + 'composite': 'subject', + 'ordernum': 9999, + 'cardinality': u'*?'}), + + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'groups allowed to add entities/relations of this type', + 'composite': None, + 'ordernum': 9999, + 'cardinality': u'**'}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'rql expression allowing to add entities/relations of this type', + 'composite': 'subject', + 'ordernum': 9999, + 'cardinality': u'*?'}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'cardinality': u'**', + 'composite': None, + 'description': u'groups allowed to add entities/relations of this type', + 'oe': None, + 'ordernum': 9999, + 'rt': None, + 'se': None}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'cardinality': u'*?', + 'composite': u'subject', + 'description': u'rql expression allowing to add entities/relations of this type', + 'oe': None, + 'ordernum': 9999, + 'rt': None, + 'se': None})], + list(rschema2rql(schema.rschema('add_permission'), cstrtypemap))) + + def test_rschema2rql3(self): + self.assertListEqual([ + ('INSERT CWRType X: X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s', + {'description': u'', + 'symmetric': False, + 'name': u'cardinality', + 'final': True, + 'fulltext_container': None, + 'inlined': False}), + + ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' + 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,' + 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' + 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' + 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'subject/object cardinality', + 'internationalizable': True, + 'fulltextindexed': False, + 'ordernum': 5, + 'defaultval': None, + 'indexed': False, + 'formula': None, + 'cardinality': u'?1'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'ct': u'SizeConstraint_eid', + 'value': u'max=2'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'ct': u'StaticVocabularyConstraint_eid', + 'value': u"u'?1', u'11'"}), + + ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' + 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,' + 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' + 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE ' + 'WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'subject/object cardinality', + 'internationalizable': True, + 'fulltextindexed': False, + 'ordernum': 5, + 'defaultval': None, + 'indexed': False, + 'formula': None, + 'cardinality': u'?1'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'ct': u'SizeConstraint_eid', + 'value': u'max=2'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'ct': u'StaticVocabularyConstraint_eid', + 'value': (u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', " + "u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'")})], + list(rschema2rql(schema.rschema('cardinality'), cstrtypemap))) + + def test_rschema2rql_custom_type(self): + expected = [('INSERT CWRType X: X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s', + {'description': u'', + 'final': True, + 'fulltext_container': None, + 'inlined': False, + 'name': u'custom_field_of_jungle', + 'symmetric': False}), + ('INSERT CWAttribute X: X cardinality %(cardinality)s,' + 'X defaultval %(defaultval)s,X description %(description)s,' + 'X extra_props %(extra_props)s,X formula %(formula)s,X indexed %(indexed)s,' + 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' + 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'cardinality': u'?1', + 'defaultval': None, + 'description': u'', + 'extra_props': b'{"jungle_speed": 42}', + 'formula': None, + 'indexed': False, + 'oe': None, + 'ordernum': 4, + 'rt': None, + 'se': None})] + + got = list(rschema2rql(schema.rschema('custom_field_of_jungle'), cstrtypemap)) + self.assertEqual(2, len(got)) + # this is a custom type attribute with an extra parameter + self.assertIn('extra_props', got[1][1]) + # this extr + extra_props = got[1][1]['extra_props'] + self.assertIsInstance(extra_props, Binary) + got[1][1]['extra_props'] = got[1][1]['extra_props'].getvalue() + self.assertListEqual(expected, got) + + def test_rdef2rql(self): + self.assertListEqual([ + ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' + 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,' + 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' + 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' + 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'', + 'internationalizable': True, + 'fulltextindexed': False, + 'ordernum': 3, + 'defaultval': Binary.zpickle(u'text/plain'), + 'indexed': False, + 'formula': None, + 'cardinality': u'?1'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'value': u'None', + 'ct': 'FormatConstraint_eid'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'value': u'max=50', + 'ct': 'SizeConstraint_eid'})], + list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], + cstrtypemap))) + + def test_updateeschema2rql1(self): + self.assertListEqual([('SET X description %(description)s,X final %(final)s,' + 'X name %(name)s WHERE X eid %(x)s', + {'description': u'define a final relation: link a final relation type from' + ' a non final entity to a final entity type. used to build the instance schema', + 'x': 1, 'final': False, 'name': u'CWAttribute'})], + list(updateeschema2rql(schema.eschema('CWAttribute'), 1))) + + def test_updateeschema2rql2(self): + self.assertListEqual([('SET X description %(description)s,X final %(final)s,' + 'X name %(name)s WHERE X eid %(x)s', + {'description': u'', 'x': 1, 'final': True, 'name': u'String'})], + list(updateeschema2rql(schema.eschema('String'), 1))) + + def test_updaterschema2rql1(self): + self.assertListEqual([ + ('SET X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', + {'x': 1, + 'symmetric': False, + 'description': u'link a relation definition to its relation type', + 'final': False, 'fulltext_container': None, + 'inlined': True, + 'name': u'relation_type'})], + list(updaterschema2rql(schema.rschema('relation_type'), 1))) + + def test_updaterschema2rql2(self): + expected = [ + ('SET X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', + {'x': 1, + 'symmetric': False, + 'description': u'', + 'final': False, + 'fulltext_container': None, + 'inlined': False, + 'name': u'add_permission'}) + ] + for i, (rql, args) in enumerate(updaterschema2rql(schema.rschema('add_permission'), 1)): + yield self.assertEqual, expected[i], (rql, args) + +class Perms2RQLTC(TestCase): + GROUP_MAPPING = { + 'managers': 0, + 'users': 1, + 'guests': 2, + 'owners': 3, + } + + def test_eperms2rql1(self): + self.assertListEqual([('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), + ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), + ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), + ('SET X add_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), + ('SET X update_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), + ('SET X delete_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0})], + [(rql, kwargs) + for rql, kwargs in erperms2rql(schema.eschema('CWEType'), self.GROUP_MAPPING)]) + + def test_rperms2rql2(self): + self.assertListEqual([('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), + ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), + ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), + ('SET X add_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), + ('SET X delete_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0})], + [(rql, kwargs) + for rql, kwargs in erperms2rql(schema.rschema('read_permission').rdef('CWEType', 'CWGroup'), + self.GROUP_MAPPING)]) + + def test_rperms2rql3(self): + self.assertListEqual([('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), + ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), + ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), + ('SET X add_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), + ('SET X update_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0})], + [(rql, kwargs) + for rql, kwargs in erperms2rql(schema.rschema('name').rdef('CWEType', 'String'), + self.GROUP_MAPPING)]) + + #def test_perms2rql(self): + # self.assertListEqual(perms2rql(schema, self.GROUP_MAPPING), + # ['INSERT CWEType X: X name 'Societe', X final FALSE']) + +class ComputedAttributeAndRelationTC(CubicWebTC): + appid = 'data-cwep002' + + def test(self): + # force to read schema from the database + self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) + schema = self.repo.schema + self.assertEqual([('Company', 'Person')], list(schema['has_employee'].rdefs)) + self.assertEqual(schema['has_employee'].rdef('Company', 'Person').permissions['read'], + (u'managers',)) + self.assertEqual('O works_for S', + schema['has_employee'].rule) + self.assertEqual([('Company', 'Int')], list(schema['total_salary'].rdefs)) + self.assertEqual('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', + schema['total_salary'].rdefs['Company', 'Int'].formula) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_security.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_security.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,688 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""functional tests for server'security""" + +from six.moves import range + +from logilab.common.testlib import unittest_main + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb import Unauthorized, ValidationError, QueryError, Binary +from cubicweb.schema import ERQLExpression +from cubicweb.server.querier import get_local_checks, check_relations_read_access +from cubicweb.server.utils import _CRYPTO_CTX + + +class BaseSecurityTC(CubicWebTC): + + def setup_database(self): + super(BaseSecurityTC, self).setup_database() + with self.admin_access.client_cnx() as cnx: + self.create_user(cnx, u'iaminusersgrouponly') + hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt') + self.create_user(cnx, u'oldpassword', password=Binary(hash.encode('ascii'))) + +class LowLevelSecurityFunctionTC(BaseSecurityTC): + + def test_check_relation_read_access(self): + rql = u'Personne U WHERE U nom "managers"' + rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0] + nom = self.repo.schema['Personne'].rdef('nom') + with self.temporary_permissions((nom, {'read': ('users', 'managers')})): + with self.admin_access.repo_cnx() as cnx: + self.repo.vreg.solutions(cnx, rqlst, None) + check_relations_read_access(cnx, rqlst, {}) + with self.new_access(u'anon').repo_cnx() as cnx: + self.assertRaises(Unauthorized, + check_relations_read_access, + cnx, rqlst, {}) + self.assertRaises(Unauthorized, cnx.execute, rql) + + def test_get_local_checks(self): + rql = u'Personne U WHERE U nom "managers"' + rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0] + with self.temporary_permissions(Personne={'read': ('users', 'managers')}): + with self.admin_access.repo_cnx() as cnx: + self.repo.vreg.solutions(cnx, rqlst, None) + solution = rqlst.solutions[0] + localchecks = get_local_checks(cnx, rqlst, solution) + self.assertEqual({}, localchecks) + with self.new_access(u'anon').repo_cnx() as cnx: + self.assertRaises(Unauthorized, + get_local_checks, + cnx, rqlst, solution) + self.assertRaises(Unauthorized, cnx.execute, rql) + + def test_upassword_not_selectable(self): + with self.admin_access.repo_cnx() as cnx: + self.assertRaises(Unauthorized, + cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P') + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + self.assertRaises(Unauthorized, + cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P') + + def test_update_password(self): + """Ensure that if a user's password is stored with a deprecated hash, + it will be updated on next login + """ + with self.repo.internal_cnx() as cnx: + oldhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " + "WHERE cw_login = 'oldpassword'").fetchone()[0] + oldhash = self.repo.system_source.binary_to_str(oldhash) + self.repo.close(self.repo.connect('oldpassword', password='oldpassword')) + newhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " + "WHERE cw_login = 'oldpassword'").fetchone()[0] + newhash = self.repo.system_source.binary_to_str(newhash) + self.assertNotEqual(oldhash, newhash) + self.assertTrue(newhash.startswith(b'$6$')) + self.repo.close(self.repo.connect('oldpassword', password='oldpassword')) + newnewhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE " + "cw_login = 'oldpassword'").fetchone()[0] + newnewhash = self.repo.system_source.binary_to_str(newnewhash) + self.assertEqual(newhash, newnewhash) + + +class SecurityRewritingTC(BaseSecurityTC): + def hijack_source_execute(self): + def syntax_tree_search(*args, **kwargs): + self.query = (args, kwargs) + return [] + self.repo.system_source.syntax_tree_search = syntax_tree_search + + def tearDown(self): + self.repo.system_source.__dict__.pop('syntax_tree_search', None) + super(SecurityRewritingTC, self).tearDown() + + def test_not_relation_read_security(self): + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + self.hijack_source_execute() + cnx.execute('Any U WHERE NOT A todo_by U, A is Affaire') + self.assertEqual(self.query[0][1].as_string(), + 'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') + cnx.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') + self.assertEqual(self.query[0][1].as_string(), + 'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') + +class SecurityTC(BaseSecurityTC): + + def setUp(self): + super(SecurityTC, self).setUp() + # implicitly test manager can add some entities + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Affaire X: X sujet 'cool'") + cnx.execute("INSERT Societe X: X nom 'logilab'") + cnx.execute("INSERT Personne X: X nom 'bidule'") + cnx.execute('INSERT CWGroup X: X name "staff"') + cnx.commit() + + def test_insert_security(self): + with self.new_access(u'anon').repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule'") + self.assertRaises(Unauthorized, cnx.commit) + self.assertEqual(cnx.execute('Personne X').rowcount, 1) + + def test_insert_security_2(self): + with self.new_access(u'anon').repo_cnx() as cnx: + cnx.execute("INSERT Affaire X") + self.assertRaises(Unauthorized, cnx.commit) + # anon has no read permission on Affaire entities, so + # rowcount == 0 + self.assertEqual(cnx.execute('Affaire X').rowcount, 0) + + def test_insert_rql_permission(self): + # test user can only add une affaire related to a societe he owns + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("INSERT Affaire X: X sujet 'cool'") + self.assertRaises(Unauthorized, cnx.commit) + # test nothing has actually been inserted + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Affaire X').rowcount, 1) + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("INSERT Affaire X: X sujet 'cool'") + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'") + cnx.commit() + + def test_update_security_1(self): + with self.new_access(u'anon').repo_cnx() as cnx: + # local security check + cnx.execute( "SET X nom 'bidulechouette' WHERE X is Personne") + self.assertRaises(Unauthorized, cnx.commit) + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) + + def test_update_security_2(self): + with self.temporary_permissions(Personne={'read': ('users', 'managers'), + 'add': ('guests', 'users', 'managers')}): + with self.new_access(u'anon').repo_cnx() as cnx: + self.assertRaises(Unauthorized, cnx.execute, + "SET X nom 'bidulechouette' WHERE X is Personne") + # test nothing has actually been inserted + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) + + def test_update_security_3(self): + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'biduuule'") + cnx.execute("INSERT Societe X: X nom 'looogilab'") + cnx.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'") + + def test_insert_immutable_attribute_update(self): + with self.admin_access.repo_cnx() as cnx: + cnx.create_entity('Old', name=u'Babar') + cnx.commit() + # this should be equivalent + o = cnx.create_entity('Old') + o.cw_set(name=u'Celeste') + cnx.commit() + + def test_update_rql_permission(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() + # test user can only update une affaire related to a societe he owns + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("SET X sujet 'pascool' WHERE X is Affaire") + # this won't actually do anything since the selection query won't return anything + cnx.commit() + # to actually get Unauthorized exception, try to update an entity we can read + cnx.execute("SET X nom 'toto' WHERE X is Societe") + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute("INSERT Affaire X: X sujet 'pascool'") + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") + cnx.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'") + cnx.commit() + + def test_delete_security(self): + # FIXME: sample below fails because we don't detect "owner" can't delete + # user anyway, and since no user with login == 'bidule' exists, no + # exception is raised + #user._groups = {'guests':1} + #self.assertRaises(Unauthorized, + # self.o.execute, user, "DELETE CWUser X WHERE X login 'bidule'") + # check local security + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + self.assertRaises(Unauthorized, cnx.execute, "DELETE CWGroup Y WHERE Y name 'staff'") + + def test_delete_rql_permission(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() + # test user can only dele une affaire related to a societe he owns + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + # this won't actually do anything since the selection query won't return anything + cnx.execute("DELETE Affaire X") + cnx.commit() + # to actually get Unauthorized exception, try to delete an entity we can read + self.assertRaises(Unauthorized, cnx.execute, "DELETE Societe S") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() + cnx.execute("INSERT Affaire X: X sujet 'pascool'") + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") + cnx.commit() +## # this one should fail since it will try to delete two affaires, one authorized +## # and the other not +## self.assertRaises(Unauthorized, cnx.execute, "DELETE Affaire X") + cnx.execute("DELETE Affaire X WHERE X sujet 'pascool'") + cnx.commit() + + def test_insert_relation_rql_permission(self): + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + # should raise Unauthorized since user don't own S though this won't + # actually do anything since the selection query won't return + # anything + cnx.commit() + # to actually get Unauthorized exception, try to insert a relation + # were we can read both entities + rset = cnx.execute('Personne P') + self.assertEqual(len(rset), 1) + ent = rset.get_entity(0, 0) + self.assertFalse(cnx.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe')) + self.assertRaises(Unauthorized, ent.cw_check_perm, 'update') + self.assertRaises(Unauthorized, + cnx.execute, "SET P travaille S WHERE P is Personne, S is Societe") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() + # test nothing has actually been inserted: + self.assertFalse(cnx.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe')) + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") + cnx.commit() + + def test_delete_relation_rql_permission(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + # this won't actually do anything since the selection query won't return anything + cnx.execute("DELETE A concerne S") + cnx.commit() + with self.admin_access.repo_cnx() as cnx: + # to actually get Unauthorized exception, try to delete a relation we can read + eid = cnx.execute("INSERT Affaire X: X sujet 'pascool'")[0][0] + cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', + {'x': eid}) + cnx.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe") + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + self.assertRaises(Unauthorized, cnx.execute, "DELETE A concerne S") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") + cnx.commit() + cnx.execute("DELETE A concerne S WHERE S nom 'chouette'") + cnx.commit() + + + def test_user_can_change_its_upassword(self): + with self.admin_access.repo_cnx() as cnx: + ueid = self.create_user(cnx, u'user').eid + with self.new_access(u'user').repo_cnx() as cnx: + cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', + {'x': ueid, 'passwd': b'newpwd'}) + cnx.commit() + self.repo.close(self.repo.connect('user', password='newpwd')) + + def test_user_cant_change_other_upassword(self): + with self.admin_access.repo_cnx() as cnx: + ueid = self.create_user(cnx, u'otheruser').eid + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', + {'x': ueid, 'passwd': b'newpwd'}) + self.assertRaises(Unauthorized, cnx.commit) + + # read security test + + def test_read_base(self): + with self.temporary_permissions(Personne={'read': ('users', 'managers')}): + with self.new_access(u'anon').repo_cnx() as cnx: + self.assertRaises(Unauthorized, + cnx.execute, 'Personne U where U nom "managers"') + + def test_read_erqlexpr_base(self): + with self.admin_access.repo_cnx() as cnx: + eid = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + rset = cnx.execute('Affaire X') + self.assertEqual(rset.rows, []) + self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) + # cache test + self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) + aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() + rset = cnx.execute('Any X WHERE X eid %(x)s', {'x': aff2}) + self.assertEqual(rset.rows, [[aff2]]) + # more cache test w/ NOT eid + rset = cnx.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}) + self.assertEqual(rset.rows, [[aff2]]) + rset = cnx.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}) + self.assertEqual(rset.rows, []) + # test can't update an attribute of an entity that can't be readen + self.assertRaises(Unauthorized, cnx.execute, + 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}) + + + def test_entity_created_in_transaction(self): + affschema = self.schema['Affaire'] + with self.temporary_permissions(Affaire={'read': affschema.permissions['add']}): + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + # entity created in transaction are readable *by eid* + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2})) + # XXX would be nice if it worked + rset = cnx.execute("Affaire X WHERE X sujet 'cool'") + self.assertEqual(len(rset), 0) + self.assertRaises(Unauthorized, cnx.commit) + + def test_read_erqlexpr_has_text1(self): + with self.admin_access.repo_cnx() as cnx: + aff1 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + card1 = cnx.execute("INSERT Card X: X title 'cool'")[0][0] + cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', + {'x': card1}) + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] + cnx.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}) + cnx.commit() + self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}) + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2})) + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':card1})) + rset = cnx.execute("Any X WHERE X has_text 'cool'") + self.assertEqual(sorted(eid for eid, in rset.rows), + [card1, aff2]) + + def test_read_erqlexpr_has_text2(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule'") + cnx.execute("INSERT Societe X: X nom 'bidule'") + cnx.commit() + with self.temporary_permissions(Personne={'read': ('managers',)}): + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + rset = cnx.execute('Any N WHERE N has_text "bidule"') + self.assertEqual(len(rset.rows), 1, rset.rows) + rset = cnx.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_read_erqlexpr_optional_rel(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule'") + cnx.execute("INSERT Societe X: X nom 'bidule'") + cnx.commit() + with self.temporary_permissions(Personne={'read': ('managers',)}): + with self.new_access(u'anon').repo_cnx() as cnx: + rset = cnx.execute('Any N,U WHERE N has_text "bidule", N owned_by U?') + self.assertEqual(len(rset.rows), 1, rset.rows) + + def test_read_erqlexpr_aggregat(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + rset = cnx.execute('Any COUNT(X) WHERE X is Affaire') + self.assertEqual(rset.rows, [[0]]) + aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() + rset = cnx.execute('Any COUNT(X) WHERE X is Affaire') + self.assertEqual(rset.rows, [[1]]) + rset = cnx.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN') + values = dict(rset) + self.assertEqual(values['Affaire'], 1) + self.assertEqual(values['Societe'], 2) + rset = cnx.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN ' + 'WITH X BEING ((Affaire X) UNION (Societe X))') + self.assertEqual(len(rset), 2) + values = dict(rset) + self.assertEqual(values['Affaire'], 1) + self.assertEqual(values['Societe'], 2) + + + def test_attribute_security(self): + with self.admin_access.repo_cnx() as cnx: + # only managers should be able to edit the 'test' attribute of Personne entities + eid = cnx.execute("INSERT Personne X: X nom 'bidule', " + "X web 'http://www.debian.org', X test TRUE")[0][0] + cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule', " + "X web 'http://www.debian.org', X test TRUE") + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute("INSERT Personne X: X nom 'bidule', " + "X web 'http://www.debian.org', X test FALSE") + self.assertRaises(Unauthorized, cnx.commit) + eid = cnx.execute("INSERT Personne X: X nom 'bidule', " + "X web 'http://www.debian.org'")[0][0] + cnx.commit() + cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}) + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}) + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute('INSERT Frozable F: F name "Foo"') + cnx.commit() + cnx.execute('SET F name "Bar" WHERE F is Frozable') + cnx.commit() + cnx.execute('SET F name "BaBar" WHERE F is Frozable') + cnx.execute('SET F frozen True WHERE F is Frozable') + with self.assertRaises(Unauthorized): + cnx.commit() + cnx.rollback() + cnx.execute('SET F frozen True WHERE F is Frozable') + cnx.commit() + cnx.execute('SET F name "Bar" WHERE F is Frozable') + with self.assertRaises(Unauthorized): + cnx.commit() + + def test_attribute_security_rqlexpr(self): + with self.admin_access.repo_cnx() as cnx: + # Note.para attribute editable by managers or if the note is in "todo" state + note = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) + cnx.commit() + note.cw_adapt_to('IWorkflowable').fire_transition('markasdone') + cnx.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}) + self.assertRaises(Unauthorized, cnx.commit) + note2 = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) + cnx.commit() + note2.cw_adapt_to('IWorkflowable').fire_transition('markasdone') + cnx.commit() + self.assertEqual(len(cnx.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', + {'x': note2.eid})), + 0) + cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) + self.assertRaises(Unauthorized, cnx.commit) + note2.cw_adapt_to('IWorkflowable').fire_transition('redoit') + cnx.commit() + cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) + cnx.commit() + cnx.execute("INSERT Note X: X something 'A'") + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute("INSERT Note X: X para 'zogzog', X something 'A'") + cnx.commit() + note = cnx.execute("INSERT Note X").get_entity(0,0) + cnx.commit() + note.cw_set(something=u'B') + cnx.commit() + note.cw_set(something=None, para=u'zogzog') + cnx.commit() + + def test_attribute_read_security(self): + # anon not allowed to see users'login, but they can see users + login_rdef = self.repo.schema['CWUser'].rdef('login') + with self.temporary_permissions((login_rdef, {'read': ('users', 'managers')}), + CWUser={'read': ('guests', 'users', 'managers')}): + with self.new_access(u'anon').repo_cnx() as cnx: + rset = cnx.execute('CWUser X') + self.assertTrue(rset) + x = rset.get_entity(0, 0) + x.complete() + self.assertEqual(x.login, None) + self.assertTrue(x.creation_date) + x = rset.get_entity(1, 0) + x.complete() + self.assertEqual(x.login, None) + self.assertTrue(x.creation_date) + + def test_yams_inheritance_and_security_bug(self): + with self.temporary_permissions(Division={'read': ('managers', + ERQLExpression('X owned_by U'))}): + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + querier = cnx.repo.querier + rqlst = querier.parse('Any X WHERE X is_instance_of Societe') + querier.solutions(cnx, rqlst, {}) + querier._annotate(rqlst) + plan = querier.plan_factory(rqlst, {}, cnx) + plan.preprocess(rqlst) + self.assertEqual( + rqlst.as_string(), + '(Any X WHERE X is IN(Societe, SubDivision)) UNION ' + '(Any X WHERE X is Division, EXISTS(X owned_by %(B)s))') + + +class BaseSchemaSecurityTC(BaseSecurityTC): + """tests related to the base schema permission configuration""" + + def test_user_can_delete_object_he_created(self): + # even if some other user have changed object'state + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + # due to security test, affaire has to concerne a societe the user owns + cnx.execute('INSERT Societe X: X nom "ARCTIA"') + cnx.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"') + cnx.commit() + with self.admin_access.repo_cnx() as cnx: + affaire = cnx.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) + affaire.cw_adapt_to('IWorkflowable').fire_transition('abort') + cnx.commit() + self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')), + 1) + self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",' + 'X owned_by U, U login "admin"')), + 1) # TrInfo at the above state change + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute('DELETE Affaire X WHERE X ref "ARCT01"') + cnx.commit() + self.assertFalse(cnx.execute('Affaire X')) + + def test_users_and_groups_non_readable_by_guests(self): + with self.repo.internal_cnx() as cnx: + admineid = cnx.execute('CWUser U WHERE U login "admin"').rows[0][0] + with self.new_access(u'anon').repo_cnx() as cnx: + anon = cnx.user + # anonymous user can only read itself + rset = cnx.execute('Any L WHERE X owned_by U, U login L') + self.assertEqual([['anon']], rset.rows) + rset = cnx.execute('CWUser X') + self.assertEqual([[anon.eid]], rset.rows) + # anonymous user can read groups (necessary to check allowed transitions for instance) + self.assertTrue(cnx.execute('CWGroup X')) + # should only be able to read the anonymous user, not another one + self.assertRaises(Unauthorized, + cnx.execute, 'CWUser X WHERE X eid %(x)s', {'x': admineid}) + rset = cnx.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}) + self.assertEqual([[anon.eid]], rset.rows) + # but can't modify it + cnx.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid}) + self.assertRaises(Unauthorized, cnx.commit) + + def test_in_group_relation(self): + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + rql = u"DELETE U in_group G WHERE U login 'admin'" + self.assertRaises(Unauthorized, cnx.execute, rql) + rql = u"SET U in_group G WHERE U login 'admin', G name 'users'" + self.assertRaises(Unauthorized, cnx.execute, rql) + + def test_owned_by(self): + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule'") + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne" + self.assertRaises(Unauthorized, cnx.execute, rql) + + def test_bookmarked_by_guests_security(self): + with self.admin_access.repo_cnx() as cnx: + beid1 = cnx.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0] + beid2 = cnx.execute('INSERT Bookmark B: B path "?vid=index", B title "index", ' + 'B bookmarked_by U WHERE U login "anon"')[0][0] + cnx.commit() + with self.new_access(u'anon').repo_cnx() as cnx: + anoneid = cnx.user.eid + self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' + 'B bookmarked_by U, U eid %s' % anoneid).rows, + [['index', '?vid=index']]) + self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' + 'B bookmarked_by U, U eid %(x)s', {'x': anoneid}).rows, + [['index', '?vid=index']]) + # can read others bookmarks as well + self.assertEqual(cnx.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows, + [[beid1]]) + self.assertRaises(Unauthorized, cnx.execute,'DELETE B bookmarked_by U') + self.assertRaises(Unauthorized, + cnx.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s', + {'x': anoneid, 'b': beid1}) + + def test_ambigous_ordered(self): + with self.new_access(u'anon').repo_cnx() as cnx: + names = [t for t, in cnx.execute('Any N ORDERBY lower(N) WHERE X name N')] + self.assertEqual(names, sorted(names, key=lambda x: x.lower())) + + def test_in_state_without_update_perm(self): + """check a user change in_state without having update permission on the + subject + """ + with self.admin_access.repo_cnx() as cnx: + eid = cnx.execute('INSERT Affaire X: X ref "ARCT01"')[0][0] + cnx.commit() + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: + # needed to remove rql expr granting update perm to the user + affschema = self.schema['Affaire'] + with self.temporary_permissions(Affaire={'update': affschema.get_groups('update'), + 'read': ('users',)}): + self.assertRaises(Unauthorized, + affschema.check_perm, cnx, 'update', eid=eid) + aff = cnx.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) + aff.cw_adapt_to('IWorkflowable').fire_transition('abort') + cnx.commit() + # though changing a user state (even logged user) is reserved to managers + user = cnx.user + # XXX wether it should raise Unauthorized or ValidationError is not clear + # the best would probably ValidationError if the transition doesn't exist + # from the current state but Unauthorized if it exists but user can't pass it + self.assertRaises(ValidationError, + user.cw_adapt_to('IWorkflowable').fire_transition, 'deactivate') + + def test_trinfo_security(self): + with self.admin_access.repo_cnx() as cnx: + aff = cnx.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0) + iworkflowable = aff.cw_adapt_to('IWorkflowable') + cnx.commit() + iworkflowable.fire_transition('abort') + cnx.commit() + # can change tr info comment + cnx.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"', + {'c': u'bouh!'}) + cnx.commit() + aff.cw_clear_relation_cache('wf_info_for', 'object') + trinfo = iworkflowable.latest_trinfo() + self.assertEqual(trinfo.comment, 'bouh!') + # but not from_state/to_state + aff.cw_clear_relation_cache('wf_info_for', role='object') + self.assertRaises(Unauthorized, cnx.execute, + 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', + {'ti': trinfo.eid}) + self.assertRaises(Unauthorized, cnx.execute, + 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"', + {'ti': trinfo.eid}) + + def test_emailaddress_security(self): + # check for prexisting email adresse + with self.admin_access.repo_cnx() as cnx: + if cnx.execute('Any X WHERE X is EmailAddress'): + rset = cnx.execute('Any X, U WHERE X is EmailAddress, U use_email X') + msg = ['Preexisting email readable by anon found!'] + tmpl = ' - "%s" used by user "%s"' + for i in range(len(rset)): + email, user = rset.get_entity(i, 0), rset.get_entity(i, 1) + msg.append(tmpl % (email.dc_title(), user.dc_title())) + raise RuntimeError('\n'.join(msg)) + # actual test + cnx.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) + cnx.execute('INSERT EmailAddress X: X address "anon", ' + 'U use_email X WHERE U login "anon"').get_entity(0, 0) + cnx.commit() + self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 2) + with self.new_access(u'anon').repo_cnx() as cnx: + self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 1) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_serverctl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_serverctl.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,26 @@ +import os.path as osp +import shutil + +from cubicweb.devtools import testlib, ApptestConfiguration +from cubicweb.server.serverctl import _local_dump, DBDumpCommand +from cubicweb.server.serverconfig import ServerConfiguration + +class ServerCTLTC(testlib.CubicWebTC): + def setUp(self): + super(ServerCTLTC, self).setUp() + self.orig_config_for = ServerConfiguration.config_for + config_for = lambda appid: ApptestConfiguration(appid, apphome=self.datadir) + ServerConfiguration.config_for = staticmethod(config_for) + + def tearDown(self): + ServerConfiguration.config_for = self.orig_config_for + super(ServerCTLTC, self).tearDown() + + def test_dump(self): + DBDumpCommand(None).run([self.appid]) + shutil.rmtree(osp.join(self.config.apphome, 'backup')) + + +if __name__ == '__main__': + from unittest import main + main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_sources_native.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_sources_native.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,38 @@ +# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from logilab.common import tempattr + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server.sources.native import FTIndexEntityOp + +class NativeSourceTC(CubicWebTC): + + def test_index_entity_consider_do_fti(self): + source = self.repo.system_source + with tempattr(source, 'do_fti', False): + with self.admin_access.repo_cnx() as cnx: + # when do_fti is set to false, call to index_entity (as may be done from hooks) + # should have no effect + source.index_entity(cnx, cnx.user) + self.assertNotIn(cnx.user.eid, FTIndexEntityOp.get_instance(cnx).get_data()) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_sqlutils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_sqlutils.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.server.sqlutils +""" + +import sys + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.server.sqlutils import * + +from cubicweb.devtools.testlib import CubicWebTC + +BASE_CONFIG = { + 'db-driver' : 'Postgres', + 'db-host' : 'crater', + 'db-name' : 'cubicweb2_test', + 'db-user' : 'toto', + 'db-upassword' : 'toto', + } + +class SQLAdapterMixInTC(TestCase): + + def test_init(self): + o = SQLAdapterMixIn(BASE_CONFIG) + self.assertEqual(o.dbhelper.dbencoding, 'UTF-8') + + def test_init_encoding(self): + config = BASE_CONFIG.copy() + config['db-encoding'] = 'ISO-8859-1' + o = SQLAdapterMixIn(config) + self.assertEqual(o.dbhelper.dbencoding, 'ISO-8859-1') + + +class SQLUtilsTC(CubicWebTC): + + def test_group_concat(self): + with self.admin_access.repo_cnx() as cnx: + g = cnx.create_entity('CWGroup', name=u'héhé') + u = cnx.create_entity('CWUser', login=u'toto', upassword=u'', + in_group=g.eid) + rset = cnx.execute(u'Any L,GROUP_CONCAT(G) GROUPBY L WHERE X login L,' + u'X in_group G, G name GN, NOT G name IN ("users", "héhé")') + self.assertEqual([[u'admin', u'3'], [u'anon', u'2']], + rset.rows) + rset = cnx.execute('Any L,GROUP_CONCAT(GN) GROUPBY L WHERE X login L,' + 'X in_group G, G name GN, NOT G name "users"') + self.assertEqual([[u'admin', u'managers'], [u'anon', u'guests'], [u'toto', u'héhé']], + rset.rows) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_ssplanner.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_ssplanner.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,66 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from cubicweb.devtools import TestServerConfiguration, get_test_db_handler +from cubicweb.devtools.repotest import BasePlannerTC, test_plan +from cubicweb.server.ssplanner import SSPlanner + +# keep cnx so it's not garbage collected and the associated session closed +def setUpModule(*args): + global repo, cnx + handler = get_test_db_handler(TestServerConfiguration( + 'data', apphome=SSPlannerTC.datadir)) + handler.build_db_cache() + global repo, cnx + repo, cnx = handler.get_repo_and_cnx() + +def tearDownModule(*args): + global repo, cnx + del repo, cnx + +class SSPlannerTC(BasePlannerTC): + _test = test_plan + + def setUp(self): + self.__class__.repo = repo + BasePlannerTC.setUp(self) + self.planner = SSPlanner(self.o.schema, self.repo.vreg.rqlhelper) + self.system = self.o._repo.system_source + + def tearDown(self): + BasePlannerTC.tearDown(self) + + def test_ordered_ambigous_sol(self): + self._test('Any XN ORDERBY XN WHERE X name XN, X is IN (Basket, State, Folder)', + [('OneFetchStep', [('Any XN ORDERBY XN WHERE X name XN, X is IN(Basket, State, Folder)', + [{'X': 'Basket', 'XN': 'String'}, + {'X': 'State', 'XN': 'String'}, + {'X': 'Folder', 'XN': 'String'}])], + None, [])]) + + def test_groupeded_ambigous_sol(self): + self._test('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN (Basket, State, Folder)', + [('OneFetchStep', [('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN(Basket, State, Folder)', + [{'X': 'Basket', 'XN': 'String'}, + {'X': 'State', 'XN': 'String'}, + {'X': 'Folder', 'XN': 'String'}])], + None, [])]) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_storage.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_storage.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,354 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.server.sources.storages""" + +from six import PY2 + +from logilab.common.testlib import unittest_main, tag, Tags +from cubicweb.devtools.testlib import CubicWebTC + +from glob import glob +import os +import os.path as osp +import sys +import shutil +import tempfile + +from cubicweb import Binary, QueryError +from cubicweb.predicates import is_instance +from cubicweb.server.sources import storages +from cubicweb.server.hook import Hook + +class DummyBeforeHook(Hook): + __regid__ = 'dummy-before-hook' + __select__ = Hook.__select__ & is_instance('File') + events = ('before_add_entity',) + + def __call__(self): + self._cw.transaction_data['orig_file_value'] = self.entity.data.getvalue() + + +class DummyAfterHook(Hook): + __regid__ = 'dummy-after-hook' + __select__ = Hook.__select__ & is_instance('File') + events = ('after_add_entity',) + + def __call__(self): + # new value of entity.data should be the same as before + oldvalue = self._cw.transaction_data['orig_file_value'] + assert oldvalue == self.entity.data.getvalue() + +class StorageTC(CubicWebTC): + tempdir = None + tags = CubicWebTC.tags | Tags('Storage', 'BFSS') + + def setup_database(self): + self.tempdir = tempfile.mkdtemp() + bfs_storage = storages.BytesFileSystemStorage(self.tempdir) + self.bfs_storage = bfs_storage + storages.set_attribute_storage(self.repo, 'File', 'data', bfs_storage) + storages.set_attribute_storage(self.repo, 'BFSSTestable', 'opt_attr', bfs_storage) + + def tearDown(self): + super(StorageTC, self).tearDown() + storages.unset_attribute_storage(self.repo, 'File', 'data') + del self.bfs_storage + shutil.rmtree(self.tempdir) + + + def create_file(self, cnx, content=b'the-data'): + return cnx.create_entity('File', data=Binary(content), + data_format=u'text/plain', + data_name=u'foo.pdf') + + def fspath(self, cnx, entity): + fspath = cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', + {'f': entity.eid})[0][0].getvalue() + return fspath if PY2 else fspath.decode('utf-8') + + def test_bfss_wrong_fspath_usage(self): + with self.admin_access.repo_cnx() as cnx: + f1 = self.create_file(cnx) + cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': f1.eid}) + with self.assertRaises(NotImplementedError) as cm: + cnx.execute('Any fspath(F) WHERE F eid %(f)s', {'f': f1.eid}) + self.assertEqual(str(cm.exception), + 'This callback is only available for BytesFileSystemStorage ' + 'managed attribute. Is FSPATH() argument BFSS managed?') + + def test_bfss_storage(self): + with self.admin_access.web_request() as req: + cnx = req.cnx + f1 = self.create_file(req) + filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) + self.assertEqual(len(filepaths), 1, filepaths) + expected_filepath = filepaths[0] + # file should be read only + self.assertFalse(os.access(expected_filepath, os.W_OK)) + self.assertEqual(open(expected_filepath).read(), 'the-data') + cnx.rollback() + self.assertFalse(osp.isfile(expected_filepath)) + filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) + self.assertEqual(len(filepaths), 0, filepaths) + f1 = self.create_file(req) + cnx.commit() + filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) + self.assertEqual(len(filepaths), 1, filepaths) + expected_filepath = filepaths[0] + self.assertEqual(open(expected_filepath).read(), 'the-data') + + # add f1 back to the entity cache with req as _cw + f1 = req.entity_from_eid(f1.eid) + f1.cw_set(data=Binary(b'the new data')) + cnx.rollback() + self.assertEqual(open(expected_filepath).read(), 'the-data') + f1.cw_delete() + self.assertTrue(osp.isfile(expected_filepath)) + cnx.rollback() + self.assertTrue(osp.isfile(expected_filepath)) + f1.cw_delete() + cnx.commit() + self.assertFalse(osp.isfile(expected_filepath)) + + def test_bfss_sqlite_fspath(self): + with self.admin_access.repo_cnx() as cnx: + f1 = self.create_file(cnx) + expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name)) + base, ext = osp.splitext(expected_filepath) + self.assertTrue(self.fspath(cnx, f1).startswith(base)) + self.assertTrue(self.fspath(cnx, f1).endswith(ext)) + + def test_bfss_fs_importing_doesnt_touch_path(self): + with self.admin_access.repo_cnx() as cnx: + cnx.transaction_data['fs_importing'] = True + filepath = osp.abspath(__file__) + f1 = cnx.create_entity('File', data=Binary(filepath.encode(sys.getfilesystemencoding())), + data_format=u'text/plain', data_name=u'foo') + self.assertEqual(self.fspath(cnx, f1), filepath) + + def test_source_storage_transparency(self): + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(DummyBeforeHook, DummyAfterHook): + self.create_file(cnx) + + def test_source_mapped_attribute_error_cases(self): + with self.admin_access.repo_cnx() as cnx: + with self.assertRaises(QueryError) as cm: + cnx.execute('Any X WHERE X data ~= "hop", X is File') + self.assertEqual(str(cm.exception), 'can\'t use File.data (X data ILIKE "hop") in restriction') + with self.assertRaises(QueryError) as cm: + cnx.execute('Any X, Y WHERE X data D, Y data D, ' + 'NOT X identity Y, X is File, Y is File') + self.assertEqual(str(cm.exception), "can't use D as a restriction variable") + # query returning mix of mapped / regular attributes (only file.data + # mapped, not image.data for instance) + with self.assertRaises(QueryError) as cm: + cnx.execute('Any X WITH X BEING (' + ' (Any NULL)' + ' UNION ' + ' (Any D WHERE X data D, X is File)' + ')') + self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') + with self.assertRaises(QueryError) as cm: + cnx.execute('(Any D WHERE X data D, X is File)' + ' UNION ' + '(Any D WHERE X title D, X is Bookmark)') + self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') + + storages.set_attribute_storage(self.repo, 'State', 'name', + storages.BytesFileSystemStorage(self.tempdir)) + try: + with self.assertRaises(QueryError) as cm: + cnx.execute('Any D WHERE X name D, X is IN (State, Transition)') + self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') + finally: + storages.unset_attribute_storage(self.repo, 'State', 'name') + + def test_source_mapped_attribute_advanced(self): + with self.admin_access.repo_cnx() as cnx: + f1 = self.create_file(cnx) + rset = cnx.execute('Any X,D WITH D,X BEING (' + ' (Any D, X WHERE X eid %(x)s, X data D)' + ' UNION ' + ' (Any D, X WHERE X eid %(x)s, X data D)' + ')', {'x': f1.eid}) + self.assertEqual(len(rset), 2) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[1][0], f1.eid) + self.assertEqual(rset[0][1].getvalue(), b'the-data') + self.assertEqual(rset[1][1].getvalue(), b'the-data') + rset = cnx.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', + {'x': f1.eid}) + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[0][1], len('the-data')) + rset = cnx.execute('Any X,LENGTH(D) WITH D,X BEING (' + ' (Any D, X WHERE X eid %(x)s, X data D)' + ' UNION ' + ' (Any D, X WHERE X eid %(x)s, X data D)' + ')', {'x': f1.eid}) + self.assertEqual(len(rset), 2) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[1][0], f1.eid) + self.assertEqual(rset[0][1], len('the-data')) + self.assertEqual(rset[1][1], len('the-data')) + with self.assertRaises(QueryError) as cm: + cnx.execute('Any X,UPPER(D) WHERE X eid %(x)s, X data D', + {'x': f1.eid}) + self.assertEqual(str(cm.exception), 'UPPER can not be called on mapped attribute') + + + def test_bfss_fs_importing_transparency(self): + with self.admin_access.repo_cnx() as cnx: + cnx.transaction_data['fs_importing'] = True + filepath = osp.abspath(__file__) + f1 = cnx.create_entity('File', data=Binary(filepath.encode(sys.getfilesystemencoding())), + data_format=u'text/plain', data_name=u'foo') + cw_value = f1.data.getvalue() + fs_value = open(filepath, 'rb').read() + if cw_value != fs_value: + self.fail('cw value %r is different from file content' % cw_value) + + @tag('update') + def test_bfss_update_with_existing_data(self): + with self.admin_access.repo_cnx() as cnx: + f1 = cnx.create_entity('File', data=Binary(b'some data'), + data_format=u'text/plain', data_name=u'foo') + # NOTE: do not use cw_set() which would automatically + # update f1's local dict. We want the pure rql version to work + cnx.execute('SET F data %(d)s WHERE F eid %(f)s', + {'d': Binary(b'some other data'), 'f': f1.eid}) + self.assertEqual(f1.data.getvalue(), b'some other data') + cnx.commit() + f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) + self.assertEqual(f2.data.getvalue(), b'some other data') + + @tag('update', 'extension', 'commit') + def test_bfss_update_with_different_extension_commited(self): + with self.admin_access.repo_cnx() as cnx: + f1 = cnx.create_entity('File', data=Binary(b'some data'), + data_format=u'text/plain', data_name=u'foo.txt') + # NOTE: do not use cw_set() which would automatically + # update f1's local dict. We want the pure rql version to work + cnx.commit() + old_path = self.fspath(cnx, f1) + self.assertTrue(osp.isfile(old_path)) + self.assertEqual(osp.splitext(old_path)[1], '.txt') + cnx.execute('SET F data %(d)s, F data_name %(dn)s, ' + 'F data_format %(df)s WHERE F eid %(f)s', + {'d': Binary(b'some other data'), 'f': f1.eid, + 'dn': u'bar.jpg', 'df': u'image/jpeg'}) + cnx.commit() + # the new file exists with correct extension + # the old file is dead + f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) + new_path = self.fspath(cnx, f2) + self.assertFalse(osp.isfile(old_path)) + self.assertTrue(osp.isfile(new_path)) + self.assertEqual(osp.splitext(new_path)[1], '.jpg') + + @tag('update', 'extension', 'rollback') + def test_bfss_update_with_different_extension_rolled_back(self): + with self.admin_access.repo_cnx() as cnx: + f1 = cnx.create_entity('File', data=Binary(b'some data'), + data_format=u'text/plain', data_name=u'foo.txt') + # NOTE: do not use cw_set() which would automatically + # update f1's local dict. We want the pure rql version to work + cnx.commit() + old_path = self.fspath(cnx, f1) + old_data = f1.data.getvalue() + self.assertTrue(osp.isfile(old_path)) + self.assertEqual(osp.splitext(old_path)[1], '.txt') + cnx.execute('SET F data %(d)s, F data_name %(dn)s, ' + 'F data_format %(df)s WHERE F eid %(f)s', + {'d': Binary(b'some other data'), + 'f': f1.eid, + 'dn': u'bar.jpg', + 'df': u'image/jpeg'}) + cnx.rollback() + # the new file exists with correct extension + # the old file is dead + f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', + {'f': f1.eid}).get_entity(0, 0) + new_path = self.fspath(cnx, f2) + new_data = f2.data.getvalue() + self.assertTrue(osp.isfile(new_path)) + self.assertEqual(osp.splitext(new_path)[1], '.txt') + self.assertEqual(old_path, new_path) + self.assertEqual(old_data, new_data) + + @tag('update', 'NULL') + def test_bfss_update_to_None(self): + with self.admin_access.repo_cnx() as cnx: + f = cnx.create_entity('Affaire', opt_attr=Binary(b'toto')) + cnx.commit() + f.cw_set(opt_attr=None) + cnx.commit() + + @tag('fs_importing', 'update') + def test_bfss_update_with_fs_importing(self): + with self.admin_access.repo_cnx() as cnx: + f1 = cnx.create_entity('File', data=Binary(b'some data'), + data_format=u'text/plain', + data_name=u'foo') + old_fspath = self.fspath(cnx, f1) + cnx.transaction_data['fs_importing'] = True + new_fspath = osp.join(self.tempdir, 'newfile.txt') + open(new_fspath, 'w').write('the new data') + cnx.execute('SET F data %(d)s WHERE F eid %(f)s', + {'d': Binary(new_fspath.encode(sys.getfilesystemencoding())), 'f': f1.eid}) + cnx.commit() + self.assertEqual(f1.data.getvalue(), b'the new data') + self.assertEqual(self.fspath(cnx, f1), new_fspath) + self.assertFalse(osp.isfile(old_fspath)) + + @tag('fsimport') + def test_clean(self): + with self.admin_access.repo_cnx() as cnx: + fsimport = storages.fsimport + td = cnx.transaction_data + self.assertNotIn('fs_importing', td) + with fsimport(cnx): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) + self.assertNotIn('fs_importing', td) + + @tag('fsimport') + def test_true(self): + with self.admin_access.repo_cnx() as cnx: + fsimport = storages.fsimport + td = cnx.transaction_data + td['fs_importing'] = True + with fsimport(cnx): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) + self.assertTrue(td['fs_importing']) + + @tag('fsimport') + def test_False(self): + with self.admin_access.repo_cnx() as cnx: + fsimport = storages.fsimport + td = cnx.transaction_data + td['fs_importing'] = False + with fsimport(cnx): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) + self.assertFalse(td['fs_importing']) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_tools.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_tools.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,30 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from logilab.common.testlib import TestCase, unittest_main + +class ImportTC(TestCase): + def test(self): + # the minimal test: module is importable... + import cubicweb.server.checkintegrity + import cubicweb.server.serverctl + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_undo.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_undo.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,471 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from six import text_type + +from cubicweb import ValidationError +from cubicweb.devtools.testlib import CubicWebTC +import cubicweb.server.session +from cubicweb.server.session import Connection as OldConnection + +from cubicweb.server.sources.native import UndoTransactionException, _UndoException + +from cubicweb.transaction import NoSuchTransaction + +class UndoableTransactionTC(CubicWebTC): + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + self.totoeid = self.create_user(cnx, 'toto', + password='toto', + groups=('users',), + commit=False).eid + self.txuuid = cnx.commit() + + def toto(self, cnx): + return cnx.entity_from_eid(self.totoeid) + + def setUp(self): + class Connection(OldConnection): + """Force undo feature to be turned on in all case""" + undo_actions = property(lambda tx: True, lambda x, y:None) + cubicweb.server.session.Connection = Connection + super(UndoableTransactionTC, self).setUp() + + def tearDown(self): + cubicweb.server.session.Connection = OldConnection + super(UndoableTransactionTC, self).tearDown() + + def check_transaction_deleted(self, cnx, txuuid): + # also check transaction actions have been properly deleted + cu = cnx.system_sql( + "SELECT * from tx_entity_actions WHERE tx_uuid='%s'" % txuuid) + self.assertFalse(cu.fetchall()) + cu = cnx.system_sql( + "SELECT * from tx_relation_actions WHERE tx_uuid='%s'" % txuuid) + self.assertFalse(cu.fetchall()) + + def assertUndoTransaction(self, cnx, txuuid, expected_errors=None): + if expected_errors is None : + expected_errors = [] + try: + cnx.undo_transaction(txuuid) + except UndoTransactionException as exn: + errors = exn.errors + else: + errors = [] + self.assertEqual(errors, expected_errors) + + def test_undo_api(self): + self.assertTrue(self.txuuid) + # test transaction api + with self.admin_access.client_cnx() as cnx: + tx_actions = cnx.transaction_actions(self.txuuid) + self.assertEqual(len(tx_actions), 2, tx_actions) + self.assertRaises(NoSuchTransaction, + cnx.transaction_info, 'hop') + self.assertRaises(NoSuchTransaction, + cnx.transaction_actions, 'hop') + self.assertRaises(NoSuchTransaction, + cnx.undo_transaction, 'hop') + txinfo = cnx.transaction_info(self.txuuid) + self.assertTrue(txinfo.datetime) + self.assertEqual(txinfo.user_eid, cnx.user.eid) + self.assertEqual(txinfo.user().login, 'admin') + actions = txinfo.actions_list() + self.assertEqual(len(actions), 2) + actions = txinfo.actions_list(public=False) + self.assertEqual(len(actions), 6) + a1 = actions[0] + self.assertEqual(a1.action, 'C') + self.assertEqual(a1.eid, self.totoeid) + self.assertEqual(a1.etype,'CWUser') + self.assertEqual(a1.ertype, 'CWUser') + self.assertEqual(a1.changes, None) + self.assertEqual(a1.public, True) + self.assertEqual(a1.order, 1) + a4 = actions[3] + self.assertEqual(a4.action, 'A') + self.assertEqual(a4.rtype, 'in_group') + self.assertEqual(a4.ertype, 'in_group') + self.assertEqual(a4.eid_from, self.totoeid) + self.assertEqual(a4.eid_to, self.toto(cnx).in_group[0].eid) + self.assertEqual(a4.order, 4) + for i, rtype in ((1, 'owned_by'), (2, 'owned_by')): + a = actions[i] + self.assertEqual(a.action, 'A') + self.assertEqual(a.eid_from, self.totoeid) + self.assertEqual(a.rtype, rtype) + self.assertEqual(a.order, i+1) + self.assertEqual(set((actions[4].rtype, actions[5].rtype)), + set(('in_state', 'created_by'))) + for i in (4, 5): + a = actions[i] + self.assertEqual(a.action, 'A') + self.assertEqual(a.eid_from, self.totoeid) + self.assertEqual(a.order, i+1) + + # test undoable_transactions + txs = cnx.undoable_transactions() + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, self.txuuid) + # test transaction_info / undoable_transactions security + with self.new_access('anon').client_cnx() as cnx: + self.assertRaises(NoSuchTransaction, + cnx.transaction_info, self.txuuid) + self.assertRaises(NoSuchTransaction, + cnx.transaction_actions, self.txuuid) + self.assertRaises(NoSuchTransaction, + cnx.undo_transaction, self.txuuid) + txs = cnx.undoable_transactions() + self.assertEqual(len(txs), 0) + + def test_undoable_transactions(self): + with self.admin_access.client_cnx() as cnx: + toto = self.toto(cnx) + e = cnx.create_entity('EmailAddress', + address=u'toto@logilab.org', + reverse_use_email=toto) + txuuid1 = cnx.commit() + toto.cw_delete() + txuuid2 = cnx.commit() + undoable_transactions = cnx.undoable_transactions + txs = undoable_transactions(action='D') + self.assertEqual(len(txs), 1, txs) + self.assertEqual(txs[0].uuid, txuuid2) + txs = undoable_transactions(action='C') + self.assertEqual(len(txs), 2, txs) + self.assertEqual(txs[0].uuid, txuuid1) + self.assertEqual(txs[1].uuid, self.txuuid) + txs = undoable_transactions(eid=toto.eid) + self.assertEqual(len(txs), 3) + self.assertEqual(txs[0].uuid, txuuid2) + self.assertEqual(txs[1].uuid, txuuid1) + self.assertEqual(txs[2].uuid, self.txuuid) + txs = undoable_transactions(etype='CWUser') + self.assertEqual(len(txs), 2) + txs = undoable_transactions(etype='CWUser', action='C') + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, self.txuuid) + txs = undoable_transactions(etype='EmailAddress', action='D') + self.assertEqual(len(txs), 0) + txs = undoable_transactions(etype='EmailAddress', action='D', + public=False) + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, txuuid2) + txs = undoable_transactions(eid=toto.eid, action='R', public=False) + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, txuuid2) + + def test_undo_deletion_base(self): + with self.admin_access.client_cnx() as cnx: + toto = self.toto(cnx) + e = cnx.create_entity('EmailAddress', + address=u'toto@logilab.org', + reverse_use_email=toto) + # entity with inlined relation + p = cnx.create_entity('CWProperty', + pkey=u'ui.default-text-format', + value=u'text/rest', + for_user=toto) + cnx.commit() + txs = cnx.undoable_transactions() + self.assertEqual(len(txs), 2) + toto.cw_delete() + txuuid = cnx.commit() + actions = cnx.transaction_info(txuuid).actions_list() + self.assertEqual(len(actions), 1) + toto.cw_clear_all_caches() + e.cw_clear_all_caches() + self.assertUndoTransaction(cnx, txuuid) + undotxuuid = cnx.commit() + self.assertEqual(undotxuuid, None) # undo not undoable + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) + self.assertTrue(cnx.execute('Any X WHERE X has_text "toto@logilab"')) + self.assertEqual(toto.cw_adapt_to('IWorkflowable').state, 'activated') + self.assertEqual(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org') + self.assertEqual([(p.pkey, p.value) for p in toto.reverse_for_user], + [('ui.default-text-format', 'text/rest')]) + self.assertEqual([g.name for g in toto.in_group], + ['users']) + self.assertEqual([et.name for et in toto.related('is', entities=True)], + ['CWUser']) + self.assertEqual([et.name for et in toto.is_instance_of], + ['CWUser']) + # undoing shouldn't be visble in undoable transaction, and the undone + # transaction should be removed + txs = cnx.undoable_transactions() + self.assertEqual(len(txs), 2) + self.assertRaises(NoSuchTransaction, + cnx.transaction_info, txuuid) + with self.admin_access.repo_cnx() as cnx: + self.check_transaction_deleted(cnx, txuuid) + # the final test: check we can login with the previously deleted user + with self.new_access('toto').client_cnx(): + pass + + def test_undo_deletion_integrity_1(self): + with self.admin_access.client_cnx() as cnx: + # 'Personne fiche Card with' '??' cardinality + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + cnx.commit() + c.cw_delete() + txuuid = cnx.commit() + c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') + p.cw_set(fiche=c2) + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + "Can't restore object relation fiche to entity " + "%s which is already linked using this relation." % p.eid]) + cnx.commit() + p.cw_clear_all_caches() + self.assertEqual(p.fiche[0].eid, c2.eid) + # we restored the card + self.assertTrue(cnx.entity_from_eid(c.eid)) + + def test_undo_deletion_integrity_2(self): + with self.admin_access.client_cnx() as cnx: + # test validation error raised if we can't restore a required relation + g = cnx.create_entity('CWGroup', name=u'staff') + cnx.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.totoeid}) + self.toto(cnx).cw_set(in_group=g) + cnx.commit() + self.toto(cnx).cw_delete() + txuuid = cnx.commit() + g.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + u"Can't restore relation in_group, object entity " + "%s doesn't exist anymore." % g.eid]) + with self.assertRaises(ValidationError) as cm: + cnx.commit() + cm.exception.translate(text_type) + self.assertEqual(cm.exception.entity, self.totoeid) + self.assertEqual(cm.exception.errors, + {'in_group-subject': u'at least one relation in_group is ' + 'required on CWUser (%s)' % self.totoeid}) + + def test_undo_creation_1(self): + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + cnx.commit() + self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': c.eid})) + self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': p.eid})) + self.assertFalse(cnx.execute('Any X,Y WHERE X fiche Y')) + with self.admin_access.repo_cnx() as cnx: + for eid in (p.eid, c.eid): + self.assertFalse(cnx.system_sql( + 'SELECT * FROM entities WHERE eid=%s' % eid).fetchall()) + self.assertFalse(cnx.system_sql( + 'SELECT 1 FROM owned_by_relation WHERE eid_from=%s' % eid).fetchall()) + # added by sql in hooks (except when using dataimport) + self.assertFalse(cnx.system_sql( + 'SELECT 1 FROM is_relation WHERE eid_from=%s' % eid).fetchall()) + self.assertFalse(cnx.system_sql( + 'SELECT 1 FROM is_instance_of_relation WHERE eid_from=%s' % eid).fetchall()) + self.check_transaction_deleted(cnx, txuuid) + + def test_undo_creation_integrity_1(self): + with self.admin_access.client_cnx() as cnx: + tutu = self.create_user(cnx, 'tutu', commit=False) + txuuid = cnx.commit() + email = cnx.create_entity('EmailAddress', address=u'tutu@cubicweb.org') + prop = cnx.create_entity('CWProperty', pkey=u'ui.default-text-format', + value=u'text/html') + tutu.cw_set(use_email=email, reverse_for_user=prop) + cnx.commit() + with self.assertRaises(ValidationError) as cm: + cnx.undo_transaction(txuuid) + self.assertEqual(cm.exception.entity, tutu.eid) + self.assertEqual(cm.exception.errors, + {None: 'some later transaction(s) touch entity, undo them first'}) + + def test_undo_creation_integrity_2(self): + with self.admin_access.client_cnx() as cnx: + g = cnx.create_entity('CWGroup', name=u'staff') + txuuid = cnx.commit() + cnx.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.totoeid}) + self.toto(cnx).cw_set(in_group=g) + cnx.commit() + with self.assertRaises(ValidationError) as cm: + cnx.undo_transaction(txuuid) + self.assertEqual(cm.exception.entity, g.eid) + self.assertEqual(cm.exception.errors, + {None: 'some later transaction(s) touch entity, undo them first'}) + # self.assertEqual(errors, + # [u"Can't restore relation in_group, object entity " + # "%s doesn't exist anymore." % g.eid]) + # with self.assertRaises(ValidationError) as cm: cnx.commit() + # self.assertEqual(cm.exception.entity, self.totoeid) + # self.assertEqual(cm.exception.errors, + # {'in_group-subject': u'at least one relation in_group is ' + # 'required on CWUser (%s)' % self.totoeid}) + + # test implicit 'replacement' of an inlined relation + + def test_undo_inline_rel_remove_ok(self): + """Undo remove relation Personne (?) fiche (?) Card + + NB: processed by `_undo_r` as expected""" + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + cnx.commit() + p.cw_set(fiche=None) + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + cnx.commit() + p.cw_clear_all_caches() + self.assertEqual(p.fiche[0].eid, c.eid) + + def test_undo_inline_rel_remove_ko(self): + """Restore an inlined relation to a deleted entity, with an error. + + NB: processed by `_undo_r` as expected""" + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + cnx.commit() + p.cw_set(fiche=None) + txuuid = cnx.commit() + c.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + "Can't restore relation fiche, object entity %d doesn't exist anymore." % c.eid]) + cnx.commit() + p.cw_clear_all_caches() + self.assertFalse(p.fiche) + with self.admin_access.repo_cnx() as cnx: + self.assertIsNone(cnx.system_sql( + 'SELECT cw_fiche FROM cw_Personne WHERE cw_eid=%s' % p.eid).fetchall()[0][0]) + + def test_undo_inline_rel_add_ok(self): + """Undo add relation Personne (?) fiche (?) Card + + Caution processed by `_undo_u`, not `_undo_a` !""" + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis') + cnx.commit() + p.cw_set(fiche=c) + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + cnx.commit() + p.cw_clear_all_caches() + self.assertFalse(p.fiche) + + def test_undo_inline_rel_delete_ko(self): + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + txuuid = cnx.commit() + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + cnx.commit() + integrityerror = self.repo.sources_by_uri['system'].dbhelper.dbapi_module.IntegrityError + with self.assertRaises(integrityerror): + cnx.undo_transaction(txuuid) + + + def test_undo_inline_rel_add_ko(self): + """Undo add relation Personne (?) fiche (?) Card + + Caution processed by `_undo_u`, not `_undo_a` !""" + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis') + cnx.commit() + p.cw_set(fiche=c) + txuuid = cnx.commit() + c.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + + def test_undo_inline_rel_replace_ok(self): + """Undo changing relation Personne (?) fiche (?) Card + + Caution processed by `_undo_u` """ + with self.admin_access.client_cnx() as cnx: + c1 = cnx.create_entity('Card', title=u'hop', content=u'hop') + c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c1) + cnx.commit() + p.cw_set(fiche=c2) + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + cnx.commit() + p.cw_clear_all_caches() + self.assertEqual(p.fiche[0].eid, c1.eid) + + def test_undo_inline_rel_replace_ko(self): + """Undo changing relation Personne (?) fiche (?) Card, with an error + + Caution processed by `_undo_u` """ + with self.admin_access.client_cnx() as cnx: + c1 = cnx.create_entity('Card', title=u'hop', content=u'hop') + c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c1) + cnx.commit() + p.cw_set(fiche=c2) + txuuid = cnx.commit() + c1.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + "can't restore entity %s of type Personne, target of fiche (eid %s)" + " does not exist any longer" % (p.eid, c1.eid)]) + cnx.commit() + p.cw_clear_all_caches() + self.assertFalse(p.fiche) + + def test_undo_attr_update_ok(self): + with self.admin_access.client_cnx() as cnx: + p = cnx.create_entity('Personne', nom=u'toto') + cnx.commit() + p.cw_set(nom=u'titi') + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + p.cw_clear_all_caches() + self.assertEqual(p.nom, u'toto') + + def test_undo_attr_update_ko(self): + with self.admin_access.client_cnx() as cnx: + p = cnx.create_entity('Personne', nom=u'toto') + cnx.commit() + p.cw_set(nom=u'titi') + txuuid = cnx.commit() + p.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + u"can't restore state of entity %s, it has been deleted inbetween" % p.eid]) + + +class UndoExceptionInUnicode(CubicWebTC): + + # problem occurs in string manipulation for python < 2.6 + def test___unicode__method(self): + u = _UndoException(u"voilà") + self.assertIsInstance(text_type(u), text_type) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/test/unittest_utils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/test/unittest_utils.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,43 @@ +# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.server import utils + +class UtilsTC(TestCase): + def test_crypt(self): + for hash in ( + utils.crypt_password('xxx'), # default sha512 + b'ab$5UsKFxRKKN.d8iBIFBnQ80', # custom md5 + b'ab4Vlm81ZUHlg', # DES + ): + self.assertEqual(utils.crypt_password('xxx', hash), hash) + self.assertEqual(utils.crypt_password(u'xxx', hash), hash) + self.assertEqual(utils.crypt_password(u'xxx', hash.decode('ascii')), hash.decode('ascii')) + self.assertEqual(utils.crypt_password('yyy', hash), b'') + + # accept any password for empty hashes (is it a good idea?) + self.assertEqual(utils.crypt_password('xxx', ''), '') + self.assertEqual(utils.crypt_password('yyy', ''), '') + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/server/utils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/server/utils.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,243 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Some utilities for the CubicWeb server.""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import sys +import logging +from threading import Timer, Thread +from getpass import getpass + +from six import PY2, text_type +from six.moves import input + +from passlib.utils import handlers as uh, to_hash_str +from passlib.context import CryptContext + +from cubicweb.md5crypt import crypt as md5crypt + + +class CustomMD5Crypt(uh.HasSalt, uh.GenericHandler): + name = 'cubicwebmd5crypt' + setting_kwds = ('salt',) + min_salt_size = 0 + max_salt_size = 8 + salt_chars = uh.H64_CHARS + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, u'') + if chk is None: + raise ValueError('missing checksum') + return cls(salt=salt, checksum=chk) + + def to_string(self): + return to_hash_str(u'%s$%s' % (self.salt, self.checksum or u'')) + + # passlib 1.5 wants calc_checksum, 1.6 wants _calc_checksum + def calc_checksum(self, secret): + return md5crypt(secret, self.salt.encode('ascii')).decode('utf-8') + _calc_checksum = calc_checksum + +_CRYPTO_CTX = CryptContext(['sha512_crypt', CustomMD5Crypt, 'des_crypt', 'ldap_salted_sha1'], + deprecated=['cubicwebmd5crypt', 'des_crypt']) +verify_and_update = _CRYPTO_CTX.verify_and_update + +def crypt_password(passwd, salt=None): + """return the encrypted password using the given salt or a generated one + """ + if salt is None: + return _CRYPTO_CTX.encrypt(passwd).encode('ascii') + # empty hash, accept any password for backwards compat + if salt == '': + return salt + try: + if _CRYPTO_CTX.verify(passwd, salt): + return salt + except ValueError: # e.g. couldn't identify hash + pass + # wrong password + return b'' + + +def eschema_eid(cnx, eschema): + """get eid of the CWEType entity for the given yams type. You should use + this because when schema has been loaded from the file-system, not from the + database, (e.g. during tests), eschema.eid is not set. + """ + if eschema.eid is None: + eschema.eid = cnx.execute( + 'Any X WHERE X is CWEType, X name %(name)s', + {'name': text_type(eschema)})[0][0] + return eschema.eid + + +DEFAULT_MSG = 'we need a manager connection on the repository \ +(the server doesn\'t have to run, even should better not)' + +def manager_userpasswd(user=None, msg=DEFAULT_MSG, confirm=False, + passwdmsg='password'): + if not user: + if msg: + print(msg) + while not user: + user = input('login: ') + if PY2: + user = unicode(user, sys.stdin.encoding) + passwd = getpass('%s: ' % passwdmsg) + if confirm: + while True: + passwd2 = getpass('confirm password: ') + if passwd == passwd2: + break + print('password doesn\'t match') + passwd = getpass('password: ') + # XXX decode password using stdin encoding then encode it using appl'encoding + return user, passwd + + +_MARKER = object() +def func_name(func): + name = getattr(func, '__name__', _MARKER) + if name is _MARKER: + name = getattr(func, 'func_name', _MARKER) + if name is _MARKER: + name = repr(func) + return name + +class LoopTask(object): + """threaded task restarting itself once executed""" + def __init__(self, tasks_manager, interval, func, args): + if interval < 0: + raise ValueError('Loop task interval must be >= 0 ' + '(current value: %f for %s)' % \ + (interval, func_name(func))) + self._tasks_manager = tasks_manager + self.interval = interval + def auto_restart_func(self=self, func=func, args=args): + restart = True + try: + func(*args) + except Exception: + logger = logging.getLogger('cubicweb.repository') + logger.exception('Unhandled exception in LoopTask %s', self.name) + raise + except BaseException: + restart = False + finally: + if restart and tasks_manager.running: + self.start() + self.func = auto_restart_func + self.name = func_name(func) + + def __str__(self): + return '%s (%s seconds)' % (self.name, self.interval) + + def start(self): + self._t = Timer(self.interval, self.func) + self._t.setName('%s-%s[%d]' % (self._t.getName(), self.name, self.interval)) + self._t.start() + + def cancel(self): + self._t.cancel() + + def join(self): + if self._t.isAlive(): + self._t.join() + + +class RepoThread(Thread): + """subclass of thread so it auto remove itself from a given list once + executed + """ + def __init__(self, target, running_threads): + def auto_remove_func(self=self, func=target): + try: + func() + except Exception: + logger = logging.getLogger('cubicweb.repository') + logger.exception('Unhandled exception in RepoThread %s', self._name) + raise + finally: + self.running_threads.remove(self) + Thread.__init__(self, target=auto_remove_func) + self.running_threads = running_threads + self._name = func_name(target) + + def start(self): + self.running_threads.append(self) + self.daemon = True + Thread.start(self) + + def getName(self): + return '%s(%s)' % (self._name, Thread.getName(self)) + +class TasksManager(object): + """Object dedicated manage background task""" + + def __init__(self): + self.running = False + self._tasks = [] + self._looping_tasks = [] + + def add_looping_task(self, interval, func, *args): + """register a function to be called every `interval` seconds. + + If interval is negative, no looping task is registered. + """ + if interval < 0: + self.debug('looping task %s ignored due to interval %f < 0', + func_name(func), interval) + return + task = LoopTask(self, interval, func, args) + if self.running: + self._start_task(task) + else: + self._tasks.append(task) + + def _start_task(self, task): + self._looping_tasks.append(task) + self.info('starting task %s with interval %.2fs', task.name, + task.interval) + task.start() + + def start(self): + """Start running looping task""" + assert self.running == False # bw compat purpose maintly + while self._tasks: + task = self._tasks.pop() + self._start_task(task) + self.running = True + + def stop(self): + """Stop all running task. + + returns when all task have been cancel and none are running anymore""" + if self.running: + while self._looping_tasks: + looptask = self._looping_tasks.pop() + self.info('canceling task %s...', looptask.name) + looptask.cancel() + looptask.join() + self.info('task %s finished', looptask.name) + +from logging import getLogger +from cubicweb import set_log_methods +set_log_methods(TasksManager, getLogger('cubicweb.repository')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/DISTNAME.spec.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/DISTNAME.spec.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,47 @@ +# for el5, force use of python2.6 +%%if 0%%{?el5} +%%define python python26 +%%define __python /usr/bin/python2.6 +%%else +%%define python python +%%define __python /usr/bin/python +%%endif +%%{!?_python_sitelib: %%define _python_sitelib %%(%%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")} + +Name: %(distname)s +Version: 0.1.0 +Release: logilab.1%%{?dist} +Summary: %(shortdesc)s +Group: Applications/Internet +License: %(license)s +Source0: %(distname)s-%%{version}.tar.gz + +BuildArch: noarch +BuildRoot: %%{_tmppath}/%%{name}-%%{version}-%%{release}-buildroot + +BuildRequires: %%{python} %%{python}-setuptools +Requires: cubicweb >= %(version)s +Requires: %%{python}-six >= 1.4.0 + +%%description +%(longdesc)s + +%%prep +%%setup -q -n %(distname)s-%%{version} +%%if 0%%{?el5} +# change the python version in shebangs +find . -name '*.py' -type f -print0 | xargs -0 sed -i '1,3s;^#!.*python.*$;#! /usr/bin/python2.6;' +%%endif + +%%install +NO_SETUPTOOLS=1 %%{__python} setup.py --quiet install --no-compile --prefix=%%{_prefix} --root="$RPM_BUILD_ROOT" +# remove generated .egg-info file +rm -rf $RPM_BUILD_ROOT/usr/lib/python* + + +%%clean +rm -rf $RPM_BUILD_ROOT + +%%files +%%defattr(-, root, root) +/* diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/MANIFEST.in --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/MANIFEST.in Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,5 @@ +include *.py +include */*.py +recursive-include data *.gif *.png *.ico *.css *.js +recursive-include i18n *.po +recursive-include wdoc * diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/README.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/README.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,3 @@ +Summary +------- +%(longdesc)s diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/__init__.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/__init__.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +"""cubicweb-%(cubename)s application package + +%(longdesc)s +""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/__pkginfo__.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/__pkginfo__.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,49 @@ +# pylint: disable=W0622 +"""%(distname)s application packaging information""" + +modname = '%(cubename)s' +distname = '%(distname)s' + +numversion = (0, 1, 0) +version = '.'.join(str(num) for num in numversion) + +license = '%(license)s' +author = '%(author)s' +author_email = '%(author-email)s' +description = '%(shortdesc)s' +web = 'http://www.cubicweb.org/project/%%s' %% distname + +__depends__ = %(dependencies)s +__recommends__ = {} + +classifiers = [ + 'Environment :: Web Environment', + 'Framework :: CubicWeb', + 'Programming Language :: Python', + 'Programming Language :: JavaScript', + ] + +from os import listdir as _listdir +from os.path import join, isdir +from glob import glob + +THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname) + + +def listdir(dirpath): + return [join(dirpath, fname) for fname in _listdir(dirpath) + if fname[0] != '.' and not fname.endswith('.pyc') + and not fname.endswith('~') + and not isdir(join(dirpath, fname))] + +data_files = [ + # common files + [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']], + ] +# check for possible extended cube layout +for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', + 'wdoc', 'i18n', 'migration'): + if isdir(dname): + data_files.append([join(THIS_CUBE_DIR, dname), listdir(dname)]) +# Note: here, you'll need to add subdirectories if you want +# them to be included in the debian package diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/data/cubes.CUBENAME.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/data/cubes.CUBENAME.css Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +/* cube-specific CSS */ diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/data/cubes.CUBENAME.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/data/cubes.CUBENAME.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +// This contains cube-specific javascript \ No newline at end of file diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/debian/changelog.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/debian/changelog.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +%(distname)s (0.1.0-1) unstable; urgency=low + + * initial release + + -- + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/debian/compat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/debian/compat Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +7 diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/debian/control.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/debian/control.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,24 @@ +Source: %(distname)s +Section: web +Priority: optional +Maintainer: %(author)s <%(author-email)s> +Build-Depends: + debhelper (>= 7), + python (>= 2.6.5), +Standards-Version: 3.9.3 +X-Python-Version: >= 2.6 + +Package: %(distname)s +Architecture: all +Depends: + cubicweb-common (>= %(version)s), + python-six (>= 1.4.0), + ${python:Depends}, + ${misc:Depends}, +Description: %(shortdesc)s + CubicWeb is a semantic web application framework. + . + %(longdesc)s + . + This package will install all the components you need to run the + %(distname)s application (cube :).. diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/debian/copyright.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/debian/copyright.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,8 @@ +Upstream Author: + + %(author)s <%(author-email)s> + +Copyright: + +Copyright (c) %(year)s %(author)s. +%(author-web-site)s -- mailto:%(author-email)s diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/debian/rules --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/debian/rules Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +#!/usr/bin/make -f + +export NO_SETUPTOOLS=1 + +%: + dh $@ --with python2 + +override_dh_python2: + dh_python2 -i /usr/share/cubicweb diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/entities.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/entities.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s entity's classes""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/hooks.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/hooks.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s specific hooks and operations""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/i18n/en.po --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/i18n/en.po Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n" + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/i18n/es.po --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/i18n/es.po Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n" + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/i18n/fr.po --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/i18n/fr.po Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n" + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/migration/postcreate.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/migration/postcreate.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s postcreate script, executed at instance creation time or when +the cube is added to an existing instance. + +You could setup site properties or a workflow here for example. +""" + +# Example of site property change +#set_property('ui.site-title', "") diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/migration/precreate.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/migration/precreate.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s precreate script, executed at instance creation time or when +the cube is added to an existing instance, before the schema is serialized. + +This is typically to create groups referenced by the cube'schema. +""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/schema.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/schema.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s schema""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/setup.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/setup.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,206 @@ +#!/usr/bin/env python +# pylint: disable=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611 +# +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with CubicWeb. If not, see . +"""Generic Setup script, takes package info from __pkginfo__.py file +""" +__docformat__ = "restructuredtext en" + +import os +import sys +import shutil +from os.path import exists, join, walk + +try: + if os.environ.get('NO_SETUPTOOLS'): + raise ImportError() # do as there is no setuptools + from setuptools import setup + from setuptools.command import install_lib + USE_SETUPTOOLS = True +except ImportError: + from distutils.core import setup + from distutils.command import install_lib + USE_SETUPTOOLS = False +from distutils.command import install_data + +# import required features +from __pkginfo__ import modname, version, license, description, web, \ + author, author_email, classifiers + +if exists('README'): + long_description = open('README').read() +else: + long_description = '' + +# import optional features +import __pkginfo__ +if USE_SETUPTOOLS: + requires = {} + for entry in ("__depends__",): # "__recommends__"): + requires.update(getattr(__pkginfo__, entry, {})) + install_requires = [("%s %s" % (d, v and v or "")).strip() + for d, v in requires.items()] +else: + install_requires = [] + +distname = getattr(__pkginfo__, 'distname', modname) +scripts = getattr(__pkginfo__, 'scripts', ()) +include_dirs = getattr(__pkginfo__, 'include_dirs', ()) +data_files = getattr(__pkginfo__, 'data_files', None) +ext_modules = getattr(__pkginfo__, 'ext_modules', None) +dependency_links = getattr(__pkginfo__, 'dependency_links', ()) + +BASE_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build') +IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~') + + +def ensure_scripts(linux_scripts): + """ + Creates the proper script names required for each platform + (taken from 4Suite) + """ + from distutils import util + if util.get_platform()[:3] == 'win': + scripts_ = [script + '.bat' for script in linux_scripts] + else: + scripts_ = linux_scripts + return scripts_ + + +def export(from_dir, to_dir, + blacklist=BASE_BLACKLIST, + ignore_ext=IGNORED_EXTENSIONS, + verbose=True): + """make a mirror of from_dir in to_dir, omitting directories and files + listed in the black list + """ + def make_mirror(arg, directory, fnames): + """walk handler""" + for norecurs in blacklist: + try: + fnames.remove(norecurs) + except ValueError: + pass + for filename in fnames: + # don't include binary files + if filename[-4:] in ignore_ext: + continue + if filename[-1] == '~': + continue + src = join(directory, filename) + dest = to_dir + src[len(from_dir):] + if verbose: + sys.stderr.write('%s -> %s\n' % (src, dest)) + if os.path.isdir(src): + if not exists(dest): + os.mkdir(dest) + else: + if exists(dest): + os.remove(dest) + shutil.copy2(src, dest) + try: + os.mkdir(to_dir) + except OSError as ex: + # file exists ? + import errno + if ex.errno != errno.EEXIST: + raise + walk(from_dir, make_mirror, None) + + +class MyInstallLib(install_lib.install_lib): + """extend install_lib command to handle package __init__.py and + include_dirs variable if necessary + """ + def run(self): + """overridden from install_lib class""" + install_lib.install_lib.run(self) + # manually install included directories if any + if include_dirs: + base = modname + for directory in include_dirs: + dest = join(self.install_dir, base, directory) + export(directory, dest, verbose=False) + +# re-enable copying data files in sys.prefix +old_install_data = install_data.install_data +if USE_SETUPTOOLS: + # overwrite InstallData to use sys.prefix instead of the egg directory + class MyInstallData(old_install_data): + """A class that manages data files installation""" + def run(self): + _old_install_dir = self.install_dir + if self.install_dir.endswith('egg'): + self.install_dir = sys.prefix + old_install_data.run(self) + self.install_dir = _old_install_dir + try: + # only if easy_install available + import setuptools.command.easy_install # noqa + # monkey patch: Crack SandboxViolation verification + from setuptools.sandbox import DirectorySandbox as DS + old_ok = DS._ok + + def _ok(self, path): + """Return True if ``path`` can be written during installation.""" + out = old_ok(self, path) # here for side effect from setuptools + realpath = os.path.normcase(os.path.realpath(path)) + allowed_path = os.path.normcase(sys.prefix) + if realpath.startswith(allowed_path): + out = True + return out + DS._ok = _ok + except ImportError: + pass + + +def install(**kwargs): + """setup entry point""" + if USE_SETUPTOOLS: + if '--force-manifest' in sys.argv: + sys.argv.remove('--force-manifest') + # install-layout option was introduced in 2.5.3-1~exp1 + elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv: + sys.argv.remove('--install-layout=deb') + cmdclass = {'install_lib': MyInstallLib} + if USE_SETUPTOOLS: + kwargs['install_requires'] = install_requires + kwargs['dependency_links'] = dependency_links + kwargs['zip_safe'] = False + cmdclass['install_data'] = MyInstallData + + return setup(name=distname, + version=version, + license=license, + description=description, + long_description=long_description, + author=author, + author_email=author_email, + url=web, + scripts=ensure_scripts(scripts), + data_files=data_files, + ext_modules=ext_modules, + cmdclass=cmdclass, + classifiers=classifiers, + **kwargs + ) + + +if __name__ == '__main__': + install() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/sobjects.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/sobjects.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s repository side views, usually for notification""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/test/data/bootstrap_cubes.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/test/data/bootstrap_cubes.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +%(cubename)s diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/test/pytestconf.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/test/pytestconf.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,61 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with CubicWeb. If not, see . +""" + +""" +import os +import sys + +from logilab.common.pytest import PyTester + + +def getlogin(): + """avoid usinng os.getlogin() because of strange tty / stdin problems + (man 3 getlogin) + Another solution would be to use $LOGNAME, $USER or $USERNAME + """ + if sys.platform == 'win32': + return os.environ.get('USERNAME') or 'cubicweb' + import pwd + return pwd.getpwuid(os.getuid())[0] + + +def update_parser(parser): + login = getlogin() + parser.add_option('-r', '--rebuild-database', dest='rebuild_db', + default=False, action="store_true", + help="remove tmpdb and rebuilds the test database") + parser.add_option('-u', '--dbuser', dest='dbuser', action='store', + default=login, help="database user") + parser.add_option('-w', '--dbpassword', dest='dbpassword', action='store', + default=login, help="database user's password") + parser.add_option('-n', '--dbname', dest='dbname', action='store', + default=None, help="database name") + parser.add_option('--euser', dest='euser', action='store', + default=login, help="euser name") + parser.add_option('--epassword', dest='epassword', action='store', + default=login, help="euser's password' name") + return parser + + +class CustomPyTester(PyTester): + def __init__(self, cvg, options): + super(CustomPyTester, self).__init__(cvg, options) + if options.rebuild_db: + os.unlink('tmpdb') + os.unlink('tmpdb-template') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/test/realdb_test_CUBENAME.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/test/realdb_test_CUBENAME.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,53 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with CubicWeb. If not, see . +""" + +""" +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.realdbtest import buildconfig, loadconfig + + +def setUpModule(options): + if options.source: + configcls = loadconfig(options.source) + elif options.dbname is None: + raise Exception('either or options are required') + else: + configcls = buildconfig(options.dbuser, options.dbpassword, + options.dbname, + options.euser, options.epassword) + RealDatabaseTC.configcls = configcls + + +class RealDatabaseTC(CubicWebTC): + configcls = None # set by setUpModule() + + def test_all_primaries(self): + for rset in self.iter_individual_rsets(limit=50): + yield self.view, 'primary', rset, rset.req.reset_headers() + + ## startup views + def test_startup_views(self): + for vid in self.list_startup_views(): + req = self.request() + yield self.view, vid, None, req + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/test/test_CUBENAME.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/test/test_CUBENAME.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,38 @@ +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s automatic tests + + +uncomment code below if you want to activate automatic test for your cube: + +.. sourcecode:: python + + from cubicweb.devtools.testlib import AutomaticWebTest + + class AutomaticWebTest(AutomaticWebTest): + '''provides `to_test_etypes` and/or `list_startup_views` implementation + to limit test scope + ''' + + def to_test_etypes(self): + '''only test views for entities of the returned types''' + return set(('My', 'Cube', 'Entity', 'Types')) + + def list_startup_views(self): + '''only test startup views of the returned identifiers''' + return ('some', 'startup', 'views') +""" + +from cubicweb.devtools import testlib + + +class DefaultTC(testlib.CubicWebTC): + def test_something(self): + self.skipTest('this cube has no test') + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/uiprops.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/uiprops.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +############################################################################### +# +# Put here information about external resources / styles used by your cube, +# or to overides existing UI properties. +# +# Existing properties are available through the `sheet` dictionary available +# in the global namespace. You also have access to a `data` function which +# will return proper url for resources in the 'data' directory. +# +# /!\ this file should not be imported /!\ +############################################################################### + +# CSS stylesheets to include in HTML headers +# uncomment the line below to use template specific stylesheet +# STYLESHEETS = sheet['STYLESHEETS'] + [data('cubes.%(cubename)s.css')] diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/skeleton/views.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/skeleton/views.py.tmpl Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s views/forms/actions/components for web ui""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,30 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""server side objects""" + +import os.path as osp + +def registration_callback(vreg): + vreg.register_all(globals().values(), __name__) + global URL_MAPPING + URL_MAPPING = {} + if vreg.config.apphome: + url_mapping_file = osp.join(vreg.config.apphome, 'urlmapping.py') + if osp.exists(url_mapping_file): + URL_MAPPING = eval(open(url_mapping_file).read()) + vreg.info('using url mapping %s from %s', URL_MAPPING, url_mapping_file) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/cwxmlparser.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/cwxmlparser.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,500 @@ +# copyright 2010-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""datafeed parser for xml generated by cubicweb + +Example of mapping for CWEntityXMLParser:: + + {u'CWUser': { # EntityType + (u'in_group', u'subject', u'link'): [ # (rtype, role, action) + (u'CWGroup', {u'linkattr': u'name'})], # -> rules = [(EntityType, options), ...] + (u'tags', u'object', u'link-or-create'): [ # (...) + (u'Tag', {u'linkattr': u'name'})], # -> ... + (u'use_email', u'subject', u'copy'): [ # (...) + (u'EmailAddress', {})] # -> ... + } + } + +""" + +from datetime import datetime, time +import urllib + +from six import text_type +from six.moves.urllib.parse import urlparse, urlunparse, parse_qs, urlencode + +import pytz +from logilab.common.date import todate, totime +from logilab.common.textutils import splitstrip, text_to_dict +from logilab.common.decorators import classproperty + +from yams.constraints import BASE_CONVERTERS +from yams.schema import role_name as rn + +from cubicweb import ValidationError, RegistryException +from cubicweb.view import Component +from cubicweb.server.sources import datafeed +from cubicweb.server.hook import match_rtype + +# XXX see cubicweb.cwvreg.YAMS_TO_PY +# XXX see cubicweb.web.views.xmlrss.SERIALIZERS +DEFAULT_CONVERTERS = BASE_CONVERTERS.copy() +DEFAULT_CONVERTERS['String'] = text_type +DEFAULT_CONVERTERS['Password'] = lambda x: x.encode('utf8') +def convert_date(ustr): + return todate(datetime.strptime(ustr, '%Y-%m-%d')) +DEFAULT_CONVERTERS['Date'] = convert_date +def convert_datetime(ustr): + if '.' in ustr: # assume %Y-%m-%d %H:%M:%S.mmmmmm + ustr = ustr.split('.', 1)[0] + return datetime.strptime(ustr, '%Y-%m-%d %H:%M:%S') +DEFAULT_CONVERTERS['Datetime'] = convert_datetime +# XXX handle timezone, though this will be enough as TZDatetime are +# serialized without time zone by default (UTC time). See +# cw.web.views.xmlrss.SERIALIZERS. +def convert_tzdatetime(ustr): + date = convert_datetime(ustr) + date = date.replace(tzinfo=pytz.utc) + return date +DEFAULT_CONVERTERS['TZDatetime'] = convert_tzdatetime +def convert_time(ustr): + return totime(datetime.strptime(ustr, '%H:%M:%S')) +DEFAULT_CONVERTERS['Time'] = convert_time +DEFAULT_CONVERTERS['TZTime'] = convert_time +def convert_interval(ustr): + return time(seconds=int(ustr)) +DEFAULT_CONVERTERS['Interval'] = convert_interval + +def extract_typed_attrs(eschema, stringdict, converters=DEFAULT_CONVERTERS): + typeddict = {} + for rschema in eschema.subject_relations(): + if rschema.final and rschema in stringdict: + if rschema in ('eid', 'cwuri', 'cwtype', 'cwsource'): + continue + attrtype = eschema.destination(rschema) + value = stringdict[rschema] + if value is not None: + value = converters[attrtype](value) + typeddict[rschema.type] = value + return typeddict + +def rtype_role_rql(rtype, role): + if role == 'object': + return 'Y %s X WHERE X eid %%(x)s' % rtype + else: + return 'X %s Y WHERE X eid %%(x)s' % rtype + + +class CWEntityXMLParser(datafeed.DataFeedXMLParser): + """datafeed parser for the 'xml' entity view + + Most of the logic is delegated to the following components: + + * an "item builder" component, turning an etree xml node into a specific + python dictionary representing an entity + + * "action" components, selected given an entity, a relation and its role in + the relation, and responsible to link the entity to given related items + (eg dictionary) + + So the parser is only doing the gluing service and the connection to the + source. + """ + __regid__ = 'cw.entityxml' + + def __init__(self, *args, **kwargs): + super(CWEntityXMLParser, self).__init__(*args, **kwargs) + self._parsed_urls = {} + self._processed_entities = set() + + def select_linker(self, action, rtype, role, entity=None): + try: + return self._cw.vreg['components'].select( + 'cw.entityxml.action.%s' % action, self._cw, entity=entity, + rtype=rtype, role=role, parser=self) + except RegistryException: + raise RegistryException('Unknown action %s' % action) + + def list_actions(self): + reg = self._cw.vreg['components'] + return sorted(clss[0].action for rid, clss in reg.items() + if rid.startswith('cw.entityxml.action.')) + + # mapping handling ######################################################### + + def add_schema_config(self, schemacfg, checkonly=False): + """added CWSourceSchemaConfig, modify mapping accordingly""" + _ = self._cw._ + try: + rtype = schemacfg.schema.rtype.name + except AttributeError: + msg = _("entity and relation types can't be mapped, only attributes " + "or relations") + raise ValidationError(schemacfg.eid, {rn('cw_for_schema', 'subject'): msg}) + if schemacfg.options: + options = text_to_dict(schemacfg.options) + else: + options = {} + try: + role = options.pop('role') + if role not in ('subject', 'object'): + raise KeyError + except KeyError: + msg = _('"role=subject" or "role=object" must be specified in options') + raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg}) + try: + action = options.pop('action') + linker = self.select_linker(action, rtype, role) + linker.check_options(options, schemacfg.eid) + except KeyError: + msg = _('"action" must be specified in options; allowed values are ' + '%s') % ', '.join(self.list_actions()) + raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg}) + except RegistryException: + msg = _('allowed values for "action" are %s') % ', '.join(self.list_actions()) + raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg}) + if not checkonly: + if role == 'subject': + etype = schemacfg.schema.stype.name + ttype = schemacfg.schema.otype.name + else: + etype = schemacfg.schema.otype.name + ttype = schemacfg.schema.stype.name + etyperules = self.source.mapping.setdefault(etype, {}) + etyperules.setdefault((rtype, role, action), []).append( + (ttype, options)) + self.source.mapping_idx[schemacfg.eid] = ( + etype, rtype, role, action, ttype) + + def del_schema_config(self, schemacfg, checkonly=False): + """deleted CWSourceSchemaConfig, modify mapping accordingly""" + etype, rtype, role, action, ttype = self.source.mapping_idx[schemacfg.eid] + rules = self.source.mapping[etype][(rtype, role, action)] + rules = [x for x in rules if not x[0] == ttype] + if not rules: + del self.source.mapping[etype][(rtype, role, action)] + + # import handling ########################################################## + + def process(self, url, raise_on_error=False): + """IDataFeedParser main entry point""" + if url.startswith('http'): # XXX similar loose test as in parse of sources.datafeed + url = self.complete_url(url) + super(CWEntityXMLParser, self).process(url, raise_on_error) + + def parse_etree(self, parent): + for node in list(parent): + builder = self._cw.vreg['components'].select( + 'cw.entityxml.item-builder', self._cw, node=node, + parser=self) + yield builder.build_item() + + def process_item(self, item, rels, raise_on_error=False): + """ + item and rels are what's returned by the item builder `build_item` method: + + * `item` is an {attribute: value} dictionary + * `rels` is for relations and structured as + {role: {relation: [(related item, related rels)...]} + """ + entity = self.extid2entity(item['cwuri'].encode('ascii'), item['cwtype'], + cwsource=item['cwsource'], item=item, + raise_on_error=raise_on_error) + if entity is None: + return None + if entity.eid in self._processed_entities: + return entity + self._processed_entities.add(entity.eid) + if not (self.created_during_pull(entity) or self.updated_during_pull(entity)): + attrs = extract_typed_attrs(entity.e_schema, item) + self.update_if_necessary(entity, attrs) + self.process_relations(entity, rels) + return entity + + def process_relations(self, entity, rels): + etype = entity.cw_etype + for (rtype, role, action), rules in self.source.mapping.get(etype, {}).items(): + try: + related_items = rels[role][rtype] + except KeyError: + self.import_log.record_error('relation %s-%s not found in xml export of %s' + % (rtype, role, etype)) + continue + try: + linker = self.select_linker(action, rtype, role, entity) + except RegistryException: + self.import_log.record_error('no linker for action %s' % action) + else: + linker.link_items(related_items, rules) + + def before_entity_copy(self, entity, sourceparams): + """IDataFeedParser callback""" + attrs = extract_typed_attrs(entity.e_schema, sourceparams['item']) + entity.cw_edited.update(attrs) + + def normalize_url(self, url): + """overridden to add vid=xml if vid is not set in the qs""" + url = super(CWEntityXMLParser, self).normalize_url(url) + purl = urlparse(url) + if purl.scheme in ('http', 'https'): + params = parse_qs(purl.query) + if 'vid' not in params: + params['vid'] = ['xml'] + purl = list(purl) + purl[4] = urlencode(params, doseq=True) + return urlunparse(purl) + return url + + def complete_url(self, url, etype=None, known_relations=None): + """append to the url's query string information about relation that should + be included in the resulting xml, according to source mapping. + + If etype is not specified, try to guess it using the last path part of + the url, i.e. the format used by default in cubicweb to map all entities + of a given type as in 'http://mysite.org/EntityType'. + + If `known_relations` is given, it should be a dictionary of already + known relations, so they don't get queried again. + """ + purl = urlparse(url) + params = parse_qs(purl.query) + if etype is None: + etype = purl.path.split('/')[-1] + try: + etype = self._cw.vreg.case_insensitive_etypes[etype.lower()] + except KeyError: + return url + relations = params['relation'] = set(params.get('relation', ())) + for rtype, role, _ in self.source.mapping.get(etype, ()): + if known_relations and rtype in known_relations.get('role', ()): + continue + relations.add('%s-%s' % (rtype, role)) + purl = list(purl) + purl[4] = urlencode(params, doseq=True) + return urlunparse(purl) + + def complete_item(self, item, rels): + try: + return self._parsed_urls[item['cwuri']] + except KeyError: + itemurl = self.complete_url(item['cwuri'], item['cwtype'], rels) + item_rels = list(self.parse(itemurl)) + assert len(item_rels) == 1, 'url %s expected to bring back one '\ + 'and only one entity, got %s' % (itemurl, len(item_rels)) + self._parsed_urls[item['cwuri']] = item_rels[0] + if rels: + # XXX (do it better) merge relations + new_rels = item_rels[0][1] + new_rels.get('subject', {}).update(rels.get('subject', {})) + new_rels.get('object', {}).update(rels.get('object', {})) + return item_rels[0] + + +class CWEntityXMLItemBuilder(Component): + __regid__ = 'cw.entityxml.item-builder' + + def __init__(self, _cw, parser, node, **kwargs): + super(CWEntityXMLItemBuilder, self).__init__(_cw, **kwargs) + self.parser = parser + self.node = node + + def build_item(self): + """parse a XML document node and return two dictionaries defining (part + of) an entity: + + - {attribute: value} + - {role: {relation: [(related item, related rels)...]} + """ + node = self.node + item = dict(node.attrib.items()) + item['cwtype'] = text_type(node.tag) + item.setdefault('cwsource', None) + try: + item['eid'] = int(item['eid']) + except KeyError: + # cw < 3.11 compat mode XXX + item['eid'] = int(node.find('eid').text) + item['cwuri'] = node.find('cwuri').text + rels = {} + for child in node: + role = child.get('role') + if role: + # relation + related = rels.setdefault(role, {}).setdefault(child.tag, []) + related += self.parser.parse_etree(child) + elif child.text: + # attribute + item[child.tag] = text_type(child.text) + else: + # None attribute (empty tag) + item[child.tag] = None + return item, rels + + +class CWEntityXMLActionCopy(Component): + """implementation of cubicweb entity xml parser's'copy' action + + Takes no option. + """ + __regid__ = 'cw.entityxml.action.copy' + + def __init__(self, _cw, parser, rtype, role, entity=None, **kwargs): + super(CWEntityXMLActionCopy, self).__init__(_cw, **kwargs) + self.parser = parser + self.rtype = rtype + self.role = role + self.entity = entity + + @classproperty + def action(cls): + return cls.__regid__.rsplit('.', 1)[-1] + + def check_options(self, options, eid): + self._check_no_options(options, eid) + + def _check_no_options(self, options, eid, msg=None): + if options: + if msg is None: + msg = self._cw._("'%s' action doesn't take any options") % self.action + raise ValidationError(eid, {rn('options', 'subject'): msg}) + + def link_items(self, others, rules): + assert not any(x[1] for x in rules), "'copy' action takes no option" + ttypes = frozenset([x[0] for x in rules]) + eids = [] # local eids + for item, rels in others: + if item['cwtype'] in ttypes: + item, rels = self.parser.complete_item(item, rels) + other_entity = self.parser.process_item(item, rels) + if other_entity is not None: + eids.append(other_entity.eid) + if eids: + self._set_relation(eids) + else: + self._clear_relation(ttypes) + + def _clear_relation(self, ttypes): + if not self.parser.created_during_pull(self.entity): + if len(ttypes) > 1: + typerestr = ', Y is IN(%s)' % ','.join(ttypes) + else: + typerestr = ', Y is %s' % ','.join(ttypes) + self._cw.execute('DELETE ' + rtype_role_rql(self.rtype, self.role) + typerestr, + {'x': self.entity.eid}) + + def _set_relation(self, eids): + assert eids + rtype = self.rtype + rqlbase = rtype_role_rql(rtype, self.role) + eidstr = ','.join(str(eid) for eid in eids) + self._cw.execute('DELETE %s, NOT Y eid IN (%s)' % (rqlbase, eidstr), + {'x': self.entity.eid}) + if self.role == 'object': + rql = 'SET %s, Y eid IN (%s), NOT Y %s X' % (rqlbase, eidstr, rtype) + else: + rql = 'SET %s, Y eid IN (%s), NOT X %s Y' % (rqlbase, eidstr, rtype) + self._cw.execute(rql, {'x': self.entity.eid}) + + +class CWEntityXMLActionLink(CWEntityXMLActionCopy): + """implementation of cubicweb entity xml parser's'link' action + + requires a 'linkattr' option to control search of the linked entity. + """ + __regid__ = 'cw.entityxml.action.link' + + def check_options(self, options, eid): + if not 'linkattr' in options: + msg = self._cw._("'%s' action requires 'linkattr' option") % self.action + raise ValidationError(eid, {rn('options', 'subject'): msg}) + + create_when_not_found = False + + def link_items(self, others, rules): + for ttype, options in rules: + searchattrs = splitstrip(options.get('linkattr', '')) + self._related_link(ttype, others, searchattrs) + + def _related_link(self, ttype, others, searchattrs): + def issubset(x, y): + return all(z in y for z in x) + eids = [] # local eids + log = self.parser.import_log + for item, rels in others: + if item['cwtype'] != ttype: + continue + if not issubset(searchattrs, item): + item, rels = self.parser.complete_item(item, rels) + if not issubset(searchattrs, item): + log.record_error('missing attribute, got %s expected keys %s' + % (item, searchattrs)) + continue + # XXX str() needed with python < 2.6 + kwargs = dict((str(attr), item[attr]) for attr in searchattrs) + targets = self._find_entities(item, kwargs) + if len(targets) == 1: + entity = targets[0] + elif not targets and self.create_when_not_found: + entity = self._cw.create_entity(item['cwtype'], **kwargs) + else: + if len(targets) > 1: + log.record_error('ambiguous link: found %s entity %s with attributes %s' + % (len(targets), item['cwtype'], kwargs)) + else: + log.record_error('can not find %s entity with attributes %s' + % (item['cwtype'], kwargs)) + continue + eids.append(entity.eid) + self.parser.process_relations(entity, rels) + if eids: + self._set_relation(eids) + else: + self._clear_relation((ttype,)) + + def _find_entities(self, item, kwargs): + return tuple(self._cw.find(item['cwtype'], **kwargs).entities()) + + +class CWEntityXMLActionLinkInState(CWEntityXMLActionLink): + """custom implementation of cubicweb entity xml parser's'link' action for + in_state relation + """ + __select__ = match_rtype('in_state') + + def check_options(self, options, eid): + super(CWEntityXMLActionLinkInState, self).check_options(options, eid) + if not 'name' in options['linkattr']: + msg = self._cw._("'%s' action for in_state relation should at least have 'linkattr=name' option") % self.action + raise ValidationError(eid, {rn('options', 'subject'): msg}) + + def _find_entities(self, item, kwargs): + assert 'name' in item # XXX else, complete_item + state_name = item['name'] + wf = self.entity.cw_adapt_to('IWorkflowable').current_workflow + state = wf.state_by_name(state_name) + if state is None: + return () + return (state,) + + +class CWEntityXMLActionLinkOrCreate(CWEntityXMLActionLink): + """implementation of cubicweb entity xml parser's'link-or-create' action + + requires a 'linkattr' option to control search of the linked entity. + """ + __regid__ = 'cw.entityxml.action.link-or-create' + create_when_not_found = True diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/ldapparser.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/ldapparser.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,255 @@ +# copyright 2011-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb ldap feed source + +unlike ldapuser source, this source is copy based and will import ldap content +(beside passwords for authentication) into the system source. +""" +from six.moves import map, filter + +from logilab.common.decorators import cached, cachedproperty +from logilab.common.shellutils import generate_password + +from cubicweb import Binary, ConfigurationError +from cubicweb.server.utils import crypt_password +from cubicweb.server.sources import datafeed +from cubicweb.dataimport import stores, importer + + +class UserMetaGenerator(stores.MetaGenerator): + """Specific metadata generator, used to see newly created user into their initial state. + """ + @cached + def base_etype_dicts(self, entity): + entity, rels = super(UserMetaGenerator, self).base_etype_dicts(entity) + if entity.cw_etype == 'CWUser': + wf_state = self._cnx.execute('Any S WHERE ET default_workflow WF, ET name %(etype)s, ' + 'WF initial_state S', {'etype': entity.cw_etype}).one() + rels['in_state'] = wf_state.eid + return entity, rels + + +class DataFeedLDAPAdapter(datafeed.DataFeedParser): + __regid__ = 'ldapfeed' + # attributes that may appears in source user_attrs dict which are not + # attributes of the cw user + non_attribute_keys = set(('email', 'eid', 'member', 'modification_date')) + + @cachedproperty + def searchfilterstr(self): + """ ldap search string, including user-filter """ + return '(&%s)' % ''.join(self.source.base_filters) + + @cachedproperty + def searchgroupfilterstr(self): + """ ldap search string, including user-filter """ + return '(&%s)' % ''.join(self.source.group_base_filters) + + @cachedproperty + def user_source_entities_by_extid(self): + source = self.source + if source.user_base_dn.strip(): + attrs = list(map(str, source.user_attrs.keys())) + return dict((userdict['dn'].encode('ascii'), userdict) + for userdict in source._search(self._cw, + source.user_base_dn, + source.user_base_scope, + self.searchfilterstr, + attrs)) + return {} + + @cachedproperty + def group_source_entities_by_extid(self): + source = self.source + if source.group_base_dn.strip(): + attrs = list(map(str, ['modifyTimestamp'] + list(source.group_attrs.keys()))) + return dict((groupdict['dn'].encode('ascii'), groupdict) + for groupdict in source._search(self._cw, + source.group_base_dn, + source.group_base_scope, + self.searchgroupfilterstr, + attrs)) + return {} + + def process(self, url, raise_on_error=False): + """IDataFeedParser main entry point""" + self.debug('processing ldapfeed source %s %s', self.source, self.searchfilterstr) + self._group_members = {} + eeimporter = self.build_importer(raise_on_error) + for name in self.source.user_default_groups: + geid = self._get_group(name) + eeimporter.extid2eid[geid] = geid + entities = self.extentities_generator() + set_cwuri = importer.use_extid_as_cwuri(eeimporter.extid2eid) + eeimporter.import_entities(set_cwuri(entities)) + self.stats['created'] = eeimporter.created + self.stats['updated'] = eeimporter.updated + # handle in_group relation + for group, members in self._group_members.items(): + self._cw.execute('DELETE U in_group G WHERE G name %(g)s', {'g': group}) + if members: + members = ["'%s'" % e for e in members] + rql = 'SET U in_group G WHERE G name %%(g)s, U login IN (%s)' % ','.join(members) + self._cw.execute(rql, {'g': group}) + # ensure updated users are activated + for eid in eeimporter.updated: + entity = self._cw.entity_from_eid(eid) + if entity.cw_etype == 'CWUser': + self.ensure_activated(entity) + # manually set primary email if necessary, it's not handled automatically since hooks are + # deactivated + self._cw.execute('SET X primary_email E WHERE NOT X primary_email E, X use_email E, ' + 'X cw_source S, S eid %(s)s, X in_state ST, TS name "activated"', + {'s': self.source.eid}) + + def build_importer(self, raise_on_error): + """Instantiate and configure an importer""" + etypes = ('CWUser', 'EmailAddress', 'CWGroup') + extid2eid = dict((self.source.decode_extid(x), y) for x, y in + self._cw.system_sql('select extid, eid from entities where asource = %(s)s', {'s': self.source.uri})) + existing_relations = {} + for rtype in ('in_group', 'use_email', 'owned_by'): + rql = 'Any S,O WHERE S {} O, S cw_source SO, SO eid %(s)s'.format(rtype) + rset = self._cw.execute(rql, {'s': self.source.eid}) + existing_relations[rtype] = set(tuple(x) for x in rset) + return importer.ExtEntitiesImporter(self._cw.vreg.schema, self.build_store(), + extid2eid=extid2eid, + existing_relations=existing_relations, + etypes_order_hint=etypes, + import_log=self.import_log, + raise_on_error=raise_on_error) + + def build_store(self): + """Instantiate and configure a store""" + metagenerator = UserMetaGenerator(self._cw, source=self.source) + return stores.NoHookRQLObjectStore(self._cw, metagenerator) + + def extentities_generator(self): + self.debug('processing ldapfeed source %s %s', self.source, self.searchgroupfilterstr) + # generate users and email addresses + for userdict in self.user_source_entities_by_extid.values(): + attrs = self.ldap2cwattrs(userdict, 'CWUser') + pwd = attrs.get('upassword') + if not pwd: + # generate a dumb password if not fetched from ldap (see + # userPassword) + pwd = crypt_password(generate_password()) + attrs['upassword'] = set([Binary(pwd)]) + extuser = importer.ExtEntity('CWUser', userdict['dn'].encode('ascii'), attrs) + extuser.values['owned_by'] = set([extuser.extid]) + for extemail in self._process_email(extuser, userdict): + yield extemail + groups = list(filter(None, [self._get_group(name) + for name in self.source.user_default_groups])) + if groups: + extuser.values['in_group'] = groups + yield extuser + # generate groups + for groupdict in self.group_source_entities_by_extid.values(): + attrs = self.ldap2cwattrs(groupdict, 'CWGroup') + extgroup = importer.ExtEntity('CWGroup', groupdict['dn'].encode('ascii'), attrs) + yield extgroup + # record group membership for later insertion + members = groupdict.get(self.source.group_rev_attrs['member'], ()) + self._group_members[attrs['name']] = members + + def _process_email(self, extuser, userdict): + try: + emailaddrs = userdict.pop(self.source.user_rev_attrs['email']) + except KeyError: + return # no email for that user, nothing to do + if not isinstance(emailaddrs, list): + emailaddrs = [emailaddrs] + for emailaddr in emailaddrs: + # search for existing email first, may be coming from another source + rset = self._cw.execute('EmailAddress X WHERE X address %(addr)s', + {'addr': emailaddr}) + emailextid = (userdict['dn'] + '@@' + emailaddr).encode('ascii') + if not rset: + # not found, create it. first forge an external id + extuser.values.setdefault('use_email', []).append(emailextid) + yield importer.ExtEntity('EmailAddress', emailextid, dict(address=[emailaddr])) + elif self.sourceuris: + # pop from sourceuris anyway, else email may be removed by the + # source once import is finished + self.sourceuris.pop(emailextid, None) + # XXX else check use_email relation? + + def handle_deletion(self, config, cnx, myuris): + if config['delete-entities']: + super(DataFeedLDAPAdapter, self).handle_deletion(config, cnx, myuris) + return + if myuris: + for extid, (eid, etype) in myuris.items(): + if etype != 'CWUser' or not self.is_deleted(extid, etype, eid): + continue + self.info('deactivate user %s', eid) + wf = cnx.entity_from_eid(eid).cw_adapt_to('IWorkflowable') + wf.fire_transition_if_possible('deactivate') + cnx.commit() + + def ensure_activated(self, entity): + if entity.cw_etype == 'CWUser': + wf = entity.cw_adapt_to('IWorkflowable') + if wf.state == 'deactivated': + wf.fire_transition('activate') + self.info('user %s reactivated', entity.login) + + def ldap2cwattrs(self, sdict, etype): + """Transform dictionary of LDAP attributes to CW. + + etype must be CWUser or CWGroup + """ + assert etype in ('CWUser', 'CWGroup'), etype + tdict = {} + if etype == 'CWUser': + items = self.source.user_attrs.items() + elif etype == 'CWGroup': + items = self.source.group_attrs.items() + for sattr, tattr in items: + if tattr not in self.non_attribute_keys: + try: + value = sdict[sattr] + except KeyError: + raise ConfigurationError( + 'source attribute %s has not been found in the source, ' + 'please check the %s-attrs-map field and the permissions of ' + 'the LDAP binding user' % (sattr, etype[2:].lower())) + if not isinstance(value, list): + value = [value] + tdict[tattr] = value + return tdict + + def is_deleted(self, extidplus, etype, eid): + try: + extid = extidplus.rsplit(b'@@', 1)[0] + except ValueError: + # for some reason extids here tend to come in both forms, e.g: + # dn, dn@@Babar + extid = extidplus + return extid not in self.user_source_entities_by_extid + + @cached + def _get_group(self, name): + try: + return self._cw.execute('Any X WHERE X is CWGroup, X name %(name)s', + {'name': name})[0][0] + except IndexError: + self.error('group %r referenced by source configuration %r does not exist', + name, self.source.uri) + return None diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/notification.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/notification.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,322 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some views to handle notification on data changes""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from itertools import repeat + +from six import text_type + +from logilab.common.textutils import normalize_text +from logilab.common.deprecation import class_renamed, class_moved, deprecated +from logilab.common.registry import yes + +from cubicweb.entity import Entity +from cubicweb.view import Component, EntityView +from cubicweb.server.hook import SendMailOp +from cubicweb.mail import construct_message_id, format_mail +from cubicweb.server.session import Session, InternalManager + + +class RecipientsFinder(Component): + """this component is responsible to find recipients of a notification + + by default user's with their email set are notified if any, else the default + email addresses specified in the configuration are used + """ + __regid__ = 'recipients_finder' + __select__ = yes() + user_rql = ('Any X,E,A WHERE X is CWUser, X in_state S, S name "activated",' + 'X primary_email E, E address A') + + def recipients(self): + mode = self._cw.vreg.config['default-recipients-mode'] + if mode == 'users': + execute = self._cw.execute + dests = [(u.cw_adapt_to('IEmailable').get_email(), + u.property_value('ui.language')) + for u in execute(self.user_rql, build_descr=True).entities()] + elif mode == 'default-dest-addrs': + lang = self._cw.vreg.property_value('ui.language') + dests = zip(self._cw.vreg.config['default-dest-addrs'], repeat(lang)) + else: # mode == 'none' + dests = [] + return dests + + +# abstract or deactivated notification views and mixin ######################## + + +class SkipEmail(Exception): + """raise this if you decide to skip an email during its generation""" + + +class NotificationView(EntityView): + """abstract view implementing the "email" API (eg to simplify sending + notification) + """ + # XXX refactor this class to work with len(rset) > 1 + + msgid_timestamp = True + + # to be defined on concrete sub-classes + content = None # body of the mail + message = None # action verb of the subject + + # this is usually the method to call + def render_and_send(self, **kwargs): + """generate and send email messages for this view""" + # render_emails changes self._cw so cache it here so all mails are sent + # after we commit our transaction. + cnx = self._cw + for msg, recipients in self.render_emails(**kwargs): + SendMailOp(cnx, recipients=recipients, msg=msg) + + def cell_call(self, row, col=0, **kwargs): + self.w(self._cw._(self.content) % self.context(**kwargs)) + + def render_emails(self, **kwargs): + """generate and send emails for this view (one per recipient)""" + self._kwargs = kwargs + recipients = self.recipients() + if not recipients: + self.info('skipping %s notification, no recipients', self.__regid__) + return + if self.cw_rset is not None: + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + # if the view is using timestamp in message ids, no way to reference + # previous email + if not self.msgid_timestamp: + refs = [self.construct_message_id(eid) + for eid in entity.cw_adapt_to('INotifiable').notification_references(self)] + else: + refs = () + msgid = self.construct_message_id(entity.eid) + else: + refs = () + msgid = None + req = self._cw + self.user_data = req.user_data() + for something in recipients: + if isinstance(something, tuple): + emailaddr, lang = something + user = InternalManager(lang=lang) + else: + emailaddr = something.cw_adapt_to('IEmailable').get_email() + user = something + # hi-jack self._cw to get a session for the returned user + session = Session(user, self._cw.repo) + with session.new_cnx() as cnx: + self._cw = cnx + try: + # since the same view (eg self) may be called multiple time and we + # need a fresh stream at each iteration, reset it explicitly + self.w = None + try: + # XXX forcing the row & col here may make the content and + # subject inconsistent because subject will depend on + # self.cw_row & self.cw_col if they are set. + content = self.render(row=0, col=0, **kwargs) + subject = self.subject() + except SkipEmail: + continue + except Exception as ex: + # shouldn't make the whole transaction fail because of rendering + # error (unauthorized or such) XXX check it doesn't actually + # occurs due to rollback on such error + self.exception(str(ex)) + continue + msg = format_mail(self.user_data, [emailaddr], content, subject, + config=self._cw.vreg.config, msgid=msgid, references=refs) + yield msg, [emailaddr] + finally: + self._cw = req + + # recipients handling ###################################################### + + def recipients(self): + """return a list of either 2-uple (email, language) or user entity to + whom this email should be sent + """ + finder = self._cw.vreg['components'].select( + 'recipients_finder', self._cw, rset=self.cw_rset, + row=self.cw_row or 0, col=self.cw_col or 0) + return finder.recipients() + + # email generation helpers ################################################# + + def construct_message_id(self, eid): + return construct_message_id(self._cw.vreg.config.appid, eid, + self.msgid_timestamp) + + def format_field(self, attr, value): + return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value} + + def format_section(self, attr, value): + return '%(attr)s\n%(ul)s\n%(value)s\n' % { + 'attr': attr, 'ul': '-'*len(attr), 'value': value} + + def subject(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + subject = self._cw._(self.message) + etype = entity.dc_type() + eid = entity.eid + login = self.user_data['login'] + return self._cw._('%(subject)s %(etype)s #%(eid)s (%(login)s)') % locals() + + def context(self, **kwargs): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + for key, val in kwargs.items(): + if val and isinstance(val, text_type) and val.strip(): + kwargs[key] = self._cw._(val) + kwargs.update({'user': self.user_data['login'], + 'eid': entity.eid, + 'etype': entity.dc_type(), + 'url': entity.absolute_url(__secure__=True), + 'title': entity.dc_long_title(),}) + return kwargs + + +class StatusChangeMixIn(object): + __regid__ = 'notif_status_change' + msgid_timestamp = True + message = _('status changed') + content = _(""" +%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for entity +'%(title)s' + +%(comment)s + +url: %(url)s +""") + + +############################################################################### +# Actual notification views. # +# # +# disable them at the recipients_finder level if you don't want them # +############################################################################### + +# XXX should be based on dc_title/dc_description, no? + +class ContentAddedView(NotificationView): + """abstract class for notification on entity/relation + + all you have to do by default is : + * set id and __select__ attributes to match desired events and entity types + * set a content attribute to define the content of the email (unless you + override call) + """ + __abstract__ = True + __regid__ = 'notif_after_add_entity' + msgid_timestamp = False + message = _('new') + content = """ +%(title)s + +%(content)s + +url: %(url)s +""" + # to be defined on concrete sub-classes + content_attr = None + + def context(self, **kwargs): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + content = entity.printable_value(self.content_attr, format='text/plain') + if content: + contentformat = getattr(entity, self.content_attr + '_format', + 'text/rest') + # XXX don't try to wrap rest until we've a proper transformation (see + # #103822) + if contentformat != 'text/rest': + content = normalize_text(content, 80) + return super(ContentAddedView, self).context(content=content, **kwargs) + + def subject(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return u'%s #%s (%s)' % (self._cw.__('New %s' % entity.e_schema), + entity.eid, self.user_data['login']) + + +def format_value(value): + if isinstance(value, text_type): + return u'"%s"' % value + return value + + +class EntityUpdatedNotificationView(NotificationView): + """abstract class for notification on entity/relation + + all you have to do by default is : + * set id and __select__ attributes to match desired events and entity types + * set a content attribute to define the content of the email (unless you + override call) + """ + __abstract__ = True + __regid__ = 'notif_entity_updated' + msgid_timestamp = True + message = _('updated') + no_detailed_change_attrs = () + content = """ +Properties have been updated by %(user)s: + +%(changes)s + +url: %(url)s +""" + + def context(self, changes=(), **kwargs): + context = super(EntityUpdatedNotificationView, self).context(**kwargs) + _ = self._cw._ + formatted_changes = [] + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + for attr, oldvalue, newvalue in sorted(changes): + # check current user has permission to see the attribute + rschema = self._cw.vreg.schema[attr] + if rschema.final: + rdef = entity.e_schema.rdef(rschema) + if not rdef.has_perm(self._cw, 'read', eid=self.cw_rset[0][0]): + continue + # XXX suppose it's a subject relation... + elif not rschema.has_perm(self._cw, 'read', + fromeid=self.cw_rset[0][0]): + continue + if attr in self.no_detailed_change_attrs: + msg = _('%s updated') % _(attr) + elif oldvalue not in (None, ''): + msg = _('%(attr)s updated from %(oldvalue)s to %(newvalue)s') % { + 'attr': _(attr), + 'oldvalue': format_value(oldvalue), + 'newvalue': format_value(newvalue)} + else: + msg = _('%(attr)s set to %(newvalue)s') % { + 'attr': _(attr), 'newvalue': format_value(newvalue)} + formatted_changes.append('* ' + msg) + if not formatted_changes: + # current user isn't allowed to see changes, skip this notification + raise SkipEmail() + context['changes'] = '\n'.join(formatted_changes) + return context + + def subject(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return u'%s #%s (%s)' % (self._cw.__('Updated %s' % entity.e_schema), + entity.eid, self.user_data['login']) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/services.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/services.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,174 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Define server side service provided by cubicweb""" + +import threading + +from six import text_type + +from yams.schema import role_name + +from cubicweb import ValidationError +from cubicweb.server import Service +from cubicweb.predicates import match_user_groups, match_kwargs + +class StatsService(Service): + """Return a dictionary containing some statistics about the repository + resources usage. + """ + + __regid__ = 'repo_stats' + __select__ = match_user_groups('managers', 'users') + + def call(self): + repo = self._cw.repo # Service are repo side only. + results = {} + querier = repo.querier + source = repo.system_source + for size, maxsize, hits, misses, title in ( + (len(querier._rql_cache), repo.config['rql-cache-size'], + querier.cache_hit, querier.cache_miss, 'rqlt_st'), + (len(source._cache), repo.config['rql-cache-size'], + source.cache_hit, source.cache_miss, 'sql'), + ): + results['%s_cache_size' % title] = {'size': size, 'maxsize': maxsize} + results['%s_cache_hit' % title] = hits + results['%s_cache_miss' % title] = misses + results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses) + results['type_source_cache_size'] = len(repo._type_source_cache) + results['extid_cache_size'] = len(repo._extid_cache) + results['sql_no_cache'] = repo.system_source.no_cache + results['nb_open_sessions'] = len(repo._sessions) + results['nb_active_threads'] = threading.activeCount() + looping_tasks = repo._tasks_manager._looping_tasks + results['looping_tasks'] = [(t.name, t.interval) for t in looping_tasks] + results['available_cnxsets'] = repo._cnxsets_pool.qsize() + results['threads'] = [t.name for t in threading.enumerate()] + return results + + +class GcStatsService(Service): + """Return a dictionary containing some statistics about the repository + resources usage. + """ + + __regid__ = 'repo_gc_stats' + __select__ = match_user_groups('managers') + + def call(self, nmax=20): + """Return a dictionary containing some statistics about the repository + memory usage. + + This is a public method, not requiring a session id. + + nmax is the max number of (most) referenced object returned as + the 'referenced' result + """ + + from cubicweb._gcdebug import gc_info + from cubicweb.appobject import AppObject + from cubicweb.rset import ResultSet + from cubicweb.web.request import CubicWebRequestBase + from rql.stmts import Union + + lookupclasses = (AppObject, + Union, ResultSet, + CubicWebRequestBase) + try: + from cubicweb.server.session import Session, InternalSession + lookupclasses += (InternalSession, Session) + except ImportError: + pass # no server part installed + + results = {} + counters, ocounters, garbage = gc_info(lookupclasses, + viewreferrersclasses=()) + values = sorted(counters.items(), key=lambda x: x[1], reverse=True) + results['lookupclasses'] = values + values = sorted(ocounters.items(), key=lambda x: x[1], reverse=True)[:nmax] + results['referenced'] = values + results['unreachable'] = garbage + return results + + +class RegisterUserService(Service): + """check if a user with the given login exists, if not create it with the + given password. This service is designed to be used for anonymous + registration on public web sites. + + To use it, do: + with self.appli.repo.internal_cnx() as cnx: + cnx.call_service('register_user', + login=login, + password=password, + **cwuserkwargs) + """ + __regid__ = 'register_user' + __select__ = Service.__select__ & match_kwargs('login', 'password') + default_groups = ('users',) + + def call(self, login, password, email=None, groups=None, **cwuserkwargs): + cnx = self._cw + errmsg = cnx._('the value "%s" is already used, use another one') + + if (cnx.execute('CWUser X WHERE X login %(login)s', {'login': login}, + build_descr=False) + or cnx.execute('CWUser X WHERE X use_email C, C address %(login)s', + {'login': login}, build_descr=False)): + qname = role_name('login', 'subject') + raise ValidationError(None, {qname: errmsg % login}) + + if isinstance(password, text_type): + # password should *always* be utf8 encoded + password = password.encode('UTF8') + cwuserkwargs['login'] = login + cwuserkwargs['upassword'] = password + # we have to create the user + user = cnx.create_entity('CWUser', **cwuserkwargs) + if groups is None: + groups = self.default_groups + assert groups, "CWUsers must belong to at least one CWGroup" + group_names = ', '.join('%r' % group for group in groups) + cnx.execute('SET X in_group G WHERE X eid %%(x)s, G name IN (%s)' % group_names, + {'x': user.eid}) + + if email or '@' in login: + d = {'login': login, 'email': email or login} + if cnx.execute('EmailAddress X WHERE X address %(email)s', d, + build_descr=False): + qname = role_name('address', 'subject') + raise ValidationError(None, {qname: errmsg % d['email']}) + cnx.execute('INSERT EmailAddress X: X address %(email)s, ' + 'U primary_email X, U use_email X ' + 'WHERE U login %(login)s', d, build_descr=False) + + return user + + +class SourceSynchronizationService(Service): + """Force synchronization of a datafeed source""" + __regid__ = 'source-sync' + __select__ = Service.__select__ & match_user_groups('managers') + + def call(self, source_eid): + source_entity = self._cw.entity_from_eid(source_eid) + repo = self._cw.repo # Service are repo side only. + with repo.internal_cnx() as cnx: + source = repo.sources_by_uri[source_entity.name] + source.pull_data(cnx) + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/supervising.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/supervising.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,191 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some hooks and views to handle supervising of any data changes""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from cubicweb import UnknownEid +from cubicweb.predicates import none_rset +from cubicweb.schema import display_name +from cubicweb.view import Component +from cubicweb.mail import format_mail +from cubicweb.server.hook import SendMailOp + + +def filter_changes(changes): + """ + * when an entity has been deleted: + * don't show deletion of its relations + * don't show related TrInfo deletion if any + * when an entity has been added don't show owned_by relation addition + * don't show new TrInfo entities if any + """ + # first build an index of changes + index = {} + added, deleted = set(), set() + for change in changes[:]: + event, changedescr = change + if event == 'add_entity': + entity = changedescr.entity + added.add(entity.eid) + if entity.e_schema == 'TrInfo': + changes.remove(change) + event = 'change_state' + change = (event, + (entity.wf_info_for[0], + entity.from_state[0], entity.to_state[0])) + changes.append(change) + elif event == 'delete_entity': + deleted.add(changedescr[0]) + index.setdefault(event, set()).add(change) + for key in ('delete_relation', 'add_relation'): + for change in index.get(key, {}).copy(): + if change[1].rtype == 'in_state': + index[key].remove(change) + # filter changes + for eid in added: + try: + for change in index['add_relation'].copy(): + changedescr = change[1] + # skip meta-relations which are set automatically + # XXX generate list below using rtags (category = 'generated') + if changedescr.rtype in ('created_by', 'owned_by', 'is', 'is_instance_of', + 'from_state', 'to_state', 'by_transition', + 'wf_info_for') \ + and changedescr.eidfrom == eid: + index['add_relation'].remove(change) + except KeyError: + break + for eid in deleted: + try: + for change in index['delete_relation'].copy(): + if change[1].eidfrom == eid: + index['delete_relation'].remove(change) + elif change[1].eidto == eid: + index['delete_relation'].remove(change) + if change[1].rtype == 'wf_info_for': + for change_ in index['delete_entity'].copy(): + if change_[1].eidfrom == change[1].eidfrom: + index['delete_entity'].remove(change_) + except KeyError: + break + for change in changes: + event, changedescr = change + if change in index[event]: + yield change + + +class SupervisionEmailView(Component): + """view implementing the email API for data changes supervision notification + """ + __regid__ = 'supervision_notif' + __select__ = none_rset() + + def recipients(self): + return self._cw.vreg.config['supervising-addrs'] + + def subject(self): + return self._cw._('[%s supervision] changes summary') % self._cw.vreg.config.appid + + def call(self, changes): + user = self._cw.user + self.w(self._cw._('user %s has made the following change(s):\n\n') + % user.login) + for event, changedescr in filter_changes(changes): + self.w(u'* ') + getattr(self, event)(changedescr) + self.w(u'\n\n') + + def _entity_context(self, entity): + return {'eid': entity.eid, + 'etype': entity.dc_type().lower(), + 'title': entity.dc_title()} + + def add_entity(self, changedescr): + msg = self._cw._('added %(etype)s #%(eid)s (%(title)s)') + self.w(u'%s\n' % (msg % self._entity_context(changedescr.entity))) + self.w(u' %s' % changedescr.entity.absolute_url()) + + def update_entity(self, changedescr): + msg = self._cw._('updated %(etype)s #%(eid)s (%(title)s)') + self.w(u'%s\n' % (msg % self._entity_context(changedescr.entity))) + # XXX print changes + self.w(u' %s' % changedescr.entity.absolute_url()) + + def delete_entity(self, args): + eid, etype, title = args + msg = self._cw._('deleted %(etype)s #%(eid)s (%(title)s)') + etype = display_name(self._cw, etype).lower() + self.w(msg % locals()) + + def change_state(self, args): + _ = self._cw._ + entity, fromstate, tostate = args + msg = _('changed state of %(etype)s #%(eid)s (%(title)s)') + self.w(u'%s\n' % (msg % self._entity_context(entity))) + self.w(_(' from state %(fromstate)s to state %(tostate)s\n' % + {'fromstate': _(fromstate.name), 'tostate': _(tostate.name)})) + self.w(u' %s' % entity.absolute_url()) + + def _relation_context(self, changedescr): + cnx = self._cw + def describe(eid): + try: + return cnx._(cnx.entity_metas(eid)['type']).lower() + except UnknownEid: + # may occurs when an entity has been deleted from an external + # source and we're cleaning its relation + return cnx._('unknown external entity') + eidfrom, rtype, eidto = changedescr.eidfrom, changedescr.rtype, changedescr.eidto + return {'rtype': cnx._(rtype), + 'eidfrom': eidfrom, + 'frometype': describe(eidfrom), + 'eidto': eidto, + 'toetype': describe(eidto)} + + def add_relation(self, changedescr): + msg = self._cw._('added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%(eidto)s') + self.w(msg % self._relation_context(changedescr)) + + def delete_relation(self, changedescr): + msg = self._cw._('deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%(eidto)s') + self.w(msg % self._relation_context(changedescr)) + + +class SupervisionMailOp(SendMailOp): + """special send email operation which should be done only once for a bunch + of changes + """ + def _get_view(self): + return self.cnx.vreg['components'].select('supervision_notif', self.cnx) + + def _prepare_email(self): + cnx = self.cnx + config = cnx.vreg.config + uinfo = {'email': config['sender-addr'], + 'name': config['sender-name']} + view = self._get_view() + content = view.render(changes=cnx.transaction_data.get('pendingchanges')) + recipients = view.recipients() + msg = format_mail(uinfo, recipients, content, view.subject(), config=config) + self.to_send = [(msg, recipients)] + + def postcommit_event(self): + self._prepare_email() + SendMailOp.postcommit_event(self) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/data/bootstrap_cubes --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/data/bootstrap_cubes Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +card,comment diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/data/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/data/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,28 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from yams.buildobjs import EntityType, RelationDefinition, String, SubjectRelation + +class comments(RelationDefinition): + subject = 'Comment' + object = 'Card' + cardinality='1*' + composite='object' + +class Tag(EntityType): + name = String(unique=True) + tags = SubjectRelation(('CWUser', 'CWGroup', 'EmailAddress')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/data/sobjects/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/data/sobjects/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,23 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from cubicweb.predicates import is_instance +from cubicweb.sobjects.notification import StatusChangeMixIn, NotificationView + +class UserStatusChangeView(StatusChangeMixIn, NotificationView): + __select__ = NotificationView.__select__ & is_instance('CWUser') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +cubicweb-card +cubicweb-comment diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/unittest_cwxmlparser.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/unittest_cwxmlparser.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,338 @@ +# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from datetime import datetime + +from six.moves.urllib.parse import urlsplit, parse_qsl + +import pytz +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.sobjects.cwxmlparser import CWEntityXMLParser + +orig_parse = CWEntityXMLParser.parse + +def parse(self, url): + try: + url = RELATEDXML[url.split('?')[0]] + except KeyError: + pass + return orig_parse(self, url) + +def setUpModule(): + CWEntityXMLParser.parse = parse + +def tearDownModule(): + CWEntityXMLParser.parse = orig_parse + + +BASEXML = ''.join(u''' + + + sthenault + toto + 2011-01-25 14:14:06 + 2010-01-22 10:27:59 + 2011-01-25 14:14:06 + + + + + + + + + + + + + + + + +'''.splitlines()) + +RELATEDXML = { + 'http://pouet.org/6': u''' + + +
syt@logilab.fr
+ 2010-04-13 14:35:56 + 2010-04-13 14:35:56 + + + +
+
+''', + 'http://pouet.org/7': u''' + + + users + + + + + +''', + 'http://pouet.org/8': u''' + + + unknown + + +''', + 'http://pouet.org/9': u''' + + + hop + + +''', + 'http://pouet.org/10': u''' + + + unknown + + +''', + } + + +OTHERXML = ''.join(u''' + + + sthenault + toto + 2011-01-25 14:14:06 + 2010-01-22 10:27:59 + 2011-01-25 14:14:06 + + + + + +'''.splitlines() +) + + +class CWEntityXMLParserTC(CubicWebTC): + """/!\ this test use a pre-setup database /!\, if you modify above xml, + REMOVE THE DATABASE TEMPLATE else it won't be considered + """ + test_db_id = 'xmlparser' + + def assertURLEquiv(self, first, second): + # ignore ordering differences in query params + parsed_first = urlsplit(first) + parsed_second = urlsplit(second) + self.assertEqual(parsed_first.scheme, parsed_second.scheme) + self.assertEqual(parsed_first.netloc, parsed_second.netloc) + self.assertEqual(parsed_first.path, parsed_second.path) + self.assertEqual(parsed_first.fragment, parsed_second.fragment) + self.assertCountEqual(parse_qsl(parsed_first.query), parse_qsl(parsed_second.query)) + + @classmethod + def pre_setup_database(cls, cnx, config): + myfeed = cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed', + parser=u'cw.entityxml', url=BASEXML) + myotherfeed = cnx.create_entity('CWSource', name=u'myotherfeed', type=u'datafeed', + parser=u'cw.entityxml', url=OTHERXML) + cnx.commit() + myfeed.init_mapping([(('CWUser', 'use_email', '*'), + u'role=subject\naction=copy'), + (('CWUser', 'in_group', '*'), + u'role=subject\naction=link\nlinkattr=name'), + (('CWUser', 'in_state', '*'), + u'role=subject\naction=link\nlinkattr=name'), + (('*', 'tags', '*'), + u'role=object\naction=link-or-create\nlinkattr=name'), + ]) + myotherfeed.init_mapping([(('CWUser', 'in_group', '*'), + u'role=subject\naction=link\nlinkattr=name'), + (('CWUser', 'in_state', '*'), + u'role=subject\naction=link\nlinkattr=name'), + ]) + cnx.create_entity('Tag', name=u'hop') + cnx.commit() + + def test_complete_url(self): + dfsource = self.repo.sources_by_uri['myfeed'] + with self.admin_access.repo_cnx() as cnx: + parser = dfsource._get_parser(cnx) + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/CWUser'), + 'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/cwuser'), + 'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'), + 'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'), + 'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'), + 'http://www.cubicweb.org/?rql=cwuser&relation=hop') + + + def test_actions(self): + dfsource = self.repo.sources_by_uri['myfeed'] + self.assertEqual(dfsource.mapping, + {u'CWUser': { + (u'in_group', u'subject', u'link'): [ + (u'CWGroup', {u'linkattr': u'name'})], + (u'in_state', u'subject', u'link'): [ + (u'State', {u'linkattr': u'name'})], + (u'tags', u'object', u'link-or-create'): [ + (u'Tag', {u'linkattr': u'name'})], + (u'use_email', u'subject', u'copy'): [ + (u'EmailAddress', {})] + }, + u'CWGroup': { + (u'tags', u'object', u'link-or-create'): [ + (u'Tag', {u'linkattr': u'name'})], + }, + u'EmailAddress': { + (u'tags', u'object', u'link-or-create'): [ + (u'Tag', {u'linkattr': u'name'})], + }, + }) + with self.repo.internal_cnx() as cnx: + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + self.assertEqual(sorted(stats), ['checked', 'created', 'updated']) + self.assertEqual(len(stats['created']), 2) + self.assertEqual(stats['updated'], set()) + + with self.admin_access.web_request() as req: + user = req.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) + self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59, tzinfo=pytz.utc)) + self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6, tzinfo=pytz.utc)) + self.assertEqual(user.cwuri, 'http://pouet.org/5') + self.assertEqual(user.cw_source[0].name, 'myfeed') + self.assertEqual(user.absolute_url(), 'http://pouet.org/5') + self.assertEqual(len(user.use_email), 1) + # copy action + email = user.use_email[0] + self.assertEqual(email.address, 'syt@logilab.fr') + self.assertEqual(email.cwuri, 'http://pouet.org/6') + self.assertEqual(email.absolute_url(), 'http://pouet.org/6') + self.assertEqual(email.cw_source[0].name, 'myfeed') + self.assertEqual(len(email.reverse_tags), 1) + self.assertEqual(email.reverse_tags[0].name, 'hop') + # link action + self.assertFalse(req.execute('CWGroup X WHERE X name "unknown"')) + groups = sorted([g.name for g in user.in_group]) + self.assertEqual(groups, ['users']) + group = user.in_group[0] + self.assertEqual(len(group.reverse_tags), 1) + self.assertEqual(group.reverse_tags[0].name, 'hop') + # link or create action + tags = set([(t.name, t.cwuri.replace(str(t.eid), ''), t.cw_source[0].name) + for t in user.reverse_tags]) + self.assertEqual(tags, set((('hop', 'http://testing.fr/cubicweb/', 'system'), + ('unknown', 'http://testing.fr/cubicweb/', 'system'))) + ) + with self.repo.internal_cnx() as cnx: + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + self.assertEqual(stats['created'], set()) + self.assertEqual(len(stats['updated']), 0) + self.assertEqual(len(stats['checked']), 2) + self.repo._type_source_cache.clear() + self.repo._extid_cache.clear() + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + self.assertEqual(stats['created'], set()) + self.assertEqual(len(stats['updated']), 0) + self.assertEqual(len(stats['checked']), 2) + + # test move to system source + cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': email.eid}) + cnx.commit() + rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') + self.assertEqual(len(rset), 1) + e = rset.get_entity(0, 0) + self.assertEqual(e.eid, email.eid) + self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', + 'use-cwuri-as-url': False}, + 'type': 'EmailAddress', + 'extid': None}) + self.assertEqual(e.cw_source[0].name, 'system') + self.assertEqual(e.reverse_use_email[0].login, 'sthenault') + # test everything is still fine after source synchronization + # clear caches to make sure we look at the moved_entities table + self.repo._type_source_cache.clear() + self.repo._extid_cache.clear() + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + self.assertEqual(stats['updated'], set((email.eid,))) + rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') + self.assertEqual(len(rset), 1) + e = rset.get_entity(0, 0) + self.assertEqual(e.eid, email.eid) + self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', + 'use-cwuri-as-url': False}, + 'type': 'EmailAddress', + 'extid': None}) + self.assertEqual(e.cw_source[0].name, 'system') + self.assertEqual(e.reverse_use_email[0].login, 'sthenault') + cnx.commit() + + # test delete entity + e.cw_delete() + cnx.commit() + # test everything is still fine after source synchronization + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') + self.assertEqual(len(rset), 0) + rset = cnx.execute('Any X WHERE X use_email E, X login "sthenault"') + self.assertEqual(len(rset), 0) + + def test_external_entity(self): + dfsource = self.repo.sources_by_uri['myotherfeed'] + with self.repo.internal_cnx() as cnx: + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + user = cnx.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) + self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59, tzinfo=pytz.utc)) + self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6, tzinfo=pytz.utc)) + self.assertEqual(user.cwuri, 'http://pouet.org/5') + self.assertEqual(user.cw_source[0].name, 'myfeed') + + def test_noerror_missing_fti_attribute(self): + dfsource = self.repo.sources_by_uri['myfeed'] + with self.repo.internal_cnx() as cnx: + parser = dfsource._get_parser(cnx) + dfsource.process_urls(parser, [''' + + + how-to + + +'''], raise_on_error=True) + + def test_noerror_unspecified_date(self): + dfsource = self.repo.sources_by_uri['myfeed'] + with self.repo.internal_cnx() as cnx: + parser = dfsource._get_parser(cnx) + dfsource.process_urls(parser, [''' + + + how-to + how-to + how-to + + + +'''], raise_on_error=True) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/unittest_email.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/unittest_email.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,68 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from cubicweb import Unauthorized +from cubicweb.devtools.testlib import CubicWebTC + +class EmailAddressHooksTC(CubicWebTC): + + def test_use_email_set_primary_email(self): + with self.admin_access.client_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"') + self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows, + []) + cnx.commit() + self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], + 'admin@logilab.fr') + # having another email should'nt change anything + cnx.execute('INSERT EmailAddress X: X address "a@logilab.fr", U use_email X WHERE U login "admin"') + cnx.commit() + self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], + 'admin@logilab.fr') + + def test_primary_email_set_use_email(self): + with self.admin_access.client_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X WHERE U login "admin"') + self.assertEqual(cnx.execute('Any A WHERE U use_email X, U login "admin", X address A').rows, + []) + cnx.commit() + self.assertEqual(cnx.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0], + 'admin@logilab.fr') + + def test_cardinality_check(self): + with self.admin_access.client_cnx() as cnx: + email1 = cnx.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0] + cnx.commit() + cnx.execute('SET U primary_email E WHERE U login "anon", E address "client@client.com"') + cnx.commit() + rset = cnx.execute('Any X WHERE X use_email E, E eid %(e)s', {'e': email1}) + self.assertFalse(rset.rowcount != 1, rset) + + def test_security_check(self): + with self.admin_access.client_cnx() as cnx: + self.create_user(cnx, 'toto') + email1 = cnx.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0] + cnx.commit() + with self.new_access('toto').client_cnx() as cnx: + self.assertRaises(Unauthorized, + cnx.execute, 'SET U primary_email E WHERE E eid %(e)s, U login "toto"', + {'e': email1}) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/unittest_notification.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/unittest_notification.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,103 @@ +# -*- coding: iso-8859-1 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from socket import gethostname + +from logilab.common.testlib import unittest_main, TestCase +from cubicweb.devtools.testlib import CubicWebTC, MAILBOX + +from cubicweb.mail import construct_message_id, parse_message_id + +class MessageIdTC(TestCase): + def test_base(self): + msgid1 = construct_message_id('testapp', 21) + msgid2 = construct_message_id('testapp', 21) + self.assertNotEqual(msgid1, msgid2) + self.assertNotIn('&', msgid1) + self.assertNotIn('=', msgid1) + self.assertNotIn('/', msgid1) + self.assertNotIn('+', msgid1) + values = parse_message_id(msgid1, 'testapp') + self.assertTrue(values) + # parse_message_id should work with or without surrounding <> + self.assertEqual(values, parse_message_id(msgid1[1:-1], 'testapp')) + self.assertEqual(values['eid'], '21') + self.assertIn('timestamp', values) + self.assertEqual(parse_message_id(msgid1[1:-1], 'anotherapp'), None) + + def test_notimestamp(self): + msgid1 = construct_message_id('testapp', 21, False) + msgid2 = construct_message_id('testapp', 21, False) + values = parse_message_id(msgid1, 'testapp') + self.assertEqual(values, {'eid': '21'}) + + def test_parse_message_doesnt_raise(self): + self.assertEqual(parse_message_id('oijioj@bla.bla', 'tesapp'), None) + self.assertEqual(parse_message_id('oijioj@bla', 'tesapp'), None) + self.assertEqual(parse_message_id('oijioj', 'tesapp'), None) + + + def test_nonregr_empty_message_id(self): + for eid in (1, 12, 123, 1234): + msgid1 = construct_message_id('testapp', eid, 12) + self.assertNotEqual(msgid1, '<@testapp.%s>' % gethostname()) + +class NotificationTC(CubicWebTC): + + def test_recipients_finder(self): + with self.admin_access.web_request() as req: + urset = req.execute('CWUser X WHERE X login "admin"') + req.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X ' + 'WHERE U eid %(x)s', {'x': urset[0][0]}) + req.execute('INSERT CWProperty X: X pkey "ui.language", X value "fr", X for_user U ' + 'WHERE U eid %(x)s', {'x': urset[0][0]}) + req.cnx.commit() # commit so that admin get its properties updated + finder = self.vreg['components'].select('recipients_finder', + req, rset=urset) + self.set_option('default-recipients-mode', 'none') + self.assertEqual(finder.recipients(), []) + self.set_option('default-recipients-mode', 'users') + self.assertEqual(finder.recipients(), [(u'admin@logilab.fr', 'fr')]) + self.set_option('default-recipients-mode', 'default-dest-addrs') + self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr') + self.assertEqual(list(finder.recipients()), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) + + def test_status_change_view(self): + with self.admin_access.web_request() as req: + u = self.create_user(req, 'toto') + iwfable = u.cw_adapt_to('IWorkflowable') + iwfable.fire_transition('deactivate', comment=u'yeah') + self.assertFalse(MAILBOX) + req.cnx.commit() + self.assertEqual(len(MAILBOX), 1) + email = MAILBOX[0] + self.assertEqual(email.content, + ''' +admin changed status from to for entity +'toto' + +yeah + +url: http://testing.fr/cubicweb/cwuser/toto +''') + self.assertEqual(email.subject, + 'status changed CWUser #%s (admin)' % u.eid) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/unittest_register_user.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/unittest_register_user.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,95 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.dbapi""" + +from cubicweb import ValidationError +from cubicweb.web import Unauthorized +from cubicweb.devtools.testlib import CubicWebTC + + +class RegisterUserTC(CubicWebTC): + + def test_register_user_service(self): + acc = self.admin_access + with acc.client_cnx() as cnx: + cnx.call_service('register_user', login=u'foo1', password=u'bar1', + email=u'foo1@bar1.com', firstname=u'Foo1', + surname=u'Bar1') + + acc = self.new_access('anon') + with acc.client_cnx() as cnx: + self.assertRaises(Unauthorized, cnx.call_service, 'register_user', + login=u'foo2', password=u'bar2', + email=u'foo2@bar2.com', firstname=u'Foo2', surname=u'Bar2') + + with self.repo.internal_cnx() as cnx: + cnx.call_service('register_user', login=u'foo3', + password=u'bar3', email=u'foo3@bar3.com', + firstname=u'Foo3', surname=u'Bar3') + # same login + with self.assertRaises(ValidationError): + cnx.call_service('register_user', login=u'foo3', + password=u'bar3') + + def test_register_user_attributes(self): + with self.repo.internal_cnx() as cnx: + cnx.call_service('register_user', login=u'foo3', + password=u'bar3', email=u'foo3@bar3.com', + firstname=u'Foo3', surname=u'Bar3') + cnx.commit() + + with self.admin_access.client_cnx() as cnx: + user = cnx.find('CWUser', login=u'foo3').one() + self.assertEqual(user.firstname, u'Foo3') + self.assertEqual(user.use_email[0].address, u'foo3@bar3.com') + + def test_register_user_groups(self): + with self.repo.internal_cnx() as cnx: + # default + cnx.call_service('register_user', login=u'foo_user', + password=u'bar_user', email=u'foo_user@bar_user.com', + firstname=u'Foo_user', surname=u'Bar_user') + + # group kwarg + cnx.call_service('register_user', login=u'foo_admin', + password=u'bar_admin', email=u'foo_admin@bar_admin.com', + firstname=u'Foo_admin', surname=u'Bar_admin', + groups=('managers', 'users')) + + # class attribute + from cubicweb.sobjects import services + services.RegisterUserService.default_groups = ('guests',) + cnx.call_service('register_user', login=u'foo_guest', + password=u'bar_guest', email=u'foo_guest@bar_guest.com', + firstname=u'Foo_guest', surname=u'Bar_guest') + cnx.commit() + + with self.admin_access.client_cnx() as cnx: + user = cnx.find('CWUser', login=u'foo_user').one() + self.assertEqual([g.name for g in user.in_group], ['users']) + + admin = cnx.find('CWUser', login=u'foo_admin').one() + self.assertEqual(sorted(g.name for g in admin.in_group), ['managers', 'users']) + + guest = cnx.find('CWUser', login=u'foo_guest').one() + self.assertEqual([g.name for g in guest.in_group], ['guests']) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/test/unittest_supervising.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/test/unittest_supervising.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,109 @@ +# -*- coding: iso-8859-1 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +import re + +from logilab.common.testlib import unittest_main +from cubicweb.devtools.testlib import CubicWebTC + +from cubicweb.sobjects.supervising import SendMailOp, SupervisionMailOp + + +class SupervisingTC(CubicWebTC): + + def setup_database(self): + with self.admin_access.client_cnx() as cnx: + cnx.create_entity('Card', title=u"une news !", content=u"cubicweb c'est beau") + card = cnx.create_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau") + cnx.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index") + cnx.create_entity('Comment', content=u"Yo !", comments=card) + cnx.commit() + self.vreg.config.global_set_option('supervising-addrs', 'test@logilab.fr') + + + def test_supervision(self): + # do some modification + with self.admin_access.repo_cnx() as cnx: + user = cnx.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G ' + 'WHERE G name "users"').get_entity(0, 0) + cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}) + cnx.execute('DELETE Card B WHERE B title "une news !"') + cnx.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}) + cnx.execute('SET X content "duh?" WHERE X is Comment') + cnx.execute('DELETE Comment C WHERE C comments Y, Y is Card, Y title "une autre news !"') + # check only one supervision email operation + sentops = [op for op in cnx.pending_operations + if isinstance(op, SupervisionMailOp)] + self.assertEqual(len(sentops), 1) + # check view content + op = sentops[0] + view = sentops[0]._get_view() + self.assertEqual(view.recipients(), ['test@logilab.fr']) + self.assertEqual(view.subject(), '[data supervision] changes summary') + data = view.render(changes=cnx.transaction_data.get('pendingchanges')).strip() + data = re.sub('#\d+', '#EID', data) + data = re.sub('/\d+', '/EID', data) + self.assertMultiLineEqual('''user admin has made the following change(s): + +* added cwuser #EID (toto) + http://testing.fr/cubicweb/cwuser/toto + +* added relation in_group from cwuser #EID to cwgroup #EID + +* deleted card #EID (une news !) + +* added relation bookmarked_by from bookmark #EID to cwuser #EID + +* updated comment #EID (duh?) + http://testing.fr/cubicweb/comment/EID + +* deleted comment #EID (duh?)''', + data) + # check prepared email + op._prepare_email() + self.assertEqual(len(op.to_send), 1) + self.assertTrue(op.to_send[0][0]) + self.assertEqual(op.to_send[0][1], ['test@logilab.fr']) + cnx.commit() + # some other changes ####### + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') + sentops = [op for op in cnx.pending_operations + if isinstance(op, SupervisionMailOp)] + self.assertEqual(len(sentops), 1) + # check view content + op = sentops[0] + view = sentops[0]._get_view() + data = view.render(changes=cnx.transaction_data.get('pendingchanges')).strip() + data = re.sub('#\d+', '#EID', data) + data = re.sub('/\d+', '/EID', data) + self.assertMultiLineEqual('''user admin has made the following change(s): + +* changed state of cwuser #EID (toto) + from state activated to state deactivated + http://testing.fr/cubicweb/cwuser/toto''', + data) + + def test_nonregr1(self): + with self.admin_access.repo_cnx() as cnx: + # do some unlogged modification + cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': cnx.user.eid}) + cnx.commit() # no crash + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/sobjects/textparsers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/sobjects/textparsers.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,90 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Some parsers to detect action to do from text + +Currently only a parser to look for state change instruction is provided. +Take care to security when you're using it, think about the user that +will provide the text to analyze... +""" + +__docformat__ = "restructuredtext en" + +import re + +from cubicweb import UnknownEid +from cubicweb.view import Component + + +class TextAnalyzer(Component): + """analyze and extract information from plain text by calling registered + text parsers + """ + __regid__ = 'textanalyzer' + + def parse(self, caller, text): + for parsercls in self._cw.vreg['components'].get('textparser', ()): + parsercls(self._cw).parse(caller, text) + + +class TextParser(Component): + """base class for text parser, responsible to extract some information + from plain text. When something is done, it usually call the + + .fire_event(something, {event args}) + + method on the caller. + """ + __regid__ = 'textparser' + __abstract__ = True + + def parse(self, caller, text): + raise NotImplementedError + + +class ChangeStateTextParser(TextParser): + """search some text for change state instruction in the form + + :: #? + """ + instr_rgx = re.compile(':(\w+):\s*#?(\d+)', re.U) + + def parse(self, caller, text): + for trname, eid in self.instr_rgx.findall(text): + try: + entity = self._cw.entity_from_eid(int(eid)) + except UnknownEid: + self.error("can't get entity with eid %s", eid) + continue + if not hasattr(entity, 'in_state'): + self.error('bad change state instruction for eid %s', eid) + continue + iworkflowable = entity.cw_adapt_to('IWorkflowable') + if iworkflowable.current_workflow: + tr = iworkflowable.current_workflow.transition_by_name(trname) + else: + tr = None + if tr and tr.may_be_fired(entity.eid): + try: + trinfo = iworkflowable.fire_transition(tr) + caller.fire_event('state-changed', {'trinfo': trinfo, + 'entity': entity}) + except Exception: + self.exception('while changing state of %s', entity) + else: + self.error("can't pass transition %s on entity %s", + trname, entity) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/spa2rql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/spa2rql.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,220 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""SPARQL -> RQL translator""" + +from logilab.common import make_domains +from rql import TypeResolverException +from fyzz.yappsparser import parse +from fyzz import ast + +from cubicweb.xy import xy + + +class UnsupportedQuery(Exception): pass + +def order_limit_offset(sparqlst): + addons = '' + if sparqlst.orderby: + sortterms = ', '.join('%s %s' % (var.name.upper(), ascdesc.upper()) + for var, ascdesc in sparqlst.orderby) + addons += ' ORDERBY %s' % sortterms + if sparqlst.limit: + addons += ' LIMIT %s' % sparqlst.limit + if sparqlst.offset: + addons += ' OFFSET %s' % sparqlst.offset + return addons + + +class QueryInfo(object): + """wrapper class containing necessary information to generate a RQL query + from a sparql syntax tree + """ + def __init__(self, sparqlst): + self.sparqlst = sparqlst + if sparqlst.selected == ['*']: + self.selection = [var.upper() for var in sparqlst.variables] + else: + self.selection = [var.name.upper() for var in sparqlst.selected] + self.possible_types = {} + self.infer_types_info = [] + self.union_params = [] + self.restrictions = [] + self.literals = {} + self._litcount = 0 + + def add_literal(self, value): + key = chr(ord('a') + self._litcount) + self._litcount += 1 + self.literals[key] = value + return key + + def set_possible_types(self, var, varpossibletypes): + """set/restrict possible types for the given variable. + + :return: True if something changed, else false. + :raise: TypeResolverException if no more type allowed + """ + varpossibletypes = set(varpossibletypes) + try: + ctypes = self.possible_types[var] + nbctypes = len(ctypes) + ctypes &= varpossibletypes + if not ctypes: + raise TypeResolverException('No possible type') + return len(ctypes) != nbctypes + except KeyError: + self.possible_types[var] = varpossibletypes + return True + + def infer_types(self): + # XXX should use something similar to rql.analyze for proper type inference + modified = True + # loop to infer types until nothing changed + while modified: + modified = False + for yams_predicates, subjvar, obj in self.infer_types_info: + nbchoices = len(yams_predicates) + # get possible types for the subject variable, according to the + # current predicate + svptypes = set(s for s, r, o in yams_predicates) + if not '*' in svptypes: + if self.set_possible_types(subjvar, svptypes): + modified = True + # restrict predicates according to allowed subject var types + if subjvar in self.possible_types: + yams_predicates[:] = [(s, r, o) for s, r, o in yams_predicates + if s == '*' or s in self.possible_types[subjvar]] + if isinstance(obj, ast.SparqlVar): + # make a valid rql var name + objvar = obj.name.upper() + # get possible types for the object variable, according to + # the current predicate + ovptypes = set(o for s, r, o in yams_predicates) + if not '*' in ovptypes: + if self.set_possible_types(objvar, ovptypes): + modified = True + # restrict predicates according to allowed object var types + if objvar in self.possible_types: + yams_predicates[:] = [(s, r, o) for s, r, o in yams_predicates + if o == '*' or o in self.possible_types[objvar]] + # ensure this still make sense + if not yams_predicates: + raise TypeResolverException('No yams predicate') + if len(yams_predicates) != nbchoices: + modified = True + + def build_restrictions(self): + # now, for each predicate + for yams_predicates, subjvar, obj in self.infer_types_info: + rel = yams_predicates[0] + # if there are several yams relation type equivalences, we will have + # to generate several unioned rql queries + for s, r, o in yams_predicates[1:]: + if r != rel[1]: + self.union_params.append((yams_predicates, subjvar, obj)) + break + # else we can simply add it to base rql restrictions + else: + restr = self.build_restriction(subjvar, rel[1], obj) + self.restrictions.append(restr) + + def build_restriction(self, subjvar, rtype, obj): + if isinstance(obj, ast.SparqlLiteral): + key = self.add_literal(obj.value) + objvar = '%%(%s)s' % key + else: + assert isinstance(obj, ast.SparqlVar) + # make a valid rql var name + objvar = obj.name.upper() + # else we can simply add it to base rql restrictions + return '%s %s %s' % (subjvar, rtype, objvar) + + def finalize(self): + """return corresponding rql query (string) / args (dict)""" + for varname, ptypes in self.possible_types.items(): + if len(ptypes) == 1: + self.restrictions.append('%s is %s' % (varname, next(iter(ptypes)))) + unions = [] + for releq, subjvar, obj in self.union_params: + thisunions = [] + for st, rt, ot in releq: + thisunions.append([self.build_restriction(subjvar, rt, obj)]) + if st != '*': + thisunions[-1].append('%s is %s' % (subjvar, st)) + if isinstance(obj, ast.SparqlVar) and ot != '*': + objvar = obj.name.upper() + thisunions[-1].append('%s is %s' % (objvar, objvar)) + if not unions: + unions = thisunions + else: + unions = zip(*make_domains([unions, thisunions])) + selection = 'Any ' + ', '.join(self.selection) + sparqlst = self.sparqlst + if sparqlst.distinct: + selection = 'DISTINCT ' + selection + if unions: + baserql = '%s WHERE %s' % (selection, ', '.join(self.restrictions)) + rqls = ['(%s, %s)' % (baserql, ', '.join(unionrestrs)) + for unionrestrs in unions] + rql = ' UNION '.join(rqls) + if sparqlst.orderby or sparqlst.limit or sparqlst.offset: + rql = '%s%s WITH %s BEING (%s)' % ( + selection, order_limit_offset(sparqlst), + ', '.join(self.selection), rql) + else: + rql = '%s%s WHERE %s' % (selection, order_limit_offset(sparqlst), + ', '.join(self.restrictions)) + return rql, self.literals + + +class Sparql2rqlTranslator(object): + def __init__(self, yschema): + self.yschema = yschema + + def translate(self, sparql): + sparqlst = parse(sparql) + if sparqlst.type != 'select': + raise UnsupportedQuery() + qi = QueryInfo(sparqlst) + for subj, predicate, obj in sparqlst.where: + if not isinstance(subj, ast.SparqlVar): + raise UnsupportedQuery() + # make a valid rql var name + subjvar = subj.name.upper() + if predicate in [('', 'a'), + ('http://www.w3.org/1999/02/22-rdf-syntax-ns#', 'type')]: + # special 'is' relation + if not isinstance(obj, tuple): + raise UnsupportedQuery() + # restrict possible types for the subject variable + qi.set_possible_types( + subjvar, xy.yeq(':'.join(obj), isentity=True)) + else: + # 'regular' relation (eg not 'is') + if not isinstance(predicate, tuple): + raise UnsupportedQuery() + # list of 3-uple + # (yams etype (subject), yams rtype, yams etype (object)) + # where subject / object entity type may '*' if not specified + yams_predicates = xy.yeq(':'.join(predicate)) + qi.infer_types_info.append((yams_predicates, subjvar, obj)) + if not isinstance(obj, (ast.SparqlLiteral, ast.SparqlVar)): + raise UnsupportedQuery() + qi.infer_types() + qi.build_restrictions() + return qi diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/statsd_logger.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/statsd_logger.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,135 @@ +# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +"""Simple statsd_ logger for cubicweb. + +This module is meant to be configured by setting a couple of global variables: + +- ``bucket`` global variable will be used as statsd bucket in every +statsd_ UDP sent packet. + +`- `address`` is a pair (IP, port) specifying the address of the +statsd_ server + + +There are 3 kinds of statds_ message:: + +- ``statsd_c(context, n)`` is a simple function to send statsd_ + counter-type of messages like:: + + .:|c\n + +- ``statsd_g(context, value)`` to send statsd_ gauge-type of messages + like:: + + .:|g\n + +- ``statsd_t(context, ms)`` to send statsd_ time-type of messages + like:: + + .:|ms\n + +There is also a decorator (``statsd_timeit``) that may be used to +measure and send to the statsd_ server the time passed in a function +or a method and the number of calls. It will send a message like:: + + .:|ms\n.:1|c\n + + +.. _statsd: https://github.com/etsy/statsd + +""" + +__docformat__ = "restructuredtext en" + +import time +import socket + +_bucket = 'cubicweb' +_address = None +_socket = None + + +def setup(bucket, address): + """Configure the statsd endpoint + + :param bucket: the name of the statsd bucket that will be used to + build messages. + + :param address: the UDP endpoint of the statsd server. Must a + couple (ip, port). + """ + global _bucket, _address, _socket + packed = None + for family in (socket.AF_INET6, socket.AF_INET): + try: + packed = socket.inet_pton(family, address[0]) + break + except socket.error: + continue + if packed is None: + return + _bucket, _address = bucket, address + _socket = socket.socket(family, socket.SOCK_DGRAM) + + +def statsd_c(context, n=1): + if _address is not None: + _socket.sendto('{0}.{1}:{2}|c\n'.format(_bucket, context, n), _address) + + +def statsd_g(context, value): + if _address is not None: + _socket.sendto('{0}.{1}:{2}|g\n'.format(_bucket, context, value), _address) + + +def statsd_t(context, value): + if _address is not None: + _socket.sendto('{0}.{1}:{2:.4f}|ms\n'.format(_bucket, context, value), _address) + + +class statsd_timeit(object): + __slots__ = ('callable',) + + def __init__(self, callableobj): + self.callable = callableobj + + @property + def __doc__(self): + return self.callable.__doc__ + @property + def __name__(self): + return self.callable.__name__ + + def __call__(self, *args, **kw): + if _address is None: + return self.callable(*args, **kw) + t0 = time.time() + try: + return self.callable(*args, **kw) + finally: + dt = 1000*(time.time()-t0) + msg = '{0}.{1}:{2:.4f}|ms\n{0}.{1}:1|c\n'.format(_bucket, self.__name__, dt) + _socket.sendto(msg, _address) + + def __get__(self, obj, objtype): + """Support instance methods.""" + if obj is None: # class method or some already wrapped method + return self + import functools + return functools.partial(self.__call__, obj) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/tags.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/tags.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,61 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""helper classes to generate simple (X)HTML tags""" + +__docformat__ = "restructuredtext en" + +from cubicweb.uilib import simple_sgml_tag, sgml_attributes + +class tag(object): + def __init__(self, name, escapecontent=True): + self.name = name + self.escapecontent = escapecontent + + def __call__(self, __content=None, **attrs): + attrs.setdefault('escapecontent', self.escapecontent) + return simple_sgml_tag(self.name, __content, **attrs) + +button = tag('button') +input = tag('input') +textarea = tag('textarea') +a = tag('a') +span = tag('span') +div = tag('div', False) +img = tag('img') +label = tag('label') +option = tag('option') +h1 = tag('h1') +h2 = tag('h2') +h3 = tag('h3') +h4 = tag('h4') +h5 = tag('h5') +tr = tag('tr') +th = tag('th') +td = tag('td') +iframe = tag('iframe') + +def select(name, id=None, multiple=False, options=[], **attrs): + if multiple: + attrs['multiple'] = 'multiple' + if id: + attrs['id'] = id + attrs['name'] = name + html = [u'') + return u'\n'.join(html) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/bootstrap_cubes --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/bootstrap_cubes Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +card, tag, localperms diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/comment/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/comment/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/comment/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/comment/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,25 @@ +# pylint: disable=W0622 +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-comment packaging information""" + +distname = "cubicweb-comment" +modname = distname.split('-', 1)[1] + +numversion = (1, 4, 3) +version = '.'.join(str(num) for num in numversion) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/email/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/email/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/email/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/email/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,30 @@ +# pylint: disable=W0622 +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-email packaging information""" + +distname = "cubicweb-email" +modname = distname.split('-', 1)[1] + +numversion = (1, 4, 3) +version = '.'.join(str(num) for num in numversion) + + +__depends__ = {'cubicweb': None, + 'cubicweb-file': None} +__recommends__ = {'cubicweb-comment': None} diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/email/entities.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/email/entities.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +"test" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/email/hooks.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/email/hooks.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +"test" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/email/views/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/email/views/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +"test" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/file/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/file/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/file/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/file/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,25 @@ +# pylint: disable=W0622 +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-file packaging information""" + +distname = "cubicweb-file" +modname = distname.split('-', 1)[1] + +numversion = (1, 4, 3) +version = '.'.join(str(num) for num in numversion) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/file/entities/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/file/entities/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +"test" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/file/hooks/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/file/hooks/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +"test" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/file/views.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/file/views.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +"test" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/forge/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/forge/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/forge/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/forge/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,32 @@ +# pylint: disable=W0622 +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-forge packaging information""" + +distname = "cubicweb-forge" +modname = distname.split('-', 1)[1] + +numversion = (1, 4, 3) +version = '.'.join(str(num) for num in numversion) + + +__depends__ = {'cubicweb': None, + 'cubicweb-file': None, + 'cubicweb-email': None, + 'cubicweb-comment': None, + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/mycube/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/mycube/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,20 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""mycube's __init__ + +""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/cubes/mycube/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/cubes/mycube/__pkginfo__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,21 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +distname = 'cubicweb-mycube' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/entities.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/entities.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,42 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from cubicweb.entities import AnyEntity, fetch_config, adapters +from cubicweb.predicates import is_instance + + +class Societe(AnyEntity): + __regid__ = 'Societe' + fetch_attrs = ('nom',) + +class Personne(Societe): + """customized class forne Person entities""" + __regid__ = 'Personne' + fetch_attrs, cw_fetch_order = fetch_config(['nom', 'prenom']) + rest_attr = 'nom' + +class Ami(Societe): + __regid__ = 'Ami' + rest_attr = 'nom' + +class Note(AnyEntity): + __regid__ = 'Note' + + +class FakeFileIDownloadableAdapter(adapters.IDownloadableAdapter): + __select__ = is_instance('FakeFile') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/erqlexpr_on_ertype.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/erqlexpr_on_ertype.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,46 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from yams.buildobjs import EntityType, RelationType, SubjectRelation +from cubicweb.schema import ERQLExpression + +class ToTo(EntityType): + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'update': ('managers',), + 'delete': ('managers',), + } + toto = SubjectRelation('TuTu') + +class TuTu(EntityType): + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'update': ('managers',), + 'delete': ('managers',), + } + +class toto(RelationType): + __permissions__ = { + 'read': ('managers', ), + 'add': ('managers', ERQLExpression('S bla Y'),), + 'delete': ('managers',), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/lowered_etype.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/lowered_etype.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,5 @@ + +from yams.buildobjs import EntityType + +class my_etype(EntityType): + pass diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/migration/0.0.3_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/migration/0.0.3_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,21 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +coucou diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/migration/0.0.4_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/migration/0.0.4_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,21 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +coucou diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/migration/0.1.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/migration/0.1.0_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,21 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +coucou diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/migration/0.1.0_common.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/migration/0.1.0_common.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,20 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""common to all configuration + +""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/migration/0.1.0_repository.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/migration/0.1.0_repository.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,20 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""repository specific + +""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/migration/0.1.2_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/migration/0.1.2_Any.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,21 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +coucou diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/rewrite/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/rewrite/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/rewrite/bootstrap_cubes --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/rewrite/bootstrap_cubes Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +card,localperms diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/rewrite/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/rewrite/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,124 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from yams.buildobjs import (EntityType, RelationDefinition, String, SubjectRelation, + ComputedRelation, Int) +from cubicweb.schema import ERQLExpression + + +class Person(EntityType): + name = String() + + +class Affaire(EntityType): + __permissions__ = { + 'read': ('managers', + ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), + 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), + 'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')), + 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), + } + ref = String(fulltextindexed=True, indexed=True, maxsize=16) + documented_by = SubjectRelation('Card', cardinality='1*') + concerne = SubjectRelation(('Societe', 'Note'), cardinality='1*') + + +class Societe(EntityType): + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), + 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), + 'add': ('managers', 'users',) + } + nom = String() + + +class Division(Societe): + __specializes_schema__ = True + + +class Note(EntityType): + pass + + +class require_permission(RelationDefinition): + subject = ('Card', 'Note') + object = 'CWPermission' + + +class require_state(RelationDefinition): + subject = 'CWPermission' + object = 'State' + + +class inlined_card(RelationDefinition): + subject = 'Affaire' + object = 'Card' + inlined = True + cardinality = '?*' + +class inlined_note(RelationDefinition): + subject = 'Card' + object = 'Note' + inlined = True + cardinality = '?*' + +class inlined_affaire(RelationDefinition): + subject = 'Note' + object = 'Affaire' + inlined = True + cardinality = '?*' + +class responsable(RelationDefinition): + subject = 'Societe' + object = 'CWUser' + inlined = True + cardinality = '1*' + +class Contribution(EntityType): + code = Int() + +class ArtWork(EntityType): + name = String() + +class Role(EntityType): + name = String() + +class contributor(RelationDefinition): + subject = 'Contribution' + object = 'Person' + cardinality = '1*' + inlined = True + +class manifestation(RelationDefinition): + subject = 'Contribution' + object = 'ArtWork' + +class role(RelationDefinition): + subject = 'Contribution' + object = 'Role' + +class illustrator_of(ComputedRelation): + rule = ('C is Contribution, C contributor S, C manifestation O, ' + 'C role R, R name "illustrator"') + +class participated_in(ComputedRelation): + rule = 'S contributor O' + +class match(RelationDefinition): + subject = 'ArtWork' + object = 'Note' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/rqlexpr_on_computedrel.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/rqlexpr_on_computedrel.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,16 @@ +from yams.buildobjs import ComputedRelation, EntityType, RelationDefinition +from cubicweb.schema import RRQLExpression + +class Subject(EntityType): + pass + +class Object(EntityType): + pass + +class relation(RelationDefinition): + subject = 'Subject' + object = 'Object' + +class computed(ComputedRelation): + rule = 'S relation O' + __permissions__ = {'read': (RRQLExpression('S is ET'),)} diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/rqlexpr_on_ertype_read.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/rqlexpr_on_ertype_read.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,46 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from yams.buildobjs import EntityType, RelationType, SubjectRelation +from cubicweb.schema import RRQLExpression + +class ToTo(EntityType): + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'update': ('managers',), + 'delete': ('managers',), + } + toto = SubjectRelation('TuTu') + +class TuTu(EntityType): + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'update': ('managers',), + 'delete': ('managers',), + } + +class toto(RelationType): + __permissions__ = { + 'read': ('managers', RRQLExpression('S bla Y'), ), + 'add': ('managers',), + 'delete': ('managers',), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/rrqlexpr_on_attr.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/rrqlexpr_on_attr.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,36 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import EntityType, RelationType, String +from cubicweb.schema import RRQLExpression + +class ToTo(EntityType): + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'update': ('managers',), + 'delete': ('managers',), + } + attr = String() + +class attr(RelationType): + __permissions__ = { + 'read': ('managers', ), + 'update': ('managers', RRQLExpression('S bla Y'),), + 'add': ('managers', RRQLExpression('S bla Y'),), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/rrqlexpr_on_eetype.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/rrqlexpr_on_eetype.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,31 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from yams.buildobjs import EntityType, String +from cubicweb.schema import RRQLExpression + +class ToTo(EntityType): + __permissions__ = { + 'read': ('managers', RRQLExpression('S bla Y'),), + 'add': ('managers',), + 'update': ('managers',), + 'delete': ('managers',), + } + attr = String() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,113 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import (EntityType, String, RichString, Bytes, + SubjectRelation, RelationDefinition) + +from cubicweb.schema import (WorkflowableEntityType, + RQLConstraint, RQLVocabularyConstraint) + + +from cubicweb import _ + + +class Personne(EntityType): + nom = String(required=True) + prenom = String() + type = String() + travaille = SubjectRelation('Societe') + evaluee = SubjectRelation(('Note', 'Personne')) + connait = SubjectRelation( + 'Personne', symmetric=True, + constraints=[ + RQLConstraint('NOT S identity O'), + # conflicting constraints, see cw_unrelated_rql tests in + # unittest_entity.py + RQLVocabularyConstraint('NOT (S connait P, P nom "toto")'), + RQLVocabularyConstraint('S travaille P, P nom "tutu"')]) + actionnaire = SubjectRelation('Societe', cardinality='??', + constraints=[RQLConstraint('NOT EXISTS(O contrat_exclusif S)')]) + dirige = SubjectRelation('Societe', cardinality='??', + constraints=[RQLConstraint('S actionnaire O')]) + associe = SubjectRelation('Personne', cardinality='?*', + constraints=[RQLConstraint('S actionnaire SOC, O actionnaire SOC')]) + +class Ami(EntityType): + """A Person, for which surname is not required""" + prenom = String() + nom = String() + +class Societe(EntityType): + nom = String() + evaluee = SubjectRelation('Note') + fournit = SubjectRelation(('Service', 'Produit'), cardinality='1*') + contrat_exclusif = SubjectRelation('Personne', cardinality='??') + +class Service(EntityType): + fabrique_par = SubjectRelation('Personne', cardinality='1*') + + +class Produit(EntityType): + fabrique_par = SubjectRelation('Usine', cardinality='1*', inlined=True) + + +class Usine(EntityType): + lieu = String(required=True) + + +class Note(EntityType): + type = String() + ecrit_par = SubjectRelation('Personne') + + +class SubNote(Note): + __specializes_schema__ = True + description = String() + + +class tags(RelationDefinition): + subject = 'Tag' + object = ('Personne', 'Note') + + +class evaluee(RelationDefinition): + subject = 'CWUser' + object = 'Note' + + +class StateFull(WorkflowableEntityType): + name = String() + + +class Reference(EntityType): + nom = String(unique=True) + ean = String(unique=True, required=True) + + +class FakeFile(EntityType): + title = String(fulltextindexed=True, maxsize=256) + data = Bytes(required=True, fulltextindexed=True, description=_('file to upload')) + data_format = String(required=True, maxsize=128, + description=_('MIME type of the file. Should be dynamically set at upload time.')) + data_encoding = String(maxsize=32, + description=_('encoding of the file when it applies (e.g. text). ' + 'Should be dynamically set at upload time.')) + data_name = String(required=True, fulltextindexed=True, + description=_('name of the file. Should be dynamically set at upload time.')) + description = RichString(fulltextindexed=True, internationalizable=True, + default_format='text/rest') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/scripts/script1.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/scripts/script1.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +from os.path import join +assert __file__.endswith(join('scripts', 'script1.py')), __file__ +assert '__main__' == __name__, __name__ +assert [] == __args__, __args__ diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/scripts/script2.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/scripts/script2.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +from os.path import join +assert __file__.endswith(join('scripts', 'script2.py')), __file__ +assert '__main__' == __name__, __name__ +assert ['-v'] == __args__, __args__ diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/scripts/script3.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/scripts/script3.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,4 @@ +from os.path import join +assert __file__.endswith(join('scripts', 'script3.py')), __file__ +assert '__main__' == __name__, __name__ +assert ['-vd', '-f', 'FILE.TXT'] == __args__, __args__ diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/server_migration/2.10.2_Any.sql diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/server_migration/2.5.0_Any.sql diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/server_migration/2.6.0_Any.sql diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/server_migration/bootstrapmigration_repository.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/server_migration/bootstrapmigration_repository.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,20 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""allways executed before all others in server migration + +""" diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/uppered_rtype.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/uppered_rtype.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ + +from yams.buildobjs import RelationDefinition + +class ARelation(RelationDefinition): + subject = 'CWUser' + object = 'CWGroup' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data/views.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data/views.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,33 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from cubicweb.web.views import xmlrss +xmlrss.RSSIconBox.visible = True + + +from cubicweb.predicates import match_user_groups +from cubicweb.server import Service + + +class TestService(Service): + __regid__ = 'test_service' + __select__ = Service.__select__ & match_user_groups('managers') + passed_here = [] + + def call(self, msg): + self.passed_here.append(msg) + return 'babar' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/data_schemareader/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/data_schemareader/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +from cubicweb.schemas.base import in_group, CWSourceSchemaConfig +# copy __permissions__ to avoid modifying a shared dictionary +in_group.__permissions__ = in_group.__permissions__.copy() +in_group.__permissions__['read'] = ('managers',) + +cw_for_source = CWSourceSchemaConfig.get_relation('cw_for_source') +cw_for_source.__permissions__ = {'read': ('managers', 'users'), + 'add': ('managers',), + 'delete': ('managers',)} diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +Pygments +#fyzz XXX pip install fails +cubicweb-card +cubicweb-file +cubicweb-localperms +cubicweb-tag diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_binary.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_binary.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,54 @@ +from six import PY2 + +from unittest import TestCase +from tempfile import NamedTemporaryFile +import os.path as osp + +from logilab.common.shellutils import tempdir +from cubicweb import Binary + + +class BinaryTC(TestCase): + def test_init(self): + Binary() + Binary(b'toto') + Binary(bytearray(b'toto')) + if PY2: + Binary(buffer('toto')) + else: + Binary(memoryview(b'toto')) + with self.assertRaises((AssertionError, TypeError)): + # TypeError is raised by BytesIO if python runs with -O + Binary(u'toto') + + def test_write(self): + b = Binary() + b.write(b'toto') + b.write(bytearray(b'toto')) + if PY2: + b.write(buffer('toto')) + else: + b.write(memoryview(b'toto')) + with self.assertRaises((AssertionError, TypeError)): + # TypeError is raised by BytesIO if python runs with -O + b.write(u'toto') + + def test_gzpickle_roundtrip(self): + old = (u'foo', b'bar', 42, {}) + new = Binary.zpickle(old).unzpickle() + self.assertEqual(old, new) + self.assertIsNot(old, new) + + def test_from_file_to_file(self): + with tempdir() as dpath: + fpath = osp.join(dpath, 'binary.bin') + with open(fpath, 'wb') as fobj: + Binary(b'binaryblob').to_file(fobj) + + bobj = Binary.from_file(fpath) + self.assertEqual(bobj.getvalue(), b'binaryblob') + + +if __name__ == '__main__': + from unittest import main + main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_cwconfig.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_cwconfig.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,222 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb.cwconfig unit tests""" + +import sys +import os +import tempfile +from os.path import dirname, join, abspath + +from logilab.common.modutils import cleanup_sys_modules +from logilab.common.testlib import (TestCase, unittest_main, + with_tempdir) +from logilab.common.changelog import Version + +from cubicweb.devtools import ApptestConfiguration +from cubicweb.cwconfig import _find_prefix + +def unabsolutize(path): + parts = path.split(os.sep) + for i, part in reversed(tuple(enumerate(parts))): + if part.startswith('cubicweb') or part == 'cubes': + return '/'.join(parts[i+1:]) + raise Exception('duh? %s' % path) + +CUSTOM_CUBES_DIR = abspath(join(dirname(__file__), 'data', 'cubes')) + + +class CubicWebConfigurationTC(TestCase): + def setUp(self): + cleanup_sys_modules([CUSTOM_CUBES_DIR, ApptestConfiguration.CUBES_DIR]) + self.config = ApptestConfiguration('data', apphome=self.datadir) + self.config._cubes = ('email', 'file') + + def tearDown(self): + ApptestConfiguration.CUBES_PATH = [] + + def test_reorder_cubes(self): + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.config.adjust_sys_path() + # forge depends on email and file and comment + # email depends on file + self.assertEqual(self.config.reorder_cubes(['file', 'email', 'forge']), + ('forge', 'email', 'file')) + self.assertEqual(self.config.reorder_cubes(['email', 'file', 'forge']), + ('forge', 'email', 'file')) + self.assertEqual(self.config.reorder_cubes(['email', 'forge', 'file']), + ('forge', 'email', 'file')) + self.assertEqual(self.config.reorder_cubes(['file', 'forge', 'email']), + ('forge', 'email', 'file')) + self.assertEqual(self.config.reorder_cubes(['forge', 'file', 'email']), + ('forge', 'email', 'file')) + self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'file')), + ('forge', 'email', 'file')) + + def test_reorder_cubes_recommends(self): + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.config.adjust_sys_path() + from cubes.comment import __pkginfo__ as comment_pkginfo + comment_pkginfo.__recommends_cubes__ = {'file': None} + try: + # email recommends comment + # comment recommends file + self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'file', 'comment')), + ('forge', 'email', 'comment', 'file')) + self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'comment', 'file')), + ('forge', 'email', 'comment', 'file')) + self.assertEqual(self.config.reorder_cubes(('forge', 'comment', 'email', 'file')), + ('forge', 'email', 'comment', 'file')) + self.assertEqual(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')), + ('forge', 'email', 'comment', 'file')) + finally: + comment_pkginfo.__recommends_cubes__ = {} + + def test_expand_cubes(self): + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.config.adjust_sys_path() + self.assertEqual(self.config.expand_cubes(('email', 'comment')), + ['email', 'comment', 'file']) + + def test_appobjects_path(self): + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.config.adjust_sys_path() + path = [unabsolutize(p) for p in self.config.appobjects_path()] + self.assertEqual(path[0], 'entities') + self.assertCountEqual(path[1:4], ['web/views', 'sobjects', 'hooks']) + self.assertEqual(path[4], 'file/entities') + self.assertCountEqual(path[5:7], ['file/views.py', 'file/hooks']) + self.assertEqual(path[7], 'email/entities.py') + self.assertCountEqual(path[8:10], ['email/views', 'email/hooks.py']) + self.assertEqual(path[10:], ['test/data/entities.py', 'test/data/views.py']) + + def test_cubes_path(self): + # make sure we don't import the email cube, but the stdlib email package + import email + self.assertNotEqual(dirname(email.__file__), self.config.CUBES_DIR) + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.assertEqual(self.config.cubes_search_path(), + [CUSTOM_CUBES_DIR, self.config.CUBES_DIR]) + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR, + self.config.CUBES_DIR, 'unexistant'] + # filter out unexistant and duplicates + self.assertEqual(self.config.cubes_search_path(), + [CUSTOM_CUBES_DIR, + self.config.CUBES_DIR]) + self.assertIn('mycube', self.config.available_cubes()) + # test cubes python path + self.config.adjust_sys_path() + import cubes + self.assertEqual(cubes.__path__, self.config.cubes_search_path()) + # this import should succeed once path is adjusted + from cubes import mycube + self.assertEqual(mycube.__path__, [join(CUSTOM_CUBES_DIR, 'mycube')]) + # file cube should be overriden by the one found in data/cubes + sys.modules.pop('cubes.file', None) + del cubes.file + from cubes import file + self.assertEqual(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')]) + + +class FindPrefixTC(TestCase): + def make_dirs(self, *args): + path = join(tempfile.tempdir, *args) + if not os.path.exists(path): + os.makedirs(path) + return path + + def make_file(self, *args): + self.make_dirs(*args[: -1]) + file_path = join(tempfile.tempdir, *args) + file_obj = open(file_path, 'w') + file_obj.write('""" None """') + file_obj.close() + return file_path + + @with_tempdir + def test_samedir(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.assertEqual(_find_prefix(prefix), prefix) + + @with_tempdir + def test_samedir_filepath(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob.py') + self.assertEqual(_find_prefix(file_path), prefix) + + @with_tempdir + def test_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + dir_path = self.make_dirs('bob') + self.assertEqual(_find_prefix(dir_path), prefix) + + @with_tempdir + def test_file_in_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob', 'toto.py') + self.assertEqual(_find_prefix(file_path), prefix) + + @with_tempdir + def test_file_in_deeper_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEqual(_find_prefix(file_path), prefix) + + @with_tempdir + def test_multiple_candidate_prefix(self): + self.make_dirs('share', 'cubicweb') + prefix = self.make_dirs('bob') + self.make_dirs('bob', 'share', 'cubicweb') + file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEqual(_find_prefix(file_path), prefix) + + @with_tempdir + def test_sister_candidate_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.make_dirs('bob', 'share', 'cubicweb') + file_path = self.make_file('bell', 'toto.py') + self.assertEqual(_find_prefix(file_path), prefix) + + @with_tempdir + def test_multiple_parent_candidate_prefix(self): + self.make_dirs('share', 'cubicweb') + prefix = self.make_dirs('share', 'cubicweb', 'bob') + self.make_dirs('share', 'cubicweb', 'bob', 'share', 'cubicweb') + file_path = self.make_file('share', 'cubicweb', 'bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEqual(_find_prefix(file_path), prefix) + + @with_tempdir + def test_upper_candidate_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.make_dirs('bell','bob', 'share', 'cubicweb') + file_path = self.make_file('bell', 'toto.py') + self.assertEqual(_find_prefix(file_path), prefix) + + @with_tempdir + def test_no_prefix(self): + prefix = tempfile.tempdir + self.assertEqual(_find_prefix(prefix), sys.prefix) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_cwctl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_cwctl.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,68 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +import sys +import os +from os.path import join +from io import StringIO, BytesIO + +from six import PY2 + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server.migractions import ServerMigrationHelper + +CubicWebConfiguration.load_cwctl_plugins() # XXX necessary? + + +class CubicWebCtlTC(TestCase): + def setUp(self): + self.stream = BytesIO() if PY2 else StringIO() + sys.stdout = self.stream + def tearDown(self): + sys.stdout = sys.__stdout__ + + def test_list(self): + from cubicweb.cwctl import ListCommand + ListCommand(None).run([]) + + +class CubicWebShellTC(CubicWebTC): + + def test_process_script_args_context(self): + repo = self.repo + with self.admin_access.repo_cnx() as cnx: + mih = ServerMigrationHelper(None, repo=repo, cnx=cnx, + interactive=False, + # hack so it don't try to load fs schema + schema=1) + scripts = {'script1.py': list(), + 'script2.py': ['-v'], + 'script3.py': ['-vd', '-f', 'FILE.TXT'], + } + mih.cmd_process_script(join(self.datadir, 'scripts', 'script1.py'), + funcname=None) + for script, args in scripts.items(): + scriptname = os.path.join(self.datadir, 'scripts', script) + self.assertTrue(os.path.exists(scriptname)) + mih.cmd_process_script(scriptname, None, scriptargs=args) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_entity.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_entity.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,848 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for cubicweb.web.views.entities module""" + +from datetime import datetime + +from six import text_type + +from logilab.common import tempattr +from logilab.common.decorators import clear_cache + +from cubicweb import Binary +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.mttransforms import HAS_TAL +from cubicweb.entity import can_use_rest_path +from cubicweb.entities import fetch_config +from cubicweb.uilib import soup2xhtml +from cubicweb.schema import RRQLExpression + +class EntityTC(CubicWebTC): + + def setUp(self): + super(EntityTC, self).setUp() + self.backup_dict = {} + for cls in self.vreg['etypes'].iter_classes(): + self.backup_dict[cls] = (cls.fetch_attrs, cls.cw_fetch_order) + + def tearDown(self): + super(EntityTC, self).tearDown() + for cls in self.vreg['etypes'].iter_classes(): + cls.fetch_attrs, cls.cw_fetch_order = self.backup_dict[cls] + + def test_no_prefill_related_cache_bug(self): + with self.admin_access.repo_cnx() as cnx: + usine = cnx.create_entity('Usine', lieu=u'Montbeliard') + produit = cnx.create_entity('Produit') + # usine was prefilled in glob_add_entity + # let's simulate produit creation without prefill + produit._cw_related_cache.clear() + # use add_relations + cnx.add_relations([('fabrique_par', [(produit.eid, usine.eid)])]) + self.assertEqual(1, len(usine.reverse_fabrique_par)) + self.assertEqual(1, len(produit.fabrique_par)) + + def test_boolean_value(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + self.assertTrue(e) + + def test_yams_inheritance(self): + from entities import Note + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('SubNote')(req) + self.assertIsInstance(e, Note) + e2 = self.vreg['etypes'].etype_class('SubNote')(req) + self.assertIs(e.__class__, e2.__class__) + + def test_has_eid(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + self.assertEqual(e.eid, None) + self.assertEqual(e.has_eid(), False) + e.eid = 'X' + self.assertEqual(e.has_eid(), False) + e.eid = 0 + self.assertEqual(e.has_eid(), True) + e.eid = 2 + self.assertEqual(e.has_eid(), True) + + def test_copy(self): + with self.admin_access.web_request() as req: + req.create_entity('Tag', name=u'x') + p = req.create_entity('Personne', nom=u'toto') + oe = req.create_entity('Note', type=u'x') + req.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', + {'t': oe.eid, 'u': p.eid}) + req.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}) + e = req.create_entity('Note', type=u'z') + e.copy_relations(oe.eid) + self.assertEqual(len(e.ecrit_par), 1) + self.assertEqual(e.ecrit_par[0].eid, p.eid) + self.assertEqual(len(e.reverse_tags), 1) + # check meta-relations are not copied, set on commit + self.assertEqual(len(e.created_by), 0) + + def test_copy_with_nonmeta_composite_inlined(self): + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'toto') + oe = req.create_entity('Note', type=u'x') + self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject' + req.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', + {'t': oe.eid, 'u': p.eid}) + e = req.create_entity('Note', type=u'z') + e.copy_relations(oe.eid) + self.assertFalse(e.ecrit_par) + self.assertTrue(oe.ecrit_par) + + def test_copy_with_composite(self): + with self.admin_access.web_request() as req: + adeleid = req.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] + e = req.execute('Any X WHERE X eid %(x)s', {'x': req.user.eid}).get_entity(0, 0) + self.assertEqual(e.use_email[0].address, "toto@logilab.org") + self.assertEqual(e.use_email[0].eid, adeleid) + usereid = req.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G ' + 'WHERE G name "users"')[0][0] + e = req.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0) + e.copy_relations(req.user.eid) + self.assertFalse(e.use_email) + self.assertFalse(e.primary_email) + + def test_copy_with_non_initial_state(self): + with self.admin_access.web_request() as req: + user = req.execute('INSERT CWUser X: X login "toto", X upassword %(pwd)s, X in_group G WHERE G name "users"', + {'pwd': 'toto'}).get_entity(0, 0) + req.cnx.commit() + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') + req.cnx.commit() + eid2 = req.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] + e = req.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) + e.copy_relations(user.eid) + req.cnx.commit() + e.cw_clear_relation_cache('in_state', 'subject') + self.assertEqual(e.cw_adapt_to('IWorkflowable').state, 'activated') + + def test_related_cache_both(self): + with self.admin_access.web_request() as req: + user = req.execute('Any X WHERE X eid %(x)s', {'x':req.user.eid}).get_entity(0, 0) + adeleid = req.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] + req.cnx.commit() + self.assertEqual(user._cw_related_cache, {}) + email = user.primary_email[0] + self.assertEqual(sorted(user._cw_related_cache), ['primary_email_subject']) + self.assertEqual(list(email._cw_related_cache), ['primary_email_object']) + groups = user.in_group + self.assertEqual(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject']) + for group in groups: + self.assertNotIn('in_group_subject', group._cw_related_cache) + user.cw_clear_all_caches() + user.related('in_group', entities=True) + self.assertIn('in_group_subject', user._cw_related_cache) + user.cw_clear_all_caches() + user.related('in_group', targettypes=('CWGroup',), entities=True) + self.assertNotIn('in_group_subject', user._cw_related_cache) + + def test_related_limit(self): + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + for tag in u'abcd': + req.create_entity('Tag', name=tag) + req.execute('SET X tags Y WHERE X is Tag, Y is Personne') + self.assertEqual(len(p.related('tags', 'object', limit=2)), 2) + self.assertEqual(len(p.related('tags', 'object')), 4) + p.cw_clear_all_caches() + self.assertEqual(len(p.related('tags', 'object', entities=True, limit=2)), 2) + self.assertEqual(len(p.related('tags', 'object', entities=True)), 4) + + def test_related_targettypes(self): + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'Loxodonta', prenom=u'Babar') + n = req.create_entity('Note', type=u'scratch', ecrit_par=p) + t = req.create_entity('Tag', name=u'a tag', tags=(p, n)) + req.cnx.commit() + with self.admin_access.web_request() as req: + t = req.entity_from_eid(t.eid) + self.assertEqual(2, t.related('tags').rowcount) + self.assertEqual(1, t.related('tags', targettypes=('Personne',)).rowcount) + self.assertEqual(1, t.related('tags', targettypes=('Note',)).rowcount) + + def test_cw_instantiate_relation(self): + with self.admin_access.web_request() as req: + p1 = req.create_entity('Personne', nom=u'di') + p2 = req.create_entity('Personne', nom=u'mascio') + t = req.create_entity('Tag', name=u't0', tags=[]) + self.assertCountEqual(t.tags, []) + t = req.create_entity('Tag', name=u't1', tags=p1) + self.assertCountEqual(t.tags, [p1]) + t = req.create_entity('Tag', name=u't2', tags=p1.eid) + self.assertCountEqual(t.tags, [p1]) + t = req.create_entity('Tag', name=u't3', tags=[p1, p2.eid]) + self.assertCountEqual(t.tags, [p1, p2]) + + def test_cw_instantiate_reverse_relation(self): + with self.admin_access.web_request() as req: + t1 = req.create_entity('Tag', name=u't1') + t2 = req.create_entity('Tag', name=u't2') + p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=t1) + self.assertCountEqual(p.reverse_tags, [t1]) + p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=t1.eid) + self.assertCountEqual(p.reverse_tags, [t1]) + p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=[t1, t2.eid]) + self.assertCountEqual(p.reverse_tags, [t1, t2]) + + def test_fetch_rql(self): + Personne = self.vreg['etypes'].etype_class('Personne') + Societe = self.vreg['etypes'].etype_class('Societe') + Note = self.vreg['etypes'].etype_class('Note') + peschema = Personne.e_schema + seschema = Societe.e_schema + torestore = [] + for rdef, card in [(peschema.subjrels['travaille'].rdef(peschema, seschema), '1*'), + (peschema.subjrels['connait'].rdef(peschema, peschema), '11'), + (peschema.subjrels['evaluee'].rdef(peschema, Note.e_schema), '1*'), + (seschema.subjrels['evaluee'].rdef(seschema, Note.e_schema), '1*')]: + cm = tempattr(rdef, 'cardinality', card) + cm.__enter__() + torestore.append(cm) + try: + with self.admin_access.web_request() as req: + user = req.user + # testing basic fetch_attrs attribute + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC ORDERBY AB ' + 'WHERE X is_instance_of Personne, X modification_date AA, X nom AB, X prenom AC') + # testing unknown attributes + Personne.fetch_attrs = ('bloug', 'beep') + self.assertEqual(Personne.fetch_rql(user), 'Any X WHERE X is_instance_of Personne') + # testing one non final relation + Personne.fetch_attrs = ('nom', 'prenom', 'travaille') + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC,AD ORDERBY AA ' + 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') + # testing two non final relations + Personne.fetch_attrs = ('nom', 'prenom', 'travaille', 'evaluee') + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC,AD,AE ORDERBY AB ' + 'WHERE X is_instance_of Personne, X evaluee AA?, X nom AB, X prenom AC, X travaille AD?, ' + 'AD nom AE') + # testing one non final relation with recursion + Personne.fetch_attrs = ('nom', 'prenom', 'travaille') + Societe.fetch_attrs = ('nom', 'evaluee') + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA ' + 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, ' + 'AC evaluee AD?, AD modification_date AE, AC nom AF') + # testing symmetric relation + Personne.fetch_attrs = ('nom', 'connait') + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AB ' + 'WHERE X is_instance_of Personne, X connait AA?, X nom AB') + # testing optional relation + peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '?*' + Personne.fetch_attrs = ('nom', 'prenom', 'travaille') + Societe.fetch_attrs = ('nom',) + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC,AD ORDERBY AA WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') + # testing relation with cardinality > 1 + peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '**' + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB ORDERBY AA WHERE X is_instance_of Personne, X nom AA, X prenom AB') + # XXX test unauthorized attribute + finally: + # fetch_attrs restored by generic tearDown + for cm in torestore: + cm.__exit__(None, None, None) + + def test_related_rql_base(self): + Personne = self.vreg['etypes'].etype_class('Personne') + Note = self.vreg['etypes'].etype_class('Note') + SubNote = self.vreg['etypes'].etype_class('SubNote') + self.assertTrue(issubclass(self.vreg['etypes'].etype_class('SubNote'), Note)) + Personne.fetch_attrs, Personne.cw_fetch_order = fetch_config(('nom', 'type')) + Note.fetch_attrs, Note.cw_fetch_order = fetch_config(('type',)) + SubNote.fetch_attrs, SubNote.cw_fetch_order = fetch_config(('type',)) + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'pouet') + self.assertEqual(p.cw_related_rql('evaluee'), + 'Any X,AA,AB ORDERBY AB WHERE E eid %(x)s, E evaluee X, ' + 'X modification_date AA, X type AB') + n = req.create_entity('Note') + self.assertEqual(n.cw_related_rql('evaluee', role='object', + targettypes=('Societe', 'Personne')), + "Any X,AA ORDERBY AB DESC WHERE E eid %(x)s, X evaluee E, " + "X is IN(Personne, Societe), X nom AA, " + "X modification_date AB") + Personne.fetch_attrs, Personne.cw_fetch_order = fetch_config(('nom', )) + # XXX + self.assertEqual(p.cw_related_rql('evaluee'), + 'Any X,AA ORDERBY AA DESC ' + 'WHERE E eid %(x)s, E evaluee X, X modification_date AA') + + tag = self.vreg['etypes'].etype_class('Tag')(req) + self.assertEqual(tag.cw_related_rql('tags', 'subject'), + 'Any X,AA ORDERBY AA DESC ' + 'WHERE E eid %(x)s, E tags X, X modification_date AA') + self.assertEqual(tag.cw_related_rql('tags', 'subject', ('Personne',)), + 'Any X,AA,AB ORDERBY AB ' + 'WHERE E eid %(x)s, E tags X, X is Personne, X modification_date AA, ' + 'X nom AB') + + def test_related_rql_ambiguous_cant_use_fetch_order(self): + with self.admin_access.web_request() as req: + tag = self.vreg['etypes'].etype_class('Tag')(req) + for ttype in self.schema['tags'].objects(): + self.vreg['etypes'].etype_class(ttype).fetch_attrs = ('modification_date',) + self.assertEqual(tag.cw_related_rql('tags', 'subject'), + 'Any X,AA ORDERBY AA DESC ' + 'WHERE E eid %(x)s, E tags X, X modification_date AA') + + def test_related_rql_fetch_ambiguous_rtype(self): + etvreg = self.vreg['etypes'] + soc_etype = etvreg.etype_class('Societe') + with self.admin_access.web_request() as req: + soc = soc_etype(req) + soc_etype.fetch_attrs = ('fournit',) + etvreg.etype_class('Service').fetch_attrs = ('fabrique_par',) + etvreg.etype_class('Produit').fetch_attrs = ('fabrique_par',) + etvreg.etype_class('Usine').fetch_attrs = ('lieu',) + etvreg.etype_class('Personne').fetch_attrs = ('nom',) + self.assertEqual(soc.cw_related_rql('fournit', 'subject'), + 'Any X,A WHERE E eid %(x)s, E fournit X, X fabrique_par A') + + def test_unrelated_rql_security_1_manager(self): + with self.admin_access.web_request() as req: + user = req.user + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, + 'Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT A use_email O, S eid %(x)s, ' + 'O is_instance_of EmailAddress, O address AA, O alias AB, ' + 'O modification_date AC') + + def test_unrelated_rql_security_1_user(self): + with self.admin_access.web_request() as req: + self.create_user(req, 'toto') + with self.new_access('toto').web_request() as req: + user = req.user # XXX + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, + 'Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT A use_email O, S eid %(x)s, ' + 'O is_instance_of EmailAddress, O address AA, O alias AB, O modification_date AC') + user = req.execute('Any X WHERE X login "admin"').get_entity(0, 0) + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT A use_email O, S eid %(x)s, ' + 'O is EmailAddress, O address AA, O alias AB, O modification_date AC, AD eid %(AE)s, ' + 'EXISTS(S identity AD, NOT AD in_group AF, AF name "guests", AF is CWGroup), A is CWUser') + + def test_unrelated_rql_security_1_anon(self): + with self.new_access('anon').web_request() as req: + user = req.user + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT A use_email O, S eid %(x)s, ' + 'O is EmailAddress, O address AA, O alias AB, O modification_date AC, AD eid %(AE)s, ' + 'EXISTS(S identity AD, NOT AD in_group AF, AF name "guests", AF is CWGroup), A is CWUser') + + def test_unrelated_rql_security_2(self): + with self.admin_access.web_request() as req: + email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AB ' + 'WHERE NOT S use_email O, O eid %(x)s, S is_instance_of CWUser, ' + 'S firstname AA, S login AB, S modification_date AC, S surname AD') + req.cnx.commit() + rperms = self.schema['EmailAddress'].permissions['read'] + clear_cache(self.schema['EmailAddress'], 'get_groups') + clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') + self.schema['EmailAddress'].permissions['read'] = ('managers', 'users', 'guests',) + try: + with self.new_access('anon').web_request() as req: + email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AB ' + 'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, ' + 'S firstname AA, S login AB, S modification_date AC, S surname AD, ' + 'AE eid %(AF)s, EXISTS(S identity AE, NOT AE in_group AG, AG name "guests", AG is CWGroup)') + finally: + clear_cache(self.schema['EmailAddress'], 'get_groups') + clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') + self.schema['EmailAddress'].permissions['read'] = rperms + + def test_cw_linkable_rql(self): + with self.admin_access.web_request() as req: + email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) + rql = email.cw_linkable_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AB ' + 'WHERE O eid %(x)s, S is_instance_of CWUser, ' + 'S firstname AA, S login AB, S modification_date AC, S surname AD') + + def test_unrelated_rql_security_nonexistant(self): + with self.new_access('anon').web_request() as req: + email = self.vreg['etypes'].etype_class('EmailAddress')(req) + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AB ' + 'WHERE S is CWUser, ' + 'S firstname AA, S login AB, S modification_date AC, S surname AD, ' + 'AE eid %(AF)s, EXISTS(S identity AE, NOT AE in_group AG, AG name "guests", AG is CWGroup)') + + def test_unrelated_rql_constraints_creation_subject(self): + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] + self.assertEqual( + rql, 'Any O,AA,AB,AC ORDERBY AA DESC WHERE ' + 'O is_instance_of Personne, O modification_date AA, O nom AB, O prenom AC') + + def test_unrelated_rql_constraints_creation_object(self): + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + rql = person.cw_unrelated_rql('connait', 'Personne', 'object')[0] + self.assertEqual( + rql, 'Any S,AA,AB,AC ORDERBY AA DESC WHERE ' + 'S is Personne, S modification_date AA, S nom AB, S prenom AC, ' + 'NOT (S connait AD, AD nom "toto"), AD is Personne, ' + 'EXISTS(S travaille AE, AE nom "tutu")') + + def test_unrelated_rql_security_rel_perms(self): + '''check `connait` add permission has no effect for a new entity on the + unrelated rql''' + rdef = self.schema['Personne'].rdef('connait') + perm_rrqle = RRQLExpression('U has_update_permission S') + with self.temporary_permissions((rdef, {'add': (perm_rrqle,)})): + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] + self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AA DESC WHERE ' + 'O is_instance_of Personne, O modification_date AA, O nom AB, ' + 'O prenom AC') + + def test_unrelated_rql_constraints_edition_subject(self): + with self.admin_access.web_request() as req: + person = req.create_entity('Personne', nom=u'sylvain') + rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] + self.assertEqual( + rql, 'Any O,AA,AB,AC ORDERBY AA DESC WHERE ' + 'NOT S connait O, S eid %(x)s, O is Personne, ' + 'O modification_date AA, O nom AB, O prenom AC, ' + 'NOT S identity O') + + def test_unrelated_rql_constraints_edition_object(self): + with self.admin_access.web_request() as req: + person = req.create_entity('Personne', nom=u'sylvain') + rql = person.cw_unrelated_rql('connait', 'Personne', 'object')[0] + self.assertEqual( + rql, 'Any S,AA,AB,AC ORDERBY AA DESC WHERE ' + 'NOT S connait O, O eid %(x)s, S is Personne, ' + 'S modification_date AA, S nom AB, S prenom AC, ' + 'NOT S identity O, NOT (S connait AD, AD nom "toto"), ' + 'EXISTS(S travaille AE, AE nom "tutu")') + + def test_unrelated_rql_s_linkto_s(self): + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + soc = req.create_entity('Societe', nom=u'logilab') + lt_infos = {('actionnaire', 'subject'): [soc.eid]} + rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject', + lt_infos=lt_infos) + self.assertEqual(u'Any O ORDERBY O WHERE O is Personne, ' + u'EXISTS(AA eid %(SOC)s, O actionnaire AA)', rql) + self.assertEqual({'SOC': soc.eid}, args) + + def test_unrelated_rql_s_linkto_o(self): + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + self.vreg['etypes'].etype_class('Societe').fetch_attrs = () + soc = req.create_entity('Societe', nom=u'logilab') + lt_infos = {('contrat_exclusif', 'object'): [soc.eid]} + rql, args = person.cw_unrelated_rql('actionnaire', 'Societe', 'subject', + lt_infos=lt_infos) + self.assertEqual(u'Any O ORDERBY O WHERE NOT A actionnaire O, ' + u'O is_instance_of Societe, NOT EXISTS(O eid %(O)s), ' + u'A is Personne', rql) + self.assertEqual({'O': soc.eid}, args) + + def test_unrelated_rql_o_linkto_s(self): + with self.admin_access.web_request() as req: + soc = self.vreg['etypes'].etype_class('Societe')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + person = req.create_entity('Personne', nom=u'florent') + lt_infos = {('contrat_exclusif', 'subject'): [person.eid]} + rql, args = soc.cw_unrelated_rql('actionnaire', 'Personne', 'object', + lt_infos=lt_infos) + self.assertEqual(u'Any S ORDERBY S WHERE NOT S actionnaire A, ' + u'S is_instance_of Personne, NOT EXISTS(S eid %(S)s), ' + u'A is Societe', rql) + self.assertEqual({'S': person.eid}, args) + + def test_unrelated_rql_o_linkto_o(self): + with self.admin_access.web_request() as req: + soc = self.vreg['etypes'].etype_class('Societe')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + person = req.create_entity('Personne', nom=u'florent') + lt_infos = {('actionnaire', 'object'): [person.eid]} + rql, args = soc.cw_unrelated_rql('dirige', 'Personne', 'object', + lt_infos=lt_infos) + self.assertEqual(u'Any S ORDERBY S WHERE NOT S dirige A, ' + u'S is_instance_of Personne, EXISTS(S eid %(S)s), ' + u'A is Societe', rql) + self.assertEqual({'S': person.eid}, args) + + def test_unrelated_rql_s_linkto_s_no_info(self): + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + soc = req.create_entity('Societe', nom=u'logilab') + rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject') + self.assertEqual(u'Any O ORDERBY O WHERE O is_instance_of Personne', rql) + self.assertEqual({}, args) + + def test_unrelated_rql_s_linkto_s_unused_info(self): + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + other_p = req.create_entity('Personne', nom=u'titi') + lt_infos = {('dirige', 'subject'): [other_p.eid]} + rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject', + lt_infos=lt_infos) + self.assertEqual(u'Any O ORDERBY O WHERE O is_instance_of Personne', rql) + + def test_unrelated_base(self): + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + e = req.create_entity('Tag', name=u'x') + related = [r.eid for r in e.tags] + self.assertEqual(related, []) + unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] + self.assertIn(p.eid, unrelated) + req.execute('SET X tags Y WHERE X is Tag, Y is Personne') + e = req.execute('Any X WHERE X is Tag').get_entity(0, 0) + unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] + self.assertNotIn(p.eid, unrelated) + + def test_unrelated_limit(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Tag', name=u'x') + req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + req.create_entity('Personne', nom=u'thenault', prenom=u'sylvain') + self.assertEqual(len(e.unrelated('tags', 'Personne', 'subject', limit=1)), + 1) + + def test_unrelated_security(self): + rperms = self.schema['EmailAddress'].permissions['read'] + clear_cache(self.schema['EmailAddress'], 'get_groups') + clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') + self.schema['EmailAddress'].permissions['read'] = ('managers', 'users', 'guests',) + try: + with self.admin_access.web_request() as req: + email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) + rset = email.unrelated('use_email', 'CWUser', 'object') + self.assertEqual([x.login for x in rset.entities()], [u'admin', u'anon']) + user = req.user + rset = user.unrelated('use_email', 'EmailAddress', 'subject') + self.assertEqual([x.address for x in rset.entities()], [u'hop']) + self.create_user(req, 'toto') + with self.new_access('toto').web_request() as req: + email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) + rset = email.unrelated('use_email', 'CWUser', 'object') + self.assertEqual([x.login for x in rset.entities()], ['toto']) + user = req.user + rset = user.unrelated('use_email', 'EmailAddress', 'subject') + self.assertEqual([x.address for x in rset.entities()], ['hop']) + user = req.execute('Any X WHERE X login "admin"').get_entity(0, 0) + rset = user.unrelated('use_email', 'EmailAddress', 'subject') + self.assertEqual([x.address for x in rset.entities()], []) + with self.new_access('anon').web_request() as req: + email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) + rset = email.unrelated('use_email', 'CWUser', 'object') + self.assertEqual([x.login for x in rset.entities()], []) + user = req.user + rset = user.unrelated('use_email', 'EmailAddress', 'subject') + self.assertEqual([x.address for x in rset.entities()], []) + finally: + clear_cache(self.schema['EmailAddress'], 'get_groups') + clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') + self.schema['EmailAddress'].permissions['read'] = rperms + + def test_unrelated_new_entity(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + unrelated = [r[0] for r in e.unrelated('in_group', 'CWGroup', 'subject')] + # should be default groups but owners, i.e. managers, users, guests + self.assertEqual(len(unrelated), 3) + + def test_markdown_printable_value_string(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Card', title=u'rest markdown', + content=u'This is [an example](http://example.com/ "Title") inline link`', + content_format=u'text/markdown') + self.assertEqual( + u'

This is an example inline link`

', + e.printable_value('content')) + + def test_printable_value_string(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Card', title=u'rest test', + content=u'du :eid:`1:*ReST*`', + content_format=u'text/rest') + self.assertEqual(e.printable_value('content'), + '

du *ReST*

') + e.cw_attr_cache['content'] = 'du html users' + e.cw_attr_cache['content_format'] = 'text/html' + self.assertEqual(e.printable_value('content'), + 'du html users') + e.cw_attr_cache['content'] = 'du *texte*' + e.cw_attr_cache['content_format'] = 'text/plain' + self.assertEqual(e.printable_value('content').replace("\n", ""), + '

du *texte*

') + e.cw_attr_cache['title'] = 'zou' + e.cw_attr_cache['content'] = '''\ +a title +======= +du :eid:`1:*ReST*`''' + e.cw_attr_cache['content_format'] = 'text/rest' + self.assertEqual(e.printable_value('content', format='text/plain'), + e.cw_attr_cache['content']) + + e.cw_attr_cache['content'] = u'yo (zou éà ;)' + e.cw_attr_cache['content_format'] = 'text/html' + self.assertEqual(e.printable_value('content', format='text/plain').strip(), + u'**yo (zou éà ;)**') + if HAS_TAL: + e.cw_attr_cache['content'] = '

titre

' + e.cw_attr_cache['content_format'] = 'text/cubicweb-page-template' + self.assertEqual(e.printable_value('content'), + '

zou

') + + + def test_printable_value_bytes(self): + with self.admin_access.web_request() as req: + e = req.create_entity('FakeFile', data=Binary(b'lambda x: 1'), data_format=u'text/x-python', + data_encoding=u'ascii', data_name=u'toto.py') + from cubicweb import mttransforms + if mttransforms.HAS_PYGMENTS_TRANSFORMS: + import pygments + if tuple(int(i) for i in pygments.__version__.split('.')[:2]) >= (1, 3): + self.assertEqual(e.printable_value('data'), + '''
lambda x: 1
+
''') + else: + self.assertEqual(e.printable_value('data'), + '''
lambda x: 1
+
''') + else: + self.assertEqual(e.printable_value('data'), + '''
+    lambda x: 1
+
''') + + e = req.create_entity('FakeFile', + data=Binary(u'*héhéhé*'.encode('utf-8')), + data_format=u'text/rest', + data_encoding=u'utf-8', data_name=u'toto.txt') + self.assertEqual(e.printable_value('data'), + u'

héhéhé

') + + def test_printable_value_bad_html(self): + """make sure we don't crash if we try to render invalid XHTML strings""" + with self.admin_access.web_request() as req: + e = req.create_entity('Card', title=u'bad html', content=u'
R&D
', + content_format=u'text/html') + tidy = lambda x: x.replace('\n', '') + self.assertEqual(tidy(e.printable_value('content')), + '
R&D
') + e.cw_attr_cache['content'] = u'yo !! R&D
pas fermé' + self.assertEqual(tidy(e.printable_value('content')), + u'yo !! R&D
pas fermé
') + e.cw_attr_cache['content'] = u'R&D' + self.assertEqual(tidy(e.printable_value('content')), u'R&D') + e.cw_attr_cache['content'] = u'R&D;' + self.assertEqual(tidy(e.printable_value('content')), u'R&D;') + e.cw_attr_cache['content'] = u'yo !! R&D
pas fermé' + self.assertEqual(tidy(e.printable_value('content')), + u'yo !! R&D
pas fermé
') + e.cw_attr_cache['content'] = u'été
été' + self.assertEqual(tidy(e.printable_value('content')), + u'été
été
') + e.cw_attr_cache['content'] = u'C'est un exemple sérieux' + self.assertEqual(tidy(e.printable_value('content')), + u"C'est un exemple sérieux") + # make sure valid xhtml is left untouched + e.cw_attr_cache['content'] = u'
R&D
' + self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) + e.cw_attr_cache['content'] = u'
été
' + self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) + e.cw_attr_cache['content'] = u'été' + self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) + e.cw_attr_cache['content'] = u'hop\r\nhop\nhip\rmomo' + self.assertEqual(e.printable_value('content'), u'hop\nhop\nhip\nmomo') + + def test_printable_value_bad_html_ms(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Card', title=u'bad html', content=u'
R&D
', + content_format=u'text/html') + tidy = lambda x: x.replace('\n', '') + e.cw_attr_cache['content'] = u'
ms orifice produces weird html
' + # Caution! current implementation of soup2xhtml strips first div element + content = soup2xhtml(e.printable_value('content'), 'utf-8') + self.assertMultiLineEqual(content, u'
ms orifice produces weird html
') + + def test_fulltextindex(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('FakeFile')(req) + e.cw_attr_cache['description'] = 'du html' + e.cw_attr_cache['description_format'] = 'text/html' + e.cw_attr_cache['data'] = Binary(b'some data') + e.cw_attr_cache['data_name'] = 'an html file' + e.cw_attr_cache['data_format'] = 'text/html' + e.cw_attr_cache['data_encoding'] = 'ascii' + e._cw.transaction_data.clear() + words = e.cw_adapt_to('IFTIndexable').get_words() + words['C'].sort() + self.assertEqual({'C': sorted(['an', 'html', 'file', 'du', 'html', 'some', 'data'])}, + words) + + + def test_nonregr_relation_cache(self): + with self.admin_access.web_request() as req: + p1 = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + p2 = req.create_entity('Personne', nom=u'toto') + req.execute('SET X evaluee Y WHERE X nom "di mascio", Y nom "toto"') + self.assertEqual(p1.evaluee[0].nom, "toto") + self.assertFalse(p1.reverse_evaluee) + + def test_complete_relation(self): + with self.admin_access.repo_cnx() as cnx: + eid = cnx.execute( + 'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 ' + 'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0] + trinfo = cnx.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) + trinfo.complete() + self.assertIsInstance(trinfo.cw_attr_cache['creation_date'], datetime) + self.assertTrue(trinfo.cw_relation_cached('from_state', 'subject')) + self.assertTrue(trinfo.cw_relation_cached('to_state', 'subject')) + self.assertTrue(trinfo.cw_relation_cached('wf_info_for', 'subject')) + self.assertEqual(trinfo.by_transition, ()) + + def test_request_cache(self): + with self.admin_access.web_request() as req: + user = req.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) + state = user.in_state[0] + samestate = req.execute('State X WHERE X name "activated"').get_entity(0, 0) + self.assertIs(state, samestate) + + def test_rest_path(self): + with self.admin_access.web_request() as req: + note = req.create_entity('Note', type=u'z') + self.assertEqual(note.rest_path(), 'note/%s' % note.eid) + # unique attr + tag = req.create_entity('Tag', name=u'x') + self.assertEqual(tag.rest_path(), 'tag/x') + # test explicit rest_attr + person = req.create_entity('Personne', prenom=u'john', nom=u'doe') + self.assertEqual(person.rest_path(), 'personne/doe') + # ambiguity test + person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe') + person.cw_clear_all_caches() + self.assertEqual(person.rest_path(), text_type(person.eid)) + self.assertEqual(person2.rest_path(), text_type(person2.eid)) + # unique attr with None value (nom in this case) + friend = req.create_entity('Ami', prenom=u'bob') + self.assertEqual(friend.rest_path(), text_type(friend.eid)) + # 'ref' below is created without the unique but not required + # attribute, make sur that the unique _and_ required 'ean' is used + # as the rest attribute + ref = req.create_entity('Reference', ean=u'42-1337-42') + self.assertEqual(ref.rest_path(), 'reference/42-1337-42') + + def test_can_use_rest_path(self): + self.assertTrue(can_use_rest_path(u'zobi')) + # don't use rest if we have /, ? or & in the path (breaks mod_proxy) + self.assertFalse(can_use_rest_path(u'zo/bi')) + self.assertFalse(can_use_rest_path(u'zo&bi')) + self.assertFalse(can_use_rest_path(u'zo?bi')) + + def test_cw_set_attributes(self): + with self.admin_access.web_request() as req: + person = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + self.assertEqual(person.prenom, u'adrien') + self.assertEqual(person.nom, u'di mascio') + person.cw_set(prenom=u'sylvain', nom=u'thénault') + person = req.execute('Personne P').get_entity(0, 0) # XXX retreival needed ? + self.assertEqual(person.prenom, u'sylvain') + self.assertEqual(person.nom, u'thénault') + + def test_cw_set_relations(self): + with self.admin_access.web_request() as req: + person = req.create_entity('Personne', nom=u'chauvat', prenom=u'nicolas') + note = req.create_entity('Note', type=u'x') + note.cw_set(ecrit_par=person) + note = req.create_entity('Note', type=u'y') + note.cw_set(ecrit_par=person.eid) + self.assertEqual(len(person.reverse_ecrit_par), 2) + + def test_metainformation_and_external_absolute_url(self): + with self.admin_access.web_request() as req: + note = req.create_entity('Note', type=u'z') + metainf = note.cw_metainformation() + self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system', + 'use-cwuri-as-url': False}, + 'type': u'Note', 'extid': None}) + self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) + metainf['source'] = metainf['source'].copy() + metainf['source']['base-url'] = 'http://cubicweb2.com/' + metainf['extid'] = 1234 + self.assertEqual(note.absolute_url(), 'http://cubicweb2.com/note/1234') + + def test_absolute_url_empty_field(self): + with self.admin_access.web_request() as req: + card = req.create_entity('Card', wikiid=u'', title=u'test') + self.assertEqual(card.absolute_url(), + 'http://testing.fr/cubicweb/%s' % card.eid) + + def test_create_and_compare_entity(self): + access = self.admin_access + with access.web_request() as req: + p1 = req.create_entity('Personne', nom=u'fayolle', prenom=u'alexandre') + p2 = req.create_entity('Personne', nom=u'campeas', prenom=u'aurelien') + note = req.create_entity('Note', type=u'z') + p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien', + connait=p1, evaluee=[p1, p2], + reverse_ecrit_par=note) + self.assertEqual(p.nom, 'di mascio') + self.assertEqual([c.nom for c in p.connait], ['fayolle']) + self.assertEqual(sorted([c.nom for c in p.evaluee]), ['campeas', 'fayolle']) + self.assertEqual([c.type for c in p.reverse_ecrit_par], ['z']) + req.cnx.commit() + with access.web_request() as req: + auc = req.execute('Personne P WHERE P prenom "aurelien"').get_entity(0,0) + persons = set() + persons.add(p1) + persons.add(p2) + persons.add(auc) + self.assertEqual(2, len(persons)) + self.assertNotEqual(p1, p2) + self.assertEqual(p2, auc) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_mail.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_mail.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.mail + +""" + +import os +import re +import sys + +from logilab.common.testlib import unittest_main +from logilab.common.umessage import message_from_string + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.mail import format_mail + + +def getlogin(): + """avoid using os.getlogin() because of strange tty / stdin problems + (man 3 getlogin) + Another solution would be to use $LOGNAME, $USER or $USERNAME + """ + if sys.platform != 'win32': + import pwd + return pwd.getpwuid(os.getuid())[0] + else: + return os.environ.get('USERNAME') + + +class EmailTC(CubicWebTC): + + def test_format_mail(self): + self.set_option('sender-addr', 'bim@boum.fr') + self.set_option('sender-name', 'BimBam') + + mail = format_mail({'name': 'oim', 'email': 'oim@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou', u'bïjour', + config=self.config) + result = mail.as_string() + result = re.sub('^Date: .*$', 'Date: now', result, flags=re.MULTILINE) + self.assertMultiLineEqual(result, """\ +MIME-Version: 1.0 +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: base64 +Subject: =?utf-8?q?b=C3=AFjour?= +From: =?utf-8?q?oim?= +Reply-to: =?utf-8?q?oim?= , =?utf-8?q?BimBam?= +X-CW: data +To: test@logilab.fr +Date: now + +dW4gcGV0aXQgY8O2dWNvdQ== +""") + msg = message_from_string(mail.as_string()) + self.assertEqual(msg.get('subject'), u'bïjour') + self.assertEqual(msg.get('from'), u'oim ') + self.assertEqual(msg.get('to'), u'test@logilab.fr') + self.assertEqual(msg.get('reply-to'), u'oim , BimBam ') + self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou') + + + def test_format_mail_euro(self): + mail = format_mail({'name': u'oîm', 'email': u'oim@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') + result = mail.as_string() + result = re.sub('^Date: .*$', 'Date: now', result, flags=re.MULTILINE) + self.assertMultiLineEqual(result, """\ +MIME-Version: 1.0 +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: base64 +Subject: =?utf-8?b?YsOvam91ciDigqw=?= +From: =?utf-8?q?o=C3=AEm?= +Reply-to: =?utf-8?q?o=C3=AEm?= +To: test@logilab.fr +Date: now + +dW4gcGV0aXQgY8O2dWNvdSDigqw= +""") + msg = message_from_string(mail.as_string()) + self.assertEqual(msg.get('subject'), u'bïjour €') + self.assertEqual(msg.get('from'), u'oîm ') + self.assertEqual(msg.get('to'), u'test@logilab.fr') + self.assertEqual(msg.get('reply-to'), u'oîm ') + self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou €') + + + def test_format_mail_from_reply_to(self): + # no sender-name, sender-addr in the configuration + self.set_option('sender-name', '') + self.set_option('sender-addr', '') + msg = format_mail({'name': u'', 'email': u''}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + self.assertEqual(msg.get('from'), u'') + self.assertEqual(msg.get('reply-to'), None) + msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + msg = message_from_string(msg.as_string()) + self.assertEqual(msg.get('from'), u'tutu ') + self.assertEqual(msg.get('reply-to'), u'tutu ') + msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') + msg = message_from_string(msg.as_string()) + self.assertEqual(msg.get('from'), u'tutu ') + self.assertEqual(msg.get('reply-to'), u'tutu ') + # set sender name and address as expected + self.set_option('sender-name', 'cubicweb-test') + self.set_option('sender-addr', 'cubicweb-test@logilab.fr') + # anonymous notification: no name and no email specified + msg = format_mail({'name': u'', 'email': u''}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + msg = message_from_string(msg.as_string()) + self.assertEqual(msg.get('from'), u'cubicweb-test ') + self.assertEqual(msg.get('reply-to'), u'cubicweb-test ') + # anonymous notification: only email specified + msg = format_mail({'email': u'tutu@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + msg = message_from_string(msg.as_string()) + self.assertEqual(msg.get('from'), u'cubicweb-test ') + self.assertEqual(msg.get('reply-to'), u'cubicweb-test , cubicweb-test ') + # anonymous notification: only name specified + msg = format_mail({'name': u'tutu'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + msg = message_from_string(msg.as_string()) + self.assertEqual(msg.get('from'), u'tutu ') + self.assertEqual(msg.get('reply-to'), u'tutu ') + + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_migration.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_migration.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,116 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb.migration unit tests""" + +from os.path import abspath, dirname, join +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.devtools import TestServerConfiguration +from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.migration import MigrationHelper, filter_scripts, version_strictly_lower +from cubicweb.server.migractions import ServerMigrationHelper + + +class Schema(dict): + def has_entity(self, e_type): + return e_type in self + +SMIGRDIR = join(dirname(__file__), 'data', 'server_migration') + '/' +TMIGRDIR = join(dirname(__file__), 'data', 'migration') + '/' + +class MigrTestConfig(TestServerConfiguration): + verbosity = 0 + def migration_scripts_dir(cls): + return SMIGRDIR + + def cube_migration_scripts_dir(cls, cube): + return TMIGRDIR + +class MigrationToolsTC(TestCase): + def setUp(self): + self.config = MigrTestConfig('data') + from yams.schema import Schema + self.config.load_schema = lambda expand_cubes=False: Schema('test') + self.config.__class__.cubicweb_appobject_path = frozenset() + self.config.__class__.cube_appobject_path = frozenset() + + def test_filter_scripts_base(self): + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,3,0), (2,4,0)), + []) + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)), + [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql')]) + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)), + [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)), + [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql'), + ((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)), + []) + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)), + [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'), + ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')]) + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)), + [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) + + self.assertListEqual(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,3)), + [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py')]) + self.assertListEqual(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,4)), + [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py'), + ((0, 0, 4), TMIGRDIR+'0.0.4_Any.py')]) + + def test_filter_scripts_for_mode(self): + config = CubicWebConfiguration('data') + config.verbosity = 0 + config = self.config + config.__class__.name = 'repository' + self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), + [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')]) + config.__class__.name = 'all-in-one' + self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), + [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')]) + config.__class__.name = 'repository' + + def test_version_strictly_lower(self): + self.assertTrue(version_strictly_lower(None, '1.0.0')) + self.assertFalse(version_strictly_lower('1.0.0', None)) + + +from cubicweb.devtools import ApptestConfiguration, get_test_db_handler + +class BaseCreationTC(TestCase): + + def test_db_creation(self): + """make sure database can be created""" + config = ApptestConfiguration('data', apphome=self.datadir) + source = config.system_source_config + self.assertEqual(source['db-driver'], 'sqlite') + handler = get_test_db_handler(config) + handler.init_test_database() + handler.build_db_cache() + repo, cnx = handler.get_repo_and_cnx() + with cnx: + self.assertEqual(cnx.execute('Any SN WHERE X is CWUser, X login "admin", X in_state S, S name SN').rows, + [['activated']]) + repo.shutdown() + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_predicates.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_predicates.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,524 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for selectors mechanism""" + +from operator import eq, lt, le, gt +from contextlib import contextmanager + +from six.moves import range + +from logilab.common.testlib import TestCase, unittest_main +from logilab.common.decorators import clear_cache + +from cubicweb import Binary +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.predicates import (is_instance, adaptable, match_kwargs, match_user_groups, + multi_lines_rset, score_entity, is_in_state, + rql_condition, relation_possible, match_form_params, + paginated_rset) +from cubicweb.selectors import on_transition # XXX on_transition is deprecated +from cubicweb.view import EntityAdapter +from cubicweb.web import action + + + +class ImplementsTC(CubicWebTC): + def test_etype_priority(self): + with self.admin_access.web_request() as req: + f = req.create_entity('FakeFile', data_name=u'hop.txt', data=Binary(b'hop'), + data_format=u'text/plain') + rset = f.as_rset() + anyscore = is_instance('Any')(f.__class__, req, rset=rset) + idownscore = adaptable('IDownloadable')(f.__class__, req, rset=rset) + self.assertTrue(idownscore > anyscore, (idownscore, anyscore)) + filescore = is_instance('FakeFile')(f.__class__, req, rset=rset) + self.assertTrue(filescore > idownscore, (filescore, idownscore)) + + def test_etype_inheritance_no_yams_inheritance(self): + cls = self.vreg['etypes'].etype_class('Personne') + with self.admin_access.web_request() as req: + self.assertFalse(is_instance('Societe').score_class(cls, req)) + + def test_yams_inheritance(self): + cls = self.vreg['etypes'].etype_class('Transition') + with self.admin_access.web_request() as req: + self.assertEqual(is_instance('BaseTransition').score_class(cls, req), + 3) + + def test_outer_join(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any U,B WHERE B? bookmarked_by U, U login "anon"') + self.assertEqual(is_instance('Bookmark')(None, req, rset=rset, row=0, col=1), + 0) + + +class WorkflowSelectorTC(CubicWebTC): + + def setUp(self): + super(WorkflowSelectorTC, self).setUp() + # enable debug mode to state/transition validation on the fly + self.vreg.config.debugmode = True + + def tearDown(self): + self.vreg.config.debugmode = False + super(WorkflowSelectorTC, self).tearDown() + + def setup_database(self): + with self.admin_access.shell() as shell: + wf = shell.add_workflow("wf_test", 'StateFull', default=True) + created = wf.add_state('created', initial=True) + validated = wf.add_state('validated') + abandoned = wf.add_state('abandoned') + wf.add_transition('validate', created, validated, ('managers',)) + wf.add_transition('forsake', (created, validated,), abandoned, ('managers',)) + + @contextmanager + def statefull_stuff(self): + with self.admin_access.web_request() as req: + wf_entity = req.create_entity('StateFull', name=u'') + rset = wf_entity.as_rset() + adapter = wf_entity.cw_adapt_to('IWorkflowable') + req.cnx.commit() + self.assertEqual(adapter.state, 'created') + yield req, wf_entity, rset, adapter + + def test_is_in_state(self): + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + for state in ('created', 'validated', 'abandoned'): + selector = is_in_state(state) + self.assertEqual(selector(None, req, rset=rset), + state=="created") + + adapter.fire_transition('validate') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'validated') + + clear_cache(rset, 'get_entity') + + selector = is_in_state('created') + self.assertEqual(selector(None, req, rset=rset), 0) + selector = is_in_state('validated') + self.assertEqual(selector(None, req, rset=rset), 1) + selector = is_in_state('validated', 'abandoned') + self.assertEqual(selector(None, req, rset=rset), 1) + selector = is_in_state('abandoned') + self.assertEqual(selector(None, req, rset=rset), 0) + + adapter.fire_transition('forsake') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'abandoned') + + clear_cache(rset, 'get_entity') + + selector = is_in_state('created') + self.assertEqual(selector(None, req, rset=rset), 0) + selector = is_in_state('validated') + self.assertEqual(selector(None, req, rset=rset), 0) + selector = is_in_state('validated', 'abandoned') + self.assertEqual(selector(None, req, rset=rset), 1) + self.assertEqual(adapter.state, 'abandoned') + self.assertEqual(selector(None, req, rset=rset), 1) + + def test_is_in_state_unvalid_names(self): + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + selector = is_in_state("unknown") + with self.assertRaises(ValueError) as cm: + selector(None, req, rset=rset) + self.assertEqual(str(cm.exception), + "wf_test: unknown state(s): unknown") + selector = is_in_state("weird", "unknown", "created", "weird") + with self.assertRaises(ValueError) as cm: + selector(None, req, rset=rset) + self.assertEqual(str(cm.exception), + "wf_test: unknown state(s): unknown,weird") + + def test_on_transition(self): + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + for transition in ('validate', 'forsake'): + selector = on_transition(transition) + self.assertEqual(selector(None, req, rset=rset), 0) + + adapter.fire_transition('validate') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'validated') + + clear_cache(rset, 'get_entity') + + selector = on_transition("validate") + self.assertEqual(selector(None, req, rset=rset), 1) + selector = on_transition("validate", "forsake") + self.assertEqual(selector(None, req, rset=rset), 1) + selector = on_transition("forsake") + self.assertEqual(selector(None, req, rset=rset), 0) + + adapter.fire_transition('forsake') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'abandoned') + + clear_cache(rset, 'get_entity') + + selector = on_transition("validate") + self.assertEqual(selector(None, req, rset=rset), 0) + selector = on_transition("validate", "forsake") + self.assertEqual(selector(None, req, rset=rset), 1) + selector = on_transition("forsake") + self.assertEqual(selector(None, req, rset=rset), 1) + + def test_on_transition_unvalid_names(self): + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + selector = on_transition("unknown") + with self.assertRaises(ValueError) as cm: + selector(None, req, rset=rset) + self.assertEqual(str(cm.exception), + "wf_test: unknown transition(s): unknown") + selector = on_transition("weird", "unknown", "validate", "weird") + with self.assertRaises(ValueError) as cm: + selector(None, req, rset=rset) + self.assertEqual(str(cm.exception), + "wf_test: unknown transition(s): unknown,weird") + + def test_on_transition_with_no_effect(self): + """selector will not be triggered with `change_state()`""" + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + adapter.change_state('validated') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'validated') + + selector = on_transition("validate") + self.assertEqual(selector(None, req, rset=rset), 0) + selector = on_transition("validate", "forsake") + self.assertEqual(selector(None, req, rset=rset), 0) + selector = on_transition("forsake") + self.assertEqual(selector(None, req, rset=rset), 0) + + +class RelationPossibleTC(CubicWebTC): + + def test_rqlst_1(self): + with self.admin_access.web_request() as req: + selector = relation_possible('in_group') + select = self.vreg.parse(req, 'Any X WHERE X is CWUser').children[0] + score = selector(None, req, rset=1, + select=select, filtered_variable=select.defined_vars['X']) + self.assertEqual(score, 1) + + def test_rqlst_2(self): + with self.admin_access.web_request() as req: + selector = relation_possible('in_group') + select = self.vreg.parse(req, 'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' + 'Y creation_date YD, Y is CWGroup ' + 'HAVING DAY(XD)=DAY(YD)').children[0] + score = selector(None, req, rset=1, + select=select, filtered_variable=select.defined_vars['X']) + self.assertEqual(score, 1) + + def test_ambiguous(self): + # Ambiguous relations are : + # (Service, fabrique_par, Personne) and (Produit, fabrique_par, Usine) + # There used to be a crash here with a bad rdef choice in the strict + # checking case. + selector = relation_possible('fabrique_par', role='object', + target_etype='Personne', strict=True) + with self.admin_access.web_request() as req: + usine = req.create_entity('Usine', lieu=u'here') + score = selector(None, req, rset=usine.as_rset()) + self.assertEqual(0, score) + + +class MatchUserGroupsTC(CubicWebTC): + def test_owners_group(self): + """tests usage of 'owners' group with match_user_group""" + class SomeAction(action.Action): + __regid__ = 'yo' + category = 'foo' + __select__ = match_user_groups('owners') + self.vreg._loadedmods[__name__] = {} + self.vreg.register(SomeAction) + SomeAction.__registered__(self.vreg['actions']) + self.assertTrue(SomeAction in self.vreg['actions']['yo'], self.vreg['actions']) + try: + with self.admin_access.web_request() as req: + self.create_user(req, 'john') + # login as a simple user + john_access = self.new_access('john') + with john_access.web_request() as req: + # it should not be possible to use SomeAction not owned objects + rset = req.execute('Any G WHERE G is CWGroup, G name "managers"') + self.assertFalse('yo' in dict(self.pactions(req, rset))) + # insert a new card, and check that we can use SomeAction on our object + req.execute('INSERT Card C: C title "zoubidou"') + req.cnx.commit() + with john_access.web_request() as req: + rset = req.execute('Card C WHERE C title "zoubidou"') + self.assertTrue('yo' in dict(self.pactions(req, rset)), self.pactions(req, rset)) + # make sure even managers can't use the action + with self.admin_access.web_request() as req: + rset = req.execute('Card C WHERE C title "zoubidou"') + self.assertFalse('yo' in dict(self.pactions(req, rset))) + finally: + del self.vreg[SomeAction.__registry__][SomeAction.__regid__] + + +class MultiLinesRsetTC(CubicWebTC): + def setup_database(self): + with self.admin_access.web_request() as req: + req.execute('INSERT CWGroup G: G name "group1"') + req.execute('INSERT CWGroup G: G name "group2"') + req.cnx.commit() + + def test_default_op_in_selector(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any G WHERE G is CWGroup') + expected = len(rset) + selector = multi_lines_rset(expected) + self.assertEqual(selector(None, req, rset=rset), 1) + self.assertEqual(selector(None, req, None), 0) + selector = multi_lines_rset(expected + 1) + self.assertEqual(selector(None, req, rset=rset), 0) + self.assertEqual(selector(None, req, None), 0) + selector = multi_lines_rset(expected - 1) + self.assertEqual(selector(None, req, rset=rset), 0) + self.assertEqual(selector(None, req, None), 0) + + def test_without_rset(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any G WHERE G is CWGroup') + expected = len(rset) + selector = multi_lines_rset(expected) + self.assertEqual(selector(None, req, None), 0) + selector = multi_lines_rset(expected + 1) + self.assertEqual(selector(None, req, None), 0) + selector = multi_lines_rset(expected - 1) + self.assertEqual(selector(None, req, None), 0) + + def test_with_operators(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any G WHERE G is CWGroup') + expected = len(rset) + + # Format 'expected', 'operator', 'assert' + testdata = (( expected, eq, 1), + ( expected+1, eq, 0), + ( expected-1, eq, 0), + ( expected, le, 1), + ( expected+1, le, 1), + ( expected-1, le, 0), + ( expected-1, gt, 1), + ( expected, gt, 0), + ( expected+1, gt, 0), + ( expected+1, lt, 1), + ( expected, lt, 0), + ( expected-1, lt, 0)) + + for (expected, operator, assertion) in testdata: + selector = multi_lines_rset(expected, operator) + yield self.assertEqual, selector(None, req, rset=rset), assertion + + +class MatchKwargsTC(TestCase): + + def test_match_kwargs_default(self): + selector = match_kwargs( set( ('a', 'b') ) ) + self.assertEqual(selector(None, None, a=1, b=2), 2) + self.assertEqual(selector(None, None, a=1), 0) + self.assertEqual(selector(None, None, c=1), 0) + self.assertEqual(selector(None, None, a=1, c=1), 0) + + def test_match_kwargs_any(self): + selector = match_kwargs( set( ('a', 'b') ), mode='any') + self.assertEqual(selector(None, None, a=1, b=2), 2) + self.assertEqual(selector(None, None, a=1), 1) + self.assertEqual(selector(None, None, c=1), 0) + self.assertEqual(selector(None, None, a=1, c=1), 1) + + +class ScoreEntityTC(CubicWebTC): + + def test_intscore_entity_selector(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any E WHERE E eid 1') + selector = score_entity(lambda x: None) + self.assertEqual(selector(None, req, rset=rset), 0) + selector = score_entity(lambda x: "something") + self.assertEqual(selector(None, req, rset=rset), 1) + selector = score_entity(lambda x: object) + self.assertEqual(selector(None, req, rset=rset), 1) + rset = req.execute('Any G LIMIT 2 WHERE G is CWGroup') + selector = score_entity(lambda x: 10) + self.assertEqual(selector(None, req, rset=rset), 20) + selector = score_entity(lambda x: 10, mode='any') + self.assertEqual(selector(None, req, rset=rset), 10) + + def test_rql_condition_entity(self): + with self.admin_access.web_request() as req: + selector = rql_condition('X identity U') + rset = req.user.as_rset() + self.assertEqual(selector(None, req, rset=rset), 1) + self.assertEqual(selector(None, req, entity=req.user), 1) + self.assertEqual(selector(None, req), 0) + + def test_rql_condition_user(self): + with self.admin_access.web_request() as req: + selector = rql_condition('U login "admin"', user_condition=True) + self.assertEqual(selector(None, req), 1) + selector = rql_condition('U login "toto"', user_condition=True) + self.assertEqual(selector(None, req), 0) + + +class AdaptablePredicateTC(CubicWebTC): + + def test_multiple_entity_types_rset(self): + class CWUserIWhatever(EntityAdapter): + __regid__ = 'IWhatever' + __select__ = is_instance('CWUser') + class CWGroupIWhatever(EntityAdapter): + __regid__ = 'IWhatever' + __select__ = is_instance('CWGroup') + with self.temporary_appobjects(CWUserIWhatever, CWGroupIWhatever): + with self.admin_access.web_request() as req: + selector = adaptable('IWhatever') + rset = req.execute('Any X WHERE X is IN(CWGroup, CWUser)') + self.assertTrue(selector(None, req, rset=rset)) + + +class MatchFormParamsTC(CubicWebTC): + """tests for match_form_params predicate""" + + def test_keyonly_match(self): + """test standard usage: ``match_form_params('param1', 'param2')`` + + ``param1`` and ``param2`` must be specified in request's form. + """ + web_request = self.admin_access.web_request + vid_selector = match_form_params('vid') + vid_subvid_selector = match_form_params('vid', 'subvid') + # no parameter => KO,KO + with web_request() as req: + self.assertEqual(vid_selector(None, req), 0) + self.assertEqual(vid_subvid_selector(None, req), 0) + # one expected parameter found => OK,KO + with web_request(vid='foo') as req: + self.assertEqual(vid_selector(None, req), 1) + self.assertEqual(vid_subvid_selector(None, req), 0) + # all expected parameters found => OK,OK + with web_request(vid='foo', subvid='bar') as req: + self.assertEqual(vid_selector(None, req), 1) + self.assertEqual(vid_subvid_selector(None, req), 2) + + def test_keyvalue_match_one_parameter(self): + """test dict usage: ``match_form_params(param1=value1)`` + + ``param1`` must be specified in the request's form and its value + must be ``value1``. + """ + web_request = self.admin_access.web_request + # test both positional and named parameters + vid_selector = match_form_params(vid='foo') + # no parameter => should fail + with web_request() as req: + self.assertEqual(vid_selector(None, req), 0) + # expected parameter found with expected value => OK + with web_request(vid='foo', subvid='bar') as req: + self.assertEqual(vid_selector(None, req), 1) + # expected parameter found but value is incorrect => KO + with web_request(vid='bar') as req: + self.assertEqual(vid_selector(None, req), 0) + + def test_keyvalue_match_two_parameters(self): + """test dict usage: ``match_form_params(param1=value1, param2=value2)`` + + ``param1`` and ``param2`` must be specified in the request's form and + their respective value must be ``value1`` and ``value2``. + """ + web_request = self.admin_access.web_request + vid_subvid_selector = match_form_params(vid='list', subvid='tsearch') + # missing one expected parameter => KO + with web_request(vid='list') as req: + self.assertEqual(vid_subvid_selector(None, req), 0) + # expected parameters found but values are incorrect => KO + with web_request(vid='list', subvid='foo') as req: + self.assertEqual(vid_subvid_selector(None, req), 0) + # expected parameters found and values are correct => OK + with web_request(vid='list', subvid='tsearch') as req: + self.assertEqual(vid_subvid_selector(None, req), 2) + + def test_keyvalue_multiple_match(self): + """test dict usage with multiple values + + i.e. as in ``match_form_params(param1=('value1', 'value2'))`` + + ``param1`` must be specified in the request's form and its value + must be either ``value1`` or ``value2``. + """ + web_request = self.admin_access.web_request + vid_subvid_selector = match_form_params(vid='list', subvid=('tsearch', 'listitem')) + # expected parameters found and values correct => OK + with web_request(vid='list', subvid='tsearch') as req: + self.assertEqual(vid_subvid_selector(None, req), 2) + with web_request(vid='list', subvid='listitem') as req: + self.assertEqual(vid_subvid_selector(None, req), 2) + # expected parameters found but values are incorrect => OK + with web_request(vid='list', subvid='foo') as req: + self.assertEqual(vid_subvid_selector(None, req), 0) + + def test_invalid_calls(self): + """checks invalid calls raise a ValueError""" + # mixing named and positional arguments should fail + with self.assertRaises(ValueError) as cm: + match_form_params('list', x='1', y='2') + self.assertEqual(str(cm.exception), + "match_form_params() can't be called with both " + "positional and named arguments") + # using a dict as first and unique argument should fail + with self.assertRaises(ValueError) as cm: + match_form_params({'x': 1}) + self.assertEqual(str(cm.exception), + "match_form_params() positional arguments must be strings") + + +class PaginatedTC(CubicWebTC): + """tests for paginated_rset predicate""" + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + for i in range(30): + cnx.create_entity('CWGroup', name=u"group%d" % i) + cnx.commit() + + def test_paginated_rset(self): + default_nb_pages = 1 + web_request = self.admin_access.web_request + with web_request() as req: + rset = req.execute('Any G WHERE G is CWGroup') + self.assertEqual(len(rset), 34) + with web_request(vid='list', page_size='10') as req: + self.assertEqual(paginated_rset()(None, req, rset), default_nb_pages) + with web_request(vid='list', page_size='20') as req: + self.assertEqual(paginated_rset()(None, req, rset), default_nb_pages) + with web_request(vid='list', page_size='50') as req: + self.assertEqual(paginated_rset()(None, req, rset), 0) + with web_request(vid='list', page_size='10/') as req: + self.assertEqual(paginated_rset()(None, req, rset), 0) + with web_request(vid='list', page_size='.1') as req: + self.assertEqual(paginated_rset()(None, req, rset), 0) + with web_request(vid='list', page_size='not_an_int') as req: + self.assertEqual(paginated_rset()(None, req, rset), 0) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_repoapi.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_repoapi.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,89 @@ +# copyright 2013-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.repoapi""" + + +from cubicweb.devtools.testlib import CubicWebTC + +from cubicweb import ProgrammingError +from cubicweb.repoapi import Connection, connect, anonymous_cnx + + +class REPOAPITC(CubicWebTC): + + def test_cnx_basic_usage(self): + """Test that a client connection can be used to access the database""" + with self.admin_access.client_cnx() as cltcnx: + # (1) some RQL request + rset = cltcnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + # (2) ORM usage + random_user = rset.get_entity(0, 0) + # (3) Write operation + random_user.cw_set(surname=u'babar') + # (4) commit + cltcnx.commit() + rset = cltcnx.execute('''Any X WHERE X is CWUser, + X surname "babar" + ''') + self.assertTrue(rset) + # prepare test for implicit rollback + random_user = rset.get_entity(0, 0) + random_user.cw_set(surname=u'celestine') + # implicit rollback on exit + with self.admin_access.client_cnx() as cltcnx: + rset = cltcnx.execute('''Any X WHERE X is CWUser, + X surname "babar" + ''') + self.assertTrue(rset) + + def test_cnx_life_cycle(self): + """Check that ClientConnection requires explicit open and close + """ + access = self.admin_access + cltcnx = Connection(access._session) + # connection not open yet + with self.assertRaises(ProgrammingError): + cltcnx.execute('Any X WHERE X is CWUser') + # connection open and working + with cltcnx: + cltcnx.execute('Any X WHERE X is CWUser') + # connection closed + with self.assertRaises(ProgrammingError): + cltcnx.execute('Any X WHERE X is CWUser') + + def test_connect(self): + """check that repoapi.connect works and returns a usable connection""" + cnx = connect(self.repo, login='admin', password='gingkow') + self.assertEqual('admin', cnx.user.login) + with cnx: + rset = cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_anonymous_connect(self): + """check that you can get anonymous connection when the data exist""" + cnx = anonymous_cnx(self.repo) + self.assertEqual('anon', cnx.user.login) + with cnx: + rset = cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_req.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_req.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,153 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from logilab.common.testlib import TestCase, unittest_main +from cubicweb import ObjectNotFound +from cubicweb.req import RequestSessionBase, FindEntityError +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb import Unauthorized + +class RequestTC(TestCase): + def test_rebuild_url(self): + rebuild_url = RequestSessionBase(None).rebuild_url + self.assertEqual(rebuild_url('http://logilab.fr?__message=pouet', __message='hop'), + 'http://logilab.fr?__message=hop') + self.assertEqual(rebuild_url('http://logilab.fr', __message='hop'), + 'http://logilab.fr?__message=hop') + self.assertEqual(rebuild_url('http://logilab.fr?vid=index', __message='hop'), + 'http://logilab.fr?__message=hop&vid=index') + + def test_build_url(self): + req = RequestSessionBase(None) + req.from_controller = lambda : 'view' + req.relative_path = lambda includeparams=True: None + req.base_url = lambda secure=None: 'http://testing.fr/cubicweb/' + self.assertEqual(req.build_url(), u'http://testing.fr/cubicweb/view') + self.assertEqual(req.build_url(None), u'http://testing.fr/cubicweb/view') + self.assertEqual(req.build_url('one'), u'http://testing.fr/cubicweb/one') + self.assertEqual(req.build_url(param='ok'), u'http://testing.fr/cubicweb/view?param=ok') + self.assertRaises(AssertionError, req.build_url, 'one', 'two not allowed') + self.assertRaises(AssertionError, req.build_url, 'view', test=None) + + def test_ensure_no_rql(self): + req = RequestSessionBase(None) + self.assertEqual(req.ensure_ro_rql('Any X WHERE X is CWUser'), None) + self.assertEqual(req.ensure_ro_rql(' Any X WHERE X is CWUser '), None) + self.assertRaises(Unauthorized, req.ensure_ro_rql, 'SET X login "toto" WHERE X is CWUser') + self.assertRaises(Unauthorized, req.ensure_ro_rql, ' SET X login "toto" WHERE X is CWUser ') + + +class RequestCWTC(CubicWebTC): + + def test_base_url(self): + base_url = self.config['base-url'] + with self.admin_access.repo_cnx() as session: + self.assertEqual(session.base_url(), base_url) + assert 'https-url' not in self.config + self.assertEqual(session.base_url(secure=True), base_url) + secure_base_url = base_url.replace('http', 'https') + self.config.global_set_option('https-url', secure_base_url) + self.assertEqual(session.base_url(secure=True), secure_base_url) + + def test_view_catch_ex(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X WHERE X login "hop"') + self.assertEqual(req.view('oneline', rset, 'null'), '') + self.assertRaises(ObjectNotFound, req.view, 'onelinee', rset, 'null') + + def test_find_one_entity(self): + with self.admin_access.web_request() as req: + req.create_entity( + 'CWUser', login=u'cdevienne', upassword=u'cdevienne', + surname=u'de Vienne', firstname=u'Christophe', + in_group=req.find('CWGroup', name=u'users').one()) + + req.create_entity( + 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', + firstname=u'adrien', + in_group=req.find('CWGroup', name=u'users').one()) + + u = req.find_one_entity('CWUser', login=u'cdevienne') + self.assertEqual(u.firstname, u"Christophe") + + with self.assertRaises(FindEntityError): + req.find_one_entity('CWUser', login=u'patanok') + + with self.assertRaises(FindEntityError): + req.find_one_entity('CWUser') + + def test_find_entities(self): + with self.admin_access.web_request() as req: + req.create_entity( + 'CWUser', login=u'cdevienne', upassword=u'cdevienne', + surname=u'de Vienne', firstname=u'Christophe', + in_group=req.find('CWGroup', name=u'users').one()) + + req.create_entity( + 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', + firstname=u'adrien', + in_group=req.find('CWGroup', name=u'users').one()) + + l = list(req.find_entities('CWUser', login=u'cdevienne')) + self.assertEqual(1, len(l)) + self.assertEqual(l[0].firstname, u"Christophe") + + l = list(req.find_entities('CWUser', login=u'patanok')) + self.assertEqual(0, len(l)) + + l = list(req.find_entities('CWUser')) + self.assertEqual(4, len(l)) + + def test_find(self): + with self.admin_access.web_request() as req: + req.create_entity( + 'CWUser', login=u'cdevienne', upassword=u'cdevienne', + surname=u'de Vienne', firstname=u'Christophe', + in_group=req.find('CWGroup', name=u'users').one()) + + req.create_entity( + 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', + firstname=u'adrien', + in_group=req.find('CWGroup', name=u'users').one()) + + u = req.find('CWUser', login=u'cdevienne').one() + self.assertEqual(u.firstname, u"Christophe") + + users = list(req.find('CWUser').entities()) + self.assertEqual(len(users), 4) + + groups = list( + req.find('CWGroup', reverse_in_group=u).entities()) + self.assertEqual(len(groups), 1) + self.assertEqual(groups[0].name, u'users') + + users = req.find('CWUser', in_group=groups[0]).entities() + users = list(users) + self.assertEqual(len(users), 2) + + with self.assertRaises(AssertionError): + req.find('CWUser', chapeau=u"melon") + + with self.assertRaises(AssertionError): + req.find('CWUser', reverse_buddy=users[0]) + + with self.assertRaises(NotImplementedError): + req.find('CWUser', in_group=[1, 2]) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_rqlrewrite.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_rqlrewrite.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,816 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from six import string_types + +from logilab.common.testlib import unittest_main, TestCase +from logilab.common.testlib import mock_object +from yams import BadSchemaDefinition +from yams.buildobjs import RelationDefinition +from rql import parse, nodes, RQLHelper + +from cubicweb import Unauthorized, rqlrewrite +from cubicweb.schema import RRQLExpression, ERQLExpression +from cubicweb.devtools import repotest, TestServerConfiguration, BaseApptestConfiguration + + +def setUpModule(*args): + global rqlhelper, schema + config = TestServerConfiguration(RQLRewriteTC.datapath('rewrite')) + config.bootstrap_cubes() + schema = config.load_schema() + schema.add_relation_def(RelationDefinition(subject='Card', name='in_state', + object='State', cardinality='1*')) + rqlhelper = RQLHelper(schema, special_relations={'eid': 'uid', + 'has_text': 'fti'}) + repotest.do_monkey_patch() + +def tearDownModule(*args): + repotest.undo_monkey_patch() + global rqlhelper, schema + del rqlhelper, schema + +def eid_func_map(eid): + return {1: 'CWUser', + 2: 'Card', + 3: 'Affaire'}[eid] + +def _prepare_rewriter(rewriter_cls, kwargs): + class FakeVReg: + schema = schema + @staticmethod + def solutions(sqlcursor, rqlst, kwargs): + rqlhelper.compute_solutions(rqlst, {'eid': eid_func_map}, kwargs=kwargs) + class rqlhelper: + @staticmethod + def annotate(rqlst): + rqlhelper.annotate(rqlst) + @staticmethod + def simplify(mainrqlst, needcopy=False): + rqlhelper.simplify(rqlst, needcopy) + return rewriter_cls(mock_object(vreg=FakeVReg, user=(mock_object(eid=1)))) + +def rewrite(rqlst, snippets_map, kwargs, existingvars=None): + rewriter = _prepare_rewriter(rqlrewrite.RQLRewriter, kwargs) + snippets = [] + for v, exprs in sorted(snippets_map.items()): + rqlexprs = [isinstance(snippet, string_types) + and mock_object(snippet_rqlst=parse(u'Any X WHERE '+snippet).children[0], + expression=u'Any X WHERE '+snippet) + or snippet + for snippet in exprs] + snippets.append((dict([v]), rqlexprs)) + rqlhelper.compute_solutions(rqlst.children[0], {'eid': eid_func_map}, kwargs=kwargs) + rewriter.rewrite(rqlst.children[0], snippets, kwargs, existingvars) + test_vrefs(rqlst.children[0]) + return rewriter.rewritten + +def test_vrefs(node): + vrefmaps = {} + selects = [] + for vref in node.iget_nodes(nodes.VariableRef): + stmt = vref.stmt + try: + vrefmaps[stmt].setdefault(vref.name, set()).add(vref) + except KeyError: + vrefmaps[stmt] = {vref.name: set( (vref,) )} + selects.append(stmt) + assert node in selects, (node, selects) + for stmt in selects: + for var in stmt.defined_vars.values(): + assert var.stinfo['references'] + vrefmap = vrefmaps[stmt] + assert not (var.stinfo['references'] ^ vrefmap[var.name]), (node.as_string(), var, var.stinfo['references'], vrefmap[var.name]) + + +class RQLRewriteTC(TestCase): + """a faire: + + * optimisation: detecter les relations utilisees dans les rqlexpressions qui + sont presentes dans la requete de depart pour les reutiliser si possible + + * "has__permission" ? + """ + + def test_base_var(self): + constraint = ('X in_state S, U in_group G, P require_state S,' + 'P name "read", P require_group G') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) + self.assertEqual(rqlst.as_string(), + u'Any C WHERE C is Card, B eid %(D)s, ' + 'EXISTS(C in_state A, B in_group E, F require_state A, ' + 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission)') + + def test_multiple_var(self): + card_constraint = ('X in_state S, U in_group G, P require_state S,' + 'P name "read", P require_group G') + affaire_constraints = ('X ref LIKE "PUBLIC%"', 'U in_group G, G name "public"') + kwargs = {'u':2} + rqlst = parse(u'Any S WHERE S documented_by C, C eid %(u)s') + rewrite(rqlst, {('C', 'X'): (card_constraint,), ('S', 'X'): affaire_constraints}, + kwargs) + self.assertMultiLineEqual( + rqlst.as_string(), + u'Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, ' + 'EXISTS(C in_state A, B in_group E, F require_state A, ' + 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission), ' + '(EXISTS(S ref LIKE "PUBLIC%")) OR (EXISTS(B in_group G, G name "public", G is CWGroup)), ' + 'S is Affaire') + self.assertIn('D', kwargs) + + def test_or(self): + constraint = '(X identity U) OR (X in_state ST, CL identity U, CL in_state ST, ST name "subscribed")' + rqlst = parse(u'Any S WHERE S owned_by C, C eid %(u)s, S is in (CWUser, CWGroup)') + rewrite(rqlst, {('C', 'X'): (constraint,)}, {'u':1}) + self.assertEqual(rqlst.as_string(), + 'Any S WHERE S owned_by C, C eid %(u)s, S is IN(CWUser, CWGroup), A eid %(B)s, ' + 'EXISTS((C identity A) OR (C in_state D, E identity A, ' + 'E in_state D, D name "subscribed"), D is State, E is CWUser)') + + def test_simplified_rqlst(self): + constraint = ('X in_state S, U in_group G, P require_state S,' + 'P name "read", P require_group G') + rqlst = parse(u'Any 2') # this is the simplified rql st for Any X WHERE X eid 12 + rewrite(rqlst, {('2', 'X'): (constraint,)}, {}) + self.assertEqual(rqlst.as_string(), + u'Any 2 WHERE B eid %(C)s, ' + 'EXISTS(2 in_state A, B in_group D, E require_state A, ' + 'E name "read", E require_group D, A is State, D is CWGroup, E is CWPermission)') + + def test_optional_var_1(self): + constraint = ('X in_state S, U in_group G, P require_state S,' + 'P name "read", P require_group G') + rqlst = parse(u'Any A,C WHERE A documented_by C?') + rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) + self.assertEqual(rqlst.as_string(), + u'Any A,C WHERE A documented_by C?, A is Affaire ' + 'WITH C BEING ' + '(Any C WHERE EXISTS(C in_state B, D in_group F, G require_state B, G name "read", ' + 'G require_group F), D eid %(A)s, C is Card)') + + def test_optional_var_2(self): + constraint = ('X in_state S, U in_group G, P require_state S,' + 'P name "read", P require_group G') + rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T') + rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) + self.assertEqual(rqlst.as_string(), + u'Any A,C,T WHERE A documented_by C?, A is Affaire ' + 'WITH C,T BEING ' + '(Any C,T WHERE C title T, EXISTS(C in_state B, D in_group F, ' + 'G require_state B, G name "read", G require_group F), ' + 'D eid %(A)s, C is Card)') + + def test_optional_var_3(self): + constraint1 = ('X in_state S, U in_group G, P require_state S,' + 'P name "read", P require_group G') + constraint2 = 'X in_state S, S name "public"' + rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T') + rewrite(rqlst, {('C', 'X'): (constraint1, constraint2)}, {}) + self.assertEqual(rqlst.as_string(), + u'Any A,C,T WHERE A documented_by C?, A is Affaire ' + 'WITH C,T BEING (Any C,T WHERE C title T, ' + '(EXISTS(C in_state B, D in_group F, G require_state B, G name "read", G require_group F)) ' + 'OR (EXISTS(C in_state E, E name "public")), ' + 'D eid %(A)s, C is Card)') + + def test_optional_var_4(self): + constraint1 = 'A created_by U, X documented_by A' + constraint2 = 'A created_by U, X concerne A' + constraint3 = 'X created_by U' + rqlst = parse(u'Any X,LA,Y WHERE LA? documented_by X, LA concerne Y') + rewrite(rqlst, {('LA', 'X'): (constraint1, constraint2), + ('X', 'X'): (constraint3,), + ('Y', 'X'): (constraint3,)}, {}) + self.assertEqual(rqlst.as_string(), + u'Any X,LA,Y WHERE LA? documented_by X, LA concerne Y, B eid %(C)s, ' + 'EXISTS(X created_by B), EXISTS(Y created_by B), ' + 'X is Card, Y is IN(Division, Note, Societe) ' + 'WITH LA BEING (Any LA WHERE (EXISTS(A created_by B, LA documented_by A)) OR (EXISTS(E created_by B, LA concerne E)), ' + 'B eid %(D)s, LA is Affaire)') + + + def test_ambiguous_optional_same_exprs(self): + """See #3013535""" + # see test of the same name in RewriteFullTC: original problem is + # unreproducible here because it actually lies in + # RQLRewriter.insert_local_checks + rqlst = parse(u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s, X creation_date CD') + rewrite(rqlst, {('X', 'X'): ('X created_by U',),}, {'a': 3}) + self.assertEqual(rqlst.as_string(), + u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s WITH X,CD BEING (Any X,CD WHERE X creation_date CD, EXISTS(X created_by B), B eid %(A)s, X is IN(Division, Note, Societe))') + + def test_optional_var_inlined(self): + c1 = ('X require_permission P') + c2 = ('X inlined_card O, O require_permission P') + rqlst = parse(u'Any C,A,R WHERE A? inlined_card C, A ref R') + rewrite(rqlst, {('C', 'X'): (c1,), + ('A', 'X'): (c2,), + }, {}) + # XXX suboptimal + self.assertEqual(rqlst.as_string(), + "Any C,A,R WITH A,C,R BEING " + "(Any A,C,R WHERE A? inlined_card C, A ref R, " + "(A is NULL) OR (EXISTS(A inlined_card B, B require_permission D, " + "B is Card, D is CWPermission)), " + "A is Affaire, C is Card, EXISTS(C require_permission E, E is CWPermission))") + + # def test_optional_var_inlined_has_perm(self): + # c1 = ('X require_permission P') + # c2 = ('X inlined_card O, U has_read_permission O') + # rqlst = parse(u'Any C,A,R WHERE A? inlined_card C, A ref R') + # rewrite(rqlst, {('C', 'X'): (c1,), + # ('A', 'X'): (c2,), + # }, {}) + # self.assertEqual(rqlst.as_string(), + # "") + + def test_optional_var_inlined_imbricated_error(self): + c1 = ('X require_permission P') + c2 = ('X inlined_card O, O require_permission P') + rqlst = parse(u'Any C,A,R,A2,R2 WHERE A? inlined_card C, A ref R,A2? inlined_card C, A2 ref R2') + self.assertRaises(BadSchemaDefinition, + rewrite, rqlst, {('C', 'X'): (c1,), + ('A', 'X'): (c2,), + ('A2', 'X'): (c2,), + }, {}) + + def test_optional_var_inlined_linked(self): + c1 = ('X require_permission P') + c2 = ('X inlined_card O, O require_permission P') + rqlst = parse(u'Any A,W WHERE A inlined_card C?, C inlined_note N, ' + 'N inlined_affaire W') + rewrite(rqlst, {('C', 'X'): (c1,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any A,W WHERE A inlined_card C?, A is Affaire ' + 'WITH C,N,W BEING (Any C,N,W WHERE C inlined_note N, ' + 'N inlined_affaire W, EXISTS(C require_permission B), ' + 'C is Card, N is Note, W is Affaire)') + + def test_relation_optimization_1_lhs(self): + # since Card in_state State as monovalued cardinality, the in_state + # relation used in the rql expression can be ignored and S replaced by + # the variable from the incoming query + snippet = ('X in_state S, S name "hop"') + rqlst = parse(u'Card C WHERE C in_state STATE') + rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C in_state STATE, C is Card, ' + 'EXISTS(STATE name "hop"), STATE is State') + + def test_relation_optimization_1_rhs(self): + snippet = ('TW subworkflow_exit X, TW name "hop"') + rqlst = parse(u'WorkflowTransition C WHERE C subworkflow_exit EXIT') + rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, ' + 'EXISTS(C name "hop"), EXIT is SubWorkflowExitPoint') + + def test_relation_optimization_2_lhs(self): + # optional relation can be shared if also optional in the snippet + snippet = ('X in_state S?, S name "hop"') + rqlst = parse(u'Card C WHERE C in_state STATE?') + rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C in_state STATE?, C is Card, ' + 'EXISTS(STATE name "hop"), STATE is State') + def test_relation_optimization_2_rhs(self): + snippet = ('TW? subworkflow_exit X, TW name "hop"') + rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT') + rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, ' + 'EXISTS(C name "hop"), C is WorkflowTransition') + + def test_relation_optimization_3_lhs(self): + # optional relation in the snippet but not in the orig tree can be shared + snippet = ('X in_state S?, S name "hop"') + rqlst = parse(u'Card C WHERE C in_state STATE') + rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C in_state STATE, C is Card, ' + 'EXISTS(STATE name "hop"), STATE is State') + + def test_relation_optimization_3_rhs(self): + snippet = ('TW? subworkflow_exit X, TW name "hop"') + rqlst = parse(u'WorkflowTransition C WHERE C subworkflow_exit EXIT') + rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, ' + 'EXISTS(C name "hop"), EXIT is SubWorkflowExitPoint') + + def test_relation_non_optimization_1_lhs(self): + # but optional relation in the orig tree but not in the snippet can't be shared + snippet = ('X in_state S, S name "hop"') + rqlst = parse(u'Card C WHERE C in_state STATE?') + rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C in_state STATE?, C is Card, ' + 'EXISTS(C in_state A, A name "hop", A is State), STATE is State') + + def test_relation_non_optimization_1_rhs(self): + snippet = ('TW subworkflow_exit X, TW name "hop"') + rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT') + rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, ' + 'EXISTS(A subworkflow_exit EXIT, A name "hop", A is WorkflowTransition), ' + 'C is WorkflowTransition') + + def test_relation_non_optimization_2(self): + """See #3024730""" + # 'X inlined_note N' must not be shared with 'C inlined_note N' + # previously inserted, else this may introduce duplicated results, as N + # will then be shared by multiple EXISTS and so at SQL generation time, + # the table will be in the FROM clause of the outermost query + rqlst = parse(u'Any A,C WHERE A inlined_card C') + rewrite(rqlst, {('A', 'X'): ('X inlined_card C, C inlined_note N, N owned_by U',), + ('C', 'X'): ('X inlined_note N, N owned_by U',)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any A,C WHERE A inlined_card C, D eid %(E)s, ' + 'EXISTS(C inlined_note B, B owned_by D, B is Note), ' + 'EXISTS(C inlined_note F, F owned_by D, F is Note), ' + 'A is Affaire, C is Card') + + def test_unsupported_constraint_1(self): + # CWUser doesn't have require_permission + trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') + rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U') + self.assertRaises(Unauthorized, rewrite, rqlst, {('T', 'X'): (trinfo_constraint,)}, {}) + + def test_unsupported_constraint_2(self): + trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') + rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U') + rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X wf_info_for Y, Y in_group G, G name "managers"')}, {}) + self.assertEqual(rqlst.as_string(), + u'Any U,T WHERE U is CWUser, T wf_info_for U, ' + 'EXISTS(U in_group B, B name "managers", B is CWGroup), T is TrInfo') + + def test_unsupported_constraint_3(self): + self.skipTest('raise unauthorized for now') + trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') + rqlst = parse(u'Any T WHERE T wf_info_for X') + rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X in_group G, G name "managers"')}, {}) + self.assertEqual(rqlst.as_string(), + u'XXX dunno what should be generated') + + def test_add_ambiguity_exists(self): + constraint = ('X concerne Y') + rqlst = parse(u'Affaire X') + rewrite(rqlst, {('X', 'X'): (constraint,)}, {}) + self.assertEqual(rqlst.as_string(), + u"Any X WHERE X is Affaire, ((EXISTS(X concerne A, A is Division)) OR (EXISTS(X concerne C, C is Societe))) OR (EXISTS(X concerne B, B is Note))") + + def test_add_ambiguity_outerjoin(self): + constraint = ('X concerne Y') + rqlst = parse(u'Any X,C WHERE X? documented_by C') + rewrite(rqlst, {('X', 'X'): (constraint,)}, {}) + # ambiguity are kept in the sub-query, no need to be resolved using OR + self.assertEqual(rqlst.as_string(), + u"Any X,C WHERE X? documented_by C, C is Card WITH X BEING (Any X WHERE EXISTS(X concerne A), X is Affaire)") + + + def test_rrqlexpr_nonexistant_subject_1(self): + constraint = RRQLExpression('S owned_by U') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') + self.assertEqual(rqlst.as_string(), + u"Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)") + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') + self.assertEqual(rqlst.as_string(), + u"Any C WHERE C is Card") + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU') + self.assertEqual(rqlst.as_string(), + u"Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)") + + def test_rrqlexpr_nonexistant_subject_2(self): + constraint = RRQLExpression('S owned_by U, O owned_by U, O is Card') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C is Card, B eid %(D)s, EXISTS(A owned_by B, A is Card)') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU') + self.assertEqual(rqlst.as_string(), + 'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A, D owned_by A, D is Card)') + + def test_rrqlexpr_nonexistant_subject_3(self): + constraint = RRQLExpression('U in_group G, G name "users"') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') + self.assertEqual(rqlst.as_string(), + u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)') + + def test_rrqlexpr_nonexistant_subject_4(self): + constraint = RRQLExpression('U in_group G, G name "users", S owned_by U') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') + self.assertEqual(rqlst.as_string(), + u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", C owned_by A, D is CWGroup)') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') + self.assertEqual(rqlst.as_string(), + u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)') + + def test_rrqlexpr_nonexistant_subject_5(self): + constraint = RRQLExpression('S owned_by Z, O owned_by Z, O is Card') + rqlst = parse(u'Card C') + rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'S') + self.assertEqual(rqlst.as_string(), + u"Any C WHERE C is Card, EXISTS(C owned_by A, A is CWUser)") + + def test_rqlexpr_not_relation_1_1(self): + constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') + rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)') + rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X') + self.assertEqual(rqlst.as_string(), + u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire') + + def test_rqlexpr_not_relation_1_2(self): + constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') + rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)') + rewrite(rqlst, {('A', 'X'): (constraint,)}, {}, 'X') + self.assertEqual(rqlst.as_string(), + u'Any A WHERE NOT EXISTS(A documented_by C, C is Card), A is Affaire, EXISTS(A owned_by B, B login "hop", B is CWUser)') + + def test_rqlexpr_not_relation_2(self): + constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') + rqlst = rqlhelper.parse(u'Affaire A WHERE NOT A documented_by C', annotate=False) + rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X') + self.assertEqual(rqlst.as_string(), + u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire') + + def test_rqlexpr_multiexpr_outerjoin(self): + c1 = ERQLExpression('X owned_by Z, Z login "hop"', 'X') + c2 = ERQLExpression('X owned_by Z, Z login "hip"', 'X') + c3 = ERQLExpression('X owned_by Z, Z login "momo"', 'X') + rqlst = rqlhelper.parse(u'Any A WHERE A documented_by C?', annotate=False) + rewrite(rqlst, {('C', 'X'): (c1, c2, c3)}, {}, 'X') + self.assertEqual(rqlst.as_string(), + u'Any A WHERE A documented_by C?, A is Affaire ' + 'WITH C BEING (Any C WHERE ((EXISTS(C owned_by B, B login "hop")) ' + 'OR (EXISTS(C owned_by D, D login "momo"))) ' + 'OR (EXISTS(C owned_by A, A login "hip")), C is Card)') + + def test_multiple_erql_one_bad(self): + #: reproduce bug #2236985 + #: (rqlrewrite fails to remove rewritten entry for unsupported constraint and then crash) + #: + #: This check a very rare code path triggered by the four condition below + + # 1. c_ok introduce an ambiguity + c_ok = ERQLExpression('X concerne R') + # 2. c_bad is just plain wrong and won't be kept + # 3. but it declare a new variable + # 4. this variable require a rewrite + c_bad = ERQLExpression('X documented_by R, A in_state R') + + rqlst = parse(u'Any A, R WHERE A ref R, S is Affaire') + rewrite(rqlst, {('A', 'X'): (c_ok, c_bad)}, {}) + + def test_nonregr_is_instance_of(self): + user_expr = ERQLExpression('NOT X in_group AF, AF name "guests"') + rqlst = parse(u'Any O WHERE S use_email O, S is CWUser, O is_instance_of EmailAddress') + rewrite(rqlst, {('S', 'X'): (user_expr,)}, {}) + self.assertEqual(rqlst.as_string(), + 'Any O WHERE S use_email O, S is CWUser, O is EmailAddress, ' + 'EXISTS(NOT S in_group A, A name "guests", A is CWGroup)') + +from cubicweb.devtools.testlib import CubicWebTC +from logilab.common.decorators import classproperty + +class RewriteFullTC(CubicWebTC): + @classproperty + def config(cls): + return BaseApptestConfiguration(apphome=cls.datapath('rewrite')) + + def process(self, rql, args=None): + if args is None: + args = {} + querier = self.repo.querier + union = querier.parse(rql) + with self.admin_access.repo_cnx() as cnx: + querier.solutions(cnx, union, args) + querier._annotate(union) + plan = querier.plan_factory(union, args, cnx) + plan.preprocess(union) + return union + + def test_ambiguous_optional_same_exprs(self): + """See #3013535""" + edef1 = self.schema['Societe'] + edef2 = self.schema['Division'] + edef3 = self.schema['Note'] + with self.temporary_permissions((edef1, {'read': (ERQLExpression('X owned_by U'),)}), + (edef2, {'read': (ERQLExpression('X owned_by U'),)}), + (edef3, {'read': (ERQLExpression('X owned_by U'),)})): + union = self.process('Any A,AR,X,CD WHERE A concerne X?, A ref AR, X creation_date CD') + self.assertEqual('Any A,AR,X,CD WHERE A concerne X?, A ref AR, A is Affaire ' + 'WITH X,CD BEING (Any X,CD WHERE X creation_date CD, ' + 'EXISTS(X owned_by %(A)s), X is IN(Division, Note, Societe))', + union.as_string()) + + def test_ambiguous_optional_diff_exprs(self): + """See #3013554""" + self.skipTest('bad request generated (may generate duplicated results)') + edef1 = self.schema['Societe'] + edef2 = self.schema['Division'] + edef3 = self.schema['Note'] + with self.temporary_permissions((edef1, {'read': (ERQLExpression('X created_by U'),)}), + (edef2, {'read': ('users',)}), + (edef3, {'read': (ERQLExpression('X owned_by U'),)})): + union = self.process('Any A,AR,X,CD WHERE A concerne X?, A ref AR, X creation_date CD') + self.assertEqual(union.as_string(), 'not generated today') + + + def test_xxxx(self): + edef1 = self.schema['Societe'] + edef2 = self.schema['Division'] + read_expr = ERQLExpression('X responsable E, U has_read_permission E') + with self.temporary_permissions((edef1, {'read': (read_expr,)}), + (edef2, {'read': (read_expr,)})): + union = self.process('Any X,AA,AC,AD ORDERBY AD DESC ' + 'WHERE X responsable E, X nom AA, ' + 'X responsable AC?, AC modification_date AD') + self.assertEqual('Any X,AA,AC,AD ORDERBY AD DESC ' + 'WHERE X responsable E, X nom AA, ' + 'X responsable AC?, AC modification_date AD, ' + 'AC is CWUser, E is CWUser, X is IN(Division, Societe)', + union.as_string()) + + def test_question_mark_attribute_snippet(self): + # see #3661918 + from cubicweb.rqlrewrite import RQLRewriter + from logilab.common.decorators import monkeypatch + repotest.undo_monkey_patch() + orig_insert_snippets = RQLRewriter.insert_snippets + # patch insert_snippets and not rewrite, insert_snippets is already + # monkey patches (see above setupModule/repotest) + @monkeypatch(RQLRewriter) + def insert_snippets(self, snippets, varexistsmap=None): + # crash occurs if snippets are processed in a specific order, force + # destiny + if snippets[0][0] != {u'N': 'X'}: + snippets = list(reversed(snippets)) + return orig_insert_snippets(self, snippets, varexistsmap) + try: + with self.temporary_permissions( + (self.schema['Affaire'], + {'read': (ERQLExpression('X ref "blah"'), )}), + (self.schema['Note'], + {'read': (ERQLExpression( + 'EXISTS(X inlined_affaire Z), EXISTS(Z owned_by U)'), )}), + ): + union = self.process( + 'Any A,COUNT(N) GROUPBY A ' + 'WHERE A is Affaire, N? inlined_affaire A') + self.assertEqual('Any A,COUNT(N) GROUPBY A WHERE A is Affaire ' + 'WITH N,A BEING (Any N,A WHERE N? inlined_affaire A, ' + '(N is NULL) OR (EXISTS(EXISTS(N inlined_affaire B), ' + 'EXISTS(B owned_by %(E)s), B is Affaire)), ' + 'A is Affaire, N is Note, EXISTS(A ref "blah"))', + union.as_string()) + finally: + RQLRewriter.insert_snippets = orig_insert_snippets + + +class RQLRelationRewriterTC(TestCase): + # XXX valid rules: S and O specified, not in a SET, INSERT, DELETE scope + # valid uses: no outer join + + # Basic tests + def test_base_rule(self): + rules = {'participated_in': 'S contributor O'} + rqlst = rqlhelper.parse(u'Any X WHERE X participated_in S') + rule_rewrite(rqlst, rules) + self.assertEqual('Any X WHERE X contributor S', + rqlst.as_string()) + + def test_complex_rule_1(self): + rules = {'illustrator_of': ('C is Contribution, C contributor S, ' + 'C manifestation O, C role R, ' + 'R name "illustrator"')} + rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE C is Contribution, ' + 'C contributor A, C manifestation B, ' + 'C role D, D name "illustrator"', + rqlst.as_string()) + + def test_complex_rule_2(self): + rules = {'illustrator_of': ('C is Contribution, C contributor S, ' + 'C manifestation O, C role R, ' + 'R name "illustrator"')} + rqlst = rqlhelper.parse(u'Any A WHERE EXISTS(A illustrator_of B)') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A WHERE EXISTS(C is Contribution, ' + 'C contributor A, C manifestation B, ' + 'C role D, D name "illustrator")', + rqlst.as_string()) + + + def test_rewrite2(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B, C require_permission R, S' + 'require_state O') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, ' + 'D is Contribution, D contributor A, D manifestation B, D role E, ' + 'E name "illustrator"', + rqlst.as_string()) + + def test_rewrite3(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'Any A,B WHERE E require_permission T, A illustrator_of B') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE E require_permission T, ' + 'C is Contribution, C contributor A, C manifestation B, ' + 'C role D, D name "illustrator"', + rqlst.as_string()) + + def test_rewrite4(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE C require_permission R, ' + 'D is Contribution, D contributor A, D manifestation B, ' + 'D role E, E name "illustrator"', + rqlst.as_string()) + + def test_rewrite5(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B, ' + 'S require_state O') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, ' + 'D is Contribution, D contributor A, D manifestation B, D role E, ' + 'E name "illustrator"', + rqlst.as_string()) + + # Tests for the with clause + def test_rewrite_with(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'Any A,B WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WITH A,B BEING ' + '(Any X,Y WHERE A is Contribution, A contributor X, ' + 'A manifestation Y, A role B, B name "illustrator")', + rqlst.as_string()) + + def test_rewrite_with2(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'Any A,B WHERE T require_permission C WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE T require_permission C ' + 'WITH A,B BEING (Any X,Y WHERE A is Contribution, ' + 'A contributor X, A manifestation Y, A role B, B name "illustrator")', + rqlst.as_string()) + + def test_rewrite_with3(self): + rules = {'participated_in': 'S contributor O'} + rqlst = rqlhelper.parse(u'Any A,B WHERE A participated_in B ' + 'WITH A, B BEING(Any X,Y WHERE X contributor Y)') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE A contributor B WITH A,B BEING ' + '(Any X,Y WHERE X contributor Y)', + rqlst.as_string()) + + def test_rewrite_with4(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B ' + 'WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE C is Contribution, ' + 'C contributor A, C manifestation B, C role D, ' + 'D name "illustrator" WITH A,B BEING ' + '(Any X,Y WHERE A is Contribution, A contributor X, ' + 'A manifestation Y, A role B, B name "illustrator")', + rqlst.as_string()) + + # Tests for the union + def test_rewrite_union(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B) UNION' + '(Any X,Y WHERE X is CWUser, Z manifestation Y)') + rule_rewrite(rqlst, rules) + self.assertEqual('(Any A,B WHERE C is Contribution, ' + 'C contributor A, C manifestation B, C role D, ' + 'D name "illustrator") UNION (Any X,Y WHERE X is CWUser, Z manifestation Y)', + rqlst.as_string()) + + def test_rewrite_union2(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'(Any Y WHERE Y match W) UNION ' + '(Any A WHERE A illustrator_of B) UNION ' + '(Any Y WHERE Y is ArtWork)') + rule_rewrite(rqlst, rules) + self.assertEqual('(Any Y WHERE Y match W) ' + 'UNION (Any A WHERE C is Contribution, C contributor A, ' + 'C manifestation B, C role D, D name "illustrator") ' + 'UNION (Any Y WHERE Y is ArtWork)', + rqlst.as_string()) + + # Tests for the exists clause + def test_rewrite_exists(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, ' + 'EXISTS(B is ArtWork))') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE EXISTS(B is ArtWork), ' + 'C is Contribution, C contributor A, C manifestation B, C role D, ' + 'D name "illustrator"', + rqlst.as_string()) + + def test_rewrite_exists2(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'(Any A,B WHERE B contributor A, EXISTS(A illustrator_of W))') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE B contributor A, ' + 'EXISTS(C is Contribution, C contributor A, C manifestation W, ' + 'C role D, D name "illustrator")', + rqlst.as_string()) + + def test_rewrite_exists3(self): + rules = {'illustrator_of': 'C is Contribution, C contributor S, ' + 'C manifestation O, C role R, R name "illustrator"'} + rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, EXISTS(A illustrator_of W))') + rule_rewrite(rqlst, rules) + self.assertEqual('Any A,B WHERE EXISTS(C is Contribution, C contributor A, ' + 'C manifestation W, C role D, D name "illustrator"), ' + 'E is Contribution, E contributor A, E manifestation B, E role F, ' + 'F name "illustrator"', + rqlst.as_string()) + + # Test for GROUPBY + def test_rewrite_groupby(self): + rules = {'participated_in': 'S contributor O'} + rqlst = rqlhelper.parse(u'Any SUM(SA) GROUPBY S WHERE P participated_in S, P manifestation SA') + rule_rewrite(rqlst, rules) + self.assertEqual('Any SUM(SA) GROUPBY S WHERE P manifestation SA, P contributor S', + rqlst.as_string()) + + +class RQLRelationRewriterTC(CubicWebTC): + + appid = 'data/rewrite' + + def test_base_rule(self): + with self.admin_access.client_cnx() as cnx: + art = cnx.create_entity('ArtWork', name=u'Les travailleurs de la Mer') + role = cnx.create_entity('Role', name=u'illustrator') + vic = cnx.create_entity('Person', name=u'Victor Hugo') + contrib = cnx.create_entity('Contribution', code=96, contributor=vic, + manifestation=art, role=role) + rset = cnx.execute('Any X WHERE X illustrator_of S') + self.assertEqual([u'Victor Hugo'], + [result.name for result in rset.entities()]) + rset = cnx.execute('Any S WHERE X illustrator_of S, X eid %(x)s', + {'x': vic.eid}) + self.assertEqual([u'Les travailleurs de la Mer'], + [result.name for result in rset.entities()]) + + +def rule_rewrite(rqlst, kwargs=None): + rewriter = _prepare_rewriter(rqlrewrite.RQLRelationRewriter, kwargs) + rqlhelper.compute_solutions(rqlst.children[0], {'eid': eid_func_map}, + kwargs=kwargs) + rewriter.rewrite(rqlst) + for select in rqlst.children: + test_vrefs(select) + return rewriter.rewritten + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_rset.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_rset.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,594 @@ +# coding: utf-8 +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.utils""" + +from six import string_types +from six.moves import cPickle as pickle +from six.moves.urllib.parse import urlsplit + +from rql import parse + +from logilab.common.testlib import TestCase, unittest_main, mock_object + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.rset import NotAnEntity, ResultSet, attr_desc_iterator +from cubicweb import NoResultError, MultipleResultsError + + +def pprelcachedict(d): + res = {} + for k, (rset, related) in d.items(): + res[k] = sorted(v.eid for v in related) + return sorted(res.items()) + + +class AttrDescIteratorTC(TestCase): + """TestCase for cubicweb.rset.attr_desc_iterator""" + + def test_relations_description(self): + """tests relations_description() function""" + queries = { + 'Any U,L,M where U is CWUser, U login L, U mail M' : [(1, 'login', 'subject'), (2, 'mail', 'subject')], + 'Any U,L,M where U is CWUser, L is Foo, U mail M' : [(2, 'mail', 'subject')], + 'Any C,P where C is Company, C employs P' : [(1, 'employs', 'subject')], + 'Any C,P where C is Company, P employed_by P' : [], + 'Any C where C is Company, C employs P' : [], + } + for rql, relations in queries.items(): + result = list(attr_desc_iterator(parse(rql).children[0], 0, 0)) + self.assertEqual((rql, result), (rql, relations)) + + def test_relations_description_indexed(self): + """tests relations_description() function""" + queries = { + 'Any C,U,P,L,M where C is Company, C employs P, U is CWUser, U login L, U mail M' : + {0: [(2,'employs', 'subject')], 1: [(3,'login', 'subject'), (4,'mail', 'subject')]}, + } + for rql, results in queries.items(): + for idx, relations in results.items(): + result = list(attr_desc_iterator(parse(rql).children[0], idx, idx)) + self.assertEqual(result, relations) + + def test_subquery_callfunc(self): + rql = ('Any A,B,C,COUNT(D) GROUPBY A,B,C WITH A,B,C,D BEING ' + '(Any YEAR(CD), MONTH(CD), S, X WHERE X is CWUser, X creation_date CD, X in_state S)') + rqlst = parse(rql) + select, col = rqlst.locate_subquery(2, 'CWUser', None) + result = list(attr_desc_iterator(select, col, 2)) + self.assertEqual(result, []) + + def test_subquery_callfunc_2(self): + rql = ('Any X,S,L WHERE X in_state S WITH X, L BEING (Any X,MAX(L) GROUPBY X WHERE X is CWUser, T wf_info_for X, T creation_date L)') + rqlst = parse(rql) + select, col = rqlst.locate_subquery(0, 'CWUser', None) + result = list(attr_desc_iterator(select, col, 0)) + self.assertEqual(result, [(1, 'in_state', 'subject')]) + + +class ResultSetTC(CubicWebTC): + + def setUp(self): + super(ResultSetTC, self).setUp() + self.rset = ResultSet([[12, 'adim'], [13, 'syt']], + 'Any U,L where U is CWUser, U login L', + description=[['CWUser', 'String'], ['Bar', 'String']]) + self.rset.req = mock_object(vreg=self.vreg) + + def compare_urls(self, url1, url2): + info1 = urlsplit(url1) + info2 = urlsplit(url2) + self.assertEqual(info1[:3], info2[:3]) + if info1[3] != info2[3]: + params1 = dict(pair.split('=') for pair in info1[3].split('&')) + params2 = dict(pair.split('=') for pair in info1[3].split('&')) + self.assertDictEqual(params1, params2) + + def test_pickle(self): + del self.rset.req + rs2 = pickle.loads(pickle.dumps(self.rset)) + self.assertEqual(self.rset.rows, rs2.rows) + self.assertEqual(self.rset.rowcount, rs2.rowcount) + self.assertEqual(self.rset.rql, rs2.rql) + self.assertEqual(self.rset.description, rs2.description) + + def test_build_url(self): + with self.admin_access.web_request() as req: + baseurl = req.base_url() + self.compare_urls(req.build_url('view', vid='foo', rql='yo'), + '%sview?vid=foo&rql=yo' % baseurl) + self.compare_urls(req.build_url('view', _restpath='task/title/go'), + '%stask/title/go' % baseurl) + #self.compare_urls(req.build_url('view', _restpath='/task/title/go'), + # '%stask/title/go' % baseurl) + # empty _restpath should not crash + self.compare_urls(req.build_url('view', _restpath=''), baseurl) + self.assertNotIn('https', req.build_url('view', vid='foo', rql='yo', + __secure__=True)) + try: + self.config.global_set_option('https-url', 'https://testing.fr/') + self.assertTrue('https', req.build_url('view', vid='foo', rql='yo', + __secure__=True)) + self.compare_urls(req.build_url('view', vid='foo', rql='yo', + __secure__=True), + '%sview?vid=foo&rql=yo' % req.base_url(secure=True)) + finally: + self.config.global_set_option('https-url', None) + + + def test_build(self): + """test basic build of a ResultSet""" + rs = ResultSet([1,2,3], 'CWGroup X', description=['CWGroup', 'CWGroup', 'CWGroup']) + self.assertEqual(rs.rowcount, 3) + self.assertEqual(rs.rows, [1,2,3]) + self.assertEqual(rs.description, ['CWGroup', 'CWGroup', 'CWGroup']) + + + def test_limit(self): + rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], + 'Any U,L where U is CWUser, U login L', + description=[['CWUser', 'String']] * 3) + with self.admin_access.web_request() as req: + rs.req = req + rs.vreg = self.vreg + self.assertEqual(rs.limit(2).rows, [[12000, 'adim'], [13000, 'syt']]) + rs2 = rs.limit(2, offset=1) + self.assertEqual(rs2.rows, [[13000, 'syt'], [14000, 'nico']]) + self.assertEqual(rs2.get_entity(0, 0).cw_row, 0) + self.assertEqual(rs.limit(2, offset=2).rows, [[14000, 'nico']]) + self.assertEqual(rs.limit(2, offset=3).rows, []) + + def test_limit_2(self): + with self.admin_access.web_request() as req: + # drop user from cache for the sake of this test + req.drop_entity_cache(req.user.eid) + rs = req.execute('Any E,U WHERE E is CWEType, E created_by U') + # get entity on row 9. This will fill its created_by relation cache, + # with cwuser on row 9 as well + e1 = rs.get_entity(9, 0) + # get entity on row 10. This will fill its created_by relation cache, + # with cwuser built on row 9 + e2 = rs.get_entity(10, 0) + # limit result set from row 10 + rs.limit(1, 10, inplace=True) + # get back eid + e = rs.get_entity(0, 0) + self.assertTrue(e2 is e) + # rs.limit has properly removed cwuser for request cache, but it's + # still referenced by e/e2 relation cache + u = e.created_by[0] + # now ensure this doesn't trigger IndexError because cwuser.cw_row is 9 + # while now rset has only one row + u.cw_rset[u.cw_row] + + def test_filter(self): + rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], + 'Any U,L where U is CWUser, U login L', + description=[['CWUser', 'String']] * 3) + with self.admin_access.web_request() as req: + rs.req = req + rs.vreg = self.vreg + def test_filter(entity): + return entity.login != 'nico' + + rs2 = rs.filtered_rset(test_filter) + self.assertEqual(len(rs2), 2) + self.assertEqual([login for _, login in rs2], ['adim', 'syt']) + self.assertEqual(rs2.description, rs.description[1:]) + + def test_transform(self): + rs = ResultSet([[12, 'adim'], [13, 'syt'], [14, 'nico']], + 'Any U,L where U is CWUser, U login L', + description=[['CWUser', 'String']] * 3) + with self.admin_access.web_request() as req: + rs.req = req + def test_transform(row, desc): + return row[1:], desc[1:] + rs2 = rs.transformed_rset(test_transform) + + self.assertEqual(len(rs2), 3) + self.assertEqual(list(rs2), [['adim'],['syt'],['nico']]) + + def test_sort(self): + rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], + 'Any U,L where U is CWUser, U login L', + description=[['CWUser', 'String']] * 3) + with self.admin_access.web_request() as req: + rs.req = req + rs.vreg = self.vreg + + rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login']) + self.assertEqual(len(rs2), 3) + self.assertEqual([login for _, login in rs2], ['adim', 'nico', 'syt']) + # make sure rs is unchanged + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) + + rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login'], reverse=True) + self.assertEqual(len(rs2), 3) + self.assertEqual([login for _, login in rs2], ['syt', 'nico', 'adim']) + # make sure rs is unchanged + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) + + rs3 = rs.sorted_rset(lambda row: row[1], col=-1) + self.assertEqual(len(rs3), 3) + self.assertEqual([login for _, login in rs3], ['adim', 'nico', 'syt']) + # make sure rs is unchanged + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) + + def test_split(self): + rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'], + [12000, 'adim', u'Jardiner facile'], + [13000, 'syt', u'Le carrelage en 42 leçons'], + [14000, 'nico', u'La tarte tatin en 15 minutes'], + [14000, 'nico', u"L'épluchage du castor commun"]], + 'Any U, L, T WHERE U is CWUser, U login L,'\ + 'D created_by U, D title T', + description=[['CWUser', 'String', 'String']] * 5) + with self.admin_access.web_request() as req: + rs.req = req + rs.vreg = self.vreg + rsets = rs.split_rset(lambda e:e.cw_attr_cache['login']) + self.assertEqual(len(rsets), 3) + self.assertEqual([login for _, login,_ in rsets[0]], ['adim', 'adim']) + self.assertEqual([login for _, login,_ in rsets[1]], ['syt']) + self.assertEqual([login for _, login,_ in rsets[2]], ['nico', 'nico']) + # make sure rs is unchanged + self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) + + rsets = rs.split_rset(lambda e:e.cw_attr_cache['login'], return_dict=True) + self.assertEqual(len(rsets), 3) + self.assertEqual([login for _, login,_ in rsets['nico']], ['nico', 'nico']) + self.assertEqual([login for _, login,_ in rsets['adim']], ['adim', 'adim']) + self.assertEqual([login for _, login,_ in rsets['syt']], ['syt']) + # make sure rs is unchanged + self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) + + rsets = rs.split_rset(lambda s: s.count('d'), col=2) + self.assertEqual(len(rsets), 2) + self.assertEqual([title for _, _, title in rsets[0]], + [u"Adim chez les pinguins", + u"Jardiner facile", + u"L'épluchage du castor commun",]) + self.assertEqual([title for _, _, title in rsets[1]], + [u"Le carrelage en 42 leçons", + u"La tarte tatin en 15 minutes",]) + # make sure rs is unchanged + self.assertEqual([title for _, _, title in rs], + [u'Adim chez les pinguins', + u'Jardiner facile', + u'Le carrelage en 42 leçons', + u'La tarte tatin en 15 minutes', + u"L'épluchage du castor commun"]) + + def test_cached_syntax_tree(self): + """make sure syntax tree is cached""" + rqlst1 = self.rset.syntax_tree() + rqlst2 = self.rset.syntax_tree() + self.assertIs(rqlst1, rqlst2) + + def test_get_entity_simple(self): + with self.admin_access.web_request() as req: + req.create_entity('CWUser', login=u'adim', upassword='adim', + surname=u'di mascio', firstname=u'adrien') + req.drop_entity_cache() + e = req.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0) + self.assertEqual(e.cw_attr_cache['surname'], 'di mascio') + self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') + self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'creation_date') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) + e.complete() + self.assertEqual(e.cw_attr_cache['firstname'], 'adrien') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) + + def test_get_entity_advanced(self): + with self.admin_access.web_request() as req: + req.create_entity('Bookmark', title=u'zou', path=u'/view') + req.drop_entity_cache() + req.execute('SET X bookmarked_by Y WHERE X is Bookmark, Y login "anon"') + rset = req.execute('Any X,Y,XT,YN WHERE X bookmarked_by Y, X title XT, Y login YN') + + e = rset.get_entity(0, 0) + self.assertEqual(e.cw_row, 0) + self.assertEqual(e.cw_col, 0) + self.assertEqual(e.cw_attr_cache['title'], 'zou') + self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'path') + self.assertEqual(e.view('text'), 'zou') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) + + e = rset.get_entity(0, 1) + self.assertEqual(e.cw_row, 0) + self.assertEqual(e.cw_col, 1) + self.assertEqual(e.cw_attr_cache['login'], 'anon') + self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') + self.assertEqual(pprelcachedict(e._cw_related_cache), + []) + e.complete() + self.assertEqual(e.cw_attr_cache['firstname'], None) + self.assertEqual(e.view('text'), 'anon') + self.assertEqual(pprelcachedict(e._cw_related_cache), + []) + + self.assertRaises(NotAnEntity, rset.get_entity, 0, 2) + self.assertRaises(NotAnEntity, rset.get_entity, 0, 3) + + def test_get_entity_relation_cache_compt(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X,S WHERE X in_state S, X login "anon"') + e = rset.get_entity(0, 0) + seid = req.execute('State X WHERE X name "activated"')[0][0] + # for_user / in_group are prefetched in CWUser __init__, in_state should + # be filed from our query rset + self.assertEqual(pprelcachedict(e._cw_related_cache), + [('in_state_subject', [seid])]) + + def test_get_entity_advanced_prefilled_cache(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'zou', path=u'path') + req.cnx.commit() + rset = req.execute('Any X,U,S,XT,UL,SN WHERE X created_by U, U in_state S, ' + 'X title XT, S name SN, U login UL, X eid %s' % e.eid) + e = rset.get_entity(0, 0) + self.assertEqual(e.cw_attr_cache['title'], 'zou') + self.assertEqual(pprelcachedict(e._cw_related_cache), + [('created_by_subject', [req.user.eid])]) + # first level of recursion + u = e.created_by[0] + self.assertEqual(u.cw_attr_cache['login'], 'admin') + self.assertRaises(KeyError, u.cw_attr_cache.__getitem__, 'firstname') + # second level of recursion + s = u.in_state[0] + self.assertEqual(s.cw_attr_cache['name'], 'activated') + self.assertRaises(KeyError, s.cw_attr_cache.__getitem__, 'description') + + + def test_get_entity_cache_with_left_outer_join(self): + with self.admin_access.web_request() as req: + eid = req.execute('INSERT CWUser E: E login "joe", E upassword "joe", E in_group G ' + 'WHERE G name "users"')[0][0] + rset = req.execute('Any X,E WHERE X eid %(x)s, X primary_email E?', {'x': eid}) + e = rset.get_entity(0, 0) + # if any of the assertion below fails with a KeyError, the relation is not cached + # related entities should be an empty list + self.assertEqual(e._cw_related_cache['primary_email_subject'][True], ()) + # related rset should be an empty rset + cached = e._cw_related_cache['primary_email_subject'][False] + self.assertIsInstance(cached, ResultSet) + self.assertEqual(cached.rowcount, 0) + + + def test_get_entity_union(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'manger', path=u'path') + req.drop_entity_cache() + rset = req.execute('Any X,N ORDERBY N WITH X,N BEING ' + '((Any X,N WHERE X is Bookmark, X title N)' + ' UNION ' + ' (Any X,N WHERE X is CWGroup, X name N))') + expected = (('CWGroup', 'guests'), ('CWGroup', 'managers'), + ('Bookmark', 'manger'), ('CWGroup', 'owners'), + ('CWGroup', 'users')) + for entity in rset.entities(): # test get_entity for each row actually + etype, n = expected[entity.cw_row] + self.assertEqual(entity.cw_etype, etype) + attr = etype == 'Bookmark' and 'title' or 'name' + self.assertEqual(entity.cw_attr_cache[attr], n) + + def test_one(self): + with self.admin_access.web_request() as req: + req.create_entity('CWUser', login=u'cdevienne', + upassword=u'cdevienne', + surname=u'de Vienne', + firstname=u'Christophe') + e = req.execute('Any X WHERE X login "cdevienne"').one() + + self.assertEqual(e.surname, u'de Vienne') + + e = req.execute( + 'Any X, N WHERE X login "cdevienne", X surname N').one() + self.assertEqual(e.surname, u'de Vienne') + + e = req.execute( + 'Any N, X WHERE X login "cdevienne", X surname N').one(col=1) + self.assertEqual(e.surname, u'de Vienne') + + def test_one_no_rows(self): + with self.admin_access.web_request() as req: + with self.assertRaises(NoResultError): + req.execute('Any X WHERE X login "patanok"').one() + + def test_one_multiple_rows(self): + with self.admin_access.web_request() as req: + req.create_entity( + 'CWUser', login=u'cdevienne', upassword=u'cdevienne', + surname=u'de Vienne', firstname=u'Christophe') + + req.create_entity( + 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', + firstname=u'adrien') + + with self.assertRaises(MultipleResultsError): + req.execute('Any X WHERE X is CWUser').one() + + def test_related_entity_optional(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any B,U,L WHERE B bookmarked_by U?, U login L') + entity, rtype = rset.related_entity(0, 2) + self.assertEqual(entity, None) + self.assertEqual(rtype, None) + + def test_related_entity_union_subquery_1(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any X,N ORDERBY N WITH X,N BEING ' + '((Any X,N WHERE X is CWGroup, X name N)' + ' UNION ' + ' (Any X,N WHERE X is Bookmark, X title N))') + entity, rtype = rset.related_entity(0, 1) + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') + self.assertEqual(entity.title, 'aaaa') + entity, rtype = rset.related_entity(1, 1) + self.assertEqual(entity.cw_etype, 'CWGroup') + self.assertEqual(rtype, 'name') + self.assertEqual(entity.name, 'guests') + + def test_related_entity_union_subquery_2(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any X,N ORDERBY N WHERE X is Bookmark WITH X,N BEING ' + '((Any X,N WHERE X is CWGroup, X name N)' + ' UNION ' + ' (Any X,N WHERE X is Bookmark, X title N))') + entity, rtype = rset.related_entity(0, 1) + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') + self.assertEqual(entity.title, 'aaaa') + + def test_related_entity_union_subquery_3(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any X,N ORDERBY N WITH N,X BEING ' + '((Any N,X WHERE X is CWGroup, X name N)' + ' UNION ' + ' (Any N,X WHERE X is Bookmark, X title N))') + entity, rtype = rset.related_entity(0, 1) + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') + self.assertEqual(entity.title, 'aaaa') + + def test_related_entity_union_subquery_4(self): + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any X,X, N ORDERBY N WITH X,N BEING ' + '((Any X,N WHERE X is CWGroup, X name N)' + ' UNION ' + ' (Any X,N WHERE X is Bookmark, X title N))') + entity, rtype = rset.related_entity(0, 2) + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') + self.assertEqual(entity.title, 'aaaa') + + def test_related_entity_trap_subquery(self): + with self.admin_access.web_request() as req: + req.create_entity('Bookmark', title=u'test bookmark', path=u'') + req.execute('SET B bookmarked_by U WHERE U login "admin"') + rset = req.execute('Any B,T,L WHERE B bookmarked_by U, U login L ' + 'WITH B,T BEING (Any B,T WHERE B is Bookmark, B title T)') + rset.related_entity(0, 2) + + def test_related_entity_subquery_outerjoin(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X,S,L WHERE X in_state S ' + 'WITH X, L BEING (Any X,MAX(L) GROUPBY X ' + 'WHERE X is CWUser, T? wf_info_for X, T creation_date L)') + self.assertEqual(len(rset), 2) + rset.related_entity(0, 1) + rset.related_entity(0, 2) + + def test_entities(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any U,G WHERE U in_group G') + # make sure we have at least one element + self.assertTrue(rset) + self.assertEqual(set(e.e_schema.type for e in rset.entities(0)), + set(['CWUser',])) + self.assertEqual(set(e.e_schema.type for e in rset.entities(1)), + set(['CWGroup',])) + + def test_iter_rows_with_entities(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any U,UN,G,GN WHERE U in_group G, U login UN, G name GN') + # make sure we have at least one element + self.assertTrue(rset) + out = list(rset.iter_rows_with_entities())[0] + self.assertEqual( out[0].login, out[1] ) + self.assertEqual( out[2].name, out[3] ) + + def test_printable_rql(self): + with self.admin_access.web_request() as req: + rset = req.execute(u'CWEType X WHERE X final FALSE') + self.assertEqual(rset.printable_rql(), + 'Any X WHERE X final FALSE, X is CWEType') + + def test_searched_text(self): + with self.admin_access.web_request() as req: + rset = req.execute(u'Any X WHERE X has_text "foobar"') + self.assertEqual(rset.searched_text(), 'foobar') + rset = req.execute(u'Any X WHERE X has_text %(text)s', {'text' : 'foo'}) + self.assertEqual(rset.searched_text(), 'foo') + + def test_union_limited_rql(self): + with self.admin_access.web_request() as req: + rset = req.execute('(Any X,N WHERE X is Bookmark, X title N)' + ' UNION ' + '(Any X,N WHERE X is CWGroup, X name N)') + rset.limit(2, 10, inplace=True) + self.assertEqual(rset.limited_rql(), + 'Any A,B LIMIT 2 OFFSET 10 ' + 'WITH A,B BEING (' + '(Any X,N WHERE X is Bookmark, X title N) ' + 'UNION ' + '(Any X,N WHERE X is CWGroup, X name N)' + ')') + + def test_count_users_by_date(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any D, COUNT(U) GROUPBY D WHERE U is CWUser, U creation_date D') + self.assertEqual(rset.related_entity(0,0), (None, None)) + + def test_str(self): + with self.admin_access.web_request() as req: + rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(str(rset), string_types) + self.assertEqual(len(str(rset).splitlines()), 1) + + def test_repr(self): + with self.admin_access.web_request() as req: + rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(repr(rset), string_types) + self.assertTrue(len(repr(rset).splitlines()) > 1) + + rset = req.execute('(Any X WHERE X is CWGroup, X name "managers")') + self.assertIsInstance(str(rset), string_types) + self.assertEqual(len(str(rset).splitlines()), 1) + + def test_slice(self): + rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'], + [12000, 'adim', u'Jardiner facile'], + [13000, 'syt', u'Le carrelage en 42 leçons'], + [14000, 'nico', u'La tarte tatin en 15 minutes'], + [14000, 'nico', u"L'épluchage du castor commun"]], + 'Any U, L, T WHERE U is CWUser, U login L,'\ + 'D created_by U, D title T', + description=[['CWUser', 'String', 'String']] * 5) + self.assertEqual(rs[1::2], + [[12000, 'adim', u'Jardiner facile'], + [14000, 'nico', u'La tarte tatin en 15 minutes']]) + + def test_nonregr_symmetric_relation(self): + # see https://www.cubicweb.org/ticket/4739253 + with self.admin_access.client_cnx() as cnx: + p1 = cnx.create_entity('Personne', nom=u'sylvain') + cnx.create_entity('Personne', nom=u'denis', connait=p1) + cnx.commit() + rset = cnx.execute('Any X,Y WHERE X connait Y') + rset.get_entity(0, 1) # used to raise KeyError + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_rtags.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_rtags.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,95 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" + +""" +from logilab.common.testlib import TestCase, unittest_main +from cubicweb.rtags import RelationTags, RelationTagsSet, RelationTagsDict + +class RelationTagsTC(TestCase): + + def test_rtags_expansion(self): + rtags = RelationTags() + rtags.tag_subject_of(('Societe', 'travaille', '*'), 'primary') + rtags.tag_subject_of(('*', 'evaluee', '*'), 'secondary') + rtags.tag_object_of(('*', 'tags', '*'), 'generated') + self.assertEqual(rtags.get('Note', 'evaluee', '*', 'subject'), + 'secondary') + self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), + 'primary') + self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), + None) + self.assertEqual(rtags.get('Note', 'tags', '*', 'subject'), + None) + self.assertEqual(rtags.get('*', 'tags', 'Note', 'object'), + 'generated') + self.assertEqual(rtags.get('Tag', 'tags', '*', 'object'), + 'generated') + +# self.assertEqual(rtags.rtag('evaluee', 'Note', 'subject'), set(('secondary', 'link'))) +# self.assertEqual(rtags.is_inlined('evaluee', 'Note', 'subject'), False) +# self.assertEqual(rtags.rtag('evaluee', 'Personne', 'subject'), set(('secondary', 'link'))) +# self.assertEqual(rtags.is_inlined('evaluee', 'Personne', 'subject'), False) +# self.assertEqual(rtags.rtag('ecrit_par', 'Note', 'object'), set(('inlineview', 'link'))) +# self.assertEqual(rtags.is_inlined('ecrit_par', 'Note', 'object'), True) +# class Personne2(Personne): +# id = 'Personne' +# __rtags__ = { +# ('evaluee', 'Note', 'subject') : set(('inlineview',)), +# } +# self.vreg.register(Personne2) +# rtags = Personne2.rtags +# self.assertEqual(rtags.rtag('evaluee', 'Note', 'subject'), set(('inlineview', 'link'))) +# self.assertEqual(rtags.is_inlined('evaluee', 'Note', 'subject'), True) +# self.assertEqual(rtags.rtag('evaluee', 'Personne', 'subject'), set(('secondary', 'link'))) +# self.assertEqual(rtags.is_inlined('evaluee', 'Personne', 'subject'), False) + + + def test_rtagset_expansion(self): + rtags = RelationTagsSet() + rtags.tag_subject_of(('Societe', 'travaille', '*'), 'primary') + rtags.tag_subject_of(('*', 'travaille', '*'), 'secondary') + self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), + set(('primary', 'secondary'))) + self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), + set(('secondary',))) + self.assertEqual(rtags.get('Note', 'tags', "*", 'subject'), + set()) + + def test_rtagdict_expansion(self): + rtags = RelationTagsDict() + rtags.tag_subject_of(('Societe', 'travaille', '*'), + {'key1': 'val1', 'key2': 'val1'}) + rtags.tag_subject_of(('*', 'travaille', '*'), + {'key1': 'val0', 'key3': 'val0'}) + rtags.tag_subject_of(('Societe', 'travaille', '*'), + {'key2': 'val2'}) + self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), + {'key1': 'val1', 'key2': 'val2', 'key3': 'val0'}) + self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), + {'key1': 'val0', 'key3': 'val0'}) + self.assertEqual(rtags.get('Note', 'tags', "*", 'subject'), + {}) + + rtags.setdefault(('Societe', 'travaille', '*', 'subject'), 'key1', 'val4') + rtags.setdefault(('Societe', 'travaille', '*', 'subject'), 'key4', 'val4') + self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), + {'key1': 'val1', 'key2': 'val2', 'key3': 'val0', 'key4': 'val4'}) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,563 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.schema""" + +import sys +from os.path import join, isabs, basename, dirname + +from logilab.common.testlib import TestCase, unittest_main + +from rql import RQLSyntaxError + +from yams import ValidationError, BadSchemaDefinition +from yams.constraints import SizeConstraint, StaticVocabularyConstraint +from yams.buildobjs import (RelationDefinition, EntityType, RelationType, + Int, String, SubjectRelation, ComputedRelation) +from yams.reader import fill_schema + +from cubicweb.schema import ( + CubicWebSchema, CubicWebEntitySchema, CubicWebSchemaLoader, + RQLConstraint, RQLUniqueConstraint, RQLVocabularyConstraint, + RQLExpression, ERQLExpression, RRQLExpression, + normalize_expression, order_eschemas, guess_rrqlexpr_mainvars, + build_schema_from_namespace) +from cubicweb.devtools import TestServerConfiguration as TestConfiguration +from cubicweb.devtools.testlib import CubicWebTC + +DATADIR = join(dirname(__file__), 'data') + +# build a dummy schema ######################################################## + + +PERSONNE_PERMISSIONS = { + 'read': ('managers', 'users', 'guests'), + 'update': ('managers', 'owners'), + 'add': ('managers', ERQLExpression('X travaille S, S owned_by U')), + 'delete': ('managers', 'owners',), + } + +CONCERNE_PERMISSIONS = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', RRQLExpression('U has_update_permission S')), + 'delete': ('managers', RRQLExpression('O owned_by U')), + } + +schema = CubicWebSchema('Test Schema') +enote = schema.add_entity_type(EntityType('Note')) +eaffaire = schema.add_entity_type(EntityType('Affaire')) +eperson = schema.add_entity_type(EntityType('Personne', __permissions__=PERSONNE_PERMISSIONS)) +esociete = schema.add_entity_type(EntityType('Societe')) + +RELS = ( + # attribute relations + ('Note date String'), + ('Note type String'), + ('Affaire sujet String'), + ('Affaire ref String'), + ('Personne nom String'), + ('Personne prenom String'), + ('Personne sexe String'), + ('Personne tel Int'), + ('Personne fax Int'), + ('Personne datenaiss Date'), + ('Personne promo String'), + # real relations + ('Personne travaille Societe'), + ('Personne evaluee Note'), + ('Societe evaluee Note'), + ('Personne concerne Affaire'), + ('Personne concerne Societe'), + ('Affaire concerne Societe'), + ) +done = {} +for rel in RELS: + _from, _type, _to = rel.split() + if not _type.lower() in done: + schema.add_relation_type(RelationType(_type)) + done[_type.lower()] = True + if _type == 'concerne': + schema.add_relation_def(RelationDefinition(_from, _type, _to, + __permissions__=CONCERNE_PERMISSIONS)) + else: + schema.add_relation_def(RelationDefinition(_from, _type, _to)) + +class CubicWebSchemaTC(TestCase): + + def test_rql_constraints_inheritance(self): + # isinstance(cstr, RQLVocabularyConstraint) + # -> expected to return RQLVocabularyConstraint and RQLConstraint + # instances but not RQLUniqueConstraint + # + # isinstance(cstr, RQLConstraint) + # -> expected to return RQLConstraint instances but not + # RQLVocabularyConstraint and RQLUniqueConstraint + self.assertFalse(issubclass(RQLUniqueConstraint, RQLVocabularyConstraint)) + self.assertFalse(issubclass(RQLUniqueConstraint, RQLConstraint)) + + def test_entity_perms(self): + self.assertEqual(eperson.get_groups('read'), set(('managers', 'users', 'guests'))) + self.assertEqual(eperson.get_groups('update'), set(('managers', 'owners',))) + self.assertEqual(eperson.get_groups('delete'), set(('managers', 'owners'))) + self.assertEqual(eperson.get_groups('add'), set(('managers',))) + self.assertEqual([str(e) for e in eperson.get_rqlexprs('add')], + ['Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s']) + eperson.set_action_permissions('read', ('managers',)) + self.assertEqual(eperson.get_groups('read'), set(('managers',))) + + def test_relation_perms(self): + rconcerne = schema.rschema('concerne').rdef('Personne', 'Societe') + self.assertEqual(rconcerne.get_groups('read'), set(('managers', 'users', 'guests'))) + self.assertEqual(rconcerne.get_groups('delete'), set(('managers',))) + self.assertEqual(rconcerne.get_groups('add'), set(('managers', ))) + rconcerne.set_action_permissions('read', ('managers',)) + self.assertEqual(rconcerne.get_groups('read'), set(('managers',))) + self.assertEqual([str(e) for e in rconcerne.get_rqlexprs('add')], + ['Any S,U WHERE U has_update_permission S, S eid %(s)s, U eid %(u)s']) + + def test_erqlexpression(self): + self.assertRaises(RQLSyntaxError, ERQLExpression, '1') + expr = ERQLExpression('X travaille S, S owned_by U') + self.assertEqual(str(expr), 'Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s') + expr = ERQLExpression('X foo S, S bar U, X baz XE, S quux SE HAVING XE > SE') + self.assertEqual(str(expr), 'Any X WHERE X foo S, S bar U, X baz XE, S quux SE, X eid %(x)s, U eid %(u)s HAVING XE > SE') + + def test_rrqlexpression(self): + self.assertRaises(Exception, RRQLExpression, '1') + self.assertRaises(RQLSyntaxError, RRQLExpression, 'O X Y') + expr = RRQLExpression('U has_update_permission O') + self.assertEqual(str(expr), 'Any O,U WHERE U has_update_permission O, O eid %(o)s, U eid %(u)s') + +loader = CubicWebSchemaLoader() +config = TestConfiguration('data', apphome=DATADIR) +config.bootstrap_cubes() + +class SchemaReaderClassTest(TestCase): + + def test_order_eschemas(self): + schema = loader.load(config) + self.assertEqual(order_eschemas([schema['Note'], schema['SubNote']]), + [schema['Note'], schema['SubNote']]) + self.assertEqual(order_eschemas([schema['SubNote'], schema['Note']]), + [schema['Note'], schema['SubNote']]) + + def test_knownValues_load_schema(self): + schema = loader.load(config) + self.assertIsInstance(schema, CubicWebSchema) + self.assertEqual(schema.name, 'data') + entities = sorted([str(e) for e in schema.entities()]) + expected_entities = ['Ami', 'BaseTransition', 'BigInt', 'Bookmark', 'Boolean', 'Bytes', 'Card', + 'Date', 'Datetime', 'Decimal', + 'CWCache', 'CWComputedRType', 'CWConstraint', + 'CWConstraintType', 'CWDataImport', 'CWEType', + 'CWAttribute', 'CWGroup', 'EmailAddress', + 'CWRelation', 'CWPermission', 'CWProperty', 'CWRType', + 'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig', + 'CWUniqueTogetherConstraint', 'CWUser', + 'ExternalUri', 'FakeFile', 'Float', 'Int', 'Interval', 'Note', + 'Password', 'Personne', 'Produit', + 'RQLExpression', 'Reference', + 'Service', 'Societe', 'State', 'StateFull', 'String', 'SubNote', 'SubWorkflowExitPoint', + 'Tag', 'TZDatetime', 'TZTime', 'Time', 'Transition', 'TrInfo', + 'Usine', + 'Workflow', 'WorkflowTransition'] + self.assertListEqual(sorted(expected_entities), entities) + relations = sorted([str(r) for r in schema.relations()]) + expected_relations = ['actionnaire', 'add_permission', 'address', 'alias', 'allowed_transition', 'associe', + 'bookmarked_by', 'by_transition', + + 'cardinality', 'comment', 'comment_format', + 'composite', 'condition', 'config', 'connait', + 'constrained_by', 'constraint_of', + 'content', 'content_format', 'contrat_exclusif', + 'created_by', 'creation_date', 'cstrtype', 'custom_workflow', + 'cwuri', 'cw_for_source', 'cw_import_of', 'cw_host_config_of', 'cw_schema', 'cw_source', + + 'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission', + 'description', 'description_format', 'destination_state', 'dirige', + + 'ean', 'ecrit_par', 'eid', 'end_timestamp', 'evaluee', 'expression', 'exprtype', 'extra_props', + + 'fabrique_par', 'final', 'firstname', 'for_user', 'formula', 'fournit', + 'from_entity', 'from_state', 'fulltext_container', 'fulltextindexed', + + 'has_group_permission', 'has_text', + 'identity', 'in_group', 'in_state', 'in_synchronization', 'indexed', + 'initial_state', 'inlined', 'internationalizable', 'is', 'is_instance_of', + + 'label', 'last_login_time', 'latest_retrieval', 'lieu', 'log', 'login', + + 'mainvars', 'match_host', 'modification_date', + + 'name', 'nom', + + 'options', 'ordernum', 'owned_by', + + 'parser', 'path', 'pkey', 'prefered_form', 'prenom', 'primary_email', + + 'read_permission', 'relation_type', 'relations', 'require_group', 'rule', + + 'specializes', 'start_timestamp', 'state_of', 'status', 'subworkflow', 'subworkflow_exit', 'subworkflow_state', 'surname', 'symmetric', 'synopsis', + + 'tags', 'timestamp', 'title', 'to_entity', 'to_state', 'transition_of', 'travaille', 'type', + + 'upassword', 'update_permission', 'url', 'uri', 'use_email', + + 'value', + + 'wf_info_for', 'wikiid', 'workflow_of', 'tr_count'] + + self.assertListEqual(sorted(expected_relations), relations) + + eschema = schema.eschema('CWUser') + rels = sorted(str(r) for r in eschema.subject_relations()) + self.assertListEqual(rels, ['created_by', 'creation_date', 'custom_workflow', + 'cw_source', 'cwuri', 'eid', + 'evaluee', 'firstname', 'has_group_permission', + 'has_text', 'identity', + 'in_group', 'in_state', 'is', + 'is_instance_of', 'last_login_time', + 'login', 'modification_date', 'owned_by', + 'primary_email', 'surname', 'upassword', + 'use_email']) + rels = sorted(r.type for r in eschema.object_relations()) + self.assertListEqual(rels, ['bookmarked_by', 'created_by', 'for_user', + 'identity', 'owned_by', 'wf_info_for']) + rschema = schema.rschema('relation_type') + properties = rschema.rdef('CWAttribute', 'CWRType') + self.assertEqual(properties.cardinality, '1*') + constraints = properties.constraints + self.assertEqual(len(constraints), 1, constraints) + constraint = constraints[0] + self.assertTrue(isinstance(constraint, RQLConstraint)) + self.assertEqual(constraint.expression, 'O final TRUE') + + def test_fulltext_container(self): + schema = loader.load(config) + self.assertIn('has_text', schema['CWUser'].subject_relations()) + self.assertNotIn('has_text', schema['EmailAddress'].subject_relations()) + + def test_permission_settings(self): + schema = loader.load(config) + aschema = schema['TrInfo'].rdef('comment') + self.assertEqual(aschema.get_groups('read'), + set(('managers', 'users', 'guests'))) + self.assertEqual(aschema.get_rqlexprs('read'), + ()) + self.assertEqual(aschema.get_groups('update'), + set(('managers',))) + self.assertEqual([x.expression for x in aschema.get_rqlexprs('update')], + ['U has_update_permission X']) + + def test_nonregr_allowed_type_names(self): + schema = CubicWebSchema('Test Schema') + schema.add_entity_type(EntityType('NaN')) + + def test_relation_perm_overriding(self): + loader = CubicWebSchemaLoader() + config = TestConfiguration('data', apphome=join(dirname(__file__), 'data_schemareader')) + config.bootstrap_cubes() + schema = loader.load(config) + rdef = next(iter(schema['in_group'].rdefs.values())) + self.assertEqual(rdef.permissions, + {'read': ('managers',), + 'add': ('managers',), + 'delete': ('managers',)}) + rdef = next(iter(schema['cw_for_source'].rdefs.values())) + self.assertEqual(rdef.permissions, + {'read': ('managers', 'users'), + 'add': ('managers',), + 'delete': ('managers',)}) + + def test_computed_attribute(self): + """Check schema finalization for computed attributes.""" + class Person(EntityType): + salary = Int() + + class works_for(RelationDefinition): + subject = 'Person' + object = 'Company' + cardinality = '?*' + + class Company(EntityType): + total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE ' + 'P works_for X, P salary SA') + good_schema = build_schema_from_namespace(vars().items()) + rdef = good_schema['Company'].rdef('total_salary') + # ensure 'X is Company' is added to the rqlst to avoid ambiguities, see #4901163 + self.assertEqual(str(rdef.formula_select), + 'Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA, X is Company') + # check relation definition permissions + self.assertEqual(rdef.permissions, + {'add': (), 'update': (), + 'read': ('managers', 'users', 'guests')}) + + class Company(EntityType): + total_salary = String(formula='Any SUM(SA) GROUPBY X WHERE ' + 'P works_for X, P salary SA') + + with self.assertRaises(BadSchemaDefinition) as exc: + bad_schema = build_schema_from_namespace(vars().items()) + + self.assertEqual(str(exc.exception), + 'computed attribute total_salary on Company: ' + 'computed attribute type (Int) mismatch with ' + 'specified type (String)') + + +class SchemaReaderComputedRelationAndAttributesTest(TestCase): + + def test_infer_computed_relation(self): + class Person(EntityType): + name = String() + + class Company(EntityType): + name = String() + + class Service(EntityType): + name = String() + + class works_for(RelationDefinition): + subject = 'Person' + object = 'Company' + + class produce(RelationDefinition): + subject = ('Person', 'Company') + object = 'Service' + + class achete(RelationDefinition): + subject = 'Person' + object = 'Service' + + class produces_and_buys(ComputedRelation): + rule = 'S produce O, S achete O' + + class produces_and_buys2(ComputedRelation): + rule = 'S works_for SO, SO produce O' + + class reproduce(ComputedRelation): + rule = 'S produce O' + + schema = build_schema_from_namespace(vars().items()) + + # check object/subject type + self.assertEqual([('Person','Service')], + list(schema['produces_and_buys'].rdefs.keys())) + self.assertEqual([('Person','Service')], + list(schema['produces_and_buys2'].rdefs.keys())) + self.assertCountEqual([('Company', 'Service'), ('Person', 'Service')], + list(schema['reproduce'].rdefs.keys())) + # check relation definitions are marked infered + rdef = schema['produces_and_buys'].rdefs[('Person','Service')] + self.assertTrue(rdef.infered) + # and have no add/delete permissions + self.assertEqual(rdef.permissions, + {'add': (), + 'delete': (), + 'read': ('managers', 'users', 'guests')}) + + class autoname(ComputedRelation): + rule = 'S produce X, X name O' + + with self.assertRaises(BadSchemaDefinition) as cm: + build_schema_from_namespace(vars().items()) + self.assertEqual(str(cm.exception), 'computed relations cannot be final') + + +class BadSchemaTC(TestCase): + def setUp(self): + self.loader = CubicWebSchemaLoader() + self.loader.defined = {} + self.loader.loaded_files = [] + self.loader.post_build_callbacks = [] + + def _test(self, schemafile, msg): + self.loader.handle_file(join(DATADIR, schemafile)) + sch = self.loader.schemacls('toto') + with self.assertRaises(BadSchemaDefinition) as cm: + fill_schema(sch, self.loader.defined, False) + self.assertEqual(str(cm.exception), msg) + + def test_lowered_etype(self): + self._test('lowered_etype.py', + "'my_etype' is not a valid name for an entity type. It should " + "start with an upper cased letter and be followed by at least " + "a lower cased letter") + + def test_uppered_rtype(self): + self._test('uppered_rtype.py', + "'ARelation' is not a valid name for a relation type. It should be lower cased") + + def test_rrqlexpr_on_etype(self): + self._test('rrqlexpr_on_eetype.py', + "can't use RRQLExpression on ToTo, use an ERQLExpression") + + def test_erqlexpr_on_rtype(self): + self._test('erqlexpr_on_ertype.py', + "can't use ERQLExpression on relation ToTo toto TuTu, use a RRQLExpression") + + def test_rqlexpr_on_rtype_read(self): + self._test('rqlexpr_on_ertype_read.py', + "can't use rql expression for read permission of relation ToTo toto TuTu") + + def test_rrqlexpr_on_attr(self): + self._test('rrqlexpr_on_attr.py', + "can't use RRQLExpression on attribute ToTo.attr[String], use an ERQLExpression") + + def test_rqlexpr_on_computedrel(self): + self._test('rqlexpr_on_computedrel.py', + "can't use rql expression for read permission of relation Subject computed Object") + + +class NormalizeExpressionTC(TestCase): + + def test(self): + self.assertEqual(normalize_expression('X bla Y,Y blur Z , Z zigoulou X '), + 'X bla Y, Y blur Z, Z zigoulou X') + self.assertEqual(normalize_expression('X bla Y, Y name "x,y"'), + 'X bla Y, Y name "x,y"') + + +class RQLExpressionTC(TestCase): + def test_comparison(self): + self.assertEqual(ERQLExpression('X is CWUser', 'X', 0), + ERQLExpression('X is CWUser', 'X', 0)) + self.assertNotEqual(ERQLExpression('X is CWUser', 'X', 0), + ERQLExpression('X is CWGroup', 'X', 0)) + + +class GuessRrqlExprMainVarsTC(TestCase): + def test_exists(self): + mainvars = guess_rrqlexpr_mainvars(normalize_expression('NOT EXISTS(O team_competition C, C level < 3, C concerns S)')) + self.assertEqual(mainvars, set(['S', 'O'])) + + +class RQLConstraintTC(CubicWebTC): + def test_user_constraint(self): + cstr = RQLConstraint('U identity O') + with self.admin_access.repo_cnx() as cnx: + anoneid = cnx.execute('Any X WHERE X login "anon"')[0][0] + self.assertRaises(ValidationError, + cstr.repo_check, cnx, 1, 'rel', anoneid) + self.assertEqual(cstr.repo_check(cnx, 1, cnx.user.eid), + None) # no validation error, constraint checked + + +class WorkflowShemaTC(CubicWebTC): + def test_trinfo_default_format(self): + with self.admin_access.web_request() as req: + tr = req.user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') + self.assertEqual(tr.comment_format, 'text/plain') + + +class CompositeSchemaTC(CubicWebTC): + composites = { + 'BaseTransition': [('condition', 'BaseTransition', 'RQLExpression', 'subject')], + 'CWAttribute': [('add_permission', 'CWAttribute', 'RQLExpression', 'subject'), + ('constrained_by', 'CWAttribute', 'CWConstraint', 'subject'), + ('read_permission', 'CWAttribute', 'RQLExpression', 'subject'), + ('update_permission', 'CWAttribute', 'RQLExpression', 'subject')], + 'CWEType': [('add_permission', 'CWEType', 'RQLExpression', 'subject'), + ('constraint_of', 'CWUniqueTogetherConstraint', 'CWEType', 'object'), + ('cw_schema', 'CWSourceSchemaConfig', 'CWEType', 'object'), + ('delete_permission', 'CWEType', 'RQLExpression', 'subject'), + ('from_entity', 'CWAttribute', 'CWEType', 'object'), + ('from_entity', 'CWRelation', 'CWEType', 'object'), + ('read_permission', 'CWEType', 'RQLExpression', 'subject'), + ('to_entity', 'CWAttribute', 'CWEType', 'object'), + ('to_entity', 'CWRelation', 'CWEType', 'object'), + ('update_permission', 'CWEType', 'RQLExpression', 'subject')], + 'CWRType': [('cw_schema', 'CWSourceSchemaConfig', 'CWRType', 'object'), + ('relation_type', 'CWAttribute', 'CWRType', 'object'), + ('relation_type', 'CWRelation', 'CWRType', 'object')], + 'CWRelation': [('add_permission', 'CWRelation', 'RQLExpression', 'subject'), + ('constrained_by', 'CWRelation', 'CWConstraint', 'subject'), + ('cw_schema', 'CWSourceSchemaConfig', 'CWRelation', 'object'), + ('delete_permission', 'CWRelation', 'RQLExpression', 'subject'), + ('read_permission', 'CWRelation', 'RQLExpression', 'subject')], + 'CWComputedRType': [('read_permission', 'CWComputedRType', 'RQLExpression', 'subject')], + 'CWSource': [('cw_for_source', 'CWSourceSchemaConfig', 'CWSource', 'object'), + ('cw_host_config_of', 'CWSourceHostConfig', 'CWSource', 'object'), + ('cw_import_of', 'CWDataImport', 'CWSource', 'object'), + ('cw_source', 'Ami', 'CWSource', 'object'), + ('cw_source', 'BaseTransition', 'CWSource', 'object'), + ('cw_source', 'Bookmark', 'CWSource', 'object'), + ('cw_source', 'CWAttribute', 'CWSource', 'object'), + ('cw_source', 'CWCache', 'CWSource', 'object'), + ('cw_source', 'CWComputedRType', 'CWSource', 'object'), + ('cw_source', 'CWConstraint', 'CWSource', 'object'), + ('cw_source', 'CWConstraintType', 'CWSource', 'object'), + ('cw_source', 'CWDataImport', 'CWSource', 'object'), + ('cw_source', 'CWEType', 'CWSource', 'object'), + ('cw_source', 'CWGroup', 'CWSource', 'object'), + ('cw_source', 'CWPermission', 'CWSource', 'object'), + ('cw_source', 'CWProperty', 'CWSource', 'object'), + ('cw_source', 'CWRType', 'CWSource', 'object'), + ('cw_source', 'CWRelation', 'CWSource', 'object'), + ('cw_source', 'CWSource', 'CWSource', 'object'), + ('cw_source', 'CWSourceHostConfig', 'CWSource', 'object'), + ('cw_source', 'CWSourceSchemaConfig', 'CWSource', 'object'), + ('cw_source', 'CWUniqueTogetherConstraint', 'CWSource', 'object'), + ('cw_source', 'CWUser', 'CWSource', 'object'), + ('cw_source', 'Card', 'CWSource', 'object'), + ('cw_source', 'EmailAddress', 'CWSource', 'object'), + ('cw_source', 'ExternalUri', 'CWSource', 'object'), + ('cw_source', 'FakeFile', 'CWSource', 'object'), + ('cw_source', 'Note', 'CWSource', 'object'), + ('cw_source', 'Personne', 'CWSource', 'object'), + ('cw_source', 'Produit', 'CWSource', 'object'), + ('cw_source', 'RQLExpression', 'CWSource', 'object'), + ('cw_source', 'Reference', 'CWSource', 'object'), + ('cw_source', 'Service', 'CWSource', 'object'), + ('cw_source', 'Societe', 'CWSource', 'object'), + ('cw_source', 'State', 'CWSource', 'object'), + ('cw_source', 'StateFull', 'CWSource', 'object'), + ('cw_source', 'SubNote', 'CWSource', 'object'), + ('cw_source', 'SubWorkflowExitPoint', 'CWSource', 'object'), + ('cw_source', 'Tag', 'CWSource', 'object'), + ('cw_source', 'TrInfo', 'CWSource', 'object'), + ('cw_source', 'Transition', 'CWSource', 'object'), + ('cw_source', 'Usine', 'CWSource', 'object'), + ('cw_source', 'Workflow', 'CWSource', 'object'), + ('cw_source', 'WorkflowTransition', 'CWSource', 'object')], + 'CWUser': [('for_user', 'CWProperty', 'CWUser', 'object'), + ('use_email', 'CWUser', 'EmailAddress', 'subject'), + ('wf_info_for', 'TrInfo', 'CWUser', 'object')], + 'StateFull': [('wf_info_for', 'TrInfo', 'StateFull', 'object')], + 'Transition': [('condition', 'Transition', 'RQLExpression', 'subject')], + 'Workflow': [('state_of', 'State', 'Workflow', 'object'), + ('transition_of', 'BaseTransition', 'Workflow', 'object'), + ('transition_of', 'Transition', 'Workflow', 'object'), + ('transition_of', 'WorkflowTransition', 'Workflow', 'object')], + 'WorkflowTransition': [('condition', 'WorkflowTransition', 'RQLExpression', 'subject'), + ('subworkflow_exit', 'WorkflowTransition', 'SubWorkflowExitPoint', 'subject')] + } + + def test_composite_entities(self): + schema = self.vreg.schema + self.assertEqual(sorted(self.composites), + [eschema.type for eschema in sorted(schema.entities()) + if eschema.is_composite]) + for etype in self.composites: + self.set_description('composite rdefs for %s' % etype) + yield self.assertEqual, self.composites[etype], \ + sorted([(r.rtype.type, r.subject.type, r.object.type, role) + for r, role in schema[etype].composite_rdef_roles]) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_spa2rql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_spa2rql.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,224 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +import unittest + +from logilab.common.testlib import TestCase, unittest_main +from cubicweb.devtools import TestServerConfiguration +from cubicweb.xy import xy + +SKIPCAUSE = None +try: + from cubicweb.spa2rql import Sparql2rqlTranslator +except ImportError as exc: + SKIPCAUSE = str(exc) + +xy.add_equivalence('Project', 'doap:Project') +xy.add_equivalence('Project creation_date', 'doap:Project doap:created') +xy.add_equivalence('Project name', 'doap:Project doap:name') +xy.add_equivalence('Project name', 'doap:Project dc:title') + + +config = TestServerConfiguration('data') +config.bootstrap_cubes() +schema = config.load_schema() + + +@unittest.skipIf(SKIPCAUSE, SKIPCAUSE) +class XYTC(TestCase): + def setUp(self): + self.tr = Sparql2rqlTranslator(schema) + + def _test(self, sparql, rql, args={}): + qi = self.tr.translate(sparql) + self.assertEqual(qi.finalize(), (rql, args)) + + def XXX_test_base_01(self): + self._test('SELECT * WHERE { }', 'Any X') + + + def test_base_is(self): + self._test(''' + PREFIX doap: + SELECT ?project + WHERE { + ?project a doap:Project; + }''', 'Any PROJECT WHERE PROJECT is Project') + + def test_base_rdftype(self): + self._test(''' + PREFIX doap: + PREFIX rdf: + SELECT ?project + WHERE { + ?project rdf:type doap:Project. + }''', 'Any PROJECT WHERE PROJECT is Project') + + def test_base_attr_sel(self): + self._test(''' + PREFIX doap: + SELECT ?created + WHERE { + ?project a doap:Project; + doap:created ?created. + }''', 'Any CREATED WHERE PROJECT creation_date CREATED, PROJECT is Project') + + + def test_base_attr_sel_distinct(self): + self._test(''' + PREFIX doap: + SELECT DISTINCT ?name + WHERE { + ?project a doap:Project; + doap:name ?name. + }''', 'DISTINCT Any NAME WHERE PROJECT name NAME, PROJECT is Project') + + + def test_base_attr_sel_reduced(self): + self._test(''' + PREFIX doap: + SELECT REDUCED ?name + WHERE { + ?project a doap:Project; + doap:name ?name. + }''', 'Any NAME WHERE PROJECT name NAME, PROJECT is Project') + + + def test_base_attr_sel_limit_offset(self): + self._test(''' + PREFIX doap: + SELECT ?name + WHERE { + ?project a doap:Project; + doap:name ?name. + } + LIMIT 20''', 'Any NAME LIMIT 20 WHERE PROJECT name NAME, PROJECT is Project') + self._test(''' + PREFIX doap: + SELECT ?name + WHERE { + ?project a doap:Project; + doap:name ?name. + } + LIMIT 20 OFFSET 10''', 'Any NAME LIMIT 20 OFFSET 10 WHERE PROJECT name NAME, PROJECT is Project') + + + def test_base_attr_sel_orderby(self): + self._test(''' + PREFIX doap: + SELECT ?name + WHERE { + ?project a doap:Project; + doap:name ?name; + doap:created ?created. + } + ORDER BY ?name DESC(?created)''', 'Any NAME ORDERBY NAME ASC, CREATED DESC WHERE PROJECT name NAME, PROJECT creation_date CREATED, PROJECT is Project') + + + def test_base_any_attr_sel(self): + self._test(''' + PREFIX dc: + SELECT ?x ?cd + WHERE { + ?x dc:date ?cd; + }''', 'Any X, CD WHERE X creation_date CD') + + + def test_base_any_attr_sel_amb(self): + xy.add_equivalence('Version publication_date', 'doap:Version dc:date') + try: + self._test(''' + PREFIX dc: + SELECT ?x ?cd + WHERE { + ?x dc:date ?cd; + }''', '(Any X, CD WHERE , X creation_date CD) UNION (Any X, CD WHERE , X publication_date CD, X is Version)') + finally: + xy.remove_equivalence('Version publication_date', 'doap:Version dc:date') + + + def test_base_any_attr_sel_amb_limit_offset(self): + xy.add_equivalence('Version publication_date', 'doap:Version dc:date') + try: + self._test(''' + PREFIX dc: + SELECT ?x ?cd + WHERE { + ?x dc:date ?cd; + } + LIMIT 20 OFFSET 10''', 'Any X, CD LIMIT 20 OFFSET 10 WITH X, CD BEING ((Any X, CD WHERE , X creation_date CD) UNION (Any X, CD WHERE , X publication_date CD, X is Version))') + finally: + xy.remove_equivalence('Version publication_date', 'doap:Version dc:date') + + + def test_base_any_attr_sel_amb_orderby(self): + xy.add_equivalence('Version publication_date', 'doap:Version dc:date') + try: + self._test(''' + PREFIX dc: + SELECT ?x ?cd + WHERE { + ?x dc:date ?cd; + } + ORDER BY DESC(?cd)''', 'Any X, CD ORDERBY CD DESC WITH X, CD BEING ((Any X, CD WHERE , X creation_date CD) UNION (Any X, CD WHERE , X publication_date CD, X is Version))') + finally: + xy.remove_equivalence('Version publication_date', 'doap:Version dc:date') + + + def test_restr_attr(self): + self._test(''' + PREFIX doap: + SELECT ?project + WHERE { + ?project a doap:Project; + doap:name "cubicweb". + }''', 'Any PROJECT WHERE PROJECT name %(a)s, PROJECT is Project', {'a': 'cubicweb'}) + + def test_dctitle_both_project_cwuser(self): + self._test(''' + PREFIX doap: + PREFIX dc: + SELECT ?project ?title + WHERE { + ?project a doap:Project; + dc:title ?title. + }''', 'Any PROJECT, TITLE WHERE PROJECT name TITLE, PROJECT is Project') + +# # Two elements in the group +# PREFIX : +# SELECT * +# { :p :q :r OPTIONAL { :a :b :c } +# :p :q :r OPTIONAL { :a :b :c } +# } + +# PREFIX : +# SELECT * +# { +# { ?s ?p ?o } UNION { ?a ?b ?c } +# } + +# PREFIX dob: +# PREFIX time: +# PREFIX dc: +# SELECT ?desc +# WHERE { +# dob:1D a time:ProperInterval; +# dc:description ?desc. +# } + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_toolsutils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_toolsutils.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,57 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.toolsutils import RQLExecuteMatcher + + +class RQLExecuteMatcherTests(TestCase): + def matched_query(self, text): + match = RQLExecuteMatcher.match(text) + if match is None: + return None + return match['rql_query'] + + def test_unknown_function_dont_match(self): + self.assertIsNone(self.matched_query('foo')) + self.assertIsNone(self.matched_query('rql(')) + self.assertIsNone(self.matched_query('hell("")')) + self.assertIsNone(self.matched_query('eval("rql(\'bla\'')) + + def test_rql_other_parameters_dont_match(self): + self.assertIsNone(self.matched_query('rql("Any X WHERE X eid %(x)s")')) + self.assertIsNone(self.matched_query('rql("Any X WHERE X eid %(x)s", {')) + self.assertIsNone(self.matched_query('session.execute("Any X WHERE X eid %(x)s")')) + self.assertIsNone(self.matched_query('session.execute("Any X WHERE X eid %(x)s", {')) + + def test_rql_function_match(self): + for func_expr in ('rql', 'session.execute'): + query = self.matched_query('%s("Any X WHERE X is ' % func_expr) + self.assertEqual(query, 'Any X WHERE X is ') + + def test_offseted_rql_function_match(self): + """check indentation is allowed""" + for func_expr in (' rql', ' session.execute'): + query = self.matched_query('%s("Any X WHERE X is ' % func_expr) + self.assertEqual(query, 'Any X WHERE X is ') + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_uilib.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_uilib.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittests for cubicweb.uilib""" + +__docformat__ = "restructuredtext en" + + +import pkg_resources + +try: + from unittest import skipIf +except ImportError: + from unittest2 import skipIf + +from logilab.common.testlib import DocTest, TestCase, unittest_main + +from cubicweb import uilib, utils as cwutils + +lxml_version = pkg_resources.get_distribution('lxml').version.split('.') + +class UILIBTC(TestCase): + + def test_remove_tags(self): + """make sure remove_tags remove all tags""" + data = [ + ('

Hello

', 'Hello'), + ('

Hello spam

', 'Hello spam'), + ('
Hello', 'Hello'), + ('

', ''), + ] + for text, expected in data: + got = uilib.remove_html_tags(text) + self.assertEqual(got, expected) + + def test_fallback_safe_cut(self): + self.assertEqual(uilib.fallback_safe_cut(u'ab cd', 4), u'ab c...') + self.assertEqual(uilib.fallback_safe_cut(u'ab cd', 5), u'ab cd') + self.assertEqual(uilib.fallback_safe_cut(u'ab &d', 4), u'ab &...') + self.assertEqual(uilib.fallback_safe_cut(u'ab &d ef', 5), u'ab &d...') + self.assertEqual(uilib.fallback_safe_cut(u'ab ìd', 4), u'ab ì...') + self.assertEqual(uilib.fallback_safe_cut(u'& &d ef', 4), u'& &d...') + + def test_lxml_safe_cut(self): + self.assertEqual(uilib.safe_cut(u'aaa
aaad
ef', 4), u'

aaa

a...
') + self.assertEqual(uilib.safe_cut(u'aaa
aaad
ef', 7), u'

aaa

aaad
...') + self.assertEqual(uilib.safe_cut(u'aaa
aaad
', 7), u'

aaa

aaad
') + # Missing ellipsis due to space management but we don't care + self.assertEqual(uilib.safe_cut(u'ab &d', 4), u'

ab &...

') + + def test_cut(self): + """tests uilib.cut() behaviour""" + data = [ + ('hello', 'hello'), + ('hello world', 'hello wo...'), + ("hellO' world", "hellO..."), + ] + for text, expected in data: + got = uilib.cut(text, 8) + self.assertEqual(got, expected) + + def test_text_cut(self): + """tests uilib.text_cut() behaviour with no text""" + data = [('',''), + ("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod +tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, +quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo +consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse +cillum dolore eu fugiat nulla pariatur.""", + "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \ +tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, \ +quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo \ +consequat."), + ("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod +tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam, +quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo +consequat Duis aute irure dolor in reprehenderit in voluptate velit esse +cillum dolore eu fugiat nulla pariatur Excepteur sint occaecat cupidatat non +proident, sunt in culpa qui officia deserunt mollit anim id est laborum +""", + "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \ +tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam, \ +quis nostrud exercitation ullamco laboris nisi"), + ] + for text, expected in data: + got = uilib.text_cut(text, 30) + self.assertEqual(got, expected) + + def test_soup2xhtml_0(self): + self.assertEqual(uilib.soup2xhtml('hop\r\nhop', 'ascii'), + 'hop\nhop') + + def test_soup2xhtml_1_1(self): + self.assertEqual(uilib.soup2xhtml('hop', 'ascii'), + 'hop') + self.assertEqual(uilib.soup2xhtml('hop
', 'ascii'), + 'hop
') + self.assertEqual(uilib.soup2xhtml('hop
', 'ascii'), + 'hop
') + self.assertEqual(uilib.soup2xhtml('
hop', 'ascii'), + '
hop
') + self.assertEqual(uilib.soup2xhtml('hop
hop', 'ascii'), + 'hop
hop
') + + def test_soup2xhtml_1_2(self): + self.assertEqual(uilib.soup2xhtml('hop
', 'ascii'), + 'hop ') + self.assertEqual(uilib.soup2xhtml('
hop', 'ascii'), + '
hop') + self.assertEqual(uilib.soup2xhtml('hop
hop', 'ascii'), + '
hop
hop') + + def test_soup2xhtml_2_1(self): + self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), + 'hop ') + self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), + ' hop') + self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), + 'hop hop') + + def test_soup2xhtml_2_2a(self): + self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), + 'hop ') + self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), + ' hop') + + @skipIf(lxml_version < ['2', '2'], 'expected behaviour on recent version of lxml only') + def test_soup2xhtml_2_2b(self): + self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), + 'hop hop') + + def test_soup2xhtml_3_1(self): + self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), + 'hop ') + self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), + ' hop') + self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), + 'hop hop') + + def test_soup2xhtml_3_2(self): + self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), + 'hop ') + self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), + ' hop') + self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), + 'hop hop') + + def test_soup2xhtml_3_3(self): + self.assertEqual(uilib.soup2xhtml(' hop ', 'ascii'), + ' hop ') + + def test_js(self): + self.assertEqual(str(uilib.js.pouet(1, "2")), + 'pouet(1,"2")') + self.assertEqual(str(uilib.js.cw.pouet(1, "2")), + 'cw.pouet(1,"2")') + self.assertEqual(str(uilib.js.cw.pouet(1, "2").pouet(None)), + 'cw.pouet(1,"2").pouet(null)') + self.assertEqual(str(uilib.js.cw.pouet(1, cwutils.JSString("$")).pouet(None)), + 'cw.pouet(1,$).pouet(null)') + self.assertEqual(str(uilib.js.cw.pouet(1, {'callback': cwutils.JSString("cw.cb")}).pouet(None)), + 'cw.pouet(1,{callback: cw.cb}).pouet(null)') + + + def test_embedded_css(self): + incoming = u"""voir le ticket

text

""" + expected = 'voir le ticket

text

' + self.assertMultiLineEqual(uilib.soup2xhtml(incoming, 'ascii'), expected) + + def test_unknown_namespace(self): + incoming = '''\ +\ +\ +
XXXXXXX
''' + expected = '''\ +\ +\ +
XXXXXXX
''' + self.assertMultiLineEqual(uilib.soup2xhtml(incoming, 'ascii'), expected) + + +class DocTest(DocTest): + module = uilib + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_utils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_utils.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,319 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.utils""" + +import re +import decimal +import datetime + +from six.moves import range + +from logilab.common.testlib import TestCase, DocTest, unittest_main + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.utils import (make_uid, UStringIO, RepeatList, HTMLHead, + QueryCache, parse_repo_uri) +from cubicweb.entity import Entity + +try: + from cubicweb.utils import CubicWebJsonEncoder, json +except ImportError: + json = None + +class MakeUidTC(TestCase): + def test_1(self): + self.assertNotEqual(make_uid('xyz'), make_uid('abcd')) + self.assertNotEqual(make_uid('xyz'), make_uid('xyz')) + + def test_2(self): + d = set() + while len(d)<10000: + uid = make_uid('xyz') + if uid in d: + self.fail(len(d)) + if re.match('\d', uid): + self.fail('make_uid must not return something begining with ' + 'some numeric character, got %s' % uid) + d.add(uid) + + +class TestParseRepoUri(TestCase): + + def test_parse_repo_uri(self): + self.assertEqual(('inmemory', None, 'myapp'), + parse_repo_uri('myapp')) + self.assertEqual(('inmemory', None, 'myapp'), + parse_repo_uri('inmemory://myapp')) + with self.assertRaises(NotImplementedError): + parse_repo_uri('foo://bar') + + + +class TestQueryCache(TestCase): + def test_querycache(self): + c = QueryCache(ceiling=20) + # write only + for x in range(10): + c[x] = x + self.assertEqual(c._usage_report(), + {'transientcount': 0, + 'itemcount': 10, + 'permanentcount': 0}) + c = QueryCache(ceiling=10) + # we should also get a warning + for x in range(20): + c[x] = x + self.assertEqual(c._usage_report(), + {'transientcount': 0, + 'itemcount': 10, + 'permanentcount': 0}) + # write + reads + c = QueryCache(ceiling=20) + for n in range(4): + for x in range(10): + c[x] = x + c[x] + self.assertEqual(c._usage_report(), + {'transientcount': 10, + 'itemcount': 10, + 'permanentcount': 0}) + c = QueryCache(ceiling=20) + for n in range(17): + for x in range(10): + c[x] = x + c[x] + self.assertEqual(c._usage_report(), + {'transientcount': 0, + 'itemcount': 10, + 'permanentcount': 10}) + c = QueryCache(ceiling=20) + for n in range(17): + for x in range(10): + c[x] = x + if n % 2: + c[x] + if x % 2: + c[x] + self.assertEqual(c._usage_report(), + {'transientcount': 5, + 'itemcount': 10, + 'permanentcount': 5}) + +class UStringIOTC(TestCase): + def test_boolean_value(self): + self.assertTrue(UStringIO()) + + +class RepeatListTC(TestCase): + + def test_base(self): + l = RepeatList(3, (1, 3)) + self.assertEqual(l[0], (1, 3)) + self.assertEqual(l[2], (1, 3)) + self.assertEqual(l[-1], (1, 3)) + self.assertEqual(len(l), 3) + # XXX + self.assertEqual(l[4], (1, 3)) + + self.assertFalse(RepeatList(0, None)) + + def test_slice(self): + l = RepeatList(3, (1, 3)) + self.assertEqual(l[0:1], [(1, 3)]) + self.assertEqual(l[0:4], [(1, 3)]*3) + self.assertEqual(l[:], [(1, 3)]*3) + + def test_iter(self): + self.assertEqual(list(RepeatList(3, (1, 3))), + [(1, 3)]*3) + + def test_add(self): + l = RepeatList(3, (1, 3)) + self.assertEqual(l + [(1, 4)], [(1, 3)]*3 + [(1, 4)]) + self.assertEqual([(1, 4)] + l, [(1, 4)] + [(1, 3)]*3) + self.assertEqual(l + RepeatList(2, (2, 3)), [(1, 3)]*3 + [(2, 3)]*2) + + x = l + RepeatList(2, (1, 3)) + self.assertIsInstance(x, RepeatList) + self.assertEqual(len(x), 5) + self.assertEqual(x[0], (1, 3)) + + x = l + [(1, 3)] * 2 + self.assertEqual(x, [(1, 3)] * 5) + + def test_eq(self): + self.assertEqual(RepeatList(3, (1, 3)), + [(1, 3)]*3) + + def test_pop(self): + l = RepeatList(3, (1, 3)) + l.pop(2) + self.assertEqual(l, [(1, 3)]*2) + + +class JSONEncoderTC(TestCase): + def setUp(self): + if json is None: + self.skipTest('json not available') + + def encode(self, value): + return json.dumps(value, cls=CubicWebJsonEncoder) + + def test_encoding_dates(self): + self.assertEqual(self.encode(datetime.datetime(2009, 9, 9, 20, 30)), + '"2009/09/09 20:30:00"') + self.assertEqual(self.encode(datetime.date(2009, 9, 9)), + '"2009/09/09"') + self.assertEqual(self.encode(datetime.time(20, 30)), + '"20:30:00"') + + def test_encoding_decimal(self): + self.assertEqual(self.encode(decimal.Decimal('1.2')), '1.2') + + def test_encoding_bare_entity(self): + e = Entity(None) + e.cw_attr_cache['pouet'] = 'hop' + e.eid = 2 + self.assertEqual(json.loads(self.encode(e)), + {'pouet': 'hop', 'eid': 2}) + + def test_encoding_entity_in_list(self): + e = Entity(None) + e.cw_attr_cache['pouet'] = 'hop' + e.eid = 2 + self.assertEqual(json.loads(self.encode([e])), + [{'pouet': 'hop', 'eid': 2}]) + + def test_encoding_unknown_stuff(self): + self.assertEqual(self.encode(TestCase), 'null') + +class HTMLHeadTC(CubicWebTC): + + def htmlhead(self, datadir_url): + with self.admin_access.web_request() as req: + base_url = u'http://test.fr/data/' + req.datadir_url = base_url + head = HTMLHead(req) + return head + + def test_concat_urls(self): + base_url = u'http://test.fr/data/' + head = self.htmlhead(base_url) + urls = [base_url + u'bob1.js', + base_url + u'bob2.js', + base_url + u'bob3.js'] + result = head.concat_urls(urls) + expected = u'http://test.fr/data/??bob1.js,bob2.js,bob3.js' + self.assertEqual(result, expected) + + def test_group_urls(self): + base_url = u'http://test.fr/data/' + head = self.htmlhead(base_url) + urls_spec = [(base_url + u'bob0.js', None), + (base_url + u'bob1.js', None), + (u'http://ext.com/bob2.js', None), + (u'http://ext.com/bob3.js', None), + (base_url + u'bob4.css', 'all'), + (base_url + u'bob5.css', 'all'), + (base_url + u'bob6.css', 'print'), + (base_url + u'bob7.css', 'print'), + (base_url + u'bob8.css', ('all', u'[if IE 8]')), + (base_url + u'bob9.css', ('print', u'[if IE 8]')) + ] + result = head.group_urls(urls_spec) + expected = [(base_url + u'??bob0.js,bob1.js', None), + (u'http://ext.com/bob2.js', None), + (u'http://ext.com/bob3.js', None), + (base_url + u'??bob4.css,bob5.css', 'all'), + (base_url + u'??bob6.css,bob7.css', 'print'), + (base_url + u'bob8.css', ('all', u'[if IE 8]')), + (base_url + u'bob9.css', ('print', u'[if IE 8]')) + ] + self.assertEqual(list(result), expected) + + def test_getvalue_with_concat(self): + self.config.global_set_option('concat-resources', True) + base_url = u'http://test.fr/data/' + head = self.htmlhead(base_url) + head.add_js(base_url + u'bob0.js') + head.add_js(base_url + u'bob1.js') + head.add_js(u'http://ext.com/bob2.js') + head.add_js(u'http://ext.com/bob3.js') + head.add_css(base_url + u'bob4.css') + head.add_css(base_url + u'bob5.css') + head.add_css(base_url + u'bob6.css', 'print') + head.add_css(base_url + u'bob7.css', 'print') + head.add_ie_css(base_url + u'bob8.css') + head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]') + result = head.getvalue() + expected = u""" + + + + + + + +""" + self.assertEqual(result, expected) + + def test_getvalue_without_concat(self): + self.config.global_set_option('concat-resources', False) + try: + base_url = u'http://test.fr/data/' + head = self.htmlhead(base_url) + head.add_js(base_url + u'bob0.js') + head.add_js(base_url + u'bob1.js') + head.add_js(u'http://ext.com/bob2.js') + head.add_js(u'http://ext.com/bob3.js') + head.add_css(base_url + u'bob4.css') + head.add_css(base_url + u'bob5.css') + head.add_css(base_url + u'bob6.css', 'print') + head.add_css(base_url + u'bob7.css', 'print') + head.add_ie_css(base_url + u'bob8.css') + head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]') + result = head.getvalue() + expected = u""" + + + + + + + + + + +""" + self.assertEqual(result, expected) + finally: + self.config.global_set_option('concat-resources', True) + +class DocTest(DocTest): + from cubicweb import utils as module + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/test/unittest_vregistry.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/test/unittest_vregistry.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,90 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from logilab.common.testlib import unittest_main, TestCase + +from os.path import join + +from cubicweb import CW_SOFTWARE_ROOT as BASE +from cubicweb.appobject import AppObject +from cubicweb.cwvreg import CWRegistryStore, UnknownProperty +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.view import EntityAdapter + +from cubes.card.entities import Card + +class YesSchema: + def __contains__(self, something): + return True + +WEBVIEWSDIR = join(BASE, 'web', 'views') + +class VRegistryTC(TestCase): + + def setUp(self): + config = TestServerConfiguration('data') + self.vreg = CWRegistryStore(config) + config.bootstrap_cubes() + self.vreg.schema = config.load_schema() + + def test_load_interface_based_vojects(self): + self.vreg.init_registration([WEBVIEWSDIR]) + self.vreg.load_file(join(BASE, 'entities', '__init__.py'), 'cubicweb.entities.__init__') + self.vreg.load_file(join(WEBVIEWSDIR, 'idownloadable.py'), 'cubicweb.web.views.idownloadable') + self.vreg.load_file(join(WEBVIEWSDIR, 'primary.py'), 'cubicweb.web.views.primary') + self.assertEqual(len(self.vreg['views']['primary']), 2) + self.vreg.initialization_completed() + self.assertEqual(len(self.vreg['views']['primary']), 1) + + + def test_load_subinterface_based_appobjects(self): + self.vreg.register_objects([join(BASE, 'web', 'views', 'idownloadable.py')]) + # check downloadlink was kicked + self.assertFalse(self.vreg['views'].get('downloadlink')) + # we've to emulate register_objects to add custom MyCard objects + path = [join(BASE, 'entities', '__init__.py'), + join(BASE, 'entities', 'adapters.py'), + join(BASE, 'web', 'views', 'idownloadable.py')] + filemods = self.vreg.init_registration(path, None) + for filepath, modname in filemods: + self.vreg.load_file(filepath, modname) + class CardIDownloadableAdapter(EntityAdapter): + __regid__ = 'IDownloadable' + self.vreg._loadedmods[__name__] = {} + self.vreg.register(CardIDownloadableAdapter) + self.vreg.initialization_completed() + # check progressbar isn't kicked + self.assertEqual(len(self.vreg['views']['downloadlink']), 1) + + def test_properties(self): + self.vreg.reset() + self.assertNotIn('system.version.cubicweb', self.vreg['propertydefs']) + self.assertTrue(self.vreg.property_info('system.version.cubicweb')) + self.assertRaises(UnknownProperty, self.vreg.property_info, 'a.non.existent.key') + + +class CWVregTC(CubicWebTC): + + def test_property_default_overriding(self): + # see data/views.py + from cubicweb.web.views.xmlrss import RSSIconBox + self.assertEqual(self.vreg.property_info(RSSIconBox._cwpropkey('visible'))['default'], True) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/toolsutils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/toolsutils.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,415 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some utilities for cubicweb command line tools""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +# XXX move most of this in logilab.common (shellutils ?) + +import io +import os, sys +import subprocess +from os import listdir, makedirs, environ, chmod, walk, remove +from os.path import exists, join, abspath, normpath +import re +from rlcompleter import Completer +try: + import readline +except ImportError: # readline not available, no completion + pass +try: + from os import symlink +except ImportError: + def symlink(*args): + raise NotImplementedError + +from six import add_metaclass + +from logilab.common.clcommands import Command as BaseCommand +from logilab.common.shellutils import ASK + +from cubicweb import warning # pylint: disable=E0611 +from cubicweb import ConfigurationError, ExecutionError + +def underline_title(title, car='-'): + return title+'\n'+(car*len(title)) + +def iter_dir(directory, condition_file=None, ignore=()): + """iterate on a directory""" + for sub in listdir(directory): + if sub in ('CVS', '.svn', '.hg'): + continue + if condition_file is not None and \ + not exists(join(directory, sub, condition_file)): + continue + if sub in ignore: + continue + yield sub + +def create_dir(directory): + """create a directory if it doesn't exist yet""" + try: + makedirs(directory) + print('-> created directory %s' % directory) + except OSError as ex: + import errno + if ex.errno != errno.EEXIST: + raise + print('-> no need to create existing directory %s' % directory) + +def create_symlink(source, target): + """create a symbolic link""" + if exists(target): + remove(target) + symlink(source, target) + print('[symlink] %s <-- %s' % (target, source)) + +def create_copy(source, target): + import shutil + print('[copy] %s <-- %s' % (target, source)) + shutil.copy2(source, target) + +def rm(whatever): + import shutil + shutil.rmtree(whatever) + print('-> removed %s' % whatever) + +def show_diffs(appl_file, ref_file, askconfirm=True): + """interactivly replace the old file with the new file according to + user decision + """ + import shutil + pipe = subprocess.Popen(['diff', '-u', appl_file, ref_file], stdout=subprocess.PIPE) + diffs = pipe.stdout.read() + if diffs: + if askconfirm: + print() + print(diffs) + action = ASK.ask('Replace ?', ('Y', 'n', 'q'), 'Y').lower() + else: + action = 'y' + if action == 'y': + try: + shutil.copyfile(ref_file, appl_file) + except IOError: + os.system('chmod a+w %s' % appl_file) + shutil.copyfile(ref_file, appl_file) + print('replaced') + elif action == 'q': + sys.exit(0) + else: + copy_file = appl_file + '.default' + copy = open(copy_file, 'w') + copy.write(open(ref_file).read()) + copy.close() + print('keep current version, the new file has been written to', copy_file) + else: + print('no diff between %s and %s' % (appl_file, ref_file)) + +SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py') +def copy_skeleton(skeldir, targetdir, context, + exclude=SKEL_EXCLUDE, askconfirm=False): + import shutil + from fnmatch import fnmatch + skeldir = normpath(skeldir) + targetdir = normpath(targetdir) + for dirpath, dirnames, filenames in walk(skeldir): + tdirpath = dirpath.replace(skeldir, targetdir) + create_dir(tdirpath) + for fname in filenames: + if any(fnmatch(fname, pat) for pat in exclude): + continue + fpath = join(dirpath, fname) + if 'CUBENAME' in fname: + tfpath = join(tdirpath, fname.replace('CUBENAME', context['cubename'])) + elif 'DISTNAME' in fname: + tfpath = join(tdirpath, fname.replace('DISTNAME', context['distname'])) + else: + tfpath = join(tdirpath, fname) + if fname.endswith('.tmpl'): + tfpath = tfpath[:-5] + if not askconfirm or not exists(tfpath) or \ + ASK.confirm('%s exists, overwrite?' % tfpath): + fill_templated_file(fpath, tfpath, context) + print('[generate] %s <-- %s' % (tfpath, fpath)) + elif exists(tfpath): + show_diffs(tfpath, fpath, askconfirm) + else: + shutil.copyfile(fpath, tfpath) + +def fill_templated_file(fpath, tfpath, context): + with io.open(fpath, encoding='ascii') as fobj: + template = fobj.read() + with io.open(tfpath, 'w', encoding='ascii') as fobj: + fobj.write(template % context) + +def restrict_perms_to_user(filepath, log=None): + """set -rw------- permission on the given file""" + if log: + log('set permissions to 0600 for %s', filepath) + else: + print('-> set permissions to 0600 for %s' % filepath) + chmod(filepath, 0o600) + +def read_config(config_file, raise_if_unreadable=False): + """read some simple configuration from `config_file` and return it as a + dictionary. If `raise_if_unreadable` is false (the default), an empty + dictionary will be returned if the file is inexistant or unreadable, else + :exc:`ExecutionError` will be raised. + """ + from logilab.common.fileutils import lines + config = current = {} + try: + for line in lines(config_file, comments='#'): + try: + option, value = line.split('=', 1) + except ValueError: + option = line.strip().lower() + if option[0] == '[': + # start a section + section = option[1:-1] + assert section not in config, \ + 'Section %s is defined more than once' % section + config[section] = current = {} + continue + sys.stderr.write('ignoring malformed line\n%r\n' % line) + continue + option = option.strip().replace(' ', '_') + value = value.strip() + current[option] = value or None + except IOError as ex: + if raise_if_unreadable: + raise ExecutionError('%s. Are you logged with the correct user ' + 'to use this instance?' % ex) + else: + warning('missing or non readable configuration file %s (%s)', + config_file, ex) + return config + + +_HDLRS = {} + +class metacmdhandler(type): + def __new__(mcs, name, bases, classdict): + cls = super(metacmdhandler, mcs).__new__(mcs, name, bases, classdict) + if getattr(cls, 'cfgname', None) and getattr(cls, 'cmdname', None): + _HDLRS.setdefault(cls.cmdname, []).append(cls) + return cls + + +@add_metaclass(metacmdhandler) +class CommandHandler(object): + """configuration specific helper for cubicweb-ctl commands""" + def __init__(self, config): + self.config = config + + +class Command(BaseCommand): + """base class for cubicweb-ctl commands""" + + def config_helper(self, config, required=True, cmdname=None): + if cmdname is None: + cmdname = self.name + for helpercls in _HDLRS.get(cmdname, ()): + if helpercls.cfgname == config.name: + return helpercls(config) + if config.name == 'all-in-one': + for helpercls in _HDLRS.get(cmdname, ()): + if helpercls.cfgname == 'repository': + return helpercls(config) + if required: + msg = 'No helper for command %s using %s configuration' % ( + cmdname, config.name) + raise ConfigurationError(msg) + + def fail(self, reason): + print("command failed:", reason) + sys.exit(1) + + +CONNECT_OPTIONS = ( + ("user", + {'short': 'u', 'type' : 'string', 'metavar': '', + 'help': 'connect as instead of being prompted to give it.', + } + ), + ("password", + {'short': 'p', 'type' : 'password', 'metavar': '', + 'help': 'automatically give for authentication instead of \ +being prompted to give it.', + }), + ("host", + {'short': 'H', 'type' : 'string', 'metavar': '', + 'default': None, + 'help': 'specify the name server\'s host name. Will be detected by \ +broadcast if not provided.', + }), + ) + +## cwshell helpers ############################################################# + +class AbstractMatcher(object): + """Abstract class for CWShellCompleter's matchers. + + A matcher should implement a ``possible_matches`` method. This + method has to return the list of possible completions for user's input. + Because of the python / readline interaction, each completion should + be a superset of the user's input. + + NOTE: readline tokenizes user's input and only passes last token to + completers. + """ + + def possible_matches(self, text): + """return possible completions for user's input. + + Parameters: + text: the user's input + + Return: + a list of completions. Each completion includes the original input. + """ + raise NotImplementedError() + + +class RQLExecuteMatcher(AbstractMatcher): + """Custom matcher for rql queries. + + If user's input starts with ``rql(`` or ``session.execute(`` and + the corresponding rql query is incomplete, suggest some valid completions. + """ + query_match_rgx = re.compile( + r'(?P\s*(?:rql)' # match rql, possibly indented + r'|' # or + r'\s*(?:\w+\.execute))' # match .execute, possibly indented + # end of + r'\(' # followed by a parenthesis + r'(?P["\'])' # a quote or double quote + r'(?P.*)') # and some content + + def __init__(self, local_ctx, req): + self.local_ctx = local_ctx + self.req = req + self.schema = req.vreg.schema + self.rsb = req.vreg['components'].select('rql.suggestions', req) + + @staticmethod + def match(text): + """check if ``text`` looks like a call to ``rql`` or ``session.execute`` + + Parameters: + text: the user's input + + Returns: + None if it doesn't match, the query structure otherwise. + """ + query_match = RQLExecuteMatcher.query_match_rgx.match(text) + if query_match is None: + return None + parameters_text = query_match.group('parameters') + quote_delim = query_match.group('quote_delim') + # first parameter is fully specified, no completion needed + if re.match(r"(.*?)%s" % quote_delim, parameters_text) is not None: + return None + func_prefix = query_match.group('func_prefix') + return { + # user's input + 'text': text, + # rql( or session.execute( + 'func_prefix': func_prefix, + # offset of rql query + 'rql_offset': len(func_prefix) + 2, + # incomplete rql query + 'rql_query': parameters_text, + } + + def possible_matches(self, text): + """call ``rql.suggestions`` component to complete user's input. + """ + # readline will only send last token, but we need the entire user's input + user_input = readline.get_line_buffer() + query_struct = self.match(user_input) + if query_struct is None: + return [] + else: + # we must only send completions of the last token => compute where it + # starts relatively to the rql query itself. + completion_offset = readline.get_begidx() - query_struct['rql_offset'] + rql_query = query_struct['rql_query'] + return [suggestion[completion_offset:] + for suggestion in self.rsb.build_suggestions(rql_query)] + + +class DefaultMatcher(AbstractMatcher): + """Default matcher: delegate to standard's `rlcompleter.Completer`` class + """ + def __init__(self, local_ctx): + self.completer = Completer(local_ctx) + + def possible_matches(self, text): + if "." in text: + return self.completer.attr_matches(text) + else: + return self.completer.global_matches(text) + + +class CWShellCompleter(object): + """Custom auto-completion helper for cubicweb-ctl shell. + + ``CWShellCompleter`` provides a ``complete`` method suitable for + ``readline.set_completer``. + + Attributes: + matchers: the list of ``AbstractMatcher`` instances that will suggest + possible completions + + The completion process is the following: + + - readline calls the ``complete`` method with user's input, + - the ``complete`` method asks for each known matchers if + it can suggest completions for user's input. + """ + + def __init__(self, local_ctx): + # list of matchers to ask for possible matches on completion + self.matchers = [DefaultMatcher(local_ctx)] + self.matchers.insert(0, RQLExecuteMatcher(local_ctx, local_ctx['session'])) + + def complete(self, text, state): + """readline's completer method + + cf http://docs.python.org/2/library/readline.html#readline.set_completer + for more details. + + Implementation inspired by `rlcompleter.Completer` + """ + if state == 0: + # reset self.matches + self.matches = [] + for matcher in self.matchers: + matches = matcher.possible_matches(text) + if matches: + self.matches = matches + break + else: + return None # no matcher able to handle `text` + try: + return self.matches[state] + except IndexError: + return None diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/transaction.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/transaction.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,127 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" undoable transaction objects. """ +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from cubicweb import RepositoryError + + +ACTION_LABELS = { + 'C': _('entity creation'), + 'U': _('entity update'), + 'D': _('entity deletion'), + 'A': _('relation add'), + 'R': _('relation removal'), + } + + +class NoSuchTransaction(RepositoryError): + # Used by CubicWebException + msg = _("there is no transaction #%s") + + def __init__(self, txuuid): + super(NoSuchTransaction, self).__init__(txuuid) + self.txuuid = txuuid + +class Transaction(object): + """an undoable transaction""" + + def __init__(self, cnx, uuid, time, ueid): + self.cnx = cnx + self.uuid = uuid + self.datetime = time + self.user_eid = ueid + + def _execute(self, *args, **kwargs): + """execute a query using either the req or the cnx""" + return self.cnx.execute(*args, **kwargs) + + + def __repr__(self): + return '' % ( + self.uuid, self.user_eid, self.datetime) + + def user(self): + """return the user entity which has done the transaction, + none if not found. + """ + return self.cnx.find('CWUser', eid=self.user_eid).one() + + def actions_list(self, public=True): + """return an ordered list of action effectued during that transaction + + if public is true, return only 'public' action, eg not ones triggered + under the cover by hooks. + """ + return self.cnx.transaction_actions(self.uuid, public) + + +class AbstractAction(object): + + def __init__(self, action, public, order): + self.action = action + self.public = public + self.order = order + + @property + def label(self): + return ACTION_LABELS[self.action] + + @property + def ertype(self): + """ Return the entity or relation type this action is related to""" + raise NotImplementedError(self) + + +class EntityAction(AbstractAction): + + def __init__(self, action, public, order, etype, eid, changes): + super(EntityAction, self).__init__(action, public, order) + self.etype = etype + self.eid = eid + self.changes = changes + + def __repr__(self): + return '<%s: %s %s (%s)>' % ( + self.label, self.eid, self.changes, + self.public and 'dbapi' or 'hook') + + @property + def ertype(self): + """ Return the entity or relation type this action is related to""" + return self.etype + + +class RelationAction(AbstractAction): + + def __init__(self, action, public, order, rtype, eidfrom, eidto): + super(RelationAction, self).__init__(action, public, order) + self.rtype = rtype + self.eid_from = eidfrom + self.eid_to = eidto + + def __repr__(self): + return '<%s: %s %s %s (%s)>' % ( + self.label, self.eid_from, self.rtype, self.eid_to, + self.public and 'dbapi' or 'hook') + + @property + def ertype(self): + """ Return the entity or relation type this action is related to""" + return self.rtype diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/uilib.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/uilib.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,589 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""user interface libraries + +contains some functions designed to help implementation of cubicweb user +interface. +""" + +__docformat__ = "restructuredtext en" + +import csv +import re +from io import StringIO + +from six import PY2, PY3, text_type, binary_type, string_types, integer_types + +from logilab.mtconverter import xml_escape, html_unescape +from logilab.common.date import ustrftime +from logilab.common.deprecation import deprecated + +from cubicweb import _ +from cubicweb.utils import js_dumps + + +def rql_for_eid(eid): + """return the rql query necessary to fetch entity with the given eid. This + function should only be used to generate link with rql inside, not to give + to cursor.execute (in which case you won't benefit from rql cache). + + :Parameters: + - `eid`: the eid of the entity we should search + :rtype: str + :return: the rql query + """ + return 'Any X WHERE X eid %s' % eid + +def eid_param(name, eid): + assert name is not None + assert eid is not None + return '%s:%s' % (name, eid) + +def print_bytes(value, req, props, displaytime=True): + return u'' + +def print_string(value, req, props, displaytime=True): + # don't translate empty value if you don't want strange results + if props is not None and value and props.get('internationalizable'): + return req._(value) + return value + +def print_int(value, req, props, displaytime=True): + return text_type(value) + +def print_date(value, req, props, displaytime=True): + return ustrftime(value, req.property_value('ui.date-format')) + +def print_time(value, req, props, displaytime=True): + return ustrftime(value, req.property_value('ui.time-format')) + +def print_tztime(value, req, props, displaytime=True): + return ustrftime(value, req.property_value('ui.time-format')) + u' UTC' + +def print_datetime(value, req, props, displaytime=True): + if displaytime: + return ustrftime(value, req.property_value('ui.datetime-format')) + return ustrftime(value, req.property_value('ui.date-format')) + +def print_tzdatetime(value, req, props, displaytime=True): + if displaytime: + return ustrftime(value, req.property_value('ui.datetime-format')) + u' UTC' + return ustrftime(value, req.property_value('ui.date-format')) + +_('%d years') +_('%d months') +_('%d weeks') +_('%d days') +_('%d hours') +_('%d minutes') +_('%d seconds') + +def print_timedelta(value, req, props, displaytime=True): + if isinstance(value, integer_types): + # `date - date`, unlike `datetime - datetime` gives an int + # (number of days), not a timedelta + # XXX should rql be fixed to return Int instead of Interval in + # that case? that would be probably the proper fix but we + # loose information on the way... + value = timedelta(days=value) + if value.days > 730 or value.days < -730: # 2 years + return req._('%d years') % (value.days // 365) + elif value.days > 60 or value.days < -60: # 2 months + return req._('%d months') % (value.days // 30) + elif value.days > 14 or value.days < -14: # 2 weeks + return req._('%d weeks') % (value.days // 7) + elif value.days > 2 or value.days < -2: + return req._('%d days') % int(value.days) + else: + minus = 1 if value.days >= 0 else -1 + if value.seconds > 3600: + return req._('%d hours') % (int(value.seconds // 3600) * minus) + elif value.seconds >= 120: + return req._('%d minutes') % (int(value.seconds // 60) * minus) + else: + return req._('%d seconds') % (int(value.seconds) * minus) + +def print_boolean(value, req, props, displaytime=True): + if value: + return req._('yes') + return req._('no') + +def print_float(value, req, props, displaytime=True): + return text_type(req.property_value('ui.float-format') % value) # XXX cast needed ? + +PRINTERS = { + 'Bytes': print_bytes, + 'String': print_string, + 'Int': print_int, + 'BigInt': print_int, + 'Date': print_date, + 'Time': print_time, + 'TZTime': print_tztime, + 'Datetime': print_datetime, + 'TZDatetime': print_tzdatetime, + 'Boolean': print_boolean, + 'Float': print_float, + 'Decimal': print_float, + 'Interval': print_timedelta, + } + +@deprecated('[3.14] use req.printable_value(attrtype, value, ...)') +def printable_value(req, attrtype, value, props=None, displaytime=True): + return req.printable_value(attrtype, value, props, displaytime) + +def css_em_num_value(vreg, propname, default): + """ we try to read an 'em' css property + if we get another unit we're out of luck and resort to the given default + (hence, it is strongly advised not to specify but ems for this css prop) + """ + propvalue = vreg.config.uiprops[propname].lower().strip() + if propvalue.endswith('em'): + try: + return float(propvalue[:-2]) + except Exception: + vreg.warning('css property %s looks malformed (%r)', + propname, propvalue) + else: + vreg.warning('css property %s should use em (currently is %r)', + propname, propvalue) + return default + +# text publishing ############################################################# + +from cubicweb.ext.markdown import markdown_publish # pylint: disable=W0611 + +try: + from cubicweb.ext.rest import rest_publish # pylint: disable=W0611 +except ImportError: + def rest_publish(entity, data): + """default behaviour if docutils was not found""" + return xml_escape(data) + + +TAG_PROG = re.compile(r'', re.U) +def remove_html_tags(text): + """Removes HTML tags from text + + >>> remove_html_tags('hi world') + 'hi world' + >>> + """ + return TAG_PROG.sub('', text) + + +REF_PROG = re.compile(r"([^<]*)", re.U) +def _subst_rql(view, obj): + delim, rql, descr = obj.groups() + return u'%s' % (view._cw.build_url(rql=rql), descr) + +def html_publish(view, text): + """replace links by """ + if not text: + return u'' + return REF_PROG.sub(lambda obj, view=view:_subst_rql(view, obj), text) + +# fallback implementation, nicer one defined below if lxml> 2.0 is available +def safe_cut(text, length): + """returns a string of length based on , removing any html + tags from given text if cut is necessary.""" + if text is None: + return u'' + noenttext = html_unescape(text) + text_nohtml = remove_html_tags(noenttext) + # try to keep html tags if text is short enough + if len(text_nohtml) <= length: + return text + # else if un-tagged text is too long, cut it + return xml_escape(text_nohtml[:length] + u'...') + +fallback_safe_cut = safe_cut + +REM_ROOT_HTML_TAGS = re.compile('', re.U) + +from lxml import etree, html +from lxml.html import clean, defs + +ALLOWED_TAGS = (defs.general_block_tags | defs.list_tags | defs.table_tags | + defs.phrase_tags | defs.font_style_tags | + set(('span', 'a', 'br', 'img', 'map', 'area', 'sub', 'sup', 'canvas')) + ) + +CLEANER = clean.Cleaner(allow_tags=ALLOWED_TAGS, remove_unknown_tags=False, + style=True, safe_attrs_only=True, + add_nofollow=False, + ) + +def soup2xhtml(data, encoding): + """tidy html soup by allowing some element tags and return the result + """ + # remove spurious and tags, then normalize line break + # (see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1) + data = REM_ROOT_HTML_TAGS.sub('', u'\n'.join(data.splitlines())) + xmltree = etree.HTML(CLEANER.clean_html('
%s
' % data)) + # NOTE: lxml 2.0 does support encoding='unicode', but last time I (syt) + # tried I got weird results (lxml 2.2.8) + body = etree.tostring(xmltree[0], encoding=encoding) + # remove and and decode to unicode + snippet = body[6:-7].decode(encoding) + # take care to bad xhtml (for instance starting with
) which + # may mess with the
we added below. Only remove it if it's + # still there... + if snippet.startswith('
') and snippet.endswith('
'): + snippet = snippet[5:-6] + return snippet + + # lxml.Cleaner envelops text elements by internal logic (not accessible) + # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 + # TODO drop attributes in elements + # TODO add policy configuration (content only, embedded content, ...) + # XXX this is buggy for "

text1

text2

"... + # XXX drop these two snippets action and follow the lxml behaviour + # XXX (tests need to be updated) + # if snippet.startswith('
') and snippet.endswith('
'): + # snippet = snippet[5:-6] + # if snippet.startswith('

') and snippet.endswith('

'): + # snippet = snippet[3:-4] + return snippet.decode(encoding) + +if hasattr(etree.HTML('
test
'), 'iter'): # XXX still necessary? + # pylint: disable=E0102 + def safe_cut(text, length): + """returns an html document of length based on , + and cut is necessary. + """ + if text is None: + return u'' + dom = etree.HTML(text) + curlength = 0 + add_ellipsis = False + for element in dom.iter(): + if curlength >= length: + parent = element.getparent() + parent.remove(element) + if curlength == length and (element.text or element.tail): + add_ellipsis = True + else: + if element.text is not None: + element.text = cut(element.text, length - curlength) + curlength += len(element.text) + if element.tail is not None: + if curlength < length: + element.tail = cut(element.tail, length - curlength) + curlength += len(element.tail) + elif curlength == length: + element.tail = '...' + else: + element.tail = '' + text = etree.tounicode(dom[0])[6:-7] # remove wrapping + if add_ellipsis: + return text + u'...' + return text + +def text_cut(text, nbwords=30, gotoperiod=True): + """from the given plain text, return a text with at least words, + trying to go to the end of the current sentence. + + :param nbwords: the minimum number of words required + :param gotoperiod: specifies if the function should try to go to + the first period after the cut (i.e. finish + the sentence if possible) + + Note that spaces are normalized. + """ + if text is None: + return u'' + words = text.split() + text = u' '.join(words) # normalize spaces + textlength = minlength = len(' '.join(words[:nbwords])) + if gotoperiod: + textlength = text.find('.', minlength) + 1 + if textlength == 0: # no period found + textlength = minlength + return text[:textlength] + +def cut(text, length): + """returns a string of a maximum length based on + (approximatively, since if text has been cut, '...' is added to the end of the string, + resulting in a string of len + 3) + """ + if text is None: + return u'' + if len(text) <= length: + return text + # else if un-tagged text is too long, cut it + return text[:length] + u'...' + + + +# HTML generation helper functions ############################################ + +class _JSId(object): + def __init__(self, id, parent=None): + self.id = id + self.parent = parent + def __unicode__(self): + if self.parent: + return u'%s.%s' % (self.parent, self.id) + return text_type(self.id) + __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') + def __getattr__(self, attr): + return _JSId(attr, self) + def __call__(self, *args): + return _JSCallArgs(args, self) + +class _JSCallArgs(_JSId): + def __init__(self, args, parent=None): + assert isinstance(args, tuple) + self.args = args + self.parent = parent + def __unicode__(self): + args = [] + for arg in self.args: + args.append(js_dumps(arg)) + if self.parent: + return u'%s(%s)' % (self.parent, ','.join(args)) + return ','.join(args) + __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') + +class _JS(object): + def __getattr__(self, attr): + return _JSId(attr) + +js = _JS() +js.__doc__ = """\ +magic object to return strings suitable to call some javascript function with +the given arguments (which should be correctly typed). + +>>> str(js.pouet(1, "2")) +'pouet(1,"2")' +>>> str(js.cw.pouet(1, "2")) +'cw.pouet(1,"2")' +>>> str(js.cw.pouet(1, "2").pouet(None)) +'cw.pouet(1,"2").pouet(null)' +>>> str(js.cw.pouet(1, JSString("$")).pouet(None)) +'cw.pouet(1,$).pouet(null)' +>>> str(js.cw.pouet(1, {'callback': JSString("cw.cb")}).pouet(None)) +'cw.pouet(1,{callback: cw.cb}).pouet(null)' +""" + +def domid(string): + """return a valid DOM id from a string (should also be usable in jQuery + search expression...) + """ + return string.replace('.', '_').replace('-', '_') + +HTML4_EMPTY_TAGS = frozenset(('base', 'meta', 'link', 'hr', 'br', 'param', + 'img', 'area', 'input', 'col')) + +def sgml_attributes(attrs): + return u' '.join(u'%s="%s"' % (attr, xml_escape(text_type(value))) + for attr, value in sorted(attrs.items()) + if value is not None) + +def simple_sgml_tag(tag, content=None, escapecontent=True, **attrs): + """generation of a simple sgml tag (eg without children tags) easier + + content and attri butes will be escaped + """ + value = u'<%s' % tag + if attrs: + try: + attrs['class'] = attrs.pop('klass') + except KeyError: + pass + value += u' ' + sgml_attributes(attrs) + if content: + if escapecontent: + content = xml_escape(text_type(content)) + value += u'>%s' % (content, tag) + else: + if tag in HTML4_EMPTY_TAGS: + value += u' />' + else: + value += u'>' % tag + return value + +def tooltipize(text, tooltip, url=None): + """make an HTML tooltip""" + url = url or '#' + return u'
%s' % (url, tooltip, text) + +def toggle_action(nodeid): + """builds a HTML link that uses the js toggleVisibility function""" + return u"javascript: toggleVisibility('%s')" % nodeid + +def toggle_link(nodeid, label): + """builds a HTML link that uses the js toggleVisibility function""" + return u'%s' % (toggle_action(nodeid), label) + + +def ureport_as_html(layout): + from logilab.common.ureports import HTMLWriter + formater = HTMLWriter(True) + stream = StringIO() #UStringIO() don't want unicode assertion + formater.format(layout, stream) + res = stream.getvalue() + if isinstance(res, binary_type): + res = res.decode('UTF8') + return res + +# traceback formatting ######################################################## + +import traceback + +def exc_message(ex, encoding): + if PY3: + excmsg = str(ex) + else: + try: + excmsg = unicode(ex) + except Exception: + try: + excmsg = unicode(str(ex), encoding, 'replace') + except Exception: + excmsg = unicode(repr(ex), encoding, 'replace') + exctype = ex.__class__.__name__ + return u'%s: %s' % (exctype, excmsg) + + +def rest_traceback(info, exception): + """return a unicode ReST formated traceback""" + res = [u'Traceback\n---------\n::\n'] + for stackentry in traceback.extract_tb(info[2]): + res.append(u'\tFile %s, line %s, function %s' % tuple(stackentry[:3])) + if stackentry[3]: + data = xml_escape(stackentry[3]) + if PY2: + data = data.decode('utf-8', 'replace') + res.append(u'\t %s' % data) + res.append(u'\n') + try: + res.append(u'\t Error: %s\n' % exception) + except Exception: + pass + return u'\n'.join(res) + + +def html_traceback(info, exception, title='', + encoding='ISO-8859-1', body=''): + """ return an html formatted traceback from python exception infos. + """ + tcbk = info[2] + stacktb = traceback.extract_tb(tcbk) + strings = [] + if body: + strings.append(u'
') + # FIXME + strings.append(body) + strings.append(u'
') + if title: + strings.append(u'

%s

'% xml_escape(title)) + try: + strings.append(u'

%s

' % xml_escape(str(exception)).replace("\n","
")) + except UnicodeError: + pass + strings.append(u'
') + for index, stackentry in enumerate(stacktb): + strings.append(u'File %s, line ' + u'%s, function ' + u'%s:
'%( + xml_escape(stackentry[0]), stackentry[1], xml_escape(stackentry[2]))) + if stackentry[3]: + string = xml_escape(stackentry[3]) + if PY2: + string = string.decode('utf-8', 'replace') + strings.append(u'  %s
\n' % (string)) + # add locals info for each entry + try: + local_context = tcbk.tb_frame.f_locals + html_info = [] + chars = 0 + for name, value in local_context.items(): + value = xml_escape(repr(value)) + info = u'%s=%s, ' % (name, value) + line_length = len(name) + len(value) + chars += line_length + # 150 is the result of *years* of research ;-) (CSS might be helpful here) + if chars > 150: + info = u'
' + info + chars = line_length + html_info.append(info) + boxid = 'ctxlevel%d' % index + strings.append(u'[%s]' % toggle_link(boxid, '+')) + strings.append(u'' % + (boxid, ''.join(html_info))) + tcbk = tcbk.tb_next + except Exception: + pass # doesn't really matter if we have no context info + strings.append(u'
') + return '\n'.join(strings) + +# csv files / unicode support ################################################# + +class UnicodeCSVWriter: + """proxies calls to csv.writer.writerow to be able to deal with unicode + + Under Python 3, this code no longer encodes anything.""" + + def __init__(self, wfunc, encoding, **kwargs): + self.writer = csv.writer(self, **kwargs) + self.wfunc = wfunc + self.encoding = encoding + + def write(self, data): + self.wfunc(data) + + def writerow(self, row): + if PY3: + self.writer.writerow(row) + return + csvrow = [] + for elt in row: + if isinstance(elt, text_type): + csvrow.append(elt.encode(self.encoding)) + else: + csvrow.append(str(elt)) + self.writer.writerow(csvrow) + + def writerows(self, rows): + for row in rows: + self.writerow(row) + + +# some decorators ############################################################# + +class limitsize(object): + def __init__(self, maxsize): + self.maxsize = maxsize + + def __call__(self, function): + def newfunc(*args, **kwargs): + ret = function(*args, **kwargs) + if isinstance(ret, string_types): + return ret[:self.maxsize] + return ret + return newfunc + + +def htmlescape(function): + def newfunc(*args, **kwargs): + ret = function(*args, **kwargs) + assert isinstance(ret, string_types) + return xml_escape(ret) + return newfunc diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/utils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/utils.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,716 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Some utilities for CubicWeb server/clients.""" + +from __future__ import division + +__docformat__ = "restructuredtext en" + +import decimal +import datetime +import random +import re +import json + +from operator import itemgetter +from inspect import getargspec +from itertools import repeat +from uuid import uuid4 +from warnings import warn +from threading import Lock +from logging import getLogger + +from six import text_type +from six.moves.urllib.parse import urlparse + +from logilab.mtconverter import xml_escape +from logilab.common.deprecation import deprecated +from logilab.common.date import ustrftime + +_MARKER = object() + +# initialize random seed from current time +random.seed() + +def admincnx(appid): + from cubicweb.cwconfig import CubicWebConfiguration + from cubicweb.server.repository import Repository + from cubicweb.server.utils import TasksManager + config = CubicWebConfiguration.config_for(appid) + + login = config.default_admin_config['login'] + password = config.default_admin_config['password'] + + repo = Repository(config, TasksManager()) + session = repo.new_session(login, password=password) + return session.new_cnx() + + +def make_uid(key=None): + """Return a unique identifier string. + + if specified, `key` is used to prefix the generated uid so it can be used + for instance as a DOM id or as sql table name. + + See uuid.uuid4 documentation for the shape of the generated identifier, but + this is basically a 32 bits hexadecimal string. + """ + if key is None: + return uuid4().hex + return str(key) + uuid4().hex + + +def support_args(callable, *argnames): + """return true if the callable support given argument names""" + if isinstance(callable, type): + callable = callable.__init__ + argspec = getargspec(callable) + if argspec[2]: + return True + for argname in argnames: + if argname not in argspec[0]: + return False + return True + + +class wrap_on_write(object): + """ Sometimes it is convenient to NOT write some container element + if it happens that there is nothing to be written within, + but this cannot be known beforehand. + Hence one can do this: + + .. sourcecode:: python + + with wrap_on_write(w, '
', '
') as wow: + component.render_stuff(wow) + """ + def __init__(self, w, tag, closetag=None): + self.written = False + self.tag = text_type(tag) + self.closetag = closetag + self.w = w + + def __enter__(self): + return self + + def __call__(self, data): + if self.written is False: + self.w(self.tag) + self.written = True + self.w(data) + + def __exit__(self, exctype, value, traceback): + if self.written is True: + if self.closetag: + self.w(text_type(self.closetag)) + else: + self.w(self.tag.replace('<', '' % ( + id(self), self._item, self._size) + def __len__(self): + return self._size + def __iter__(self): + return repeat(self._item, self._size) + def __getitem__(self, index): + if isinstance(index, slice): + # XXX could be more efficient, but do we bother? + return ([self._item] * self._size)[index] + return self._item + def __delitem__(self, idc): + assert self._size > 0 + self._size -= 1 + def __add__(self, other): + if isinstance(other, RepeatList): + if other._item == self._item: + return RepeatList(self._size + other._size, self._item) + return ([self._item] * self._size) + other[:] + return ([self._item] * self._size) + other + def __radd__(self, other): + if isinstance(other, RepeatList): + if other._item == self._item: + return RepeatList(self._size + other._size, self._item) + return other[:] + ([self._item] * self._size) + return other[:] + ([self._item] * self._size) + def __eq__(self, other): + if isinstance(other, RepeatList): + return other._size == self._size and other._item == self._item + return self[:] == other + def __ne__(self, other): + return not (self == other) + def __hash__(self): + raise NotImplementedError + def pop(self, i): + self._size -= 1 + + +class UStringIO(list): + """a file wrapper which automatically encode unicode string to an encoding + specifed in the constructor + """ + + def __init__(self, tracewrites=False, *args, **kwargs): + self.tracewrites = tracewrites + super(UStringIO, self).__init__(*args, **kwargs) + + def __bool__(self): + return True + + __nonzero__ = __bool__ + + def write(self, value): + assert isinstance(value, text_type), u"unicode required not %s : %s"\ + % (type(value).__name__, repr(value)) + if self.tracewrites: + from traceback import format_stack + stack = format_stack(None)[:-1] + escaped_stack = xml_escape(json_dumps(u'\n'.join(stack))) + escaped_html = xml_escape(value).replace('\n', '
\n') + tpl = u'%s' + value = tpl % (escaped_stack, escaped_html) + self.append(value) + + def getvalue(self): + return u''.join(self) + + def __repr__(self): + return '<%s at %#x>' % (self.__class__.__name__, id(self)) + + +class HTMLHead(UStringIO): + """wraps HTML header's stream + + Request objects use a HTMLHead instance to ease adding of + javascripts and stylesheets + """ + js_unload_code = u'''if (typeof(pageDataUnloaded) == 'undefined') { + jQuery(window).unload(unloadPageData); + pageDataUnloaded = true; +}''' + script_opening = u'' + + def __init__(self, req, *args, **kwargs): + super(HTMLHead, self).__init__(*args, **kwargs) + self.jsvars = [] + self.jsfiles = [] + self.cssfiles = [] + self.ie_cssfiles = [] + self.post_inlined_scripts = [] + self.pagedata_unload = False + self._cw = req + self.datadir_url = req.datadir_url + + def add_raw(self, rawheader): + self.write(rawheader) + + def define_var(self, var, value, override=True): + """adds a javascript var declaration / assginment in the header + + :param var: the variable name + :param value: the variable value (as a raw python value, + it will be jsonized later) + :param override: if False, don't set the variable value if the variable + is already defined. Default is True. + """ + self.jsvars.append( (var, value, override) ) + + def add_post_inline_script(self, content): + self.post_inlined_scripts.append(content) + + def add_onload(self, jscode): + self.add_post_inline_script(u"""$(cw).one('server-response', function(event) { +%s});""" % jscode) + + + def add_js(self, jsfile): + """adds `jsfile` to the list of javascripts used in the webpage + + This function checks if the file has already been added + :param jsfile: the script's URL + """ + if jsfile not in self.jsfiles: + self.jsfiles.append(jsfile) + + def add_css(self, cssfile, media='all'): + """adds `cssfile` to the list of javascripts used in the webpage + + This function checks if the file has already been added + :param cssfile: the stylesheet's URL + """ + if (cssfile, media) not in self.cssfiles: + self.cssfiles.append( (cssfile, media) ) + + def add_ie_css(self, cssfile, media='all', iespec=u'[if lt IE 8]'): + """registers some IE specific CSS""" + if (cssfile, media, iespec) not in self.ie_cssfiles: + self.ie_cssfiles.append( (cssfile, media, iespec) ) + + def add_unload_pagedata(self): + """registers onunload callback to clean page data on server""" + if not self.pagedata_unload: + self.post_inlined_scripts.append(self.js_unload_code) + self.pagedata_unload = True + + def concat_urls(self, urls): + """concatenates urls into one url usable by Apache mod_concat + + This method returns the url without modifying it if there is only + one element in the list + :param urls: list of local urls/filenames to concatenate + """ + if len(urls) == 1: + return urls[0] + len_prefix = len(self.datadir_url) + concated = u','.join(url[len_prefix:] for url in urls) + return (u'%s??%s' % (self.datadir_url, concated)) + + def group_urls(self, urls_spec): + """parses urls_spec in order to generate concatenated urls + for js and css includes + + This method checks if the file is local and if it shares options + with direct neighbors + :param urls_spec: entire list of urls/filenames to inspect + """ + concatable = [] + prev_islocal = False + prev_key = None + for url, key in urls_spec: + islocal = url.startswith(self.datadir_url) + if concatable and (islocal != prev_islocal or key != prev_key): + yield (self.concat_urls(concatable), prev_key) + del concatable[:] + if not islocal: + yield (url, key) + else: + concatable.append(url) + prev_islocal = islocal + prev_key = key + if concatable: + yield (self.concat_urls(concatable), prev_key) + + + def getvalue(self, skiphead=False): + """reimplement getvalue to provide a consistent (and somewhat browser + optimzed cf. http://stevesouders.com/cuzillion) order in external + resources declaration + """ + w = self.write + # 1/ variable declaration if any + if self.jsvars: + if skiphead: + w(u'') + else: + w(self.script_opening) + for var, value, override in self.jsvars: + vardecl = u'%s = %s;' % (var, json.dumps(value)) + if not override: + vardecl = (u'if (typeof %s == "undefined") {%s}' % + (var, vardecl)) + w(vardecl + u'\n') + if skiphead: + w(u'') + else: + w(self.script_closing) + # 2/ css files + ie_cssfiles = ((x, (y, z)) for x, y, z in self.ie_cssfiles) + if self.datadir_url and self._cw.vreg.config['concat-resources']: + cssfiles = self.group_urls(self.cssfiles) + ie_cssfiles = self.group_urls(ie_cssfiles) + jsfiles = (x for x, _ in self.group_urls((x, None) for x in self.jsfiles)) + else: + cssfiles = self.cssfiles + jsfiles = self.jsfiles + for cssfile, media in cssfiles: + w(u'\n' % + (media, xml_escape(cssfile))) + # 3/ ie css if necessary + if self.ie_cssfiles: # use self.ie_cssfiles because `ie_cssfiles` is a genexp + for cssfile, (media, iespec) in ie_cssfiles: + w(u' \n') + # 4/ js files + for jsfile in jsfiles: + if skiphead: + # Don't insert \n' % + xml_escape(jsfile)) + # 5/ post inlined scripts (i.e. scripts depending on other JS files) + if self.post_inlined_scripts: + if skiphead: + for script in self.post_inlined_scripts: + w(u'') + w(xml_escape(script)) + w(u'') + else: + w(self.script_opening) + w(u'\n\n'.join(self.post_inlined_scripts)) + w(self.script_closing) + # at the start of this function, the parent UStringIO may already have + # data in it, so we can't w(u'\n') at the top. Instead, we create + # a temporary UStringIO to get the same debugging output formatting + # if debugging is enabled. + headtag = UStringIO(tracewrites=self.tracewrites) + if not skiphead: + headtag.write(u'\n') + w(u'\n') + return headtag.getvalue() + super(HTMLHead, self).getvalue() + + +class HTMLStream(object): + """represents a HTML page. + + This is used my main templates so that HTML headers can be added + at any time during the page generation. + + HTMLStream uses the (U)StringIO interface to be compliant with + existing code. + """ + + def __init__(self, req): + self.tracehtml = req.tracehtml + # stream for + self.head = req.html_headers + # main stream + self.body = UStringIO(tracewrites=req.tracehtml) + # this method will be assigned to self.w in views + self.write = self.body.write + self.doctype = u'' + self._htmlattrs = [('lang', req.lang)] + # keep main_stream's reference on req for easier text/html demoting + req.main_stream = self + + @deprecated('[3.17] there are no namespaces in html, xhtml is not served any longer') + def add_namespace(self, prefix, uri): + pass + + @deprecated('[3.17] there are no namespaces in html, xhtml is not served any longer') + def set_namespaces(self, namespaces): + pass + + def add_htmlattr(self, attrname, attrvalue): + self._htmlattrs.append( (attrname, attrvalue) ) + + def set_htmlattrs(self, attrs): + self._htmlattrs = attrs + + def set_doctype(self, doctype, reset_xmldecl=None): + self.doctype = doctype + if reset_xmldecl is not None: + warn('[3.17] xhtml is no more supported', + DeprecationWarning, stacklevel=2) + + @property + def htmltag(self): + attrs = ' '.join('%s="%s"' % (attr, xml_escape(value)) + for attr, value in self._htmlattrs) + if attrs: + return '' % attrs + return '' + + def getvalue(self): + """writes HTML headers, closes tag and writes HTML body""" + if self.tracehtml: + css = u'\n'.join((u'span {', + u' font-family: monospace;', + u' word-break: break-all;', + u' word-wrap: break-word;', + u'}', + u'span:hover {', + u' color: red;', + u' text-decoration: underline;', + u'}')) + style = u'\n' % css + return (u'\n' + + u'\n\n%s\n\n' % style + + u'\n' + + u'' + xml_escape(self.doctype) + u'
' + + u'' + xml_escape(self.htmltag) + u'
' + + self.head.getvalue() + + self.body.getvalue() + + u'' + xml_escape(u'') + u'' + + u'\n') + return u'%s\n%s\n%s\n%s\n' % (self.doctype, + self.htmltag, + self.head.getvalue(), + self.body.getvalue()) + + +class CubicWebJsonEncoder(json.JSONEncoder): + """define a json encoder to be able to encode yams std types""" + + def default(self, obj): + if hasattr(obj, '__json_encode__'): + return obj.__json_encode__() + if isinstance(obj, datetime.datetime): + return ustrftime(obj, '%Y/%m/%d %H:%M:%S') + elif isinstance(obj, datetime.date): + return ustrftime(obj, '%Y/%m/%d') + elif isinstance(obj, datetime.time): + return obj.strftime('%H:%M:%S') + elif isinstance(obj, datetime.timedelta): + return (obj.days * 24 * 60 * 60) + obj.seconds + elif isinstance(obj, decimal.Decimal): + return float(obj) + try: + return json.JSONEncoder.default(self, obj) + except TypeError: + # we never ever want to fail because of an unknown type, + # just return None in those cases. + return None + +def json_dumps(value, **kwargs): + return json.dumps(value, cls=CubicWebJsonEncoder, **kwargs) + + +class JSString(str): + """use this string sub class in values given to :func:`js_dumps` to + insert raw javascript chain in some JSON string + """ + +def _dict2js(d, predictable=False): + if predictable: + it = sorted(d.items()) + else: + it = d.items() + res = [key + ': ' + js_dumps(val, predictable) + for key, val in it] + return '{%s}' % ', '.join(res) + +def _list2js(l, predictable=False): + return '[%s]' % ', '.join([js_dumps(val, predictable) for val in l]) + +def js_dumps(something, predictable=False): + """similar as :func:`json_dumps`, except values which are instances of + :class:`JSString` are expected to be valid javascript and will be output + as is + + >>> js_dumps({'hop': JSString('$.hop'), 'bar': None}, predictable=True) + '{bar: null, hop: $.hop}' + >>> js_dumps({'hop': '$.hop'}) + '{hop: "$.hop"}' + >>> js_dumps({'hip': {'hop': JSString('momo')}}) + '{hip: {hop: momo}}' + """ + if isinstance(something, dict): + return _dict2js(something, predictable) + if isinstance(something, list): + return _list2js(something, predictable) + if isinstance(something, JSString): + return something + return json_dumps(something, sort_keys=predictable) + +PERCENT_IN_URLQUOTE_RE = re.compile(r'%(?=[0-9a-fA-F]{2})') +def js_href(javascript_code): + """Generate a "javascript: ..." string for an href attribute. + + Some % which may be interpreted in a href context will be escaped. + + In an href attribute, url-quotes-looking fragments are interpreted before + being given to the javascript engine. Valid url quotes are in the form + ``%xx`` with xx being a byte in hexadecimal form. This means that ``%toto`` + will be unaltered but ``%babar`` will be mangled because ``ba`` is the + hexadecimal representation of 186. + + >>> js_href('alert("babar");') + 'javascript: alert("babar");' + >>> js_href('alert("%babar");') + 'javascript: alert("%25babar");' + >>> js_href('alert("%toto %babar");') + 'javascript: alert("%toto %25babar");' + >>> js_href('alert("%1337%");') + 'javascript: alert("%251337%");' + """ + return 'javascript: ' + PERCENT_IN_URLQUOTE_RE.sub(r'%25', javascript_code) + + +def parse_repo_uri(uri): + """ transform a command line uri into a (protocol, hostport, appid), e.g: + -> 'inmemory', None, '' + inmemory:// -> 'inmemory', None, '' + """ + parseduri = urlparse(uri) + scheme = parseduri.scheme + if scheme == '': + return ('inmemory', None, parseduri.path) + if scheme == 'inmemory': + return (scheme, None, parseduri.netloc) + raise NotImplementedError('URI protocol not implemented for `%s`' % uri) + + + +logger = getLogger('cubicweb.utils') + +class QueryCache(object): + """ a minimalist dict-like object to be used by the querier + and native source (replaces lgc.cache for this very usage) + + To be efficient it must be properly used. The usage patterns are + quite specific to its current clients. + + The ceiling value should be sufficiently high, else it will be + ruthlessly inefficient (there will be warnings when this happens). + A good (high enough) value can only be set on a per-application + value. A default, reasonnably high value is provided but tuning + e.g `rql-cache-size` can certainly help. + + There are two kinds of elements to put in this cache: + * frequently used elements + * occasional elements + + The former should finish in the _permanent structure after some + warmup. + + Occasional elements can be buggy requests (server-side) or + end-user (web-ui provided) requests. These have to be cleaned up + when they fill the cache, without evicting the useful, frequently + used entries. + """ + # quite arbitrary, but we want to never + # immortalize some use-a-little query + _maxlevel = 15 + + def __init__(self, ceiling=3000): + self._max = ceiling + # keys belonging forever to this cache + self._permanent = set() + # mapping of key (that can get wiped) to getitem count + self._transient = {} + self._data = {} + self._lock = Lock() + + def __len__(self): + with self._lock: + return len(self._data) + + def __getitem__(self, k): + with self._lock: + if k in self._permanent: + return self._data[k] + v = self._transient.get(k, _MARKER) + if v is _MARKER: + self._transient[k] = 1 + return self._data[k] + if v > self._maxlevel: + self._permanent.add(k) + self._transient.pop(k, None) + else: + self._transient[k] += 1 + return self._data[k] + + def __setitem__(self, k, v): + with self._lock: + if len(self._data) >= self._max: + self._try_to_make_room() + self._data[k] = v + + def pop(self, key, default=_MARKER): + with self._lock: + try: + if default is _MARKER: + return self._data.pop(key) + return self._data.pop(key, default) + finally: + if key in self._permanent: + self._permanent.remove(key) + else: + self._transient.pop(key, None) + + def clear(self): + with self._lock: + self._clear() + + def _clear(self): + self._permanent = set() + self._transient = {} + self._data = {} + + def _try_to_make_room(self): + current_size = len(self._data) + items = sorted(self._transient.items(), key=itemgetter(1)) + level = 0 + for k, v in items: + self._data.pop(k, None) + self._transient.pop(k, None) + if v > level: + datalen = len(self._data) + if datalen == 0: + return + if (current_size - datalen) / datalen > .1: + break + level = v + else: + # we removed cruft but everything is permanent + if len(self._data) >= self._max: + logger.warning('Cache %s is full.' % id(self)) + self._clear() + + def _usage_report(self): + with self._lock: + return {'itemcount': len(self._data), + 'transientcount': len(self._transient), + 'permanentcount': len(self._permanent)} + + def popitem(self): + raise NotImplementedError() + + def setdefault(self, key, default=None): + raise NotImplementedError() + + def update(self, other): + raise NotImplementedError() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/view.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/view.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,534 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""abstract views and templates classes for CubicWeb web client""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from io import BytesIO +from warnings import warn +from functools import partial + +from six.moves import range + +from logilab.common.deprecation import deprecated +from logilab.common.registry import yes +from logilab.mtconverter import xml_escape + +from rql import nodes + +from cubicweb import NotAnEntity +from cubicweb.predicates import non_final_entity, nonempty_rset, none_rset +from cubicweb.appobject import AppObject +from cubicweb.utils import UStringIO, HTMLStream +from cubicweb.uilib import domid, js +from cubicweb.schema import display_name + +# robots control +NOINDEX = u'' +NOFOLLOW = u'' + +TRANSITIONAL_DOCTYPE_NOEXT = u'\n' +TRANSITIONAL_DOCTYPE = TRANSITIONAL_DOCTYPE_NOEXT # bw compat + +STRICT_DOCTYPE_NOEXT = u'\n' +STRICT_DOCTYPE = STRICT_DOCTYPE_NOEXT # bw compat + +# base view object ############################################################ + +class View(AppObject): + """This class is an abstraction of a view class, used as a base class for + every renderable object such as views, templates and other user interface + components. + + A `View` is instantiated to render a result set or part of a result + set. `View` subclasses may be parametrized using the following class + attributes: + + :py:attr:`templatable` indicates if the view may be embedded in a main + template or if it has to be rendered standalone (i.e. pure XML views must + not be embedded in the main template of HTML pages) + :py:attr:`content_type` if the view is not templatable, it should set the + `content_type` class attribute to the correct MIME type (text/xhtml being + the default) + :py:attr:`category` this attribute may be used in the interface to regroup + related objects (view kinds) together + + :py:attr:`paginable` + + :py:attr:`binary` + + + A view writes to its output stream thanks to its attribute `w` (the + append method of an `UStreamIO`, except for binary views). + + At instantiation time, the standard `_cw`, and `cw_rset` attributes are + added and the `w` attribute will be set at rendering time to a write + function to use. + """ + __registry__ = 'views' + + templatable = True + # content_type = 'application/xhtml+xml' # text/xhtml' + binary = False + add_to_breadcrumbs = True + category = 'view' + paginable = True + + def __init__(self, req=None, rset=None, **kwargs): + super(View, self).__init__(req, rset=rset, **kwargs) + self.w = None + + @property + def content_type(self): + return self._cw.html_content_type() + + def set_stream(self, w=None): + if self.w is not None: + return + if w is None: + if self.binary: + self._stream = stream = BytesIO() + else: + self._stream = stream = UStringIO() + w = stream.write + else: + stream = None + self.w = w + return stream + + # main view interface ##################################################### + + def render(self, w=None, **context): + """called to render a view object for a result set. + + This method is a dispatched to an actual method selected + according to optional row and col parameters, which are locating + a particular row or cell in the result set: + + * if row is specified, `cell_call` is called + * if none of them is supplied, the view is considered to apply on + the whole result set (which may be None in this case), `call` is + called + """ + # XXX use .cw_row/.cw_col + row = context.get('row') + if row is not None: + context.setdefault('col', 0) + view_func = self.cell_call + else: + view_func = self.call + stream = self.set_stream(w) + try: + view_func(**context) + except Exception: + self.debug('view call %s failed (context=%s)', view_func, context) + raise + # return stream content if we have created it + if stream is not None: + return self._stream.getvalue() + + def tal_render(self, template, variables): + """render a precompiled page template with variables in the given + dictionary as context + """ + from cubicweb.ext.tal import CubicWebContext + context = CubicWebContext() + context.update({'self': self, 'rset': self.cw_rset, '_' : self._cw._, + 'req': self._cw, 'user': self._cw.user}) + context.update(variables) + output = UStringIO() + template.expand(context, output) + return output.getvalue() + + # should default .call() method add a
around each + # rset item + add_div_section = True + + def call(self, **kwargs): + """the view is called for an entire result set, by default loop + other rows of the result set and call the same view on the + particular row + + Views applicable on None result sets have to override this method + """ + rset = self.cw_rset + if rset is None: + raise NotImplementedError("%r an rset is required" % self) + wrap = self.templatable and len(rset) > 1 and self.add_div_section + # avoid re-selection if rset of size 1, we already have the most + # specific view + if rset.rowcount != 1: + kwargs.setdefault('initargs', self.cw_extra_kwargs) + for i in range(len(rset)): + if wrap: + self.w(u'
') + self.wview(self.__regid__, rset, row=i, **kwargs) + if wrap: + self.w(u"
") + else: + if wrap: + self.w(u'
') + kwargs.setdefault('col', 0) + self.cell_call(row=0, **kwargs) + if wrap: + self.w(u"
") + + def cell_call(self, row, col, **kwargs): + """the view is called for a particular result set cell""" + raise NotImplementedError(repr(self)) + + def linkable(self): + """return True if the view may be linked in a menu + + by default views without title are not meant to be displayed + """ + if not getattr(self, 'title', None): + return False + return True + + def is_primary(self): + return self.cw_extra_kwargs.get('is_primary', self.__regid__ == 'primary') + + def url(self): + """return the url associated with this view. Should not be + necessary for non linkable views, but a default implementation + is provided anyway. + """ + rset = self.cw_rset + if rset is None: + return self._cw.build_url('view', vid=self.__regid__) + coltypes = rset.column_types(0) + if len(coltypes) == 1: + etype = next(iter(coltypes)) + if not self._cw.vreg.schema.eschema(etype).final: + if len(rset) == 1: + entity = rset.get_entity(0, 0) + return entity.absolute_url(vid=self.__regid__) + # don't want to generate / url if there is some restriction + # on something else than the entity type + restr = rset.syntax_tree().children[0].where + # XXX norestriction is not correct here. For instance, in cases like + # "Any P,N WHERE P is Project, P name N" norestriction should equal + # True + norestriction = (isinstance(restr, nodes.Relation) and + restr.is_types_restriction()) + if norestriction: + return self._cw.build_url(etype.lower(), vid=self.__regid__) + return self._cw.build_url('view', rql=rset.printable_rql(), vid=self.__regid__) + + def set_request_content_type(self): + """set the content type returned by this view""" + self._cw.set_content_type(self.content_type) + + # view utilities ########################################################## + + def wview(self, __vid, rset=None, __fallback_vid=None, **kwargs): + """shortcut to self.view method automatically passing self.w as argument + """ + self._cw.view(__vid, rset, __fallback_vid, w=self.w, **kwargs) + + def whead(self, data): + self._cw.html_headers.write(data) + + def wdata(self, data): + """simple helper that escapes `data` and writes into `self.w`""" + self.w(xml_escape(data)) + + def html_headers(self): + """return a list of html headers (eg something to be inserted between + and of the returned page + + by default return a meta tag to disable robot indexation of the page + """ + return [NOINDEX] + + def page_title(self): + """returns a title according to the result set - used for the + title in the HTML header + """ + vtitle = self._cw.form.get('vtitle') + if vtitle: + return self._cw._(vtitle) + # class defined title will only be used if the resulting title doesn't + # seem clear enough + vtitle = getattr(self, 'title', None) or u'' + if vtitle: + vtitle = self._cw._(vtitle) + rset = self.cw_rset + if rset and rset.rowcount: + if rset.rowcount == 1: + try: + entity = rset.complete_entity(0, 0) + # use long_title to get context information if any + clabel = entity.dc_long_title() + except NotAnEntity: + clabel = display_name(self._cw, rset.description[0][0]) + clabel = u'%s (%s)' % (clabel, vtitle) + else : + etypes = rset.column_types(0) + if len(etypes) == 1: + etype = next(iter(etypes)) + clabel = display_name(self._cw, etype, 'plural') + else : + clabel = u'#[*] (%s)' % vtitle + else: + clabel = vtitle + return u'%s (%s)' % (clabel, self._cw.property_value('ui.site-title')) + + def field(self, label, value, row=True, show_label=True, w=None, tr=True, + table=False): + """read-only field""" + if w is None: + w = self.w + if table: + w(u'') + else: + w(u'
') + if show_label and label: + if tr: + label = display_name(self._cw, label) + if table: + w(u'%s' % label) + else: + w(u'%s ' % label) + if table: + if not (show_label and label): + w(u'%s' % value) + else: + w(u'%s' % value) + else: + w(u'%s
' % value) + + + +# concrete views base classes ################################################# + +class EntityView(View): + """base class for views applying on an entity (i.e. uniform result set)""" + __select__ = non_final_entity() + category = _('entityview') + + def call(self, **kwargs): + if self.cw_rset is None: + # * cw_extra_kwargs is the place where extra selection arguments are + # stored + # * when calling req.view('somevid', entity=entity), 'entity' ends + # up in cw_extra_kwargs and kwargs + # + # handle that to avoid a TypeError with a sanity check + # + # Notice that could probably be avoided by handling entity_call in + # .render + entity = self.cw_extra_kwargs.pop('entity') + if 'entity' in kwargs: + assert kwargs.pop('entity') is entity + self.entity_call(entity, **kwargs) + else: + super(EntityView, self).call(**kwargs) + + def cell_call(self, row, col, **kwargs): + self.entity_call(self.cw_rset.get_entity(row, col), **kwargs) + + def entity_call(self, entity, **kwargs): + raise NotImplementedError('%r %r' % (self.__regid__, self.__class__)) + + +class StartupView(View): + """base class for views which doesn't need a particular result set to be + displayed (so they can always be displayed!) + """ + __select__ = none_rset() + + category = _('startupview') + + def html_headers(self): + """return a list of html headers (eg something to be inserted between + and of the returned page + + by default startup views are indexed + """ + return [] + + +class EntityStartupView(EntityView): + """base class for entity views which may also be applied to None + result set (usually a default rql is provided by the view class) + """ + __select__ = none_rset() | non_final_entity() + + default_rql = None + + def __init__(self, req, rset=None, **kwargs): + super(EntityStartupView, self).__init__(req, rset=rset, **kwargs) + if rset is None: + # this instance is not in the "entityview" category + self.category = 'startupview' + + def startup_rql(self): + """return some rql to be executed if the result set is None""" + return self.default_rql + + def no_entities(self, **kwargs): + """override to display something when no entities were found""" + pass + + def call(self, **kwargs): + """override call to execute rql returned by the .startup_rql method if + necessary + """ + rset = self.cw_rset + if rset is None: + rset = self.cw_rset = self._cw.execute(self.startup_rql()) + if rset: + for i in range(len(rset)): + self.wview(self.__regid__, rset, row=i, **kwargs) + else: + self.no_entities(**kwargs) + + +class AnyRsetView(View): + """base class for views applying on any non empty result sets""" + __select__ = nonempty_rset() + + category = _('anyrsetview') + + def columns_labels(self, mainindex=0, tr=True): + """compute the label of the rset colums + + The logic is based on :meth:`~rql.stmts.Union.get_description`. + + :param mainindex: The index of the main variable. This is an hint to get + more accurate label for various situation + :type mainindex: int + + :param tr: Should the label be translated ? + :type tr: boolean + """ + if tr: + translate = partial(display_name, self._cw) + else: + translate = lambda val, *args,**kwargs: val + # XXX [0] because of missing Union support + rql_syntax_tree = self.cw_rset.syntax_tree() + rqlstdescr = rql_syntax_tree.get_description(mainindex, translate)[0] + labels = [] + for colidx, label in enumerate(rqlstdescr): + labels.append(self.column_label(colidx, label, translate)) + return labels + + def column_label(self, colidx, default, translate_func=None): + """return the label of a specified columns index + + Overwrite me if you need to compute specific label. + + :param colidx: The index of the column the call computes a label for. + :type colidx: int + + :param default: Default value. If ``"Any"`` the default value will be + recomputed as coma separated list for all possible + etypes name. + :type colidx: string + + :param translate_func: A function used to translate name. + :type colidx: function + """ + label = default + if label == 'Any': + etypes = self.cw_rset.column_types(colidx) + if translate_func is not None: + etypes = map(translate_func, etypes) + label = u','.join(etypes) + return label + + + +# concrete template base classes ############################################## + +class MainTemplate(View): + """main template are primary access point to render a full HTML page. + There is usually at least a regular main template and a simple fallback + one to display error if the first one failed + """ + + doctype = '' + + def set_stream(self, w=None): + if self.w is not None: + return + if w is None: + if self.binary: + self._stream = stream = BytesIO() + else: + self._stream = stream = HTMLStream(self._cw) + w = stream.write + else: + stream = None + self.w = w + return stream + + def write_doctype(self, xmldecl=True): + assert isinstance(self._stream, HTMLStream) + self._stream.doctype = self.doctype + if not xmldecl: + self._stream.xmldecl = u'' + + def linkable(self): + return False + +# concrete component base classes ############################################# + +class ReloadableMixIn(object): + """simple mixin for reloadable parts of UI""" + + @property + def domid(self): + return domid(self.__regid__) + + +class Component(ReloadableMixIn, View): + """base class for components""" + __registry__ = 'components' + __select__ = yes() + + # XXX huummm, much probably useless (should be...) + htmlclass = 'mainRelated' + @property + def cssclass(self): + return '%s %s' % (self.htmlclass, domid(self.__regid__)) + + # XXX should rely on ReloadableMixIn.domid + @property + def domid(self): + return '%sComponent' % domid(self.__regid__) + + +class Adapter(AppObject): + """base class for adapters""" + __registry__ = 'adapters' + + +class EntityAdapter(Adapter): + """base class for entity adapters (eg adapt an entity to an interface)""" + def __init__(self, _cw, **kwargs): + try: + self.entity = kwargs.pop('entity') + except KeyError: + self.entity = kwargs['rset'].get_entity(kwargs.get('row') or 0, + kwargs.get('col') or 0) + Adapter.__init__(self, _cw, **kwargs) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/vregistry.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/vregistry.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,23 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +from warnings import warn +from logilab.common.deprecation import class_moved +warn('[3.15] moved to logilab.common.registry', DeprecationWarning, stacklevel=2) +from logilab.common.registry import * + +VRegistry = class_moved(RegistryStore, old_name='VRegistry', message='[3.15] VRegistry moved to logilab.common.registry as RegistryStore') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,61 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""CubicWeb web client core. You'll need a apache-modpython or twisted +publisher to get a full CubicWeb web application +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six.moves.urllib.parse import quote as urlquote +from logilab.common.deprecation import deprecated + +from cubicweb.web._exceptions import * +from cubicweb.utils import json_dumps +from cubicweb.uilib import eid_param + +assert json_dumps is not None, 'no json module installed' + +INTERNAL_FIELD_VALUE = '__cubicweb_internal_field__' + + +class stdmsgs(object): + """standard ui message (in a class for bw compat)""" + BUTTON_OK = (_('button_ok'), 'OK_ICON') + BUTTON_APPLY = (_('button_apply'), 'APPLY_ICON') + BUTTON_CANCEL = (_('button_cancel'), 'CANCEL_ICON') + BUTTON_DELETE = (_('button_delete'), 'TRASH_ICON') + YES = (_('yes'), None) + NO = (_('no'), None) + + +from logging import getLogger +LOGGER = getLogger('cubicweb.web') + +# XXX deprecated +FACETTES = set() + + +def jsonize(function): + def newfunc(*args, **kwargs): + value = function(*args, **kwargs) + try: + return json_dumps(value) + except TypeError: + return json_dumps(repr(value)) + return newfunc diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/_exceptions.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/_exceptions.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,108 @@ +# pylint: disable=W0401,W0614 +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""exceptions used in the core of the CubicWeb web application""" + +__docformat__ = "restructuredtext en" + +from six.moves import http_client + +from cubicweb._exceptions import * +from cubicweb.utils import json_dumps + + +class DirectResponse(Exception): + """Used to supply a twitted HTTP Response directly""" + def __init__(self, response): + self.response = response + +class InvalidSession(CubicWebException): + """raised when a session id is found but associated session is not found or + invalid""" + +# Publish related exception + +class PublishException(CubicWebException): + """base class for publishing related exception""" + + def __init__(self, *args, **kwargs): + self.status = kwargs.pop('status', http_client.OK) + super(PublishException, self).__init__(*args, **kwargs) + +class LogOut(PublishException): + """raised to ask for deauthentication of a logged in user""" + def __init__(self, url=None): + super(LogOut, self).__init__() + self.url = url + +class Redirect(PublishException): + """raised to redirect the http request""" + def __init__(self, location, status=http_client.SEE_OTHER): + super(Redirect, self).__init__(status=status) + self.location = location + +class StatusResponse(PublishException): + + def __init__(self, status, content=''): + super(StatusResponse, self).__init__(status=status) + self.content = content + + def __repr__(self): + return '%s(%r, %r)' % (self.__class__.__name__, self.status, self.content) + +# Publish related error + +class RequestError(PublishException): + """raised when a request can't be served because of a bad input""" + + def __init__(self, *args, **kwargs): + kwargs.setdefault('status', http_client.BAD_REQUEST) + super(RequestError, self).__init__(*args, **kwargs) + + +class NothingToEdit(RequestError): + """raised when an edit request doesn't specify any eid to edit""" + + def __init__(self, *args, **kwargs): + kwargs.setdefault('status', http_client.BAD_REQUEST) + super(NothingToEdit, self).__init__(*args, **kwargs) + +class ProcessFormError(RequestError): + """raised when posted data can't be processed by the corresponding field + """ + def __init__(self, *args, **kwargs): + kwargs.setdefault('status', http_client.BAD_REQUEST) + super(ProcessFormError, self).__init__(*args, **kwargs) + +class NotFound(RequestError): + """raised when something was not found. In most case, + a 404 error should be returned""" + + def __init__(self, *args, **kwargs): + kwargs.setdefault('status', http_client.NOT_FOUND) + super(NotFound, self).__init__(*args, **kwargs) + +class RemoteCallFailed(RequestError): + """raised when a json remote call fails + """ + def __init__(self, reason='', status=http_client.INTERNAL_SERVER_ERROR): + super(RemoteCallFailed, self).__init__(reason, status=status) + self.reason = reason + + def dumps(self): + return json_dumps({'reason': self.reason}) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/action.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/action.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,113 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""abstract action classes for CubicWeb web client + +Actions are typically displayed in an action box, but can also be used +in other parts of the interface (the user menu, the footer, etc.). The +'order', 'category' and 'title' class attributes control how the action will +be displayed. The 'submenu' attribute is only used for actions in the +action box. + +The most important method from a developper point of view in the +:meth:'Action.url' method, which returns a URL on which the navigation +should be directed to perform the action. The common way of +writing that method is to simply return a URL to the current rset with a +special view (with `self._cw.build_url(...)` for instance) + +Many examples are available in :mod:`cubicweb.web.views.actions`. +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from cubicweb import target +from cubicweb.predicates import (partial_relation_possible, match_search_state, + one_line_rset) +from cubicweb.appobject import AppObject + + +class Action(AppObject): + """abstract action. Handle the .search_states attribute to match + request search state. + """ + __registry__ = 'actions' + __select__ = match_search_state('normal') + order = 99 + category = 'moreactions' + # actions in category 'moreactions' can specify a sub-menu in which they should be filed + submenu = None + + def actual_actions(self): + yield self + + def fill_menu(self, box, menu): + """add action(s) to the given submenu of the given box""" + for action in self.actual_actions(): + menu.append(box.action_link(action)) + + def html_class(self): + if self._cw.selected(self.url()): + return 'selected' + + def build_action(self, title, url, **kwargs): + return UnregisteredAction(self._cw, title, url, **kwargs) + + def url(self): + """return the url associated with this action""" + raise NotImplementedError + + +class UnregisteredAction(Action): + """non registered action, used to build boxes""" + category = None + id = None + + def __init__(self, req, title, url, **kwargs): + Action.__init__(self, req) + self.title = req._(title) + self._url = url + self.__dict__.update(kwargs) + + def url(self): + return self._url + + +class LinkToEntityAction(Action): + """base class for actions consisting to create a new object with an initial + relation set to an entity. + + Additionally to EntityAction behaviour, this class is parametrized using + .rtype, .role and .target_etype attributes to check if the action apply and + if the logged user has access to it (see + :class:`~cubicweb.selectors.partial_relation_possible` selector + documentation for more information). + """ + __select__ = (match_search_state('normal') & one_line_rset() + & partial_relation_possible(action='add', strict=True)) + + submenu = 'addrelated' + # to be defined in concrete classes + target_etype = rtype = None + + def url(self): + ttype = self.target_etype + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + linkto = '%s:%s:%s' % (self.rtype, entity.eid, target(self)) + return self._cw.vreg["etypes"].etype_class(ttype).cw_create_url(self._cw, + __redirectpath=entity.rest_path(), __linkto=linkto, + __redirectvid=self._cw.form.get('__redirectvid', '')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/application.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/application.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,532 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""CubicWeb web client application object""" + +__docformat__ = "restructuredtext en" + +import sys +from time import clock, time +from contextlib import contextmanager +from warnings import warn +import json + +from six import text_type, binary_type +from six.moves import http_client + +from logilab.common.deprecation import deprecated + +from rql import BadRQLQuery + +from cubicweb import set_log_methods, cwvreg +from cubicweb import ( + ValidationError, Unauthorized, Forbidden, + AuthenticationError, NoSelectableObject, + CW_EVENT_MANAGER) +from cubicweb.repoapi import anonymous_cnx +from cubicweb.web import LOGGER, component, cors +from cubicweb.web import ( + StatusResponse, DirectResponse, Redirect, NotFound, LogOut, + RemoteCallFailed, InvalidSession, RequestError, PublishException) + +from cubicweb.web.request import CubicWebRequestBase + +# make session manager available through a global variable so the debug view can +# print information about web session +SESSION_MANAGER = None + + +@contextmanager +def anonymized_request(req): + orig_cnx = req.cnx + anon_cnx = anonymous_cnx(orig_cnx.session.repo) + req.set_cnx(anon_cnx) + try: + with anon_cnx: + yield req + finally: + req.set_cnx(orig_cnx) + + + +class CookieSessionHandler(object): + """a session handler using a cookie to store the session identifier""" + + def __init__(self, appli): + self.repo = appli.repo + self.vreg = appli.vreg + self.session_manager = self.vreg['sessions'].select('sessionmanager', + repo=self.repo) + global SESSION_MANAGER + SESSION_MANAGER = self.session_manager + if self.vreg.config.mode != 'test': + # don't try to reset session manager during test, this leads to + # weird failures when running multiple tests + CW_EVENT_MANAGER.bind('after-registry-reload', + self.reset_session_manager) + + def reset_session_manager(self): + data = self.session_manager.dump_data() + self.session_manager = self.vreg['sessions'].select('sessionmanager', + repo=self.repo) + self.session_manager.restore_data(data) + global SESSION_MANAGER + SESSION_MANAGER = self.session_manager + + @property + def clean_sessions_interval(self): + return self.session_manager.clean_sessions_interval + + def clean_sessions(self): + """cleanup sessions which has not been unused since a given amount of + time + """ + self.session_manager.clean_sessions() + + def session_cookie(self, req): + """return a string giving the name of the cookie used to store the + session identifier. + """ + if req.https: + return '__%s_https_session' % self.vreg.config.appid + return '__%s_session' % self.vreg.config.appid + + def get_session(self, req): + """Return a session object corresponding to credentials held by the req + + Session id is searched from : + - # form variable + - cookie + + If no session id is found, try opening a new session with credentials + found in the request. + + Raises AuthenticationError if no session can be found or created. + """ + cookie = req.get_cookie() + sessioncookie = self.session_cookie(req) + try: + sessionid = str(cookie[sessioncookie].value) + session = self.get_session_by_id(req, sessionid) + except (KeyError, InvalidSession): # no valid session cookie + session = self.open_session(req) + return session + + def get_session_by_id(self, req, sessionid): + session = self.session_manager.get_session(req, sessionid) + session.mtime = time() + return session + + def open_session(self, req): + session = self.session_manager.open_session(req) + sessioncookie = self.session_cookie(req) + secure = req.https and req.base_url().startswith('https://') + req.set_cookie(sessioncookie, session.sessionid, + maxage=None, secure=secure, httponly=True) + if not session.anonymous_session: + self.session_manager.postlogin(req, session) + return session + + def logout(self, req, goto_url): + """logout from the instance by cleaning the session and raising + `AuthenticationError` + """ + self.session_manager.close_session(req.session) + req.remove_cookie(self.session_cookie(req)) + raise LogOut(url=goto_url) + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + +class CubicWebPublisher(object): + """the publisher is a singleton hold by the web frontend, and is responsible + to publish HTTP request. + + The http server will call its main entry point ``application.handle_request``. + + .. automethod:: cubicweb.web.application.CubicWebPublisher.main_handle_request + + You have to provide both a repository and web-server config at + initialization. In all in one instance both config will be the same. + """ + + def __init__(self, repo, config, session_handler_fact=CookieSessionHandler): + self.info('starting web instance from %s', config.apphome) + self.repo = repo + self.vreg = repo.vreg + # get instance's schema + if not self.vreg.initialized: + config.init_cubes(self.repo.get_cubes()) + self.vreg.init_properties(self.repo.properties()) + self.vreg.set_schema(self.repo.get_schema()) + # set the correct publish method + if config['query-log-file']: + from threading import Lock + self._query_log = open(config['query-log-file'], 'a') + self.handle_request = self.log_handle_request + self._logfile_lock = Lock() + else: + self._query_log = None + self.handle_request = self.main_handle_request + # instantiate session and url resolving helpers + self.session_handler = session_handler_fact(self) + self.set_urlresolver() + CW_EVENT_MANAGER.bind('after-registry-reload', self.set_urlresolver) + + def set_urlresolver(self): + self.url_resolver = self.vreg['components'].select('urlpublisher', + vreg=self.vreg) + + def get_session(self, req): + """Return a session object corresponding to credentials held by the req + + May raise AuthenticationError. + """ + return self.session_handler.get_session(req) + + # publish methods ######################################################### + + def log_handle_request(self, req, path): + """wrapper around _publish to log all queries executed for a given + accessed path + """ + def wrap_set_cnx(func): + def wrap_execute(cnx): + orig_execute = cnx.execute + def execute(rql, kwargs=None, build_descr=True): + tstart, cstart = time(), clock() + rset = orig_execute(rql, kwargs, build_descr=build_descr) + cnx.executed_queries.append((rql, kwargs, time() - tstart, clock() - cstart)) + return rset + return execute + def set_cnx(cnx): + func(cnx) + cnx.execute = wrap_execute(cnx) + cnx.executed_queries = [] + return set_cnx + req.set_cnx = wrap_set_cnx(req.set_cnx) + try: + return self.main_handle_request(req, path) + finally: + cnx = req.cnx + if cnx: + with self._logfile_lock: + try: + result = ['\n'+'*'*80] + result.append(req.url()) + result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q + for q in cnx.executed_queries] + cnx.executed_queries = [] + self._query_log.write('\n'.join(result).encode(req.encoding)) + self._query_log.flush() + except Exception: + self.exception('error while logging queries') + + + + def main_handle_request(self, req, path): + """Process an http request + + Arguments are: + - a Request object + - path of the request object + + It returns the content of the http response. HTTP header and status are + set on the Request object. + """ + if not isinstance(req, CubicWebRequestBase): + warn('[3.15] Application entry point arguments are now (req, path) ' + 'not (path, req)', DeprecationWarning, 2) + req, path = path, req + if req.authmode == 'http': + # activate realm-based auth + realm = self.vreg.config['realm'] + req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False) + content = b'' + try: + try: + session = self.get_session(req) + from cubicweb import repoapi + cnx = repoapi.Connection(session) + req.set_cnx(cnx) + except AuthenticationError: + # Keep the dummy session set at initialisation. + # such session with work to an some extend but raise an + # AuthenticationError on any database access. + import contextlib + @contextlib.contextmanager + def dummy(): + yield + cnx = dummy() + # XXX We want to clean up this approach in the future. But + # several cubes like registration or forgotten password rely on + # this principle. + + # nested try to allow LogOut to delegate logic to AuthenticationError + # handler + try: + ### Try to generate the actual request content + with cnx: + content = self.core_handle(req, path) + # Handle user log-out + except LogOut as ex: + # When authentification is handled by cookie the code that + # raised LogOut must has invalidated the cookie. We can just + # reload the original url without authentification + if self.vreg.config['auth-mode'] == 'cookie' and ex.url: + req.headers_out.setHeader('location', str(ex.url)) + if ex.status is not None: + req.status_out = http_client.SEE_OTHER + # When the authentification is handled by http we must + # explicitly ask for authentification to flush current http + # authentification information + else: + # Render "logged out" content. + # assignement to ``content`` prevent standard + # AuthenticationError code to overwrite it. + content = self.loggedout_content(req) + # let the explicitly reset http credential + raise AuthenticationError() + except Redirect as ex: + # authentication needs redirection (eg openid) + content = self.redirect_handler(req, ex) + # Wrong, absent or Reseted credential + except AuthenticationError: + # If there is an https url configured and + # the request does not use https, redirect to login form + https_url = self.vreg.config['https-url'] + if https_url and req.base_url() != https_url: + req.status_out = http_client.SEE_OTHER + req.headers_out.setHeader('location', https_url + 'login') + else: + # We assume here that in http auth mode the user *May* provide + # Authentification Credential if asked kindly. + if self.vreg.config['auth-mode'] == 'http': + req.status_out = http_client.UNAUTHORIZED + # In the other case (coky auth) we assume that there is no way + # for the user to provide them... + # XXX But WHY ? + else: + req.status_out = http_client.FORBIDDEN + # If previous error handling already generated a custom content + # do not overwrite it. This is used by LogOut Except + # XXX ensure we don't actually serve content + if not content: + content = self.need_login_content(req) + assert isinstance(content, binary_type) + return content + + + def core_handle(self, req, path): + """method called by the main publisher to process + + should return a string containing the resulting page or raise a + `NotFound` exception + + :type path: str + :param path: the path part of the url to publish + + :type req: `web.Request` + :param req: the request object + + :rtype: str + :return: the result of the pusblished url + """ + # don't log form values they may contains sensitive information + self.debug('publish "%s" (%s, form params: %s)', + path, req.session.sessionid, list(req.form)) + # remove user callbacks on a new request (except for json controllers + # to avoid callbacks being unregistered before they could be called) + tstart = clock() + commited = False + try: + ### standard processing of the request + try: + # apply CORS sanity checks + cors.process_request(req, self.vreg.config) + ctrlid, rset = self.url_resolver.process(req, path) + try: + controller = self.vreg['controllers'].select(ctrlid, req, + appli=self) + except NoSelectableObject: + raise Unauthorized(req._('not authorized')) + req.update_search_state() + result = controller.publish(rset=rset) + except cors.CORSPreflight: + # Return directly an empty 200 + req.status_out = 200 + result = b'' + except StatusResponse as ex: + warn('[3.16] StatusResponse is deprecated use req.status_out', + DeprecationWarning, stacklevel=2) + result = ex.content + req.status_out = ex.status + except Redirect as ex: + # Redirect may be raised by edit controller when everything went + # fine, so attempt to commit + result = self.redirect_handler(req, ex) + if req.cnx: + txuuid = req.cnx.commit() + commited = True + if txuuid is not None: + req.data['last_undoable_transaction'] = txuuid + ### error case + except NotFound as ex: + result = self.notfound_content(req) + req.status_out = ex.status + except ValidationError as ex: + result = self.validation_error_handler(req, ex) + except RemoteCallFailed as ex: + result = self.ajax_error_handler(req, ex) + except Unauthorized as ex: + req.data['errmsg'] = req._('You\'re not authorized to access this page. ' + 'If you think you should, please contact the site administrator.') + req.status_out = http_client.FORBIDDEN + result = self.error_handler(req, ex, tb=False) + except Forbidden as ex: + req.data['errmsg'] = req._('This action is forbidden. ' + 'If you think it should be allowed, please contact the site administrator.') + req.status_out = http_client.FORBIDDEN + result = self.error_handler(req, ex, tb=False) + except (BadRQLQuery, RequestError) as ex: + result = self.error_handler(req, ex, tb=False) + ### pass through exception + except DirectResponse: + if req.cnx: + req.cnx.commit() + raise + except (AuthenticationError, LogOut): + # the rollback is handled in the finally + raise + ### Last defense line + except BaseException as ex: + req.status_out = http_client.INTERNAL_SERVER_ERROR + result = self.error_handler(req, ex, tb=True) + finally: + if req.cnx and not commited: + try: + req.cnx.rollback() + except Exception: + pass # ignore rollback error at this point + self.add_undo_link_to_msg(req) + self.debug('query %s executed in %s sec', req.relative_path(), clock() - tstart) + return result + + # Error handlers + + def redirect_handler(self, req, ex): + """handle redirect + - comply to ex status + - set header field + - return empty content + """ + self.debug('redirecting to %s', str(ex.location)) + req.headers_out.setHeader('location', str(ex.location)) + assert 300 <= ex.status < 400 + req.status_out = ex.status + return b'' + + def validation_error_handler(self, req, ex): + ex.translate(req._) # translate messages using ui language + if '__errorurl' in req.form: + forminfo = {'error': ex, + 'values': req.form, + 'eidmap': req.data.get('eidmap', {}) + } + req.session.data[req.form['__errorurl']] = forminfo + # XXX form session key / __error_url should be differentiated: + # session key is 'url + #
[%s]' %( + req.build_url('undo', txuuid=txuuid), req._('undo')) + req.append_to_redirect_message(msg) + + def ajax_error_handler(self, req, ex): + req.set_header('content-type', 'application/json') + status = http_client.INTERNAL_SERVER_ERROR + if isinstance(ex, PublishException) and ex.status is not None: + status = ex.status + if req.status_out < 400: + # don't overwrite it if it's already set + req.status_out = status + json_dumper = getattr(ex, 'dumps', lambda : json.dumps({'reason': text_type(ex)})) + return json_dumper().encode('utf-8') + + # special case handling + + def need_login_content(self, req): + return self.vreg['views'].main_template(req, 'login') + + def loggedout_content(self, req): + return self.vreg['views'].main_template(req, 'loggedout') + + def notfound_content(self, req): + req.form['vid'] = '404' + view = self.vreg['views'].select('404', req) + template = self.main_template_id(req) + return self.vreg['views'].main_template(req, template, view=view) + + # template stuff + + def main_template_id(self, req): + template = req.form.get('__template', req.property_value('ui.main-template')) + if template not in self.vreg['views']: + template = 'main-template' + return template + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + +set_log_methods(CubicWebPublisher, LOGGER) +set_log_methods(CookieSessionHandler, LOGGER) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/box.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/box.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,197 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""abstract box classes for CubicWeb web client""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six import add_metaclass + +from logilab.mtconverter import xml_escape +from logilab.common.deprecation import class_deprecated, class_renamed + +from cubicweb import Unauthorized, role as get_role +from cubicweb.schema import display_name +from cubicweb.predicates import no_cnx, one_line_rset +from cubicweb.view import View +from cubicweb.web import INTERNAL_FIELD_VALUE, stdmsgs +from cubicweb.web.htmlwidgets import (BoxLink, BoxWidget, SideBoxWidget, + RawBoxItem, BoxSeparator) +from cubicweb.web.action import UnregisteredAction + + +def sort_by_category(actions, categories_in_order=None): + """return a list of (category, actions_sorted_by_title)""" + result = [] + actions_by_cat = {} + for action in actions: + actions_by_cat.setdefault(action.category, []).append( + (action.title, action) ) + for key, values in actions_by_cat.items(): + actions_by_cat[key] = [act for title, act in sorted(values, key=lambda x: x[0])] + if categories_in_order: + for cat in categories_in_order: + if cat in actions_by_cat: + result.append( (cat, actions_by_cat[cat]) ) + for item in sorted(actions_by_cat.items()): + result.append(item) + return result + + +# old box system, deprecated ################################################### + +@add_metaclass(class_deprecated) +class BoxTemplate(View): + """base template for boxes, usually a (contextual) list of possible + actions. Various classes attributes may be used to control the box + rendering. + + You may override one of the formatting callbacks if this is not necessary + for your custom box. + + Classes inheriting from this class usually only have to override call + to fetch desired actions, and then to do something like :: + + box.render(self.w) + """ + __deprecation_warning__ = '[3.10] *BoxTemplate classes are deprecated, use *CtxComponent instead (%(cls)s)' + + __registry__ = 'ctxcomponents' + __select__ = ~no_cnx() + + categories_in_order = () + cw_property_defs = { + _('visible'): dict(type='Boolean', default=True, + help=_('display the box or not')), + _('order'): dict(type='Int', default=99, + help=_('display order of the box')), + # XXX 'incontext' boxes are handled by the default primary view + _('context'): dict(type='String', default='left', + vocabulary=(_('left'), _('incontext'), _('right')), + help=_('context where this box should be displayed')), + } + context = 'left' + + def sort_actions(self, actions): + """return a list of (category, actions_sorted_by_title)""" + return sort_by_category(actions, self.categories_in_order) + + def mk_action(self, title, url, escape=True, **kwargs): + """factory function to create dummy actions compatible with the + .format_actions method + """ + if escape: + title = xml_escape(title) + return self.box_action(self._action(title, url, **kwargs)) + + def _action(self, title, url, **kwargs): + return UnregisteredAction(self._cw, title, url, **kwargs) + + # formating callbacks + + def boxitem_link_tooltip(self, action): + if action.__regid__: + return u'keyword: %s' % action.__regid__ + return u'' + + def box_action(self, action): + klass = getattr(action, 'html_class', lambda: None)() + return BoxLink(action.url(), self._cw._(action.title), + klass, self.boxitem_link_tooltip(action)) + + +class RQLBoxTemplate(BoxTemplate): + """abstract box for boxes displaying the content of a rql query not + related to the current result set. + """ + + # to be defined in concrete classes + rql = title = None + + def to_display_rql(self): + assert self.rql is not None, self.__regid__ + return (self.rql,) + + def call(self, **kwargs): + try: + rset = self._cw.execute(*self.to_display_rql()) + except Unauthorized: + # can't access to something in the query, forget this box + return + if len(rset) == 0: + return + box = BoxWidget(self._cw._(self.title), self.__regid__) + for i, (teid, tname) in enumerate(rset): + entity = rset.get_entity(i, 0) + box.append(self.mk_action(tname, entity.absolute_url())) + box.render(w=self.w) + + +class UserRQLBoxTemplate(RQLBoxTemplate): + """same as rql box template but the rql is build using the eid of the + request's user + """ + + def to_display_rql(self): + assert self.rql is not None, self.__regid__ + return (self.rql, {'x': self._cw.user.eid}) + + +class EntityBoxTemplate(BoxTemplate): + """base class for boxes related to a single entity""" + __select__ = BoxTemplate.__select__ & one_line_rset() + context = 'incontext' + + def call(self, row=0, col=0, **kwargs): + """classes inheriting from EntityBoxTemplate should define cell_call""" + self.cell_call(row, col, **kwargs) + +from cubicweb.web.component import AjaxEditRelationCtxComponent, EditRelationMixIn + + +class EditRelationBoxTemplate(EditRelationMixIn, EntityBoxTemplate): + """base class for boxes which let add or remove entities linked + by a given relation + + subclasses should define at least id, rtype and target + class attributes. + """ + rtype = None + def cell_call(self, row, col, view=None, **kwargs): + self._cw.add_js('cubicweb.ajax.js') + entity = self.cw_rset.get_entity(row, col) + title = display_name(self._cw, self.rtype, get_role(self), + context=entity.cw_etype) + box = SideBoxWidget(title, self.__regid__) + related = self.related_boxitems(entity) + unrelated = self.unrelated_boxitems(entity) + box.extend(related) + if related and unrelated: + box.append(BoxSeparator()) + box.extend(unrelated) + box.render(self.w) + + def box_item(self, entity, etarget, rql, label): + label = super(EditRelationBoxTemplate, self).box_item( + entity, etarget, rql, label) + return RawBoxItem(label, liclass=u'invisible') + + +AjaxEditRelationBoxTemplate = class_renamed( + 'AjaxEditRelationBoxTemplate', AjaxEditRelationCtxComponent, + '[3.10] AjaxEditRelationBoxTemplate has been renamed to AjaxEditRelationCtxComponent (%(cls)s)') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/captcha.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/captcha.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,98 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Simple captcha library, based on PIL. Monkey patch functions in this module +if you want something better... +""" + +__docformat__ = "restructuredtext en" + +from random import randint, choice +from io import BytesIO + +from six.moves import range + +from PIL import Image, ImageFont, ImageDraw, ImageFilter + + +from time import time + +from cubicweb import tags +from cubicweb.web import ProcessFormError, formwidgets as fw + + +def pil_captcha(text, fontfile, fontsize): + """Generate a captcha image. Return a PIL image object. + + adapted from http://code.activestate.com/recipes/440588/ + """ + # randomly select the foreground color + fgcolor = randint(0, 0xffff00) + # make the background color the opposite of fgcolor + bgcolor = fgcolor ^ 0xffffff + # create a font object + font = ImageFont.truetype(fontfile, fontsize) + # determine dimensions of the text + dim = font.getsize(text) + # create a new image slightly larger that the text + img = Image.new('RGB', (dim[0]+5, dim[1]+5), bgcolor) + draw = ImageDraw.Draw(img) + # draw 100 random colored boxes on the background + x, y = img.size + for num in range(100): + draw.rectangle((randint(0, x), randint(0, y), + randint(0, x), randint(0, y)), + fill=randint(0, 0xffffff)) + # add the text to the image + draw.text((3, 3), text, font=font, fill=fgcolor) + img = img.filter(ImageFilter.EDGE_ENHANCE_MORE) + return img + + +def captcha(fontfile, fontsize, size=5, format='JPEG'): + """Generate an arbitrary text, return it together with a buffer containing + the captcha image for the text + """ + text = u''.join(choice('QWERTYUOPASDFGHJKLZXCVBNM') for i in range(size)) + img = pil_captcha(text, fontfile, fontsize) + out = BytesIO() + img.save(out, format) + out.seek(0) + return text, out + + +class CaptchaWidget(fw.TextInput): + def render(self, form, field, renderer=None): + # t=int(time()*100) to make sure img is not cached + src = form._cw.build_url('view', vid='captcha', t=int(time()*100), + captchakey=field.input_name(form)) + img = tags.img(src=src, alt=u'captcha') + img = u'
%s
' % img + return img + super(CaptchaWidget, self).render(form, field, renderer) + + def process_field_data(self, form, field): + captcha = form._cw.session.data.pop(field.input_name(form), None) + val = super(CaptchaWidget, self).process_field_data(form, field) + if val is None: + return val # required will be checked by field + if captcha is None: + msg = form._cw._('unable to check captcha, please try again') + raise ProcessFormError(msg) + elif val.lower() != captcha.lower(): + msg = form._cw._('incorrect captcha value') + raise ProcessFormError(msg) + return val diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/component.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/component.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,752 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""abstract component class and base components definition for CubicWeb web +client +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from six import PY3, add_metaclass, text_type + +from logilab.common.deprecation import class_deprecated, class_renamed, deprecated +from logilab.mtconverter import xml_escape + +from cubicweb import Unauthorized, role, target, tags +from cubicweb.schema import display_name +from cubicweb.uilib import js, domid +from cubicweb.utils import json_dumps, js_href +from cubicweb.view import ReloadableMixIn, Component +from cubicweb.predicates import (no_cnx, paginated_rset, one_line_rset, + non_final_entity, partial_relation_possible, + partial_has_related_entities) +from cubicweb.appobject import AppObject +from cubicweb.web import INTERNAL_FIELD_VALUE, stdmsgs + + +# abstract base class for navigation components ################################ + +class NavigationComponent(Component): + """abstract base class for navigation components""" + __regid__ = 'navigation' + __select__ = paginated_rset() + + cw_property_defs = { + _('visible'): dict(type='Boolean', default=True, + help=_('display the component or not')), + } + + page_size_property = 'navigation.page-size' + start_param = '__start' + stop_param = '__stop' + page_link_templ = u'%s' + selected_page_link_templ = u'%s' + previous_page_link_templ = next_page_link_templ = page_link_templ + + def __init__(self, req, rset, **kwargs): + super(NavigationComponent, self).__init__(req, rset=rset, **kwargs) + self.starting_from = 0 + self.total = rset.rowcount + + def get_page_size(self): + try: + return self._page_size + except AttributeError: + page_size = self.cw_extra_kwargs.get('page_size') + if page_size is None: + if 'page_size' in self._cw.form: + page_size = int(self._cw.form['page_size']) + else: + page_size = self._cw.property_value(self.page_size_property) + self._page_size = page_size + return page_size + + def set_page_size(self, page_size): + self._page_size = page_size + + page_size = property(get_page_size, set_page_size) + + def page_boundaries(self): + try: + stop = int(self._cw.form[self.stop_param]) + 1 + start = int(self._cw.form[self.start_param]) + except KeyError: + start, stop = 0, self.page_size + if start >= len(self.cw_rset): + start, stop = 0, self.page_size + self.starting_from = start + return start, stop + + def clean_params(self, params): + if self.start_param in params: + del params[self.start_param] + if self.stop_param in params: + del params[self.stop_param] + + def page_url(self, path, params, start=None, stop=None): + params = dict(params) + params['__fromnavigation'] = 1 + if start is not None: + params[self.start_param] = start + if stop is not None: + params[self.stop_param] = stop + view = self.cw_extra_kwargs.get('view') + if view is not None and hasattr(view, 'page_navigation_url'): + url = view.page_navigation_url(self, path, params) + elif path in ('json', 'ajax'): + # 'ajax' is the new correct controller, but the old 'json' + # controller should still be supported + url = self.ajax_page_url(**params) + else: + url = self._cw.build_url(path, **params) + # XXX hack to avoid opening a new page containing the evaluation of the + # js expression on ajax call + if url.startswith('javascript:'): + url += '; $.noop();' + return url + + def ajax_page_url(self, **params): + divid = params.setdefault('divid', 'pageContent') + params['rql'] = self.cw_rset.printable_rql() + return js_href("$(%s).loadxhtml(AJAX_PREFIX_URL, %s, 'get', 'swap')" % ( + json_dumps('#'+divid), js.ajaxFuncArgs('view', params))) + + def page_link(self, path, params, start, stop, content): + url = xml_escape(self.page_url(path, params, start, stop)) + if start == self.starting_from: + return self.selected_page_link_templ % (url, content, content) + return self.page_link_templ % (url, content, content) + + @property + def prev_icon_url(self): + return xml_escape(self._cw.data_url('go_prev.png')) + + @property + def next_icon_url(self): + return xml_escape(self._cw.data_url('go_next.png')) + + @property + def no_previous_page_link(self): + return (u'%s' % + (self.prev_icon_url, self._cw._('there is no previous page'))) + + @property + def no_next_page_link(self): + return (u'%s' % + (self.next_icon_url, self._cw._('there is no next page'))) + + @property + def no_content_prev_link(self): + return (u'%s' % ( + (self.prev_icon_url, self._cw._('no content prev link')))) + + @property + def no_content_next_link(self): + return (u'%s' % + (self.next_icon_url, self._cw._('no content next link'))) + + def previous_link(self, path, params, content=None, title=_('previous_results')): + if not content: + content = self.no_content_prev_link + start = self.starting_from + if not start : + return self.no_previous_page_link + start = max(0, start - self.page_size) + stop = start + self.page_size - 1 + url = xml_escape(self.page_url(path, params, start, stop)) + return self.previous_page_link_templ % (url, self._cw._(title), content) + + def next_link(self, path, params, content=None, title=_('next_results')): + if not content: + content = self.no_content_next_link + start = self.starting_from + self.page_size + if start >= self.total: + return self.no_next_page_link + stop = start + self.page_size - 1 + url = xml_escape(self.page_url(path, params, start, stop)) + return self.next_page_link_templ % (url, self._cw._(title), content) + + +# new contextual components system ############################################# + +def override_ctx(cls, **kwargs): + cwpdefs = cls.cw_property_defs.copy() + cwpdefs['context'] = cwpdefs['context'].copy() + cwpdefs['context'].update(kwargs) + return cwpdefs + + +class EmptyComponent(Exception): + """some selectable component has actually no content and should not be + rendered + """ + + +class Link(object): + """a link to a view or action in the ui. + + Use this rather than `cw.web.htmlwidgets.BoxLink`. + + Note this class could probably be avoided with a proper DOM on the server + side. + """ + newstyle = True + + def __init__(self, href, label, **attrs): + self.href = href + self.label = label + self.attrs = attrs + + def __unicode__(self): + return tags.a(self.label, href=self.href, **self.attrs) + + if PY3: + __str__ = __unicode__ + + def render(self, w): + w(tags.a(self.label, href=self.href, **self.attrs)) + + def __repr__(self): + return '<%s: href=%r label=%r %r>' % (self.__class__.__name__, + self.href, self.label, self.attrs) + + +class Separator(object): + """a menu separator. + + Use this rather than `cw.web.htmlwidgets.BoxSeparator`. + """ + newstyle = True + + def render(self, w): + w(u'
') + + +def _bwcompatible_render_item(w, item): + if hasattr(item, 'render'): + if getattr(item, 'newstyle', False): + if isinstance(item, Separator): + w(u'') + item.render(w) + w(u'
    ') + else: + w(u'
  • ') + item.render(w) + w(u'
  • ') + else: + item.render(w) # XXX displays
  • by itself + else: + w(u'
  • %s
  • ' % item) + + +class Layout(Component): + __regid__ = 'component_layout' + __abstract__ = True + + def init_rendering(self): + """init view for rendering. Return true if we should go on, false + if we should stop now. + """ + view = self.cw_extra_kwargs['view'] + try: + view.init_rendering() + except Unauthorized as ex: + self.warning("can't render %s: %s", view, ex) + return False + except EmptyComponent: + return False + return True + + +class LayoutableMixIn(object): + layout_id = None # to be defined in concret class + layout_args = {} + + def layout_render(self, w, **kwargs): + getlayout = self._cw.vreg['components'].select + layout = getlayout(self.layout_id, self._cw, **self.layout_select_args()) + layout.render(w) + + def layout_select_args(self): + args = dict(rset=self.cw_rset, row=self.cw_row, col=self.cw_col, + view=self) + args.update(self.layout_args) + return args + + +class CtxComponent(LayoutableMixIn, AppObject): + """base class for contextual components. The following contexts are + predefined: + + * boxes: 'left', 'incontext', 'right' + * section: 'navcontenttop', 'navcontentbottom', 'navtop', 'navbottom' + * other: 'ctxtoolbar' + + The 'incontext', 'navcontenttop', 'navcontentbottom' and 'ctxtoolbar' + contexts are handled by the default primary view, others by the default main + template. + + All subclasses may not support all those contexts (for instance if it can't + be displayed as box, or as a toolbar icon). You may restrict allowed context + as follows: + + .. sourcecode:: python + + class MyComponent(CtxComponent): + cw_property_defs = override_ctx(CtxComponent, + vocabulary=[list of contexts]) + context = 'my default context' + + You can configure a component's default context by simply giving an + appropriate value to the `context` class attribute, as seen above. + """ + __registry__ = 'ctxcomponents' + __select__ = ~no_cnx() + + categories_in_order = () + cw_property_defs = { + _('visible'): dict(type='Boolean', default=True, + help=_('display the box or not')), + _('order'): dict(type='Int', default=99, + help=_('display order of the box')), + _('context'): dict(type='String', default='left', + vocabulary=(_('left'), _('incontext'), _('right'), + _('navtop'), _('navbottom'), + _('navcontenttop'), _('navcontentbottom'), + _('ctxtoolbar')), + help=_('context where this component should be displayed')), + } + visible = True + order = 0 + context = 'left' + contextual = False + title = None + layout_id = 'component_layout' + + def render(self, w, **kwargs): + self.layout_render(w, **kwargs) + + def layout_select_args(self): + args = super(CtxComponent, self).layout_select_args() + try: + # XXX ensure context is given when the component is reloaded through + # ajax + args['context'] = self.cw_extra_kwargs['context'] + except KeyError: + args['context'] = self.cw_propval('context') + return args + + def init_rendering(self): + """init rendering callback: that's the good time to check your component + has some content to display. If not, you can still raise + :exc:`EmptyComponent` to inform it should be skipped. + + Also, :exc:`Unauthorized` will be caught, logged, then the component + will be skipped. + """ + self.items = [] + + @property + def domid(self): + """return the HTML DOM identifier for this component""" + return domid(self.__regid__) + + @property + def cssclass(self): + """return the CSS class name for this component""" + return domid(self.__regid__) + + def render_title(self, w): + """return the title for this component""" + if self.title: + w(self._cw._(self.title)) + + def render_body(self, w): + """return the body (content) for this component""" + raise NotImplementedError() + + def render_items(self, w, items=None, klass=u'boxListing'): + if items is None: + items = self.items + assert items + w(u'
      ' % klass) + for item in items: + _bwcompatible_render_item(w, item) + w(u'
    ') + + def append(self, item): + self.items.append(item) + + def action_link(self, action): + return self.link(self._cw._(action.title), action.url()) + + def link(self, title, url, **kwargs): + if self._cw.selected(url): + try: + kwargs['klass'] += ' selected' + except KeyError: + kwargs['klass'] = 'selected' + return Link(url, title, **kwargs) + + def separator(self): + return Separator() + + +class EntityCtxComponent(CtxComponent): + """base class for boxes related to a single entity""" + __select__ = CtxComponent.__select__ & non_final_entity() & one_line_rset() + context = 'incontext' + contextual = True + + def __init__(self, *args, **kwargs): + super(EntityCtxComponent, self).__init__(*args, **kwargs) + try: + entity = kwargs['entity'] + except KeyError: + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + self.entity = entity + + def layout_select_args(self): + args = super(EntityCtxComponent, self).layout_select_args() + args['entity'] = self.entity + return args + + @property + def domid(self): + return domid(self.__regid__) + text_type(self.entity.eid) + + def lazy_view_holder(self, w, entity, oid, registry='views'): + """add a holder and return a URL that may be used to replace this + holder by the html generate by the view specified by registry and + identifier. Registry defaults to 'views'. + """ + holderid = '%sHolder' % self.domid + w(u'
    ' % holderid) + params = self.cw_extra_kwargs.copy() + params.pop('view', None) + params.pop('entity', None) + form = params.pop('formparams', {}) + if entity.has_eid(): + eid = entity.eid + else: + eid = None + form['etype'] = entity.cw_etype + form['tempEid'] = entity.eid + args = [json_dumps(x) for x in (registry, oid, eid, params)] + return self._cw.ajax_replace_url( + holderid, fname='render', arg=args, **form) + + +# high level abstract classes ################################################## + +class RQLCtxComponent(CtxComponent): + """abstract box for boxes displaying the content of a rql query not related + to the current result set. + + Notice that this class's init_rendering implemention is overwriting context + result set (eg `cw_rset`) with the result set returned by execution of + `to_display_rql()`. + """ + rql = None + + def to_display_rql(self): + """return arguments to give to self._cw.execute, as a tuple, to build + the result set to be displayed by this box. + """ + assert self.rql is not None, self.__regid__ + return (self.rql,) + + def init_rendering(self): + super(RQLCtxComponent, self).init_rendering() + self.cw_rset = self._cw.execute(*self.to_display_rql()) + if not self.cw_rset: + raise EmptyComponent() + + def render_body(self, w): + rset = self.cw_rset + if len(rset[0]) == 2: + items = [] + for i, (eid, label) in enumerate(rset): + entity = rset.get_entity(i, 0) + items.append(self.link(label, entity.absolute_url())) + else: + items = [self.link(e.dc_title(), e.absolute_url()) + for e in rset.entities()] + self.render_items(w, items) + + +class EditRelationMixIn(ReloadableMixIn): + + def box_item(self, entity, etarget, fname, label): + """builds HTML link to edit relation between `entity` and `etarget`""" + args = {role(self) : entity.eid, target(self): etarget.eid} + # for each target, provide a link to edit the relation + jscall = js.cw.utils.callAjaxFuncThenReload(fname, + self.rtype, + args['subject'], + args['object']) + return u'[%s] %s' % ( + xml_escape(text_type(jscall)), label, etarget.view('incontext')) + + def related_boxitems(self, entity): + return [self.box_item(entity, etarget, 'delete_relation', u'-') + for etarget in self.related_entities(entity)] + + def related_entities(self, entity): + return entity.related(self.rtype, role(self), entities=True) + + def unrelated_boxitems(self, entity): + return [self.box_item(entity, etarget, 'add_relation', u'+') + for etarget in self.unrelated_entities(entity)] + + def unrelated_entities(self, entity): + """returns the list of unrelated entities, using the entity's + appropriate vocabulary function + """ + skip = set(text_type(e.eid) for e in entity.related(self.rtype, role(self), + entities=True)) + skip.add(None) + skip.add(INTERNAL_FIELD_VALUE) + filteretype = getattr(self, 'etype', None) + entities = [] + form = self._cw.vreg['forms'].select('edition', self._cw, + rset=self.cw_rset, + row=self.cw_row or 0) + field = form.field_by_name(self.rtype, role(self), entity.e_schema) + for _, eid in field.vocabulary(form): + if eid not in skip: + entity = self._cw.entity_from_eid(eid) + if filteretype is None or entity.cw_etype == filteretype: + entities.append(entity) + return entities + +# XXX should be a view usable using uicfg +class EditRelationCtxComponent(EditRelationMixIn, EntityCtxComponent): + """base class for boxes which let add or remove entities linked by a given + relation + + subclasses should define at least id, rtype and target class attributes. + """ + # to be defined in concrete classes + rtype = None + + def render_title(self, w): + w(display_name(self._cw, self.rtype, role(self), + context=self.entity.cw_etype)) + + def render_body(self, w): + self._cw.add_js('cubicweb.ajax.js') + related = self.related_boxitems(self.entity) + unrelated = self.unrelated_boxitems(self.entity) + self.items.extend(related) + if related and unrelated: + self.items.append(u'
    ') + self.items.extend(unrelated) + self.render_items(w) + + +class AjaxEditRelationCtxComponent(EntityCtxComponent): + __select__ = EntityCtxComponent.__select__ & ( + partial_relation_possible(action='add') | partial_has_related_entities()) + + # view used to display related entties + item_vid = 'incontext' + # values separator when multiple values are allowed + separator = ',' + # msgid of the message to display when some new relation has been added/removed + added_msg = None + removed_msg = None + + # to be defined in concrete classes + rtype = role = target_etype = None + # class attributes below *must* be set in concrete classes (additionally to + # rtype / role [/ target_etype]. They should correspond to js_* methods on + # the json controller + + # function(eid) + # -> expected to return a list of values to display as input selector + # vocabulary + fname_vocabulary = None + + # function(eid, value) + # -> handle the selector's input (eg create necessary entities and/or + # relations). If the relation is multiple, you'll get a list of value, else + # a single string value. + fname_validate = None + + # function(eid, linked entity eid) + # -> remove the relation + fname_remove = None + + def __init__(self, *args, **kwargs): + super(AjaxEditRelationCtxComponent, self).__init__(*args, **kwargs) + self.rdef = self.entity.e_schema.rdef(self.rtype, self.role, self.target_etype) + + def render_title(self, w): + w(self.rdef.rtype.display_name(self._cw, self.role, + context=self.entity.cw_etype)) + + def add_js_css(self): + self._cw.add_js(('jquery.ui.js', 'cubicweb.widgets.js')) + self._cw.add_js(('cubicweb.ajax.js', 'cubicweb.ajax.box.js')) + self._cw.add_css('jquery.ui.css') + return True + + def render_body(self, w): + req = self._cw + entity = self.entity + related = entity.related(self.rtype, self.role) + if self.role == 'subject': + mayadd = self.rdef.has_perm(req, 'add', fromeid=entity.eid) + else: + mayadd = self.rdef.has_perm(req, 'add', toeid=entity.eid) + js_css_added = False + if mayadd: + js_css_added = self.add_js_css() + _ = req._ + if related: + maydel = None + w(u'') + for rentity in related.entities(): + if maydel is None: + # Only check permission for the first related. + if self.role == 'subject': + fromeid, toeid = entity.eid, rentity.eid + else: + fromeid, toeid = rentity.eid, entity.eid + maydel = self.rdef.has_perm( + req, 'delete', fromeid=fromeid, toeid=toeid) + # for each related entity, provide a link to remove the relation + subview = rentity.view(self.item_vid) + if maydel: + if not js_css_added: + js_css_added = self.add_js_css() + jscall = text_type(js.ajaxBoxRemoveLinkedEntity( + self.__regid__, entity.eid, rentity.eid, + self.fname_remove, + self.removed_msg and _(self.removed_msg))) + w(u'' + '' % (xml_escape(jscall), + subview)) + else: + w(u'' % (subview)) + w(u'
    %s
    %s
    ') + else: + w(_('no related entity')) + if mayadd: + multiple = self.rdef.role_cardinality(self.role) in '*+' + w(u'
    ') + jscall = text_type(js.ajaxBoxShowSelector( + self.__regid__, entity.eid, self.fname_vocabulary, + self.fname_validate, self.added_msg and _(self.added_msg), + _(stdmsgs.BUTTON_OK[0]), _(stdmsgs.BUTTON_CANCEL[0]), + multiple and self.separator)) + w('%s' % ( + xml_escape(jscall), + multiple and _('add_relation') or _('update_relation'))) + w(u'') + w(u'
    ' % self.domid) + w(u'
    ') + + +class RelatedObjectsCtxComponent(EntityCtxComponent): + """a contextual component to display entities related to another""" + __select__ = EntityCtxComponent.__select__ & partial_has_related_entities() + context = 'navcontentbottom' + rtype = None + role = 'subject' + + vid = 'list' + + def render_body(self, w): + rset = self.entity.related(self.rtype, role(self)) + self._cw.view(self.vid, rset, w=w) + + +# old contextual components, deprecated ######################################## + +@add_metaclass(class_deprecated) +class EntityVComponent(Component): + """abstract base class for additinal components displayed in content + headers and footer according to: + + * the displayed entity's type + * a context (currently 'header' or 'footer') + + it should be configured using .accepts, .etype, .rtype, .target and + .context class attributes + """ + __deprecation_warning__ = '[3.10] *VComponent classes are deprecated, use *CtxComponent instead (%(cls)s)' + + __registry__ = 'ctxcomponents' + __select__ = one_line_rset() + + cw_property_defs = { + _('visible'): dict(type='Boolean', default=True, + help=_('display the component or not')), + _('order'): dict(type='Int', default=99, + help=_('display order of the component')), + _('context'): dict(type='String', default='navtop', + vocabulary=(_('navtop'), _('navbottom'), + _('navcontenttop'), _('navcontentbottom'), + _('ctxtoolbar')), + help=_('context where this component should be displayed')), + } + + context = 'navcontentbottom' + + def call(self, view=None): + if self.cw_rset is None: + self.entity_call(self.cw_extra_kwargs.pop('entity')) + else: + self.cell_call(0, 0, view=view) + + def cell_call(self, row, col, view=None): + self.entity_call(self.cw_rset.get_entity(row, col), view=view) + + def entity_call(self, entity, view=None): + raise NotImplementedError() + +class RelatedObjectsVComponent(EntityVComponent): + """a section to display some related entities""" + __select__ = EntityVComponent.__select__ & partial_has_related_entities() + + vid = 'list' + # to be defined in concrete classes + rtype = title = None + + def rql(self): + """override this method if you want to use a custom rql query""" + return None + + def cell_call(self, row, col, view=None): + rql = self.rql() + if rql is None: + entity = self.cw_rset.get_entity(row, col) + rset = entity.related(self.rtype, role(self)) + else: + eid = self.cw_rset[row][col] + rset = self._cw.execute(self.rql(), {'x': eid}) + if not rset.rowcount: + return + self.w(u'
    ' % self.cssclass) + self.w(u'

    %s

    \n' % self._cw._(self.title).capitalize()) + self.wview(self.vid, rset) + self.w(u'
    ') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/controller.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/controller.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,221 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""abstract controller classe for CubicWeb web client""" + +__docformat__ = "restructuredtext en" + +from six import PY2 + +from logilab.mtconverter import xml_escape +from logilab.common.registry import yes +from logilab.common.deprecation import deprecated + +from cubicweb.appobject import AppObject +from cubicweb.mail import format_mail +from cubicweb.web import LOGGER, Redirect, RequestError + + +NAVIGATION_PARAMETERS = (('vid', '__redirectvid'), + ('rql', '__redirectrql'), + ('__redirectpath', '__redirectpath'), + ('__redirectparams', '__redirectparams'), + ) +NAV_FORM_PARAMETERS = tuple(fp for ap, fp in NAVIGATION_PARAMETERS) + +def redirect_params(form): + """transform redirection parameters into navigation parameters + """ + params = {} + # extract navigation parameters from redirection parameters + for navparam, redirectparam in NAVIGATION_PARAMETERS: + if navparam == redirectparam: + continue + if redirectparam in form: + params[navparam] = form[redirectparam] + return params + +def append_url_params(url, params): + """append raw parameters to the url. Given parameters, if any, are expected + to be already url-quoted. + """ + if params: + if not '?' in url: + url += '?' + else: + url += '&' + url += params + return url + + +class Controller(AppObject): + """a controller is responsible to make necessary stuff to publish + a request. There is usually at least one standard "view" controller + and another linked by forms to edit objects ("edit"). + """ + __registry__ = 'controllers' + __select__ = yes() + + def __init__(self, *args, **kwargs): + self.appli = kwargs.pop('appli', None) + super(Controller, self).__init__(*args, **kwargs) + # attributes use to control after edition redirection + self._after_deletion_path = None + self._edited_entity = None + + def publish(self, rset=None): + """publish the current request, with an optional input rset""" + raise NotImplementedError + + # generic methods useful for concrete implementations ###################### + + def process_rql(self): + """execute rql if specified""" + req = self._cw + rql = req.form.get('rql') + if rql: + req.ensure_ro_rql(rql) + if PY2 and not isinstance(rql, unicode): + rql = unicode(rql, req.encoding) + pp = req.vreg['components'].select_or_none('magicsearch', req) + if pp is not None: + return pp.process_query(rql) + if 'eid' in req.form and not isinstance(req.form['eid'], list): + return req.eid_rset(req.form['eid']) + return None + + def notify_edited(self, entity): + """called by edit_entity() to notify which entity is edited""" + # NOTE: we can't use entity.rest_path() at this point because + # rest_path() could rely on schema constraints (such as a required + # relation) that might not be satisfied yet (in case of creations) + if not self._edited_entity: + self._edited_entity = entity + + @deprecated('[3.18] call view.set_http_cache_headers then ' + '.is_client_cache_valid() method and return instead') + def validate_cache(self, view): + view.set_http_cache_headers() + self._cw.validate_cache() + + def sendmail(self, recipient, subject, body): + senderemail = self._cw.user.cw_adapt_to('IEmailable').get_email() + msg = format_mail({'email' : senderemail, + 'name' : self._cw.user.dc_title(),}, + [recipient], body, subject) + if not self._cw.vreg.config.sendmails([(msg, [recipient])]): + msg = self._cw._('could not connect to the SMTP server') + url = self._cw.build_url(__message=msg) + raise Redirect(url) + + def reset(self): + """reset form parameters and redirect to a view determinated by given + parameters + """ + newparams = {} + # sets message if needed + # XXX - don't call .message twice since it pops the id + msg = self._cw.message + if msg: + newparams['_cwmsgid'] = self._cw.set_redirect_message(msg) + if '__action_apply' in self._cw.form: + self._return_to_edition_view(newparams) + else: + self._return_to_original_view(newparams) + + def _return_to_original_view(self, newparams): + """validate-button case""" + # transforms __redirect[*] parameters into regular form parameters + newparams.update(redirect_params(self._cw.form)) + # find out if we have some explicit `rql` needs + rql = newparams.pop('rql', None) + # if rql is needed (explicit __redirectrql or multiple deletions for + # instance), we have to use the old `view?rql=...` form + if rql: + path = 'view' + newparams['rql'] = rql + elif '__redirectpath' in self._cw.form: + # if redirect path was explicitly specified in the form, use it + path = self._cw.form['__redirectpath'] + if (self._edited_entity and path != self._edited_entity.rest_path() + and '_cwmsgid' in newparams): + # are we here on creation or modification? + if any(eid == self._edited_entity.eid + for eid in self._cw.data.get('eidmap', {}).values()): + msg = self._cw._('click here to see created entity') + else: + msg = self._cw._('click here to see edited entity') + msg = u'(%s)' % (xml_escape(self._edited_entity.absolute_url()), msg) + self._cw.append_to_redirect_message(msg) + elif self._after_deletion_path: + # else it should have been set during form processing + path, params = self._after_deletion_path + params = dict(params) # params given as tuple + params.update(newparams) + newparams = params + elif self._edited_entity: + # clear caches in case some attribute participating to the rest path + # has been modified + self._edited_entity.cw_clear_all_caches() + path = self._edited_entity.rest_path() + else: + path = 'view' + url = self._cw.build_url(path, **newparams) + url = append_url_params(url, self._cw.form.get('__redirectparams')) + raise Redirect(url) + + def _return_to_edition_view(self, newparams): + """apply-button case""" + form = self._cw.form + if self._edited_entity: + path = self._edited_entity.rest_path() + newparams.pop('rql', None) + # else, fallback on the old `view?rql=...` url form + elif 'rql' in self._cw.form: + path = 'view' + newparams['rql'] = form['rql'] + else: + self.warning('the edited data seems inconsistent') + path = 'view' + # pick up the correction edition view + if form.get('__form_id'): + newparams['vid'] = form['__form_id'] + # re-insert copy redirection parameters + for redirectparam in NAV_FORM_PARAMETERS: + if redirectparam in form: + newparams[redirectparam] = form[redirectparam] + raise Redirect(self._cw.build_url(path, **newparams)) + + + def _redirect(self, newparams): + """Raise a redirect. We use __redirectpath if it specified, else we + return to the home page. + """ + if '__redirectpath' in self._cw.form: + # if redirect path was explicitly specified in the form, use it + path = self._cw.form['__redirectpath'] + url = self._cw.build_url(path) + url = append_url_params(url, self._cw.form.get('__redirectparams')) + else: + url = self._cw.base_url() + # The newparams must update the params in all cases + url = self._cw.rebuild_url(url, **newparams) + raise Redirect(url) + + +from cubicweb import set_log_methods +set_log_methods(Controller, LOGGER) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/cors.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/cors.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# copyright 2014 Logilab, PARIS + +"""A set of utility functions to handle CORS requests + +Unless specified, all references in this file are related to: + http://www.w3.org/TR/cors + +The provided implementation roughly follows: + http://www.html5rocks.com/static/images/cors_server_flowchart.png + +See also: + https://developer.mozilla.org/en-US/docs/HTTP/Access_control_CORS + +""" + +from six.moves.urllib.parse import urlsplit + +from cubicweb.web import LOGGER +info = LOGGER.info + +class CORSFailed(Exception): + """Raised when cross origin resource sharing checks failed""" + + +class CORSPreflight(Exception): + """Raised when cross origin resource sharing checks detects the + request as a valid preflight request""" + + +def process_request(req, config): + """ + Process a request to apply CORS specification algorithms + + Check whether the CORS specification is respected and set corresponding + headers to ensure response complies with the specification. + + In case of non-compliance, no CORS-related header is set. + """ + base_url = urlsplit(req.base_url()) + expected_host = '://'.join((base_url.scheme, base_url.netloc)) + if not req.get_header('Origin') or req.get_header('Origin') == expected_host: + # not a CORS request, nothing to do + return + try: + # handle cross origin resource sharing (CORS) + if req.http_method() == 'OPTIONS': + if req.get_header('Access-Control-Request-Method'): + # preflight CORS request + process_preflight(req, config) + else: # Simple CORS or actual request + process_simple(req, config) + except CORSFailed as exc: + info('Cross origin resource sharing failed: %s' % exc) + except CORSPreflight: + info('Cross origin resource sharing: valid Preflight request %s') + raise + +def process_preflight(req, config): + """cross origin resource sharing (preflight) + Cf http://www.w3.org/TR/cors/#resource-preflight-requests + """ + origin = check_origin(req, config) + allowed_methods = set(config['access-control-allow-methods']) + allowed_headers = set(config['access-control-allow-headers']) + try: + method = req.get_header('Access-Control-Request-Method') + except ValueError: + raise CORSFailed('Access-Control-Request-Method is incorrect') + if method not in allowed_methods: + raise CORSFailed('Method is not allowed') + try: + req.get_header('Access-Control-Request-Headers', ()) + except ValueError: + raise CORSFailed('Access-Control-Request-Headers is incorrect') + req.set_header('Access-Control-Allow-Methods', allowed_methods, raw=False) + req.set_header('Access-Control-Allow-Headers', allowed_headers, raw=False) + + process_common(req, config, origin) + raise CORSPreflight() + +def process_simple(req, config): + """Handle the Simple Cross-Origin Request case + """ + origin = check_origin(req, config) + exposed_headers = config['access-control-expose-headers'] + if exposed_headers: + req.set_header('Access-Control-Expose-Headers', exposed_headers, raw=False) + process_common(req, config, origin) + +def process_common(req, config, origin): + req.set_header('Access-Control-Allow-Origin', origin) + # in CW, we always support credential/authentication + req.set_header('Access-Control-Allow-Credentials', 'true') + +def check_origin(req, config): + origin = req.get_header('Origin').lower() + allowed_origins = config.get('access-control-allow-origin') + if not allowed_origins: + raise CORSFailed('access-control-allow-origin is not configured') + if '*' not in allowed_origins and origin not in allowed_origins: + raise CORSFailed('Origin is not allowed') + # bit of sanity check; see "6.3 Security" + myhost = urlsplit(req.base_url()).netloc + host = req.get_header('Host') + if host != myhost: + info('cross origin resource sharing detected possible ' + 'DNS rebinding attack Host header != host of base_url: ' + '%s != %s' % (host, myhost)) + raise CORSFailed('Host header and hostname do not match') + # include "Vary: Origin" header (see 6.4) + req.headers_out.addHeader('Vary', 'Origin') + return origin diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/accessories-text-editor.png Binary file cubicweb/web/data/accessories-text-editor.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/add_button.png Binary file cubicweb/web/data/add_button.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/asc.gif Binary file cubicweb/web/data/asc.gif has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/banner.png Binary file cubicweb/web/data/banner.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/bg.gif Binary file cubicweb/web/data/bg.gif has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/bg_trame_grise.png Binary file cubicweb/web/data/bg_trame_grise.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/black-check.png Binary file cubicweb/web/data/black-check.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/black-uncheck.png Binary file cubicweb/web/data/black-uncheck.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/bullet.png Binary file cubicweb/web/data/bullet.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/bullet_orange.png Binary file cubicweb/web/data/bullet_orange.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/button.png Binary file cubicweb/web/data/button.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/calendar.gif Binary file cubicweb/web/data/calendar.gif has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/cancel.png Binary file cubicweb/web/data/cancel.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/contextFreeBoxHeader.png Binary file cubicweb/web/data/contextFreeBoxHeader.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/contextualBoxHeader.png Binary file cubicweb/web/data/contextualBoxHeader.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/critical.png Binary file cubicweb/web/data/critical.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/cubicweb.acl.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/data/cubicweb.acl.css Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,51 @@ +/* styles for access control forms) + * + * :organization: Logilab + * :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr + */ + +/******************************************************************************/ +/* security edition form (views/management.py) web/views/schema.py */ +/******************************************************************************/ + +h2.schema{ + color: %(aColor)s; +} + +table.schemaInfo td a.users{ + color : #00CC33; + font-weight: bold; +} + +table.schemaInfo td a.guests{ + color: #ff7700; + font-weight: bold; +} + +table.schemaInfo td a.owners{ + color: #8b0000; + font-weight: bold; +} + +table.schemaInfo td a.managers{ + color: #000000; + font-weight: bold; +} + +.discret, +table.schemaInfo td a.grey{ + color:#666; +} + +a.grey:hover{ + color:#000; +} + +.red{ + color: #ff7700; + } + +div#schema_security{ + width:100%; + } \ No newline at end of file diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/cubicweb.ajax.box.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/data/cubicweb.ajax.box.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,96 @@ +/** + * Functions for ajax boxes. + * + * :organization: Logilab + * :copyright: 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr + * + */ + +function ajaxBoxValidateSelectorInput(boxid, eid, separator, fname, msg) { + var holderid = cw.utils.domid(boxid) + eid + 'Holder'; + var value = $('#' + holderid + 'Input').val(); + if (separator) { + value = $.map(value.split(separator), jQuery.trim); + } + var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs(fname, null, eid, value)); + d.addCallback(function() { + $('#' + holderid).empty(); + var formparams = ajaxFuncArgs('render', null, 'ctxcomponents', boxid, eid); + $('#' + cw.utils.domid(boxid) + eid).loadxhtml(AJAX_BASE_URL, formparams, null, 'swap'); + if (msg) { + document.location.hash = '#header'; + updateMessage(msg); + } + }); +} + +function ajaxBoxRemoveLinkedEntity(boxid, eid, relatedeid, delfname, msg) { + var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs(delfname, null, eid, relatedeid)); + d.addCallback(function() { + var formparams = ajaxFuncArgs('render', null, 'ctxcomponents', boxid, eid); + $('#' + cw.utils.domid(boxid) + eid).loadxhtml(AJAX_BASE_URL, formparams, null, 'swap'); + if (msg) { + document.location.hash = '#header'; + updateMessage(msg); + } + }); +} + +/** + * .. function:: ajaxBoxShowSelector(boxid, eid, unrelfname, + * addfname, msg, + * oklabel, cancellabel, + * separator=None) + * + * Display an ajax selector within a box of regid `boxid`, for entity with eid + * `eid`. + * + * Other parameters are: + * + * * `addfname`, name of the json controller method to call to add a relation + * + * * `msg`, message to display to the user when a relation has been added + * + * * `oklabel`/`cancellabel`, OK/cancel buttons label + * + * * `separator`, items separator if the field is multi-valued (will be + * considered mono-valued when not specified) + */ +function ajaxBoxShowSelector(boxid, eid, + unrelfname, + addfname, msg, + oklabel, cancellabel, + separator) { + var holderid = cw.utils.domid(boxid) + eid + 'Holder'; + var holder = $('#' + holderid); + if (holder.children().length) { + holder.empty(); + } + else { + var inputid = holderid + 'Input'; + var deferred = loadRemote(AJAX_BASE_URL, ajaxFuncArgs(unrelfname, null, eid)); + deferred.addCallback(function (unrelated) { + var input = INPUT({'type': 'text', 'id': inputid, 'size': 20}); + holder.append(input).show(); + var $input = $(input); + $input.keypress(function (evt) { + if (evt.keyCode == $.ui.keyCode.ENTER) { + ajaxBoxValidateSelectorInput(boxid, eid, separator, addfname, msg); + } + }); + $input.cwautocomplete(unrelated, {multiple: Boolean(separator)}); + var buttons = DIV({'class' : "sgformbuttons"}, + A({href : "javascript: $.noop();", + onclick : cw.utils.strFuncCall('ajaxBoxValidateSelectorInput', + boxid, eid, separator, addfname, msg)}, + oklabel), + ' / ', + A({'href' : "javascript: $.noop();", + 'onclick' : '$("#' + holderid + '").empty()'}, + cancellabel)); + holder.append(buttons); + $input.focus(); + }); + } +} diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/data/cubicweb.ajax.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/data/cubicweb.ajax.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,743 @@ +/* copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * contact http://www.logilab.fr/ -- mailto:contact@logilab.fr + * + * This file is part of CubicWeb. + * + * CubicWeb is free software: you can redistribute it and/or modify it under the + * terms of the GNU Lesser General Public License as published by the Free + * Software Foundation, either version 2.1 of the License, or (at your option) + * any later version. + * + * CubicWeb is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more + * details. + * + * You should have received a copy of the GNU Lesser General Public License along + * with CubicWeb. If not, see . + */ + +/** + * .. function:: Deferred + * + * dummy ultra minimalist implementation of deferred for jQuery + */ + +cw.ajax = new Namespace('cw.ajax'); + +function Deferred() { + this.__init__(this); +} + +jQuery.extend(Deferred.prototype, { + __init__: function() { + this._onSuccess = []; + this._onFailure = []; + this._req = null; + this._result = null; + this._error = null; + }, + + addCallback: function(callback) { + if (this._req && (this._req.readyState == 4) && this._result) { + var args = [this._result, this._req]; + jQuery.merge(args, cw.utils.sliceList(arguments, 1)); + callback.apply(null, args); + } + else { + this._onSuccess.push([callback, cw.utils.sliceList(arguments, 1)]); + } + return this; + }, + + addErrback: function(callback) { + if (this._req && this._req.readyState == 4 && this._error) { + callback.apply(null, [this._error, this._req]); + } + else { + this._onFailure.push([callback, cw.utils.sliceList(arguments, 1)]); + } + return this; + }, + + success: function(result) { + this._result = result; + for (var i = 0; i < this._onSuccess.length; i++) { + var callback = this._onSuccess[i][0]; + var args = [result, this._req]; + jQuery.merge(args, this._onSuccess[i][1]); + callback.apply(null, args); + } + }, + + error: function(xhr, status, error) { + this._error = error; + for (var i = 0; i < this._onFailure.length; i++) { + var callback = this._onFailure[i][0]; + var args = [error, this._req]; + jQuery.merge(args, this._onFailure[i][1]); + if (callback !== undefined) + callback.apply(null, args); + } + } + +}); + +var AJAX_PREFIX_URL = 'ajax'; +var JSON_BASE_URL = BASE_URL + 'json?'; +var AJAX_BASE_URL = BASE_URL + AJAX_PREFIX_URL + '?'; + + +jQuery.extend(cw.ajax, { + /* variant of jquery evalScript with cache: true in ajax call */ + _evalscript: function ( i, elem ) { + var src = elem.getAttribute('src'); + if (src) { + jQuery.ajax({ + url: src, + async: false, + cache: true, + dataType: "script" + }); + } else { + jQuery.globalEval( elem.text || elem.textContent || elem.innerHTML || "" ); + } + if ( elem.parentNode ) { + elem.parentNode.removeChild( elem ); + } + }, + + evalscripts: function ( scripts ) { + if ( scripts.length ) { + jQuery.each(scripts, cw.ajax._evalscript); + } + }, + + /** + * returns true if `url` is a mod_concat-like url + * (e.g. http://..../data??resource1.js,resource2.js) + */ + _modconcatLikeUrl: function(url) { + var modconcat_rgx = new RegExp('(' + BASE_URL + 'data/([a-z0-9]+/)?)\\?\\?(.+)'); + return modconcat_rgx.exec(url); + }, + + /** + * decomposes a mod_concat-like url into its corresponding list of + * resources' urls + * >>> _listResources('http://foo.com/data/??a.js,b.js,c.js') + * ['http://foo.com/data/a.js', 'http://foo.com/data/b.js', 'http://foo.com/data/c.js'] + */ + _listResources: function(src) { + var resources = []; + var groups = cw.ajax._modconcatLikeUrl(src); + if (groups == null) { + resources.push(src); + } else { + var dataurl = groups[1]; + $.each(cw.utils.lastOf(groups).split(','), + function() { + resources.push(dataurl + this); + } + ); + } + return resources; + }, + + _buildMissingResourcesUrl: function(url, loadedResources) { + var resources = cw.ajax._listResources(url); + var missingResources = $.grep(resources, function(resource) { + return $.inArray(resource, loadedResources) == -1; + }); + cw.utils.extend(loadedResources, missingResources); + var missingResourceUrl = null; + if (missingResources.length == 1) { + // only one resource missing: build a node with a single resource url + // (maybe the browser has it in cache already) + missingResourceUrl = missingResources[0]; + } else if (missingResources.length > 1) { + // several resources missing: build a node with a concatenated + // resources url + var dataurl = cw.ajax._modconcatLikeUrl(url)[1]; + var missing_path = $.map(missingResources, function(resource) { + return resource.substring(dataurl.length); + }); + missingResourceUrl = dataurl + '??' + missing_path.join(','); + } + return missingResourceUrl; + }, + + _loadAjaxStylesheets: function($responseHead, $head) { + $responseHead.find('link[href]').each(function(i) { + var $srcnode = $(this); + var url = $srcnode.attr('href'); + if (url) { + var missingStylesheetsUrl = cw.ajax._buildMissingResourcesUrl(url, cw.loaded_links); + // compute concat-like url for missing resources and append + // element to $head + if (missingStylesheetsUrl) { + // IE has problems with dynamic CSS insertions. One symptom (among others) + // is a "1 item remaining" message in the status bar. (cf. #2356261) + // document.createStyleSheet needs to be used for this, although it seems + // that IE can't create more than 31 additional stylesheets with + // document.createStyleSheet. + if ($.browser.msie) { + document.createStyleSheet(missingStylesheetsUrl); + } else { + $srcnode.attr('href', missingStylesheetsUrl); + $srcnode.appendTo($head); + } + } + } + }); + $responseHead.find('link[href]').remove(); + }, + + _loadAjaxScripts: function($responseHead, $head) { + $responseHead.find('cubicweb\\:script').each(function(i) { + var $srcnode = $(this); + var url = $srcnode.attr('src'); + if (url) { + var missingScriptsUrl = cw.ajax._buildMissingResourcesUrl(url, cw.loaded_scripts); + if (missingScriptsUrl) { + $srcnode.attr('src', missingScriptsUrl); + /* special handling of + + + + + + + + + + + + + + +
    +

    cubicweb.ajax.js functions tests

    +

    +
      + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/data/static/jstests/test_ajax.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/data/static/jstests/test_ajax.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,274 @@ +$(document).ready(function() { + + QUnit.module("ajax", { + setup: function() { + this.scriptsLength = $('head script[src]').length-1; + this.cssLength = $('head link[rel=stylesheet]').length-1; + // re-initialize cw loaded cache so that each tests run in a + // clean environment, have a lookt at _loadAjaxHtmlHead implementation + // in cubicweb.ajax.js for more information. + cw.loaded_scripts = []; + cw.loaded_links = []; + }, + teardown: function() { + $('head script[src]:lt(' + ($('head script[src]').length - 1 - this.scriptsLength) + ')').remove(); + $('head link[rel=stylesheet]:gt(' + this.cssLength + ')').remove(); + } + }); + + function jsSources() { + return $.map($('head script[src]'), function(script) { + return script.getAttribute('src'); + }); + } + + QUnit.test('test simple h1 inclusion (ajax_url0.html)', function (assert) { + assert.expect(3); + assert.equal($('#qunit-fixture').children().length, 0); + var done = assert.async(); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') + .addCallback(function() { + try { + assert.equal($('#qunit-fixture').children().length, 1); + assert.equal($('#qunit-fixture h1').html(), 'Hello'); + } finally { + done(); + }; + } + ); + }); + + QUnit.test('test simple html head inclusion (ajax_url1.html)', function (assert) { + assert.expect(6); + var scriptsIncluded = jsSources(); + assert.equal(jQuery.inArray('http://foo.js', scriptsIncluded), - 1); + var done = assert.async(); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url1.html', null, 'GET') + .addCallback(function() { + try { + var origLength = scriptsIncluded.length; + scriptsIncluded = jsSources(); + // check that foo.js has been prepended to + assert.equal(scriptsIncluded.length, origLength + 1); + assert.equal(scriptsIncluded.indexOf('http://foo.js'), 0); + // check that
      has been removed + assert.equal($('#qunit-fixture').children().length, 1); + assert.equal($('div.ajaxHtmlHead').length, 0); + assert.equal($('#qunit-fixture h1').html(), 'Hello'); + } finally { + done(); + }; + } + ); + }); + + QUnit.test('test addCallback', function (assert) { + assert.expect(3); + assert.equal($('#qunit-fixture').children().length, 0); + var done = assert.async(); + var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); + d.addCallback(function() { + try { + assert.equal($('#qunit-fixture').children().length, 1); + assert.equal($('#qunit-fixture h1').html(), 'Hello'); + } finally { + done(); + }; + }); + }); + + QUnit.test('test callback after synchronous request', function (assert) { + assert.expect(1); + var deferred = new Deferred(); + var result = jQuery.ajax({ + url: 'static/jstests/ajax_url0.html', + async: false, + beforeSend: function(xhr) { + deferred._req = xhr; + }, + success: function(data, status) { + deferred.success(data); + } + }); + var done = assert.async(); + deferred.addCallback(function() { + try { + // add an assertion to ensure the callback is executed + assert.ok(true, "callback is executed"); + } finally { + done(); + }; + }); + }); + + QUnit.test('test addCallback with parameters', function (assert) { + assert.expect(3); + assert.equal($('#qunit-fixture').children().length, 0); + var done = assert.async(); + var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); + d.addCallback(function(data, req, arg1, arg2) { + try { + assert.equal(arg1, 'Hello'); + assert.equal(arg2, 'world'); + } finally { + done(); + }; + }, + 'Hello', 'world'); + }); + + QUnit.test('test callback after synchronous request with parameters', function (assert) { + assert.expect(3); + var deferred = new Deferred(); + deferred.addCallback(function(data, req, arg1, arg2) { + // add an assertion to ensure the callback is executed + try { + assert.ok(true, "callback is executed"); + assert.equal(arg1, 'Hello'); + assert.equal(arg2, 'world'); + } finally { + done(); + }; + }, + 'Hello', 'world'); + deferred.addErrback(function() { + // throw an exception to start errback chain + try { + throw this._error; + } finally { + done(); + }; + }); + var done = assert.async(); + var result = jQuery.ajax({ + url: 'static/jstests/ajax_url0.html', + async: false, + beforeSend: function(xhr) { + deferred._req = xhr; + }, + success: function(data, status) { + deferred.success(data); + } + }); + }); + + QUnit.test('test addErrback', function (assert) { + assert.expect(1); + var done = assert.async(); + var d = $('#qunit-fixture').loadxhtml('static/jstests/nonexistent.html', null, 'GET'); + d.addCallback(function() { + // should not be executed + assert.ok(false, "callback is executed"); + }); + d.addErrback(function() { + try { + assert.ok(true, "errback is executed"); + } finally { + done(); + }; + }); + }); + + QUnit.test('test callback execution order', function (assert) { + assert.expect(3); + var counter = 0; + var done = assert.async(); + var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); + d.addCallback(function() { + assert.equal(++counter, 1); // should be executed first + }); + d.addCallback(function() { + assert.equal(++counter, 2); + }); + d.addCallback(function() { + try { + assert.equal(++counter, 3); + } finally { + done(); + } + }); + }); + + QUnit.test('test already included resources are ignored (ajax_url1.html)', function (assert) { + assert.expect(10); + var scriptsIncluded = jsSources(); + // NOTE: + assert.equal(jQuery.inArray('http://foo.js', scriptsIncluded), -1); + assert.equal($('head link').length, 1); + /* use endswith because in pytest context we have an absolute path */ + assert.ok($('head link').attr('href').endswith('/qunit.css'), 'qunit.css is loaded'); + var done = assert.async(); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url1.html', null, 'GET') + .addCallback(function() { + var origLength = scriptsIncluded.length; + scriptsIncluded = jsSources(); + try { + // check that foo.js has been inserted in + assert.equal(scriptsIncluded.length, origLength + 1); + assert.equal(scriptsIncluded.indexOf('http://foo.js'), 0); + // check that
      has been removed + assert.equal($('#qunit-fixture').children().length, 1); + assert.equal($('div.ajaxHtmlHead').length, 0); + assert.equal($('#qunit-fixture h1').html(), 'Hello'); + // qunit.css is not added twice + assert.equal($('head link').length, 1); + /* use endswith because in pytest context we have an absolute path */ + assert.ok($('head link').attr('href').endswith('/qunit.css'), 'qunit.css is loaded'); + } finally { + done(); + } + } + ); + }); + + QUnit.test('test synchronous request loadRemote', function (assert) { + var res = loadRemote('static/jstests/ajaxresult.json', {}, + 'GET', true); + assert.deepEqual(res, ['foo', 'bar']); + }); + + QUnit.test('test event on CubicWeb', function (assert) { + assert.expect(1); + var done = assert.async(); + var events = null; + $(CubicWeb).bind('server-response', function() { + // check that server-response event on CubicWeb is triggered + events = 'CubicWeb'; + }); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') + .addCallback(function() { + try { + assert.equal(events, 'CubicWeb'); + } finally { + done(); + }; + } + ); + }); + + QUnit.test('test event on node', function (assert) { + assert.expect(3); + var done = assert.async(); + var nodes = []; + $('#qunit-fixture').bind('server-response', function() { + nodes.push('node'); + }); + $(CubicWeb).bind('server-response', function() { + nodes.push('CubicWeb'); + }); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') + .addCallback(function() { + try { + assert.equal(nodes.length, 2); + // check that server-response event on CubicWeb is triggered + // only once and event server-response on node is triggered + assert.equal(nodes[0], 'CubicWeb'); + assert.equal(nodes[1], 'node'); + } finally { + done(); + }; + } + ); + }); +}); + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/data/static/jstests/test_htmlhelpers.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/data/static/jstests/test_htmlhelpers.html Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + +
      +

      cubicweb.htmlhelpers.js functions tests

      +

      +
        + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/data/static/jstests/test_htmlhelpers.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/data/static/jstests/test_htmlhelpers.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,36 @@ +$(document).ready(function() { + + QUnit.module("module2", { + setup: function() { + $('#qunit-fixture').append(''); + } + }); + + QUnit.test("test first selected", function (assert) { + $('#theselect').append('' + + '' + + '' + + ''); + var selected = firstSelected(document.getElementById("theselect")); + assert.equal(selected.value, 'bar'); + }); + + QUnit.test("test first selected 2", function (assert) { + $('#theselect').append('' + + '' + + '' + + ''); + var selected = firstSelected(document.getElementById("theselect")); + assert.equal(selected, null); + }); + + QUnit.module("visibilty"); + QUnit.test('toggleVisibility', function (assert) { + $('#qunit-fixture').append('
        '); + toggleVisibility('foo'); + assert.ok($('#foo').hasClass('hidden'), 'check hidden class is set'); + }); + +}); + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/data/static/jstests/test_utils.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/data/static/jstests/test_utils.html Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + +
        +

        cw.utils functions tests

        +

        +
          + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/data/static/jstests/test_utils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/data/static/jstests/test_utils.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,92 @@ +$(document).ready(function() { + + QUnit.module("datetime"); + + QUnit.test("test full datetime", function (assert) { + assert.equal(cw.utils.toISOTimestamp(new Date(1986, 3, 18, 10, 30, 0, 0)), + '1986-04-18 10:30:00'); + }); + + QUnit.test("test only date", function (assert) { + assert.equal(cw.utils.toISOTimestamp(new Date(1986, 3, 18)), '1986-04-18 00:00:00'); + }); + + QUnit.test("test null", function (assert) { + assert.equal(cw.utils.toISOTimestamp(null), null); + }); + + QUnit.module("parsing"); + QUnit.test("test basic number parsing", function (assert) { + var d = strptime('2008/08/08', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); + d = strptime('2008/8/8', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); + d = strptime('8/8/8', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [8, 8, 8, 0, 0]); + d = strptime('0/8/8', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [0, 8, 8, 0, 0]); + d = strptime('-10/8/8', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [-10, 8, 8, 0, 0]); + d = strptime('-35000', '%Y'); + assert.deepEqual(datetuple(d), [-35000, 1, 1, 0, 0]); + }); + + QUnit.test("test custom format parsing", function (assert) { + var d = strptime('2008-08-08', '%Y-%m-%d'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); + d = strptime('2008 - ! 08: 08', '%Y - ! %m: %d'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); + d = strptime('2008-08-08 12:14', '%Y-%m-%d %H:%M'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 12, 14]); + d = strptime('2008-08-08 1:14', '%Y-%m-%d %H:%M'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 1, 14]); + d = strptime('2008-08-08 01:14', '%Y-%m-%d %H:%M'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 1, 14]); + }); + + QUnit.module("sliceList"); + QUnit.test("test slicelist", function (assert) { + var list = ['a', 'b', 'c', 'd', 'e', 'f']; + assert.deepEqual(cw.utils.sliceList(list, 2), ['c', 'd', 'e', 'f']); + assert.deepEqual(cw.utils.sliceList(list, 2, -2), ['c', 'd']); + assert.deepEqual(cw.utils.sliceList(list, -3), ['d', 'e', 'f']); + assert.deepEqual(cw.utils.sliceList(list, 0, -2), ['a', 'b', 'c', 'd']); + assert.deepEqual(cw.utils.sliceList(list), list); + }); + + QUnit.module("formContents", { + setup: function() { + $('#qunit-fixture').append(''); + } + }); + // XXX test fckeditor + QUnit.test("test formContents", function (assert) { + $('#test-form').append(''); + $('#test-form').append(' '); + $('#test-form').append(''); + $('#test-form').append(''); + $('#test-form').append(''); + $('#test-form').append(''); + $('#test-form').append(''); + $('#theselect').append('' + + ''); + //Append an unchecked radio input : should not be in formContents list + $('#test-form').append(''); + $('#test-form').append(''); + assert.deepEqual(cw.utils.formContents($('#test-form')[0]), [ + ['input-text', 'mytextarea', 'choice', 'check', 'theselect'], + ['toto', 'Hello World!', 'no', 'no', 'foo'] + ]); + }); +}); + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/data/static/jstests/utils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/data/static/jstests/utils.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,29 @@ +function datetuple(d) { + return [d.getFullYear(), d.getMonth()+1, d.getDate(), + d.getHours(), d.getMinutes()]; +} + +function pprint(obj) { + print('{'); + for(k in obj) { + print(' ' + k + ' = ' + obj[k]); + } + print('}'); +} + +function arrayrepr(array) { + return '[' + array.join(', ') + ']'; +} + +function assertArrayEquals(array1, array2) { + if (array1.length != array2.length) { + throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); + } + for (var i=0; i. + +from cubicweb.predicates import has_related_entities +from cubicweb.web.views.ajaxcontroller import ajaxfunc +from cubicweb.web.views.ibreadcrumbs import IBreadCrumbsAdapter + +def _recursive_replace_stream_by_content(tree): + """ Search for streams (i.e. object that have a 'read' method) in a tree + (which branches are lists or tuples), and substitute them by their content, + leaving other leafs identical. A copy of the tree with only lists as + branches is returned. + """ + if not isinstance(tree, (list, tuple)): + if hasattr(tree, 'read'): + return tree.read() + return tree + else: + return [_recursive_replace_stream_by_content(value) + for value in tree] + + +@ajaxfunc(output_type='json') +def fileupload(self): + """ Return a json copy of the web request formin which uploaded files + are read and their content substitute the received streams. + """ + try: + result_dict = {} + for key, value in self._cw.form.items(): + result_dict[key] = _recursive_replace_stream_by_content(value) + return result_dict + except Exception as ex: + import traceback as tb + tb.print_exc(ex) + + +class FolderIBreadCrumbsAdapter(IBreadCrumbsAdapter): + __select__ = IBreadCrumbsAdapter.__select__ & has_related_entities('filed_under') + + def parent_entity(self): + return self.entity.filed_under[0] diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,6 @@ +requests +webtest +Twisted +cubicweb-blog +cubicweb-file +cubicweb-tag diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/test_jscript.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/test_jscript.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,38 @@ +from cubicweb.devtools import qunit + +from os import path as osp + + +class JScript(qunit.QUnitTestCase): + + all_js_tests = ( + ("/static/jstests/test_utils.js", ( + "/data/cubicweb.js", + "/data/cubicweb.compat.js", + "/data/cubicweb.python.js", + "/static/jstests/utils.js", + ), + ), + + ("/static/jstests/test_htmlhelpers.js", ( + "/data/cubicweb.js", + "/data/cubicweb.compat.js", + "/data/cubicweb.python.js", + "/data/cubicweb.htmlhelpers.js", + ), + ), + + ("/static/jstests/test_ajax.js", ( + "/data/cubicweb.python.js", + "/data/cubicweb.js", + "/data/cubicweb.compat.js", + "/data/cubicweb.htmlhelpers.js", + "/data/cubicweb.ajax.js", + ), + ), + ) + + +if __name__ == '__main__': + from unittest import main + main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/test_views.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/test_views.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,86 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""automatic tests""" +from cubicweb.devtools.testlib import AutoPopulateTest, AutomaticWebTest +from cubicweb.view import AnyRsetView + +class AutomaticWebTest(AutomaticWebTest): + application_rql = [ + 'Any L,F WHERE E is CWUser, E login L, E firstname F', + 'Any L,F,E WHERE E is CWUser, E login L, E firstname F', + 'Any COUNT(X) WHERE X is CWUser', + ] + + def to_test_etypes(self): + # We do not really want to test cube views here. So we can drop testing + # some EntityType. The two Blog types below require the sioc cube that + # we do not want to add as a dependency. + etypes = super(AutomaticWebTest, self).to_test_etypes() + etypes -= set(('Blog', 'BlogEntry')) + return etypes + + +class SomeView(AnyRsetView): + __regid__ = 'someview' + + def call(self): + self._cw.add_js('spam.js') + self._cw.add_js('spam.js') + + +class ManualCubicWebTCs(AutoPopulateTest): + + def test_regr_copy_view(self): + """regression test: make sure we can ask a copy of a + composite entity + """ + with self.admin_access.web_request() as req: + rset = req.execute(u'CWUser X WHERE X login "admin"') + self.view('copy', rset, req=req) + + def test_sortable_js_added(self): + with self.admin_access.web_request() as req: + # sortable.js should not be included by default + rset = req.execute('CWUser X') + self.assertNotIn(b'jquery.tablesorter.js', self.view('oneline', rset, req=req).source) + + with self.admin_access.web_request() as req: + # but should be included by the tableview + rset = req.execute('Any P,F,S LIMIT 1 WHERE P is CWUser, P firstname F, P surname S') + self.assertIn(b'jquery.tablesorter.js', self.view('table', rset, req=req).source) + + def test_js_added_only_once(self): + with self.admin_access.web_request() as req: + self.vreg._loadedmods[__name__] = {} + self.vreg.register(SomeView) + rset = req.execute('CWUser X') + source = self.view('someview', rset, req=req).source + self.assertEqual(source.count(b'spam.js'), 1) + + def test_unrelateddivs(self): + with self.admin_access.client_cnx() as cnx: + group = cnx.create_entity('CWGroup', name=u'R&D') + cnx.commit() + with self.admin_access.web_request(relation='in_group_subject') as req: + rset = req.execute(u'Any X WHERE X is CWUser, X login "admin"') + self.view('unrelateddivs', rset, req=req) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/testutils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/testutils.js Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,29 @@ +function datetuple(d) { + return [d.getFullYear(), d.getMonth()+1, d.getDate(), + d.getHours(), d.getMinutes()]; +} + +function pprint(obj) { + print('{'); + for(k in obj) { + print(' ' + k + ' = ' + obj[k]); + } + print('}'); +} + +function arrayrepr(array) { + return '[' + array.join(', ') + ']'; +} + +function assertArrayEquals(array1, array2) { + if (array1.length != array2.length) { + throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); + } + for (var i=0; i. +"""unit tests for cubicweb.web.application""" + +import base64 + +from six import text_type +from six.moves import http_client +from six.moves.http_cookies import SimpleCookie + +from logilab.common.testlib import TestCase, unittest_main +from logilab.common.decorators import clear_cache, classproperty + +from cubicweb import AuthenticationError +from cubicweb import view +from cubicweb.devtools.testlib import CubicWebTC, real_error_handling +from cubicweb.devtools.fake import FakeRequest +from cubicweb.web import LogOut, Redirect, INTERNAL_FIELD_VALUE +from cubicweb.web.views.basecontrollers import ViewController +from cubicweb.web.application import anonymized_request +from cubicweb import repoapi + +class FakeMapping: + """emulates a mapping module""" + def __init__(self): + self.ENTITIES_MAP = {} + self.ATTRIBUTES_MAP = {} + self.RELATIONS_MAP = {} + +class MockCursor: + def __init__(self): + self.executed = [] + def execute(self, rql, args=None, build_descr=False): + args = args or {} + self.executed.append(rql % args) + + +class FakeController(ViewController): + + def __init__(self, form=None): + self._cw = FakeRequest() + self._cw.form = form or {} + self._cursor = MockCursor() + self._cw.execute = self._cursor.execute + + def new_cursor(self): + self._cursor = MockCursor() + self._cw.execute = self._cursor.execute + + def set_form(self, form): + self._cw.form = form + + +class RequestBaseTC(TestCase): + def setUp(self): + self._cw = FakeRequest() + + + def test_list_arg(self): + """tests the list_arg() function""" + list_arg = self._cw.list_form_param + self.assertEqual(list_arg('arg3', {}), []) + d = {'arg1' : "value1", + 'arg2' : ('foo', INTERNAL_FIELD_VALUE,), + 'arg3' : ['bar']} + self.assertEqual(list_arg('arg1', d, True), ['value1']) + self.assertEqual(d, {'arg2' : ('foo', INTERNAL_FIELD_VALUE), 'arg3' : ['bar'],}) + self.assertEqual(list_arg('arg2', d, True), ['foo']) + self.assertEqual({'arg3' : ['bar'],}, d) + self.assertEqual(list_arg('arg3', d), ['bar',]) + self.assertEqual({'arg3' : ['bar'],}, d) + + + def test_from_controller(self): + self._cw.vreg['controllers'] = {'view': 1, 'login': 1} + self.assertEqual(self._cw.from_controller(), 'view') + req = FakeRequest(url='project?vid=list') + req.vreg['controllers'] = {'view': 1, 'login': 1} + # this assertion is just to make sure that relative_path can be + # correctly computed as it is used in from_controller() + self.assertEqual(req.relative_path(False), 'project') + self.assertEqual(req.from_controller(), 'view') + # test on a valid non-view controller + req = FakeRequest(url='login?x=1&y=2') + req.vreg['controllers'] = {'view': 1, 'login': 1} + self.assertEqual(req.relative_path(False), 'login') + self.assertEqual(req.from_controller(), 'login') + + +class UtilsTC(TestCase): + """test suite for misc application utilities""" + + def setUp(self): + self.ctrl = FakeController() + + #def test_which_mapping(self): + # """tests which mapping is used (application or core)""" + # init_mapping() + # from cubicweb.common import mapping + # self.assertEqual(mapping.MAPPING_USED, 'core') + # sys.modules['mapping'] = FakeMapping() + # init_mapping() + # self.assertEqual(mapping.MAPPING_USED, 'application') + # del sys.modules['mapping'] + + def test_execute_linkto(self): + """tests the execute_linkto() function""" + self.assertEqual(self.ctrl.execute_linkto(), None) + self.assertEqual(self.ctrl._cursor.executed, + []) + + self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:object', + 'eid': 8}) + self.ctrl.execute_linkto() + self.assertEqual(self.ctrl._cursor.executed, + ['SET Y works_for X WHERE X eid 8, Y eid %s' % i + for i in (12, 13, 14)]) + + self.ctrl.new_cursor() + self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:subject', + 'eid': 8}) + self.ctrl.execute_linkto() + self.assertEqual(self.ctrl._cursor.executed, + ['SET X works_for Y WHERE X eid 8, Y eid %s' % i + for i in (12, 13, 14)]) + + + self.ctrl.new_cursor() + self.ctrl._cw.form = {'__linkto' : 'works_for:12_13_14:object'} + self.ctrl.execute_linkto(eid=8) + self.assertEqual(self.ctrl._cursor.executed, + ['SET Y works_for X WHERE X eid 8, Y eid %s' % i + for i in (12, 13, 14)]) + + self.ctrl.new_cursor() + self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:subject'}) + self.ctrl.execute_linkto(eid=8) + self.assertEqual(self.ctrl._cursor.executed, + ['SET X works_for Y WHERE X eid 8, Y eid %s' % i + for i in (12, 13, 14)]) + + +class ApplicationTC(CubicWebTC): + + @classproperty + def config(cls): + try: + return cls.__dict__['_config'] + except KeyError: + config = super(ApplicationTC, cls).config + config.global_set_option('allow-email-login', True) + return config + + def test_cnx_user_groups_sync(self): + with self.admin_access.client_cnx() as cnx: + user = cnx.user + self.assertEqual(user.groups, set(('managers',))) + cnx.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) + user = cnx.user + self.assertEqual(user.groups, set(('managers',))) + cnx.commit() + user = cnx.user + self.assertEqual(user.groups, set(('managers', 'guests'))) + # cleanup + cnx.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) + cnx.commit() + + def test_publish_validation_error(self): + with self.admin_access.web_request() as req: + user = req.user + eid = text_type(user.eid) + req.form = { + 'eid': eid, + '__type:'+eid: 'CWUser', '_cw_entity_fields:'+eid: 'login-subject', + 'login-subject:'+eid: '', # ERROR: no login specified + # just a sample, missing some necessary information for real life + '__errorurl': 'view?vid=edition...' + } + path, params = self.expect_redirect_handle_request(req, 'edit') + forminfo = req.session.data['view?vid=edition...'] + eidmap = forminfo['eidmap'] + self.assertEqual(eidmap, {}) + values = forminfo['values'] + self.assertEqual(values['login-subject:'+eid], '') + self.assertEqual(values['eid'], eid) + error = forminfo['error'] + self.assertEqual(error.entity, user.eid) + self.assertEqual(error.errors['login-subject'], 'required field') + + + def test_validation_error_dont_loose_subentity_data_ctrl(self): + """test creation of two linked entities + + error occurs on the web controller + """ + with self.admin_access.web_request() as req: + # set Y before X to ensure both entities are edited, not only X + req.form = {'eid': ['Y', 'X'], '__maineid': 'X', + '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject', + # missing required field + 'login-subject:X': u'', + # but email address is set + '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject', + 'address-subject:Y': u'bougloup@logilab.fr', + 'use_email-object:Y': 'X', + # necessary to get validation error handling + '__errorurl': 'view?vid=edition...', + } + path, params = self.expect_redirect_handle_request(req, 'edit') + forminfo = req.session.data['view?vid=edition...'] + self.assertEqual(set(forminfo['eidmap']), set('XY')) + self.assertEqual(forminfo['eidmap']['X'], None) + self.assertIsInstance(forminfo['eidmap']['Y'], int) + self.assertEqual(forminfo['error'].entity, 'X') + self.assertEqual(forminfo['error'].errors, + {'login-subject': 'required field'}) + self.assertEqual(forminfo['values'], req.form) + + + def test_validation_error_dont_loose_subentity_data_repo(self): + """test creation of two linked entities + + error occurs on the repository + """ + with self.admin_access.web_request() as req: + # set Y before X to ensure both entities are edited, not only X + req.form = {'eid': ['Y', 'X'], '__maineid': 'X', + '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject,upassword-subject', + # already existent user + 'login-subject:X': u'admin', + 'upassword-subject:X': u'admin', 'upassword-subject-confirm:X': u'admin', + '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject', + 'address-subject:Y': u'bougloup@logilab.fr', + 'use_email-object:Y': 'X', + # necessary to get validation error handling + '__errorurl': 'view?vid=edition...', + } + path, params = self.expect_redirect_handle_request(req, 'edit') + forminfo = req.session.data['view?vid=edition...'] + self.assertEqual(set(forminfo['eidmap']), set('XY')) + self.assertIsInstance(forminfo['eidmap']['X'], int) + self.assertIsInstance(forminfo['eidmap']['Y'], int) + self.assertEqual(forminfo['error'].entity, forminfo['eidmap']['X']) + self.assertEqual(forminfo['error'].errors, + {'login-subject': u'the value "admin" is already used, use another one'}) + self.assertEqual(forminfo['values'], req.form) + + def test_ajax_view_raise_arbitrary_error(self): + class ErrorAjaxView(view.View): + __regid__ = 'test.ajax.error' + def call(self): + raise Exception('whatever') + with self.temporary_appobjects(ErrorAjaxView): + with real_error_handling(self.app) as app: + with self.admin_access.web_request(vid='test.ajax.error') as req: + req.ajax_request = True + page = app.handle_request(req, '') + self.assertEqual(http_client.INTERNAL_SERVER_ERROR, + req.status_out) + + def _test_cleaned(self, kwargs, injected, cleaned): + with self.admin_access.web_request(**kwargs) as req: + page = self.app_handle_request(req, 'view') + self.assertNotIn(injected.encode('ascii'), page) + self.assertIn(cleaned.encode('ascii'), page) + + def test_nonregr_script_kiddies(self): + """test against current script injection""" + injected = 'toto' + cleaned = 'toto' + for kwargs in ({'vid': injected}, + {'vtitle': injected}, + ): + yield self._test_cleaned, kwargs, injected, cleaned + + def test_site_wide_eproperties_sync(self): + # XXX work in all-in-one configuration but not in twisted for instance + # in which case we need a kindof repo -> http server notification + # protocol + vreg = self.app.vreg + # default value + self.assertEqual(vreg.property_value('ui.language'), 'en') + with self.admin_access.client_cnx() as cnx: + cnx.execute('INSERT CWProperty X: X value "fr", X pkey "ui.language"') + self.assertEqual(vreg.property_value('ui.language'), 'en') + cnx.commit() + self.assertEqual(vreg.property_value('ui.language'), 'fr') + cnx.execute('SET X value "de" WHERE X pkey "ui.language"') + self.assertEqual(vreg.property_value('ui.language'), 'fr') + cnx.commit() + self.assertEqual(vreg.property_value('ui.language'), 'de') + cnx.execute('DELETE CWProperty X WHERE X pkey "ui.language"') + self.assertEqual(vreg.property_value('ui.language'), 'de') + cnx.commit() + self.assertEqual(vreg.property_value('ui.language'), 'en') + + # authentication tests #################################################### + + def test_http_auth_no_anon(self): + req, origsession = self.init_authentication('http') + self.assertAuthFailure(req) + self.app.handle_request(req, 'login') + self.assertEqual(401, req.status_out) + clear_cache(req, 'get_authorization') + authstr = base64.encodestring(('%s:%s' % (self.admlogin, self.admpassword)).encode('ascii')) + req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) + self.assertAuthSuccess(req, origsession) + self.assertRaises(LogOut, self.app_handle_request, req, 'logout') + self.assertEqual(len(self.open_sessions), 0) + + def test_cookie_auth_no_anon(self): + req, origsession = self.init_authentication('cookie') + self.assertAuthFailure(req) + try: + form = self.app.handle_request(req, 'login') + except Redirect as redir: + self.fail('anonymous user should get login form') + clear_cache(req, 'get_authorization') + self.assertIn(b'__login', form) + self.assertIn(b'__password', form) + self.assertFalse(req.cnx) # Mock cnx are False + req.form['__login'] = self.admlogin + req.form['__password'] = self.admpassword + self.assertAuthSuccess(req, origsession) + self.assertRaises(LogOut, self.app_handle_request, req, 'logout') + self.assertEqual(len(self.open_sessions), 0) + + def test_login_by_email(self): + with self.admin_access.client_cnx() as cnx: + login = cnx.user.login + address = login + u'@localhost' + cnx.execute('INSERT EmailAddress X: X address %(address)s, U primary_email X ' + 'WHERE U login %(login)s', {'address': address, 'login': login}) + cnx.commit() + # # option allow-email-login not set + req, origsession = self.init_authentication('cookie') + # req.form['__login'] = address + # req.form['__password'] = self.admpassword + # self.assertAuthFailure(req) + # option allow-email-login set + #origsession.login = address + self.set_option('allow-email-login', True) + req.form['__login'] = address + req.form['__password'] = self.admpassword + self.assertAuthSuccess(req, origsession) + self.assertRaises(LogOut, self.app_handle_request, req, 'logout') + self.assertEqual(len(self.open_sessions), 0) + + def _reset_cookie(self, req): + # preparing the suite of the test + # set session id in cookie + cookie = SimpleCookie() + sessioncookie = self.app.session_handler.session_cookie(req) + cookie[sessioncookie] = req.session.sessionid + req.set_request_header('Cookie', cookie[sessioncookie].OutputString(), + raw=True) + clear_cache(req, 'get_authorization') + + def _test_auth_anon(self, req): + asession = self.app.get_session(req) + # important otherwise _reset_cookie will not use the right session + req.set_cnx(repoapi.Connection(asession)) + self.assertEqual(len(self.open_sessions), 1) + self.assertEqual(asession.login, 'anon') + self.assertTrue(asession.anonymous_session) + self._reset_cookie(req) + + def _test_anon_auth_fail(self, req): + self.assertEqual(1, len(self.open_sessions)) + session = self.app.get_session(req) + # important otherwise _reset_cookie will not use the right session + req.set_cnx(repoapi.Connection(session)) + self.assertEqual(req.message, 'authentication failure') + self.assertEqual(req.session.anonymous_session, True) + self.assertEqual(1, len(self.open_sessions)) + self._reset_cookie(req) + + def test_http_auth_anon_allowed(self): + req, origsession = self.init_authentication('http', 'anon') + self._test_auth_anon(req) + authstr = base64.encodestring(b'toto:pouet') + req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) + self._test_anon_auth_fail(req) + authstr = base64.encodestring(('%s:%s' % (self.admlogin, self.admpassword)).encode('ascii')) + req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) + self.assertAuthSuccess(req, origsession) + self.assertRaises(LogOut, self.app_handle_request, req, 'logout') + self.assertEqual(len(self.open_sessions), 0) + + def test_cookie_auth_anon_allowed(self): + req, origsession = self.init_authentication('cookie', 'anon') + self._test_auth_anon(req) + req.form['__login'] = 'toto' + req.form['__password'] = 'pouet' + self._test_anon_auth_fail(req) + req.form['__login'] = self.admlogin + req.form['__password'] = self.admpassword + self.assertAuthSuccess(req, origsession) + self.assertRaises(LogOut, self.app_handle_request, req, 'logout') + self.assertEqual(0, len(self.open_sessions)) + + def test_anonymized_request(self): + with self.admin_access.web_request() as req: + self.assertEqual(self.admlogin, req.session.user.login) + # admin should see anon + admin + self.assertEqual(2, len(list(req.find('CWUser')))) + with anonymized_request(req): + self.assertEqual('anon', req.session.login, 'anon') + # anon should only see anon user + self.assertEqual(1, len(list(req.find('CWUser')))) + self.assertEqual(self.admlogin, req.session.login) + self.assertEqual(2, len(list(req.find('CWUser')))) + + def test_non_regr_optional_first_var(self): + with self.admin_access.web_request() as req: + # expect a rset with None in [0][0] + req.form['rql'] = 'rql:Any OV1, X WHERE X custom_workflow OV1?' + self.app_handle_request(req) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_breadcrumbs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_breadcrumbs.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,48 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from cubicweb.devtools.testlib import CubicWebTC + + +class BreadCrumbsTC(CubicWebTC): + + def test_base(self): + with self.admin_access.web_request() as req: + f1 = req.create_entity('Folder', name=u'par&ent') + f2 = req.create_entity('Folder', name=u'chi&ld') + req.cnx.execute('SET F2 filed_under F1 WHERE F1 eid %(f1)s, F2 eid %(f2)s', + {'f1' : f1.eid, 'f2' : f2.eid}) + req.cnx.commit() + self.assertEqual(f2.view('breadcrumbs'), + '' + 'chi&ld' % f2.eid) + childrset = f2.as_rset() + ibc = self.vreg['ctxcomponents'].select('breadcrumbs', req, rset=childrset) + l = [] + ibc.render(l.append) + self.assertMultiLineEqual(' > ' + 'Folder_plural' + ' > par&ent > \n' + '' + 'chi&ld' % (f1.eid, f2.eid), + ''.join(l)) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_controller.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_controller.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,59 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb.web.controller unit tests + +""" + +from datetime import datetime, date, time + +from logilab.common.testlib import unittest_main + +from cubicweb.devtools import testlib + +class BaseControllerTC(testlib.CubicWebTC): + + def test_parse_datetime_ok(self): + with self.admin_access.web_request() as req: + ctrl = self.vreg['controllers'].select('view', req) + pd = ctrl._cw.parse_datetime + self.assertIsInstance(pd('2006/06/24 12:18'), datetime) + self.assertIsInstance(pd('2006/06/24'), date) + self.assertIsInstance(pd('2006/06/24 12:18', 'Datetime'), datetime) + self.assertIsInstance(pd('2006/06/24', 'Datetime'), datetime) + self.assertIsInstance(pd('2006/06/24', 'Date'), date) + self.assertIsInstance(pd('12:18', 'Time'), time) + + def test_parse_datetime_ko(self): + with self.admin_access.web_request() as req: + ctrl = self.vreg['controllers'].select('view', req) + pd = ctrl._cw.parse_datetime + self.assertRaises(ValueError, + pd, '2006/06/24 12:188', 'Datetime') + self.assertRaises(ValueError, + pd, '2006/06/240', 'Datetime') + self.assertRaises(ValueError, + pd, '2006/06/24 12:18', 'Date') + self.assertRaises(ValueError, + pd, '2006/24/06', 'Date') + self.assertRaises(ValueError, + pd, '2006/06/240', 'Date') + self.assertRaises(ValueError, + pd, '12:188', 'Time') + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_facet.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_facet.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,421 @@ +from logilab.common.date import datetime2ticks +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web import facet + +class BaseFacetTC(CubicWebTC): + + def prepare_rqlst(self, req, rql='CWUser X', mainvar='X', + expected_baserql='Any X WHERE X is CWUser', + expected_preparedrql='DISTINCT Any WHERE X is CWUser'): + rset = req.cnx.execute(rql) + rqlst = rset.syntax_tree().copy() + filtered_variable, baserql = facet.init_facets(rset, rqlst.children[0], + mainvar=mainvar) + self.assertEqual(filtered_variable.name, mainvar) + self.assertEqual(baserql, expected_baserql) + self.assertEqual(rqlst.as_string(), expected_preparedrql) + return rset, rqlst, filtered_variable + + def _in_group_facet(self, req, cls=facet.RelationFacet, no_relation=False): + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + cls.no_relation = no_relation + f = cls(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + f.__regid__ = 'in_group' + f.rtype = 'in_group' + f.role = 'subject' + f.target_attr = 'name' + guests, managers = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' + 'WHERE G name GN, G name IN ("guests", "managers")')] + groups = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' + 'WHERE G name GN, G name IN ("guests", "managers")')] + return f, groups + + def test_relation_simple(self): + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req) + self.assertEqual(f.vocabulary(), + [(u'guests', guests), (u'managers', managers)]) + # ensure rqlst is left unmodified + self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + [str(guests), str(managers)]) + # ensure rqlst is left unmodified + self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') + f._cw.form[f.__regid__] = str(guests) + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X in_group D, D eid %s' % guests) + + def test_relation_multiple_and(self): + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req) + f._cw.form[f.__regid__] = [str(guests), str(managers)] + f._cw.form[f.__regid__ + '_andor'] = 'AND' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X in_group A, B eid %s, X in_group B, A eid %s' % (guests, managers)) + + def test_relation_multiple_or(self): + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req) + f._cw.form[f.__regid__] = [str(guests), str(managers)] + f._cw.form[f.__regid__ + '_andor'] = 'OR' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X in_group A, A eid IN(%s, %s)' % (guests, managers)) + + def test_relation_optional_rel(self): + with self.admin_access.web_request() as req: + rset = req.cnx.execute(u'Any X,GROUP_CONCAT(GN) GROUPBY X ' + 'WHERE X in_group G?, G name GN, NOT G name "users"') + rqlst = rset.syntax_tree().copy() + select = rqlst.children[0] + filtered_variable, baserql = facet.init_facets(rset, select) + + f = facet.RelationFacet(req, rset=rset, + select=select, + filtered_variable=filtered_variable) + f.rtype = 'in_group' + f.role = 'subject' + f.target_attr = 'name' + guests, managers = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' + 'WHERE G name GN, G name IN ("guests", "managers")')] + self.assertEqual(f.vocabulary(), + [(u'guests', guests), (u'managers', managers)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"') + #rqlst = rset.syntax_tree() + self.assertEqual(sorted(f.possible_values()), + [str(guests), str(managers)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"') + req.form[f.__regid__] = str(guests) + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users", X in_group D, D eid %s' % guests) + + def test_relation_no_relation_1(self): + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req, no_relation=True) + self.assertEqual(f.vocabulary(), + [(u'guests', guests), (u'managers', managers)]) + self.assertEqual(f.possible_values(), + [str(guests), str(managers)]) + f._cw.create_entity('CWUser', login=u'hop', upassword='toto') + self.assertEqual(f.vocabulary(), + [(u'', ''), (u'guests', guests), (u'managers', managers)]) + self.assertEqual(f.possible_values(), + [str(guests), str(managers), '']) + f._cw.form[f.__regid__] = '' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, NOT X in_group G') + + def test_relation_no_relation_2(self): + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req, no_relation=True) + f._cw.form[f.__regid__] = ['', guests] + f.select.save_state() + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, (NOT X in_group B) OR (X in_group A, A eid %s)' % guests) + f.select.recover() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser') + + def test_relationattribute(self): + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req, cls=facet.RelationAttributeFacet) + self.assertEqual(f.vocabulary(), + [(u'guests', u'guests'), (u'managers', u'managers')]) + # ensure rqlst is left unmodified + self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + ['guests', 'managers']) + # ensure rqlst is left unmodified + self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') + f._cw.form[f.__regid__] = u'guests' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X in_group E, E name "guests"') + + def test_hasrelation(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + f = facet.HasRelationFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.__regid__ = 'has_group' + f.rtype = 'in_group' + f.role = 'subject' + f._cw.form[f.__regid__] = 'feed me' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, EXISTS(X in_group A)') + + def test_daterange(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + f = facet.DateRangeFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.rtype = 'creation_date' + mind, maxd = req.cnx.execute('Any MIN(CD), MAX(CD) WHERE X is CWUser, X creation_date CD')[0] + self.assertEqual(f.vocabulary(), + [(str(mind), mind), + (str(maxd), maxd)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + [str(mind), str(maxd)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form['%s_inf' % f.__regid__] = str(datetime2ticks(mind)) + req.form['%s_sup' % f.__regid__] = str(datetime2ticks(mind)) + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X creation_date >= "%s", ' + 'X creation_date <= "%s"' + % (mind.strftime('%Y/%m/%d'), + mind.strftime('%Y/%m/%d'))) + + def test_attribute(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + f = facet.AttributeFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.rtype = 'login' + self.assertEqual(f.vocabulary(), + [(u'admin', u'admin'), (u'anon', u'anon')]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + ['admin', 'anon']) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form[f.__regid__] = u'admin' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X login "admin"') + + def test_bitfield(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req, + 'CWAttribute X WHERE X ordernum XO', + expected_baserql='Any X WHERE X ordernum XO, X is CWAttribute', + expected_preparedrql='DISTINCT Any WHERE X ordernum XO, X is CWAttribute') + f = facet.BitFieldFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.choices = [('un', 1,), ('deux', 2,)] + f.rtype = 'ordernum' + self.assertEqual(f.vocabulary(), + [(u'deux', 2), (u'un', 1)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X ordernum XO, X is CWAttribute') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + ['2', '1']) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X ordernum XO, X is CWAttribute') + req.form[f.__regid__] = '3' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X ordernum XO, X is CWAttribute, X ordernum C HAVING 3 = (C & 3)") + + def test_bitfield_0_value(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req, + 'CWAttribute X WHERE X ordernum XO', + expected_baserql='Any X WHERE X ordernum XO, X is CWAttribute', + expected_preparedrql='DISTINCT Any WHERE X ordernum XO, X is CWAttribute') + f = facet.BitFieldFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.choices = [('zero', 0,), ('un', 1,), ('deux', 2,)] + f.rtype = 'ordernum' + self.assertEqual(f.vocabulary(), + [(u'deux', 2), (u'un', 1), (u'zero', 0)]) + self.assertEqual(f.possible_values(), + ['2', '1', '0']) + req.form[f.__regid__] = '0' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X ordernum XO, X is CWAttribute, X ordernum C HAVING 0 = C") + + def test_rql_path_eid(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'O' + label_variable = 'OL' + f = RPF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + self.assertEqual(f.vocabulary(), [(u'admin', req.user.eid),]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + [str(req.user.eid),]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form[f.__regid__] = '1' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X is CWUser, X created_by F, F owned_by G, G eid 1") + + def test_rql_path_eid_no_label(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'O' + f = RPF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + self.assertEqual(f.vocabulary(), [(str(req.user.eid), req.user.eid),]) + + def test_rql_path_attr(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'OL' + f = RPF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + + self.assertEqual(f.vocabulary(), [(u'admin', 'admin'),]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + self.assertEqual(f.possible_values(), ['admin',]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form[f.__regid__] = u'admin' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H login "admin"') + + def test_rql_path_check_filter_label_variable(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'OL' + label_variable = 'OL' + self.assertRaises(AssertionError, RPF, req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + + + def test_rqlpath_range(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + class RRF(facet.DateRangeRQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O creation_date OL')] + filter_variable = 'OL' + f = RRF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + mind, maxd = req.cnx.execute('Any MIN(CD), MAX(CD) WHERE X is CWUser, X created_by U, U owned_by O, O creation_date CD')[0] + self.assertEqual(f.vocabulary(), [(str(mind), mind), + (str(maxd), maxd)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + self.assertEqual(f.possible_values(), + [str(mind), str(maxd)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form['%s_inf' % f.__regid__] = str(datetime2ticks(mind)) + req.form['%s_sup' % f.__regid__] = str(datetime2ticks(mind)) + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H creation_date >= "%s", ' + 'H creation_date <= "%s"' + % (mind.strftime('%Y/%m/%d'), + mind.strftime('%Y/%m/%d'))) + + def prepareg_aggregat_rqlst(self, req): + return self.prepare_rqlst(req, + u'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup ' + 'HAVING DAY(XD)>=DAY(YD) AND DAY(XM)<=DAY(YD)', 'X', + expected_baserql=u'Any 1,COUNT(X) WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup ' + 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)', + expected_preparedrql=u'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup ' + 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') + + + def test_aggregat_query_cleanup_select(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) + select = rqlst.children[0] + facet.cleanup_select(select, filtered_variable=filtered_variable) + self.assertEqual(select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup ' + 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') + + def test_aggregat_query_rql_path(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'OL' + f = RPF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + self.assertEqual(f.vocabulary(), [(u'admin', u'admin')]) + self.assertEqual(f.possible_values(), ['admin']) + req.form[f.__regid__] = u'admin' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup, ' + 'X created_by G, G owned_by H, H login "admin" ' + 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') + + def test_aggregat_query_attribute(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) + f = facet.AttributeFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.rtype = 'login' + self.assertEqual(f.vocabulary(), + [(u'admin', u'admin'), (u'anon', u'anon')]) + self.assertEqual(f.possible_values(), + ['admin', 'anon']) + req.form[f.__regid__] = u'admin' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup, X login "admin" ' + 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_form.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_form.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,286 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +import time + +from xml.etree.ElementTree import fromstring +from lxml import html + +from six import text_type + +from logilab.common.testlib import unittest_main + +from cubicweb import Binary, ValidationError +from cubicweb.mttransforms import HAS_TAL +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web.formfields import (IntField, StringField, RichTextField, + PasswordField, DateTimeField, + FileField, EditableFileField) +from cubicweb.web.formwidgets import PasswordInput, Input, DateTimePicker +from cubicweb.web.views.forms import EntityFieldsForm, FieldsForm +from cubicweb.web.views.workflow import ChangeStateForm +from cubicweb.web.views.formrenderers import FormRenderer + + +class FieldsFormTC(CubicWebTC): + + def test_form_field_format(self): + with self.admin_access.web_request() as req: + form = FieldsForm(req, None) + self.assertEqual(StringField().format(form), 'text/plain') + req.cnx.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"') + req.cnx.commit() + self.assertEqual(StringField().format(form), 'text/rest') + + + def test_process_posted(self): + class AForm(FieldsForm): + anint = IntField() + astring = StringField() + with self.admin_access.web_request(anint='1', astring='2', _cw_fields='anint,astring') as req: + form = AForm(req) + self.assertEqual(form.process_posted(), {'anint': 1, 'astring': '2'}) + with self.admin_access.web_request(anint='1a', astring='2b', _cw_fields='anint,astring') as req: + form = AForm(req) + self.assertRaises(ValidationError, form.process_posted) + + +class EntityFieldsFormTC(CubicWebTC): + + def test_form_field_choices(self): + with self.admin_access.web_request() as req: + b = req.create_entity('BlogEntry', title=u'di mascii code', content=u'a best-seller') + t = req.create_entity('Tag', name=u'x') + form1 = self.vreg['forms'].select('edition', req, entity=t) + choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] + self.assertIn(text_type(b.eid), choices) + form2 = self.vreg['forms'].select('edition', req, entity=b) + choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] + self.assertIn(text_type(t.eid), choices) + + b.cw_clear_all_caches() + t.cw_clear_all_caches() + req.cnx.execute('SET X tags Y WHERE X is Tag, Y is BlogEntry') + + choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] + self.assertIn(text_type(b.eid), choices) + choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] + self.assertIn(text_type(t.eid), choices) + + def test_form_field_choices_new_entity(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + form = self.vreg['forms'].select('edition', req, entity=e) + unrelated = [rview for rview, reid in form.field_by_name('in_group', 'subject').choices(form)] + # should be default groups but owners, i.e. managers, users, guests + self.assertEqual(unrelated, [u'guests', u'managers', u'users']) + + def test_consider_req_form_params(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + e.eid = 'A' + with self.admin_access.web_request(login=u'toto') as toto_req: + form = EntityFieldsForm(toto_req, None, entity=e) + field = StringField(name='login', role='subject', eidparam=True) + form.append_field(field) + form.build_context({}) + self.assertEqual(field.widget.values(form, field), (u'toto',)) + + def test_linkto_field_duplication_inout(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + e.eid = 'A' + e._cw = req + geid = req.cnx.execute('CWGroup X WHERE X name "users"')[0][0] + req.form['__linkto'] = 'in_group:%s:subject' % geid + form = self.vreg['forms'].select('edition', req, entity=e) + form.content_type = 'text/html' + data = [] + form.render(w=data.append) + pageinfo = self._check_html(u'\n'.join(data), form, template=None) + inputs = pageinfo.find_tag('select', False) + ok = False + for selectnode in pageinfo.matching_nodes('select', name='from_in_group-subject:A'): + for optionnode in selectnode: + self.assertEqual(optionnode.get('value'), str(geid)) + self.assertEqual(ok, False) + ok = True + inputs = pageinfo.find_tag('input', False) + self.assertFalse(list(pageinfo.matching_nodes('input', name='__linkto'))) + + def test_reledit_composite_field(self): + with self.admin_access.web_request() as req: + rset = req.execute('INSERT BlogEntry X: X title "cubicweb.org", X content "hop"') + form = self.vreg['views'].select('reledit', req, + rset=rset, row=0, rtype='content') + data = form.render(row=0, rtype='content', formid='base', action='edit_rtype') + self.assertIn('content_format', data) + + + def test_form_generation_time(self): + with self.admin_access.web_request() as req: + e = req.create_entity('BlogEntry', title=u'cubicweb.org', content=u"hop") + expected_field_name = '__form_generation_time:%d' % e.eid + + ts_before = time.time() + form = self.vreg['forms'].select('edition', req, entity=e) + ts_after = time.time() + + data = [] + form.render(action='edit', w=data.append) + html_form = html.fromstring(''.join(data)).forms[0] + fields = dict(html_form.form_values()) + self.assertIn(expected_field_name, fields) + ts = float(fields[expected_field_name]) + self.assertTrue(ts_before < ts < ts_after) + + + # form tests ############################################################## + + def test_form_inheritance(self): + with self.admin_access.web_request() as req: + class CustomChangeStateForm(ChangeStateForm): + hello = IntField(name='youlou') + creation_date = DateTimeField(widget=DateTimePicker) + form = CustomChangeStateForm(req, redirect_path='perdu.com', + entity=req.user) + data = [] + form.render(w=data.append, + formvalues=dict(state=123, trcomment=u'', + trcomment_format=u'text/plain')) + + def test_change_state_form(self): + with self.admin_access.web_request() as req: + form = ChangeStateForm(req, redirect_path='perdu.com', + entity=req.user) + data = [] + form.render(w=data.append, + formvalues=dict(state=123, trcomment=u'', + trcomment_format=u'text/plain')) + + # fields tests ############################################################ + + def _render_entity_field(self, req, name, form): + form.build_context({}) + renderer = FormRenderer(req) + return form.field_by_name(name, 'subject').render(form, renderer) + + def _test_richtextfield(self, req, expected): + class RTFForm(EntityFieldsForm): + description = RichTextField(eidparam=True, role='subject') + state = self.vreg['etypes'].etype_class('State')(req) + state.eid = 'S' + form = RTFForm(req, redirect_path='perdu.com', entity=state) + # make it think it can use fck editor anyway + form.field_by_name('description', 'subject').format = lambda form, field=None: 'text/html' + self.assertMultiLineEqual(self._render_entity_field(req, 'description', form), + expected % {'eid': state.eid}) + + + def test_richtextfield_1(self): + with self.admin_access.web_request() as req: + req.use_fckeditor = lambda: False + self._test_richtextfield(req, '''''') + + + def test_richtextfield_2(self): + with self.admin_access.web_request() as req: + req.use_fckeditor = lambda: True + self._test_richtextfield(req, '') + + + def test_filefield(self): + class FFForm(EntityFieldsForm): + data = FileField( + format_field=StringField(name='data_format', max_length=50, + eidparam=True, role='subject'), + encoding_field=StringField(name='data_encoding', max_length=20, + eidparam=True, role='subject'), + eidparam=True, role='subject') + with self.admin_access.web_request() as req: + file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', + data=Binary(b'new widgets system')) + form = FFForm(req, redirect_path='perdu.com', entity=file) + self.assertMultiLineEqual(self._render_entity_field(req, 'data', form), + ''' +show advanced fields + +
          + +detach attached file''' % {'eid': file.eid}) + + + def test_editablefilefield(self): + class EFFForm(EntityFieldsForm): + data = EditableFileField( + format_field=StringField('data_format', max_length=50, + eidparam=True, role='subject'), + encoding_field=StringField('data_encoding', max_length=20, + eidparam=True, role='subject'), + eidparam=True, role='subject') + with self.admin_access.web_request() as req: + file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', + data=Binary(b'new widgets system')) + form = EFFForm(req, redirect_path='perdu.com', entity=file) + self.assertMultiLineEqual(self._render_entity_field(req, 'data', form), + ''' +show advanced fields + +
          + +detach attached file +

          You can either submit a new file using the browse button above, or choose to remove already uploaded file by checking the "detach attached file" check-box, or edit file content online with the widget below.

          +''' % {'eid': file.eid}) + + + def test_passwordfield(self): + class PFForm(EntityFieldsForm): + upassword = PasswordField(eidparam=True, role='subject') + with self.admin_access.web_request() as req: + form = PFForm(req, redirect_path='perdu.com', entity=req.user) + self.assertMultiLineEqual(self._render_entity_field(req, 'upassword', form), + ''' +
          + +  +confirm password''' % {'eid': req.user.eid}) + + + # def test_datefield(self): + # class DFForm(EntityFieldsForm): + # creation_date = DateTimeField(widget=Input) + # form = DFForm(self.req, entity=self.entity) + # init, cur = (fromstring(self._render_entity_field(attr, form)).get('value') + # for attr in ('edits-creation_date', 'creation_date')) + # self.assertEqual(init, cur) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_formfields.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_formfields.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,199 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittests for cw.web.formfields""" + +from logilab.common.testlib import TestCase, unittest_main, mock_object as mock + +from yams.constraints import StaticVocabularyConstraint, SizeConstraint + +import cubicweb +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web.formwidgets import PasswordInput, TextArea, Select, Radio +from cubicweb.web.formfields import * +from cubicweb.web.views.forms import EntityFieldsForm, FieldsForm + +from cubes.file.entities import File + +def setUpModule(*args): + global schema + config = TestServerConfiguration('data', apphome=GuessFieldTC.datadir) + config.bootstrap_cubes() + schema = config.load_schema() + +class GuessFieldTC(CubicWebTC): + + def test_state_fields(self): + with self.admin_access.web_request() as req: + title_field = guess_field(schema['State'], schema['name'], req=req) + self.assertIsInstance(title_field, StringField) + self.assertEqual(title_field.required, True) + +# synopsis_field = guess_field(schema['State'], schema['synopsis']) +# self.assertIsInstance(synopsis_field, StringField) +# self.assertIsInstance(synopsis_field.widget, TextArea) +# self.assertEqual(synopsis_field.required, False) +# self.assertEqual(synopsis_field.help, 'an abstract for this state') + + with self.admin_access.web_request() as req: + description_field = guess_field(schema['State'], schema['description'], req=req) + self.assertIsInstance(description_field, RichTextField) + self.assertEqual(description_field.required, False) + self.assertEqual(description_field.format_field, None) + + # description_format_field = guess_field(schema['State'], schema['description_format']) + # self.assertEqual(description_format_field, None) + + with self.admin_access.web_request() as req: + description_format_field = guess_field(schema['State'], schema['description_format'], + req=req) + self.assertEqual(description_format_field.internationalizable, True) + self.assertEqual(description_format_field.sort, True) + +# wikiid_field = guess_field(schema['State'], schema['wikiid']) +# self.assertIsInstance(wikiid_field, StringField) +# self.assertEqual(wikiid_field.required, False) + + + def test_cwuser_fields(self): + with self.admin_access.web_request() as req: + upassword_field = guess_field(schema['CWUser'], schema['upassword'], req=req) + self.assertIsInstance(upassword_field, StringField) + self.assertIsInstance(upassword_field.widget, PasswordInput) + self.assertEqual(upassword_field.required, True) + + with self.admin_access.web_request() as req: + last_login_time_field = guess_field(schema['CWUser'], schema['last_login_time'], req=req) + self.assertIsInstance(last_login_time_field, DateTimeField) + self.assertEqual(last_login_time_field.required, False) + + with self.admin_access.web_request() as req: + in_group_field = guess_field(schema['CWUser'], schema['in_group'], req=req) + self.assertIsInstance(in_group_field, RelationField) + self.assertEqual(in_group_field.required, True) + self.assertEqual(in_group_field.role, 'subject') + self.assertEqual(in_group_field.help, 'groups grant permissions to the user') + + with self.admin_access.web_request() as req: + owned_by_field = guess_field(schema['CWUser'], schema['owned_by'], 'object', req=req) + self.assertIsInstance(owned_by_field, RelationField) + self.assertEqual(owned_by_field.required, False) + self.assertEqual(owned_by_field.role, 'object') + + + def test_file_fields(self): + # data_format_field = guess_field(schema['File'], schema['data_format']) + # self.assertEqual(data_format_field, None) + # data_encoding_field = guess_field(schema['File'], schema['data_encoding']) + # self.assertEqual(data_encoding_field, None) + # data_name_field = guess_field(schema['File'], schema['data_name']) + # self.assertEqual(data_name_field, None) + + with self.admin_access.web_request() as req: + data_field = guess_field(schema['File'], schema['data'], req=req) + self.assertIsInstance(data_field, FileField) + self.assertEqual(data_field.required, True) + self.assertIsInstance(data_field.format_field, StringField) + self.assertIsInstance(data_field.encoding_field, StringField) + self.assertIsInstance(data_field.name_field, StringField) + + def test_constraints_priority(self): + with self.admin_access.web_request() as req: + salesterm_field = guess_field(schema['Salesterm'], schema['reason'], req=req) + constraints = schema['reason'].rdef('Salesterm', 'String').constraints + self.assertEqual([c.__class__ for c in constraints], + [SizeConstraint, StaticVocabularyConstraint]) + self.assertIsInstance(salesterm_field, StringField) + self.assertIsInstance(salesterm_field.widget, Select) + + + def test_bool_field_base(self): + with self.admin_access.web_request() as req: + field = guess_field(schema['CWAttribute'], schema['indexed'], req=req) + self.assertIsInstance(field, BooleanField) + self.assertEqual(field.required, False) + self.assertIsInstance(field.widget, Radio) + self.assertEqual(field.vocabulary(mock(_cw=mock(_=cubicweb._))), + [(u'yes', '1'), (u'no', '')]) + + def test_bool_field_explicit_choices(self): + with self.admin_access.web_request() as req: + field = guess_field(schema['CWAttribute'], schema['indexed'], + choices=[(u'maybe', '1'), (u'no', '')], req=req) + self.assertIsInstance(field.widget, Radio) + self.assertEqual(field.vocabulary(mock(req=mock(_=cubicweb._))), + [(u'maybe', '1'), (u'no', '')]) + + +class MoreFieldsTC(CubicWebTC): + def test_rtf_format_field(self): + with self.admin_access.web_request() as req: + req.use_fckeditor = lambda: False + e = self.vreg['etypes'].etype_class('State')(req) + form = EntityFieldsForm(req, entity=e) + description_field = guess_field(schema['State'], schema['description']) + description_format_field = description_field.get_format_field(form) + self.assertEqual(description_format_field.internationalizable, True) + self.assertEqual(description_format_field.sort, True) + # unlike below, initial is bound to form.form_field_format + self.assertEqual(description_format_field.value(form), 'text/plain') + req.cnx.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"') + req.cnx.commit() + self.assertEqual(description_format_field.value(form), 'text/rest') + + + def test_property_key_field(self): + from cubicweb.web.views.cwproperties import PropertyKeyField + with self.admin_access.web_request() as req: + field = PropertyKeyField(name='test') + e = self.vreg['etypes'].etype_class('CWProperty')(req) + renderer = self.vreg['formrenderers'].select('base', req) + form = EntityFieldsForm(req, entity=e) + form.formvalues = {} + field.render(form, renderer) + + +class CompoundFieldTC(CubicWebTC): + + def test_multipart(self): + """Ensures that compound forms have needs_multipart set if their + children require it""" + class AForm(FieldsForm): + comp = CompoundField([IntField(), StringField()]) + with self.admin_access.web_request() as req: + aform = AForm(req, None) + self.assertFalse(aform.needs_multipart) + class MForm(FieldsForm): + comp = CompoundField([IntField(), FileField()]) + with self.admin_access.web_request() as req: + mform = MForm(req, None) + self.assertTrue(mform.needs_multipart) + + +class UtilsTC(TestCase): + def test_vocab_sort(self): + self.assertEqual(vocab_sort([('Z', 1), ('A', 2), + ('Group 1', None), ('Y', 3), ('B', 4), + ('Group 2', None), ('X', 5), ('C', 6)]), + [('A', 2), ('Z', 1), + ('Group 1', None), ('B', 4), ('Y', 3), + ('Group 2', None), ('C', 6), ('X', 5)] + ) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_formwidgets.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_formwidgets.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,71 @@ +# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittests for cw.web.formwidgets""" + +from logilab.common.testlib import unittest_main, mock_object as mock + +from cubicweb.devtools import fake +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web import formwidgets, formfields +from cubicweb.web.views.forms import FieldsForm + + +class WidgetsTC(CubicWebTC): + + def test_editableurl_widget(self): + field = formfields.guess_field(self.schema['Bookmark'], self.schema['path']) + widget = formwidgets.EditableURLWidget() + req = fake.FakeRequest(form={'path-subjectfqs:A': 'param=value&vid=view'}) + form = mock(_cw=req, formvalues={}, edited_entity=mock(eid='A')) + self.assertEqual(widget.process_field_data(form, field), + '?param=value%26vid%3Dview') + + def test_bitselect_widget(self): + field = formfields.guess_field(self.schema['CWAttribute'], self.schema['ordernum']) + field.choices = [('un', '1',), ('deux', '2',)] + widget = formwidgets.BitSelect(settabindex=False) + req = fake.FakeRequest(form={'ordernum-subject:A': ['1', '2']}) + form = mock(_cw=req, formvalues={}, edited_entity=mock(eid='A'), + form_previous_values=()) + self.assertMultiLineEqual(widget._render(form, field, None), + '''\ +''') + self.assertEqual(widget.process_field_data(form, field), + 3) + + def test_xml_escape_checkbox(self): + class TestForm(FieldsForm): + bool = formfields.BooleanField(ignore_req_params=True, + choices=[('python >> others', '1')], + widget=formwidgets.CheckBox()) + with self.admin_access.web_request() as req: + form = TestForm(req, None) + form.build_context() + field = form.field_by_name('bool') + widget = field.widget + self.assertMultiLineEqual(widget._render(form, field, None), + '') + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_http.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_http.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,418 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +import contextlib + +from logilab.common.testlib import TestCase, unittest_main, tag, Tags + +from cubicweb.devtools.fake import FakeRequest +from cubicweb.devtools.testlib import CubicWebTC + + +def _test_cache(hin, hout, method='GET'): + """forge and process an HTTP request using given headers in/out and method, + then return it once its .is_client_cache_valid() method has been called. + + req.status_out is None if the page should have been calculated. + """ + # forge request + req = FakeRequest(method=method) + for key, value in hin: + req._headers_in.addRawHeader(key, str(value)) + for key, value in hout: + req.headers_out.addRawHeader(key, str(value)) + # process + req.status_out = None + req.is_client_cache_valid() + return req + +class HTTPCache(TestCase): + """Check that the http cache logiac work as expected + (as far as we understood the RFC) + + """ + tags = TestCase.tags | Tags('http', 'cache') + + + def assertCache(self, expected, status, situation=''): + """simple assert for nicer message""" + if expected != status: + if expected is None: + expected = "MODIFIED" + if status is None: + status = "MODIFIED" + msg = 'expected %r got %r' % (expected, status) + if situation: + msg = "%s - when: %s" % (msg, situation) + self.fail(msg) + + def test_IN_none_OUT_none(self): + #: test that no caching is requested when not data is available + #: on any side + req =_test_cache((), ()) + self.assertIsNone(req.status_out) + + def test_IN_Some_OUT_none(self): + #: test that no caching is requested when no data is available + #: server (origin) side + hin = [('if-modified-since','Sat, 14 Apr 2012 14:39:32 GM'), + ] + req = _test_cache(hin, ()) + self.assertIsNone(req.status_out) + hin = [('if-none-match','babar/huitre'), + ] + req = _test_cache(hin, ()) + self.assertIsNone(req.status_out) + hin = [('if-modified-since','Sat, 14 Apr 2012 14:39:32 GM'), + ('if-none-match','babar/huitre'), + ] + req = _test_cache(hin, ()) + self.assertIsNone(req.status_out) + + def test_IN_none_OUT_Some(self): + #: test that no caching is requested when no data is provided + #: by the client + hout = [('last-modified','Sat, 14 Apr 2012 14:39:32 GM'), + ] + req = _test_cache((), hout) + self.assertIsNone(req.status_out) + hout = [('etag','babar/huitre'), + ] + req = _test_cache((), hout) + self.assertIsNone(req.status_out) + hout = [('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), + ('etag','babar/huitre'), + ] + req = _test_cache((), hout) + self.assertIsNone(req.status_out) + + @tag('last_modified') + def test_last_modified_newer(self): + #: test the proper behavior of modification date only + # newer + hin = [('if-modified-since', 'Sat, 13 Apr 2012 14:39:32 GM'), + ] + hout = [('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), + ] + req = _test_cache(hin, hout) + self.assertCache(None, req.status_out, 'origin is newer than client') + + @tag('last_modified') + def test_last_modified_older(self): + # older + hin = [('if-modified-since', 'Sat, 15 Apr 2012 14:39:32 GM'), + ] + hout = [('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), + ] + req = _test_cache(hin, hout) + self.assertCache(304, req.status_out, 'origin is older than client') + + @tag('last_modified') + def test_last_modified_same(self): + # same + hin = [('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), + ] + hout = [('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), + ] + req = _test_cache(hin, hout) + self.assertCache(304, req.status_out, 'origin is equal to client') + + @tag('etag') + def test_etag_mismatch(self): + #: test the proper behavior of etag only + # etag mismatch + hin = [('if-none-match', 'babar'), + ] + hout = [('etag', 'celestine'), + ] + req = _test_cache(hin, hout) + self.assertCache(None, req.status_out, 'etag mismatch') + + @tag('etag') + def test_etag_match(self): + # etag match + hin = [('if-none-match', 'babar'), + ] + hout = [('etag', 'babar'), + ] + req = _test_cache(hin, hout) + self.assertCache(304, req.status_out, 'etag match') + # etag match in multiple + hin = [('if-none-match', 'loutre'), + ('if-none-match', 'babar'), + ] + hout = [('etag', 'babar'), + ] + req = _test_cache(hin, hout) + self.assertCache(304, req.status_out, 'etag match in multiple') + # client use "*" as etag + hin = [('if-none-match', '*'), + ] + hout = [('etag', 'babar'), + ] + req = _test_cache(hin, hout) + self.assertCache(304, req.status_out, 'client use "*" as etag') + + @tag('etag', 'last_modified') + def test_both(self): + #: test the proper behavior of etag only + # both wrong + hin = [('if-none-match', 'babar'), + ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), + ] + hout = [('etag', 'loutre'), + ('last-modified', 'Sat, 15 Apr 2012 14:39:32 GM'), + ] + req = _test_cache(hin, hout) + self.assertCache(None, req.status_out, 'both wrong') + + @tag('etag', 'last_modified') + def test_both_etag_mismatch(self): + # both etag mismatch + hin = [('if-none-match', 'babar'), + ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), + ] + hout = [('etag', 'loutre'), + ('last-modified', 'Sat, 13 Apr 2012 14:39:32 GM'), + ] + req = _test_cache(hin, hout) + self.assertCache(None, req.status_out, 'both but etag mismatch') + + @tag('etag', 'last_modified') + def test_both_but_modified(self): + # both but modified + hin = [('if-none-match', 'babar'), + ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), + ] + hout = [('etag', 'babar'), + ('last-modified', 'Sat, 15 Apr 2012 14:39:32 GM'), + ] + req = _test_cache(hin, hout) + self.assertCache(None, req.status_out, 'both but modified') + + @tag('etag', 'last_modified') + def test_both_ok(self): + # both ok + hin = [('if-none-match', 'babar'), + ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), + ] + hout = [('etag', 'babar'), + ('last-modified', 'Sat, 13 Apr 2012 14:39:32 GM'), + ] + req = _test_cache(hin, hout) + self.assertCache(304, req.status_out, 'both ok') + + @tag('etag', 'HEAD') + def test_head_verb(self): + #: check than FOUND 200 is properly raise without content on HEAD request + #: This logic does not really belong here :-/ + # modified + hin = [('if-none-match', 'babar'), + ] + hout = [('etag', 'rhino/really-not-babar'), + ] + req = _test_cache(hin, hout, method='HEAD') + self.assertCache(None, req.status_out, 'modifier HEAD verb') + # not modified + hin = [('if-none-match', 'babar'), + ] + hout = [('etag', 'babar'), + ] + req = _test_cache(hin, hout, method='HEAD') + self.assertCache(304, req.status_out, 'not modifier HEAD verb') + + @tag('etag', 'POST') + def test_post_verb(self): + # modified + hin = [('if-none-match', 'babar'), + ] + hout = [('etag', 'rhino/really-not-babar'), + ] + req = _test_cache(hin, hout, method='POST') + self.assertCache(None, req.status_out, 'modifier HEAD verb') + # not modified + hin = [('if-none-match', 'babar'), + ] + hout = [('etag', 'babar'), + ] + req = _test_cache(hin, hout, method='POST') + self.assertCache(412, req.status_out, 'not modifier HEAD verb') + + +alloworig = 'access-control-allow-origin' +allowmethods = 'access-control-allow-methods' +allowheaders = 'access-control-allow-headers' +allowcreds = 'access-control-allow-credentials' +exposeheaders = 'access-control-expose-headers' +maxage = 'access-control-max-age' + +requestmethod = 'access-control-request-method' +requestheaders = 'access-control-request-headers' + +class _BaseAccessHeadersTC(CubicWebTC): + + @contextlib.contextmanager + def options(self, **options): + for k, values in options.items(): + self.config.set_option(k, values) + try: + yield + finally: + for k in options: + self.config.set_option(k, '') + def check_no_cors(self, req): + self.assertEqual(None, req.get_response_header(alloworig)) + self.assertEqual(None, req.get_response_header(allowmethods)) + self.assertEqual(None, req.get_response_header(allowheaders)) + self.assertEqual(None, req.get_response_header(allowcreds)) + self.assertEqual(None, req.get_response_header(exposeheaders)) + self.assertEqual(None, req.get_response_header(maxage)) + + +class SimpleAccessHeadersTC(_BaseAccessHeadersTC): + + def test_noaccess(self): + with self.admin_access.web_request() as req: + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_noorigin(self): + with self.options(**{alloworig: '*'}): + with self.admin_access.web_request() as req: + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_origin_noaccess(self): + with self.admin_access.web_request() as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_origin_noaccess_bad_host(self): + with self.options(**{alloworig: '*'}): + with self.admin_access.web_request() as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + # in these tests, base_url is http://testing.fr/cubicweb/ + req.set_request_header('Host', 'badhost.net') + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_explicit_origin_noaccess(self): + with self.options(**{alloworig: ['http://www.toto.org', 'http://othersite.fr']}): + with self.admin_access.web_request() as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + # in these tests, base_url is http://testing.fr/cubicweb/ + req.set_request_header('Host', 'testing.fr') + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_origin_access(self): + with self.options(**{alloworig: '*'}): + with self.admin_access.web_request() as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + # in these tests, base_url is http://testing.fr/cubicweb/ + req.set_request_header('Host', 'testing.fr') + data = self.app_handle_request(req) + self.assertEqual('http://www.cubicweb.org', + req.get_response_header(alloworig)) + + def test_explicit_origin_access(self): + with self.options(**{alloworig: ['http://www.cubicweb.org', 'http://othersite.fr']}): + with self.admin_access.web_request() as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + # in these tests, base_url is http://testing.fr/cubicweb/ + req.set_request_header('Host', 'testing.fr') + data = self.app_handle_request(req) + self.assertEqual('http://www.cubicweb.org', + req.get_response_header(alloworig)) + + def test_origin_access_headers(self): + with self.options(**{alloworig: '*', + exposeheaders: ['ExposeHead1', 'ExposeHead2'], + allowheaders: ['AllowHead1', 'AllowHead2'], + allowmethods: ['GET', 'POST', 'OPTIONS']}): + with self.admin_access.web_request() as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + # in these tests, base_url is http://testing.fr/cubicweb/ + req.set_request_header('Host', 'testing.fr') + data = self.app_handle_request(req) + self.assertEqual('http://www.cubicweb.org', + req.get_response_header(alloworig)) + self.assertEqual("true", + req.get_response_header(allowcreds)) + self.assertEqual(['ExposeHead1', 'ExposeHead2'], + req.get_response_header(exposeheaders)) + self.assertEqual(None, req.get_response_header(allowmethods)) + self.assertEqual(None, req.get_response_header(allowheaders)) + + +class PreflightAccessHeadersTC(_BaseAccessHeadersTC): + + def test_noaccess(self): + with self.admin_access.web_request(method='OPTIONS') as req: + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_noorigin(self): + with self.options(**{alloworig: '*'}): + with self.admin_access.web_request(method='OPTIONS') as req: + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_origin_noaccess(self): + with self.admin_access.web_request(method='OPTIONS') as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_origin_noaccess_bad_host(self): + with self.options(**{alloworig: '*'}): + with self.admin_access.web_request(method='OPTIONS') as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + # in these tests, base_url is http://testing.fr/cubicweb/ + req.set_request_header('Host', 'badhost.net') + data = self.app_handle_request(req) + self.check_no_cors(req) + + def test_origin_access(self): + with self.options(**{alloworig: '*', + exposeheaders: ['ExposeHead1', 'ExposeHead2'], + allowheaders: ['AllowHead1', 'AllowHead2'], + allowmethods: ['GET', 'POST', 'OPTIONS']}): + with self.admin_access.web_request(method='OPTIONS') as req: + req.set_request_header('Origin', 'http://www.cubicweb.org') + # in these tests, base_url is http://testing.fr/cubicweb/ + req.set_request_header('Host', 'testing.fr') + req.set_request_header(requestmethod, 'GET') + + data = self.app_handle_request(req) + self.assertEqual(200, req.status_out) + self.assertEqual('http://www.cubicweb.org', + req.get_response_header(alloworig)) + self.assertEqual("true", + req.get_response_header(allowcreds)) + self.assertEqual(set(['GET', 'POST', 'OPTIONS']), + req.get_response_header(allowmethods)) + self.assertEqual(set(['AllowHead1', 'AllowHead2']), + req.get_response_header(allowheaders)) + self.assertEqual(None, + req.get_response_header(exposeheaders)) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_http_headers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_http_headers.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,18 @@ +import unittest + +from cubicweb.web import http_headers + + +class TestGenerators(unittest.TestCase): + def test_generate_true_false(self): + for v in (True, 1, 'true', 'True', 'TRUE'): + self.assertEqual('true', http_headers.generateTrueFalse(v)) + for v in (False, 0, 'false', 'False', 'FALSE'): + self.assertEqual('false', http_headers.generateTrueFalse(v)) + + with self.assertRaises(ValueError): + http_headers.generateTrueFalse('any value') + +if __name__ == '__main__': + from unittest import main + main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_idownloadable.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_idownloadable.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from functools import partial + +from logilab.common.testlib import unittest_main + +from cubicweb.devtools.testlib import CubicWebTC, real_error_handling +from cubicweb import view +from cubicweb.predicates import is_instance + +class IDownloadableUser(view.EntityAdapter): + __regid__ = 'IDownloadable' + __select__ = is_instance('CWUser') + + def download_content_type(self): + """return MIME type of the downloadable content""" + return 'text/plain' + + def download_encoding(self): + """return encoding of the downloadable content""" + return 'ascii' + + def download_file_name(self): + """return file name of the downloadable content""" + return self.entity.name() + '.txt' + + def download_data(self): + return b'Babar is not dead!' + + +class BrokenIDownloadableGroup(IDownloadableUser): + __regid__ = 'IDownloadable' + __select__ = is_instance('CWGroup') + + def download_file_name(self): + return self.entity.name + '.txt' + + def download_data(self): + raise IOError() + +class IDownloadableTC(CubicWebTC): + + def setUp(self): + super(IDownloadableTC, self).setUp() + self.vreg.register(IDownloadableUser) + self.addCleanup(partial(self.vreg.unregister, IDownloadableUser)) + + def test_header_simple_case(self): + with self.admin_access.web_request() as req: + req.form['vid'] = 'download' + req.form['eid'] = str(req.user.eid) + data = self.ctrl_publish(req, 'view') + get = req.headers_out.getRawHeaders + self.assertEqual(['attachment;filename="admin.txt"'], + get('content-disposition')) + self.assertEqual(['text/plain;charset=ascii'], + get('content-type')) + self.assertEqual(b'Babar is not dead!', data) + + def test_header_with_space(self): + with self.admin_access.web_request() as req: + self.create_user(req, login=u'c c l a', password='babar') + req.cnx.commit() + with self.new_access(u'c c l a').web_request() as req: + req.form['vid'] = 'download' + req.form['eid'] = str(req.user.eid) + data = self.ctrl_publish(req,'view') + get = req.headers_out.getRawHeaders + self.assertEqual(['attachment;filename="c c l a.txt"'], + get('content-disposition')) + self.assertEqual(['text/plain;charset=ascii'], + get('content-type')) + self.assertEqual(b'Babar is not dead!', data) + + def test_header_with_space_and_comma(self): + with self.admin_access.web_request() as req: + self.create_user(req, login=u'c " l\\ a', password='babar') + req.cnx.commit() + with self.new_access(u'c " l\\ a').web_request() as req: + req.form['vid'] = 'download' + req.form['eid'] = str(req.user.eid) + data = self.ctrl_publish(req,'view') + get = req.headers_out.getRawHeaders + self.assertEqual([r'attachment;filename="c \" l\\ a.txt"'], + get('content-disposition')) + self.assertEqual(['text/plain;charset=ascii'], + get('content-type')) + self.assertEqual(b'Babar is not dead!', data) + + def test_header_unicode_filename(self): + with self.admin_access.web_request() as req: + self.create_user(req, login=u'cécilia', password='babar') + req.cnx.commit() + with self.new_access(u'cécilia').web_request() as req: + req.form['vid'] = 'download' + req.form['eid'] = str(req.user.eid) + self.ctrl_publish(req,'view') + get = req.headers_out.getRawHeaders + self.assertEqual(['''attachment;filename="ccilia.txt";filename*=utf-8''c%C3%A9cilia.txt'''], + get('content-disposition')) + + def test_header_unicode_long_filename(self): + name = u'Bèrte_hô_grand_nôm_ça_va_totallement_déborder_de_la_limite_là' + with self.admin_access.web_request() as req: + self.create_user(req, login=name, password='babar') + req.cnx.commit() + with self.new_access(name).web_request() as req: + req.form['vid'] = 'download' + req.form['eid'] = str(req.user.eid) + self.ctrl_publish(req,'view') + get = req.headers_out.getRawHeaders + self.assertEqual(["""attachment;filename="Brte_h_grand_nm_a_va_totallement_dborder_de_la_limite_l.txt";filename*=utf-8''B%C3%A8rte_h%C3%B4_grand_n%C3%B4m_%C3%A7a_va_totallement_d%C3%A9border_de_la_limite_l%C3%A0.txt"""], + get('content-disposition')) + + + def test_download_data_error(self): + self.vreg.register(BrokenIDownloadableGroup) + self.addCleanup(partial(self.vreg.unregister, BrokenIDownloadableGroup)) + with self.admin_access.web_request() as req: + req.form['vid'] = 'download' + req.form['eid'] = str(req.execute('CWGroup X WHERE X name "managers"')[0][0]) + with real_error_handling(self.app): + data = self.app_handle_request(req) + get = req.headers_out.getRawHeaders + self.assertEqual(['text/html;charset=UTF-8'], + get('content-type')) + self.assertEqual(None, + get('content-disposition')) + self.assertEqual(req.status_out, 500) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_magicsearch.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_magicsearch.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,366 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Unit tests for cw.web.views.magicsearch""" + +import sys +from contextlib import contextmanager + +from six.moves import range + +from logilab.common.testlib import TestCase, unittest_main + +from rql import BadRQLQuery, RQLSyntaxError + +from cubicweb.devtools.testlib import CubicWebTC + + +translations = { + u'CWUser' : u"Utilisateur", + u'EmailAddress' : u"Adresse", + u'name' : u"nom", + u'alias' : u"nom", + u'surname' : u"nom", + u'firstname' : u"prénom", + u'state' : u"état", + u'address' : u"adresse", + u'use_email' : u"adel", + } + +def _translate(msgid): + return translations.get(msgid, msgid) + +def _ctxtranslate(ctx, msgid): + return _translate(msgid) + +from cubicweb.web.views.magicsearch import translate_rql_tree, QSPreProcessor, QueryTranslator + +class QueryTranslatorTC(CubicWebTC): + """test suite for QueryTranslatorTC""" + + @contextmanager + def proc(self): + with self.admin_access.web_request() as req: + self.vreg.config.translations = {'en': (_translate, _ctxtranslate)} + proc = self.vreg['components'].select('magicsearch', req) + proc = [p for p in proc.processors if isinstance(p, QueryTranslator)][0] + yield proc + + def test_basic_translations(self): + """tests basic translations (no ambiguities)""" + with self.proc() as proc: + rql = u"Any C WHERE C is Adresse, P adel C, C adresse 'Logilab'" + rql, = proc.preprocess_query(rql) + self.assertEqual(rql, 'Any C WHERE C is EmailAddress, P use_email C, C address "Logilab"') + + def test_ambiguous_translations(self): + """tests possibly ambiguous translations""" + with self.proc() as proc: + rql = u"Any P WHERE P adel C, C is EmailAddress, C nom 'Logilab'" + rql, = proc.preprocess_query(rql) + self.assertEqual(rql, 'Any P WHERE P use_email C, C is EmailAddress, C alias "Logilab"') + rql = u"Any P WHERE P is Utilisateur, P adel C, P nom 'Smith'" + rql, = proc.preprocess_query(rql) + self.assertEqual(rql, 'Any P WHERE P is CWUser, P use_email C, P surname "Smith"') + + +class QSPreProcessorTC(CubicWebTC): + """test suite for QSPreProcessor""" + + @contextmanager + def proc(self): + self.vreg.config.translations = {'en': (_translate, _ctxtranslate)} + with self.admin_access.web_request() as req: + proc = self.vreg['components'].select('magicsearch', req) + proc = [p for p in proc.processors if isinstance(p, QSPreProcessor)][0] + proc._cw = req + yield proc + + def test_entity_translation(self): + """tests QSPreProcessor._get_entity_name()""" + with self.proc() as proc: + translate = proc._get_entity_type + self.assertEqual(translate(u'EmailAddress'), "EmailAddress") + self.assertEqual(translate(u'emailaddress'), "EmailAddress") + self.assertEqual(translate(u'Adresse'), "EmailAddress") + self.assertEqual(translate(u'adresse'), "EmailAddress") + self.assertRaises(BadRQLQuery, translate, 'whatever') + + def test_attribute_translation(self): + """tests QSPreProcessor._get_attribute_name""" + with self.proc() as proc: + translate = proc._get_attribute_name + eschema = self.schema.eschema('CWUser') + self.assertEqual(translate(u'prénom', eschema), "firstname") + self.assertEqual(translate(u'nom', eschema), 'surname') + eschema = self.schema.eschema('EmailAddress') + self.assertEqual(translate(u'adresse', eschema), "address") + self.assertEqual(translate(u'nom', eschema), 'alias') + # should fail if the name is not an attribute for the given entity schema + self.assertRaises(BadRQLQuery, translate, 'whatever', eschema) + self.assertRaises(BadRQLQuery, translate, 'prénom', eschema) + + def test_one_word_query(self): + """tests the 'one word shortcut queries'""" + with self.proc() as proc: + transform = proc._one_word_query + self.assertEqual(transform('123'), + ('Any X WHERE X eid %(x)s', {'x': 123}, 'x')) + self.assertEqual(transform('CWUser'), + ('CWUser C',)) + self.assertEqual(transform('Utilisateur'), + ('CWUser C',)) + self.assertEqual(transform('Adresse'), + ('EmailAddress E',)) + self.assertEqual(transform('adresse'), + ('EmailAddress E',)) + self.assertRaises(BadRQLQuery, transform, 'Workcases') + + def test_two_words_query(self): + """tests the 'two words shortcut queries'""" + with self.proc() as proc: + transform = proc._two_words_query + self.assertEqual(transform('CWUser', 'E'), + ("CWUser E",)) + self.assertEqual(transform('CWUser', 'Smith'), + ('CWUser C ORDERBY FTIRANK(C) DESC WHERE C has_text %(text)s', {'text': 'Smith'})) + self.assertEqual(transform('utilisateur', 'Smith'), + ('CWUser C ORDERBY FTIRANK(C) DESC WHERE C has_text %(text)s', {'text': 'Smith'})) + self.assertEqual(transform(u'adresse', 'Logilab'), + ('EmailAddress E ORDERBY FTIRANK(E) DESC WHERE E has_text %(text)s', {'text': 'Logilab'})) + self.assertEqual(transform(u'adresse', 'Logi%'), + ('EmailAddress E WHERE E alias LIKE %(text)s', {'text': 'Logi%'})) + self.assertRaises(BadRQLQuery, transform, "pers", "taratata") + + def test_three_words_query(self): + """tests the 'three words shortcut queries'""" + with self.proc() as proc: + transform = proc._three_words_query + self.assertEqual(transform('utilisateur', u'prénom', 'cubicweb'), + ('CWUser C WHERE C firstname %(text)s', {'text': 'cubicweb'})) + self.assertEqual(transform('utilisateur', 'nom', 'cubicweb'), + ('CWUser C WHERE C surname %(text)s', {'text': 'cubicweb'})) + self.assertEqual(transform(u'adresse', 'nom', 'cubicweb'), + ('EmailAddress E WHERE E alias %(text)s', {'text': 'cubicweb'})) + self.assertEqual(transform('EmailAddress', 'nom', 'cubicweb'), + ('EmailAddress E WHERE E alias %(text)s', {'text': 'cubicweb'})) + self.assertEqual(transform('utilisateur', u'prénom', 'cubicweb%'), + ('CWUser C WHERE C firstname LIKE %(text)s', {'text': 'cubicweb%'})) + # expanded shortcuts + self.assertEqual(transform('CWUser', 'use_email', 'Logilab'), + ('CWUser C ORDERBY FTIRANK(C1) DESC WHERE C use_email C1, C1 has_text %(text)s', {'text': 'Logilab'})) + self.assertEqual(transform('CWUser', 'use_email', '%Logilab'), + ('CWUser C WHERE C use_email C1, C1 alias LIKE %(text)s', {'text': '%Logilab'})) + self.assertRaises(BadRQLQuery, transform, 'word1', 'word2', 'word3') + + def test_quoted_queries(self): + """tests how quoted queries are handled""" + queries = [ + (u'Adresse "My own EmailAddress"', ('EmailAddress E ORDERBY FTIRANK(E) DESC WHERE E has_text %(text)s', {'text': u'My own EmailAddress'})), + (u'Utilisateur prénom "Jean Paul"', ('CWUser C WHERE C firstname %(text)s', {'text': 'Jean Paul'})), + (u'Utilisateur firstname "Jean Paul"', ('CWUser C WHERE C firstname %(text)s', {'text': 'Jean Paul'})), + (u'CWUser firstname "Jean Paul"', ('CWUser C WHERE C firstname %(text)s', {'text': 'Jean Paul'})), + ] + with self.proc() as proc: + transform = proc._quoted_words_query + for query, expected in queries: + self.assertEqual(transform(query), expected) + self.assertRaises(BadRQLQuery, transform, "unquoted rql") + self.assertRaises(BadRQLQuery, transform, 'pers "Jean Paul"') + self.assertRaises(BadRQLQuery, transform, 'CWUser firstname other "Jean Paul"') + + def test_process_query(self): + """tests how queries are processed""" + queries = [ + (u'Utilisateur', (u"CWUser C",)), + (u'Utilisateur P', (u"CWUser P",)), + (u'Utilisateur cubicweb', (u'CWUser C ORDERBY FTIRANK(C) DESC WHERE C has_text %(text)s', {'text': u'cubicweb'})), + (u'CWUser prénom cubicweb', (u'CWUser C WHERE C firstname %(text)s', {'text': 'cubicweb'},)), + ] + with self.proc() as proc: + for query, expected in queries: + self.assertEqual(proc.preprocess_query(query), expected) + self.assertRaises(BadRQLQuery, + proc.preprocess_query, 'Any X WHERE X is Something') + + + +## Processor Chains tests ############################################ + +class ProcessorChainTC(CubicWebTC): + """test suite for magic_search's processor chains""" + + @contextmanager + def proc(self): + self.vreg.config.translations = {'en': (_translate, _ctxtranslate)} + with self.admin_access.web_request() as req: + proc = self.vreg['components'].select('magicsearch', req) + yield proc + + def test_main_preprocessor_chain(self): + """tests QUERY_PROCESSOR""" + queries = [ + (u'foo', + ("Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s", {'text': u'foo'})), + # XXX this sounds like a language translator test... + # and it fails + (u'Utilisateur Smith', + ('CWUser C ORDERBY FTIRANK(C) DESC WHERE C has_text %(text)s', {'text': u'Smith'})), + (u'utilisateur nom Smith', + ('CWUser C WHERE C surname %(text)s', {'text': u'Smith'})), + (u'Any P WHERE P is Utilisateur, P nom "Smith"', + ('Any P WHERE P is CWUser, P surname "Smith"', None)), + ] + with self.proc() as proc: + for query, expected in queries: + rset = proc.process_query(query) + self.assertEqual((rset.rql, rset.args), expected) + + def test_accentuated_fulltext(self): + """we must be able to type accentuated characters in the search field""" + with self.proc() as proc: + rset = proc.process_query(u'écrire') + self.assertEqual(rset.rql, "Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s") + self.assertEqual(rset.args, {'text': u'écrire'}) + + def test_explicit_component(self): + with self.proc() as proc: + self.assertRaises(RQLSyntaxError, + proc.process_query, u'rql: CWUser E WHERE E noattr "Smith",') + self.assertRaises(BadRQLQuery, + proc.process_query, u'rql: CWUser E WHERE E noattr "Smith"') + rset = proc.process_query(u'text: utilisateur Smith') + self.assertEqual(rset.rql, 'Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s') + self.assertEqual(rset.args, {'text': u'utilisateur Smith'}) + + +class RQLSuggestionsBuilderTC(CubicWebTC): + def suggestions(self, rql): + with self.admin_access.web_request() as req: + rbs = self.vreg['components'].select('rql.suggestions', req) + return rbs.build_suggestions(rql) + + def test_no_restrictions_rql(self): + self.assertListEqual([], self.suggestions('')) + self.assertListEqual([], self.suggestions('An')) + self.assertListEqual([], self.suggestions('Any X')) + self.assertListEqual([], self.suggestions('Any X, Y')) + + def test_invalid_rql(self): + self.assertListEqual([], self.suggestions('blabla')) + self.assertListEqual([], self.suggestions('Any X WHERE foo, bar')) + + def test_is_rql(self): + self.assertListEqual(['Any X WHERE X is %s' % eschema + for eschema in sorted(self.vreg.schema.entities()) + if not eschema.final], + self.suggestions('Any X WHERE X is')) + + self.assertListEqual(['Any X WHERE X is Personne', 'Any X WHERE X is Project'], + self.suggestions('Any X WHERE X is P')) + + self.assertListEqual(['Any X WHERE X is Personne, Y is Personne', + 'Any X WHERE X is Personne, Y is Project'], + self.suggestions('Any X WHERE X is Personne, Y is P')) + + + def test_relations_rql(self): + self.assertListEqual(['Any X WHERE X is Personne, X ass A', + 'Any X WHERE X is Personne, X datenaiss A', + 'Any X WHERE X is Personne, X description A', + 'Any X WHERE X is Personne, X fax A', + 'Any X WHERE X is Personne, X nom A', + 'Any X WHERE X is Personne, X prenom A', + 'Any X WHERE X is Personne, X promo A', + 'Any X WHERE X is Personne, X salary A', + 'Any X WHERE X is Personne, X sexe A', + 'Any X WHERE X is Personne, X tel A', + 'Any X WHERE X is Personne, X test A', + 'Any X WHERE X is Personne, X titre A', + 'Any X WHERE X is Personne, X travaille A', + 'Any X WHERE X is Personne, X web A', + ], + self.suggestions('Any X WHERE X is Personne, X ')) + self.assertListEqual(['Any X WHERE X is Personne, X tel A', + 'Any X WHERE X is Personne, X test A', + 'Any X WHERE X is Personne, X titre A', + 'Any X WHERE X is Personne, X travaille A', + ], + self.suggestions('Any X WHERE X is Personne, X t')) + # try completion on selected + self.assertListEqual(['Any X WHERE X is Personne, Y is Societe, X tel A', + 'Any X WHERE X is Personne, Y is Societe, X test A', + 'Any X WHERE X is Personne, Y is Societe, X titre A', + 'Any X WHERE X is Personne, Y is Societe, X travaille Y', + ], + self.suggestions('Any X WHERE X is Personne, Y is Societe, X t')) + # invalid relation should not break + self.assertListEqual([], + self.suggestions('Any X WHERE X is Personne, X asdasd')) + + def test_attribute_vocabulary_rql(self): + self.assertListEqual(['Any X WHERE X is Personne, X promo "bon"', + 'Any X WHERE X is Personne, X promo "pasbon"', + ], + self.suggestions('Any X WHERE X is Personne, X promo "')) + self.assertListEqual(['Any X WHERE X is Personne, X promo "pasbon"', + ], + self.suggestions('Any X WHERE X is Personne, X promo "p')) + # "bon" should be considered complete, hence no suggestion + self.assertListEqual([], + self.suggestions('Any X WHERE X is Personne, X promo "bon"')) + # no valid vocabulary starts with "po" + self.assertListEqual([], + self.suggestions('Any X WHERE X is Personne, X promo "po')) + + def test_attribute_value_rql(self): + # suggestions should contain any possible value for + # a given attribute (limited to 10) + with self.admin_access.web_request() as req: + for i in range(15): + req.create_entity('Personne', nom=u'n%s' % i, prenom=u'p%s' % i) + req.cnx.commit() + self.assertListEqual(['Any X WHERE X is Personne, X nom "n0"', + 'Any X WHERE X is Personne, X nom "n1"', + 'Any X WHERE X is Personne, X nom "n10"', + 'Any X WHERE X is Personne, X nom "n11"', + 'Any X WHERE X is Personne, X nom "n12"', + 'Any X WHERE X is Personne, X nom "n13"', + 'Any X WHERE X is Personne, X nom "n14"', + 'Any X WHERE X is Personne, X nom "n2"', + 'Any X WHERE X is Personne, X nom "n3"', + 'Any X WHERE X is Personne, X nom "n4"', + 'Any X WHERE X is Personne, X nom "n5"', + 'Any X WHERE X is Personne, X nom "n6"', + 'Any X WHERE X is Personne, X nom "n7"', + 'Any X WHERE X is Personne, X nom "n8"', + 'Any X WHERE X is Personne, X nom "n9"', + ], + self.suggestions('Any X WHERE X is Personne, X nom "')) + self.assertListEqual(['Any X WHERE X is Personne, X nom "n1"', + 'Any X WHERE X is Personne, X nom "n10"', + 'Any X WHERE X is Personne, X nom "n11"', + 'Any X WHERE X is Personne, X nom "n12"', + 'Any X WHERE X is Personne, X nom "n13"', + 'Any X WHERE X is Personne, X nom "n14"', + ], + self.suggestions('Any X WHERE X is Personne, X nom "n1')) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_propertysheet.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_propertysheet.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,63 @@ +import os +from os.path import join, dirname +from shutil import rmtree +import errno +import tempfile +from unittest import TestCase, main + +from cubicweb.web.propertysheet import PropertySheet, lazystr + + +DATADIR = join(dirname(__file__), 'data') + + +class PropertySheetTC(TestCase): + + def setUp(self): + uicache = join(DATADIR, 'uicache') + try: + os.makedirs(uicache) + except OSError as err: + if err.errno != errno.EEXIST: + raise + self.cachedir = tempfile.mkdtemp(dir=uicache) + + def tearDown(self): + rmtree(self.cachedir) + + def data(self, filename): + return join(DATADIR, filename) + + def test(self): + ps = PropertySheet(self.cachedir, datadir_url='http://cwtest.com') + ps.load(self.data('sheet1.py')) + ps.load(self.data('sheet2.py')) + # defined by sheet1 + self.assertEqual(ps['logo'], 'http://cwtest.com/logo.png') + # defined by sheet1, overriden by sheet2 + self.assertEqual(ps['bgcolor'], '#FFFFFF') + # defined by sheet2 + self.assertEqual(ps['fontcolor'], 'black') + # defined by sheet1, extended by sheet2 + self.assertEqual(ps['stylesheets'], ['http://cwtest.com/cubicweb.css', + 'http://cwtest.com/mycube.css']) + # lazy string defined by sheet1 + self.assertIsInstance(ps['lazy'], lazystr) + self.assertEqual(str(ps['lazy']), '#FFFFFF') + # test compilation + self.assertEqual(ps.compile('a {bgcolor: %(bgcolor)s; size: 1%;}'), + 'a {bgcolor: #FFFFFF; size: 1%;}') + self.assertEqual(ps.process_resource(DATADIR, 'pouet.css'), + self.cachedir) + self.assertFalse(ps.need_reload()) + os.utime(self.data('sheet1.py'), None) + self.assertTrue(ps.need_reload()) + ps.reload() + self.assertFalse(ps.need_reload()) + ps.process_resource(DATADIR, 'pouet.css') # put in cache + os.utime(self.data('pouet.css'), None) + self.assertFalse(ps.need_reload()) + + +if __name__ == '__main__': + main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_reledit.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_reledit.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,225 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +mainly regression-preventing tests for reledit views +""" + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web.views.uicfg import reledit_ctrl + +class ReleditMixinTC(object): + + def setup_database(self): + with self.admin_access.client_cnx() as cnx: + self.proj = cnx.create_entity('Project', title=u'cubicweb-world-domination').eid + self.tick = cnx.create_entity('Ticket', title=u'write the code').eid + self.toto = cnx.create_entity('Personne', nom=u'Toto').eid + cnx.commit() + +class ClickAndEditFormTC(ReleditMixinTC, CubicWebTC): + + def test_default_config(self): + reledit = {'title': '''
          cubicweb-world-domination
          ''', + 'long_desc': '''
          <not specified>
          ''', + 'manager': '''
          <not specified>
          ''', + 'composite_card11_2ttypes': """<not specified>""", + 'concerns': """<not specified>"""} + + with self.admin_access.web_request() as req: + proj = req.entity_from_eid(self.proj) + + for rschema, ttypes, role in proj.e_schema.relation_definitions(includefinal=True): + if rschema not in reledit: + continue + rtype = rschema.type + self.assertMultiLineEqual(reledit[rtype] % {'eid': self.proj}, + proj.view('reledit', rtype=rtype, role=role), + rtype) + + def test_default_forms(self): + self.skipTest('Need to check if this test should still run post reledit/doreledit merge') + doreledit = {'title': """
          cubicweb-world-domination
          +
          + + + + + + + + + + + + + + +
          + + + +
          + +
          + + + + +
          +
          + +
          """, + + 'long_desc': """
          <not specified>
          +
          + + + + + + + + + + + + + + + + +
          + + + + + + + + + + +
          + +
          + +
          + +
          + + + + +
          +
          + +
          """, + + 'manager': """
          <not specified>
          +
          + + + + + + + + + + + + + + + + +
          + + + +
          + +
          + + + + +
          +
          + +
          """, + 'composite_card11_2ttypes': """<not specified>""", + 'concerns': """<not specified>""" + } + for rschema, ttypes, role in self.proj.e_schema.relation_definitions(includefinal=True): + if rschema not in doreledit: + continue + rtype = rschema.type + self.assertMultiLineEqual(doreledit[rtype] % {'eid': self.proj.eid, 'toto': self.toto.eid}, + self.proj.view('doreledit', rtype=rtype, role=role, + formid='edition' if rtype == 'long_desc' else 'base'), + rtype) + +class ClickAndEditFormUICFGTC(ReleditMixinTC, CubicWebTC): + + def setup_database(self): + super(ClickAndEditFormUICFGTC, self).setup_database() + with self.admin_access.client_cnx() as cnx: + cnx.execute('SET T concerns P WHERE T eid %(t)s, P eid %(p)s', {'t': self.tick, 'p': self.proj}) + cnx.execute('SET P manager T WHERE P eid %(p)s, T eid %(t)s', {'p': self.proj, 't': self.toto}) + cnx.commit() + + def test_with_uicfg(self): + old_rctl = reledit_ctrl._tagdefs.copy() + reledit_ctrl.tag_attribute(('Project', 'title'), + {'novalue_label': '', 'reload': True}) + reledit_ctrl.tag_subject_of(('Project', 'long_desc', '*'), + {'reload': True, 'edit_target': 'rtype', + 'novalue_label': u'<long_desc is required>'}) + reledit_ctrl.tag_subject_of(('Project', 'manager', '*'), + {'edit_target': 'related'}) + reledit_ctrl.tag_subject_of(('Project', 'composite_card11_2ttypes', '*'), + {'edit_target': 'related'}) + reledit_ctrl.tag_object_of(('Ticket', 'concerns', 'Project'), + {'edit_target': 'rtype'}) + reledit = { + 'title': """<div id="title-subject-%(eid)s-reledit" onmouseout="jQuery('#title-subject-%(eid)s').addClass('invisible')" onmouseover="jQuery('#title-subject-%(eid)s').removeClass('invisible')" class="releditField"><div id="title-subject-%(eid)s-value" class="editableFieldValue">cubicweb-world-domination</div><div id="title-subject-%(eid)s" class="editableField invisible"><div id="title-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm('base', %(eid)s, 'title', 'subject', 'title-subject-%(eid)s', true, '', 'edit_rtype');" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""", + 'long_desc': """<div id="long_desc-subject-%(eid)s-reledit" onmouseout="jQuery('#long_desc-subject-%(eid)s').addClass('invisible')" onmouseover="jQuery('#long_desc-subject-%(eid)s').removeClass('invisible')" class="releditField"><div id="long_desc-subject-%(eid)s-value" class="editableFieldValue"><long_desc is required></div><div id="long_desc-subject-%(eid)s" class="editableField invisible"><div id="long_desc-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm('base', %(eid)s, 'long_desc', 'subject', 'long_desc-subject-%(eid)s', true, 'autolimited', 'edit_rtype');" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""", + 'manager': """<div id="manager-subject-%(eid)s-reledit" onmouseout="jQuery('#manager-subject-%(eid)s').addClass('invisible')" onmouseover="jQuery('#manager-subject-%(eid)s').removeClass('invisible')" class="releditField"><div id="manager-subject-%(eid)s-value" class="editableFieldValue"><a href="http://testing.fr/cubicweb/personne/%(toto)s" title="">Toto</a></div><div id="manager-subject-%(eid)s" class="editableField invisible"><div id="manager-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm('edition', %(eid)s, 'manager', 'subject', 'manager-subject-%(eid)s', false, 'autolimited', 'edit_related');" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div><div id="manager-subject-%(eid)s-delete" class="editableField" onclick="cw.reledit.loadInlineEditionForm('deleteconf', %(eid)s, 'manager', 'subject', 'manager-subject-%(eid)s', false, 'autolimited', 'delete');" title="click to delete this value"><img title="click to delete this value" src="http://testing.fr/cubicweb/data/cancel.png" alt="click to delete this value"/></div></div></div>""", + 'composite_card11_2ttypes': """<not specified>""", + 'concerns': """<div id="concerns-object-%(eid)s-reledit" onmouseout="jQuery('#concerns-object-%(eid)s').addClass('invisible')" onmouseover="jQuery('#concerns-object-%(eid)s').removeClass('invisible')" class="releditField"><div id="concerns-object-%(eid)s-value" class="editableFieldValue"><a href="http://testing.fr/cubicweb/ticket/%(tick)s" title="">write the code</a></div><div id="concerns-object-%(eid)s" class="editableField invisible"><div id="concerns-object-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm('base', %(eid)s, 'concerns', 'object', 'concerns-object-%(eid)s', false, 'autolimited', 'edit_rtype');" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""" + } + with self.admin_access.web_request() as req: + proj = req.entity_from_eid(self.proj) + for rschema, ttypes, role in proj.e_schema.relation_definitions(includefinal=True): + if rschema not in reledit: + continue + rtype = rschema.type + self.assertMultiLineEqual(reledit[rtype] % {'eid': self.proj, 'toto': self.toto, 'tick': self.tick}, + proj.view('reledit', rtype=rtype, role=role), + rtype) + reledit_ctrl.clear() + reledit_ctrl._tagdefs.update(old_rctl) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_request.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_request.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,89 @@ +"""misc. unittests for utility functions +""" + +from logilab.common.testlib import TestCase, unittest_main + +from functools import partial + +from cubicweb.devtools.fake import FakeConfig + +from cubicweb.web.request import (CubicWebRequestBase, _parse_accept_header, + _mimetype_sort_key, _mimetype_parser, _charset_sort_key) + + + +class AcceptParserTC(TestCase): + + def test_parse_accept(self): + parse_accept_header = partial(_parse_accept_header, + value_parser=_mimetype_parser, + value_sort_key=_mimetype_sort_key) + # compare scores + self.assertEqual(parse_accept_header("audio/*;q=0.2, audio/basic"), + [( ('audio/basic', ('audio', 'basic', {}), 1.0 ) ), + ( ('audio/*', ('audio', '*', {}), 0.2 ) )]) + self.assertEqual(parse_accept_header("text/plain;q=0.5, text/html, text/x-dvi;q=0.8, text/x-c"), + [( ('text/html', ('text', 'html', {}), 1.0 ) ), + ( ('text/x-c', ('text', 'x-c', {}), 1.0 ) ), + ( ('text/x-dvi', ('text', 'x-dvi', {}), 0.8 ) ), + ( ('text/plain', ('text', 'plain', {}), 0.5 ) )]) + # compare mimetype precedence for a same given score + self.assertEqual(parse_accept_header("audio/*, audio/basic"), + [( ('audio/basic', ('audio', 'basic', {}), 1.0 ) ), + ( ('audio/*', ('audio', '*', {}), 1.0 ) )]) + self.assertEqual(parse_accept_header("text/*, text/html, text/html;level=1, */*"), + [( ('text/html', ('text', 'html', {'level': '1'}), 1.0 ) ), + ( ('text/html', ('text', 'html', {}), 1.0 ) ), + ( ('text/*', ('text', '*', {}), 1.0 ) ), + ( ('*/*', ('*', '*', {}), 1.0 ) )]) + # free party + self.assertEqual(parse_accept_header("text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"), + [( ('text/html', ('text', 'html', {'level': '1'}), 1.0 ) ), + ( ('text/html', ('text', 'html', {}), 0.7 ) ), + ( ('*/*', ('*', '*', {}), 0.5 ) ), + ( ('text/html', ('text', 'html', {'level': '2'}), 0.4 ) ), + ( ('text/*', ('text', '*', {}), 0.3 ) ) + ]) + # chrome sample header + self.assertEqual(parse_accept_header("application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"), + [( ('application/xhtml+xml', ('application', 'xhtml+xml', {}), 1.0 ) ), + ( ('application/xml', ('application', 'xml', {}), 1.0 ) ), + ( ('image/png', ('image', 'png', {}), 1.0 ) ), + ( ('text/html', ('text', 'html', {}), 0.9 ) ), + ( ('text/plain', ('text', 'plain', {}), 0.8 ) ), + ( ('*/*', ('*', '*', {}), 0.5 ) ), + ]) + + def test_parse_accept_language(self): + self.assertEqual(_parse_accept_header('fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3'), + [('fr', 'fr', 1.0), ('fr-fr', 'fr-fr', 0.8), + ('en-us', 'en-us', 0.5), ('en', 'en', 0.3)]) + + def test_parse_accept_charset(self): + parse_accept_header = partial(_parse_accept_header, + value_sort_key=_charset_sort_key) + self.assertEqual(parse_accept_header('ISO-8859-1,utf-8;q=0.7,*;q=0.7'), + [('ISO-8859-1', 'ISO-8859-1', 1.0), + ('utf-8', 'utf-8', 0.7), + ('*', '*', 0.7)]) + + def test_base_url(self): + dummy_vreg = type('DummyVreg', (object,), {})() + dummy_vreg.config = FakeConfig() + dummy_vreg.config['base-url'] = 'http://babar.com/' + dummy_vreg.config['https-url'] = 'https://toto.com/' + + req = CubicWebRequestBase(dummy_vreg, https=False) + self.assertEqual('http://babar.com/', req.base_url()) + self.assertEqual('http://babar.com/', req.base_url(False)) + self.assertEqual('https://toto.com/', req.base_url(True)) + + req = CubicWebRequestBase(dummy_vreg, https=True) + self.assertEqual('https://toto.com/', req.base_url()) + self.assertEqual('http://babar.com/', req.base_url(False)) + self.assertEqual('https://toto.com/', req.base_url(True)) + + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_uicfg.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_uicfg.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,139 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +import copy +from logilab.common.testlib import tag +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web import uihelper, formwidgets as fwdgs +from cubicweb.web.views import uicfg + +abaa = uicfg.actionbox_appearsin_addmenu + +class UICFGTC(CubicWebTC): + + def test_default_actionbox_appearsin_addmenu_config(self): + self.assertFalse(abaa.etype_get('TrInfo', 'wf_info_for', 'object', 'CWUser')) + + + +class DefinitionOrderTC(CubicWebTC): + """This test check that when multiple definition could match a key, only + the more accurate apply""" + + def setUp(self): + super(DefinitionOrderTC, self).setUp() + for rtag in (uicfg.autoform_section, uicfg.autoform_field_kwargs): + rtag._old_tagdefs = copy.deepcopy(rtag._tagdefs) + new_def = ( + (('*', 'login', '*'), + {'formtype':'main', 'section':'hidden'}), + (('*', 'login', '*'), + {'formtype':'muledit', 'section':'hidden'}), + (('CWUser', 'login', '*'), + {'formtype':'main', 'section':'attributes'}), + (('CWUser', 'login', '*'), + {'formtype':'muledit', 'section':'attributes'}), + (('CWUser', 'login', 'String'), + {'formtype':'main', 'section':'inlined'}), + (('CWUser', 'login', 'String'), + {'formtype':'inlined', 'section':'attributes'}), + ) + for key, kwargs in new_def: + uicfg.autoform_section.tag_subject_of(key, **kwargs) + + def tearDown(self): + super(DefinitionOrderTC, self).tearDown() + for rtag in (uicfg.autoform_section, uicfg.autoform_field_kwargs): + rtag._tagdefs = rtag._old_tagdefs + + @tag('uicfg') + def test_definition_order_hidden(self): + result = uicfg.autoform_section.get('CWUser', 'login', 'String', 'subject') + expected = set(['main_inlined', 'muledit_attributes', 'inlined_attributes']) + self.assertSetEqual(result, expected) + + @tag('uihelper', 'order', 'func') + def test_uihelper_set_fields_order(self): + afk_get = uicfg.autoform_field_kwargs.get + self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {}) + uihelper.set_fields_order('CWUser', ('login', 'firstname', 'surname')) + self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {'order': 1}) + + @tag('uicfg', 'order', 'func') + def test_uicfg_primaryview_set_fields_order(self): + pvdc = uicfg.primaryview_display_ctrl + pvdc.set_fields_order('CWUser', ('login', 'firstname', 'surname')) + self.assertEqual(pvdc.get('CWUser', 'login', 'String', 'subject'), {'order': 0}) + self.assertEqual(pvdc.get('CWUser', 'firstname', 'String', 'subject'), {'order': 1}) + self.assertEqual(pvdc.get('CWUser', 'surname', 'String', 'subject'), {'order': 2}) + + @tag('uihelper', 'kwargs', 'func') + def test_uihelper_set_field_kwargs(self): + afk_get = uicfg.autoform_field_kwargs.get + self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {}) + wdg = fwdgs.TextInput({'size': 30}) + uihelper.set_field_kwargs('CWUser', 'firstname', widget=wdg) + self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {'widget': wdg}) + + @tag('uihelper', 'hidden', 'func') + def test_uihelper_hide_fields(self): + # original conf : in_group is edited in 'attributes' section everywhere + section_conf = uicfg.autoform_section.get('CWUser', 'in_group', '*', 'subject') + self.assertCountEqual(section_conf, ['main_attributes', 'muledit_attributes']) + # hide field in main form + uihelper.hide_fields('CWUser', ('login', 'in_group')) + section_conf = uicfg.autoform_section.get('CWUser', 'in_group', '*', 'subject') + self.assertCountEqual(section_conf, ['main_hidden', 'muledit_attributes']) + # hide field in muledit form + uihelper.hide_fields('CWUser', ('login', 'in_group'), formtype='muledit') + section_conf = uicfg.autoform_section.get('CWUser', 'in_group', '*', 'subject') + self.assertCountEqual(section_conf, ['main_hidden', 'muledit_hidden']) + + @tag('uihelper', 'hidden', 'formconfig') + def test_uihelper_formconfig(self): + afk_get = uicfg.autoform_field_kwargs.get + class CWUserFormConfig(uihelper.FormConfig): + etype = 'CWUser' + hidden = ('in_group',) + fields_order = ('login', 'firstname') + section_conf = uicfg.autoform_section.get('CWUser', 'in_group', '*', 'subject') + self.assertCountEqual(section_conf, ['main_hidden', 'muledit_attributes']) + self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {'order': 1}) + + +class UicfgRegistryTC(CubicWebTC): + + def test_default_uicfg_object(self): + 'CW default ui config objects must be registered in uicfg registry' + onames = ('autoform_field', 'autoform_section', 'autoform_field_kwargs') + for oname in onames: + obj = self.vreg['uicfg'].select_or_none(oname) + self.assertTrue(obj is not None, '%s not found in uicfg registry' + % oname) + + def test_custom_uicfg(self): + ASRT = uicfg.AutoformSectionRelationTags + custom_afs = ASRT() + custom_afs.__select__ = ASRT.__select__ & ASRT.__select__ + self.vreg['uicfg'].register(custom_afs) + obj = self.vreg['uicfg'].select_or_none('autoform_section') + self.assertTrue(obj is custom_afs) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_urlpublisher.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_urlpublisher.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""Unit tests for url publishing service""" + +import re + +from logilab.common.testlib import unittest_main + +from cubicweb.rset import ResultSet +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.fake import FakeRequest +from cubicweb.web import NotFound, Redirect, views +from cubicweb.web.views.urlrewrite import SimpleReqRewriter + + +class URLPublisherTC(CubicWebTC): + """test suite for QSPreProcessor""" + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, u'ÿsaÿe') + b = cnx.create_entity('BlogEntry', title=u'hell\'o', content=u'blabla') + # take care: Tag's name normalized to lower case + c = cnx.create_entity('Tag', name=u'yo') + cnx.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', + {'c':c.eid, 'b':b.eid}) + cnx.commit() + + def process(self, req, url): + return self.app.url_resolver.process(req, url) + + def test_raw_path(self): + """tests raw path resolution'""" + with self.admin_access.web_request() as req: + self.assertEqual(self.process(req, 'view'), ('view', None)) + self.assertEqual(self.process(req, 'edit'), ('edit', None)) + self.assertRaises(NotFound, self.process, req, 'whatever') + + def test_eid_path(self): + """tests eid path resolution""" + with self.admin_access.web_request() as req: + self.assertIsInstance(self.process(req, '123')[1], ResultSet) + self.assertEqual(len(self.process(req, '123')[1]), 1) + self.assertRaises(NotFound, self.process, req, '123/345') + self.assertRaises(NotFound, self.process, req, 'not_eid') + + def test_rest_path_etype(self): + """tests the rest path resolution""" + with self.admin_access.web_request() as req: + ctrl, rset = self.process(req, 'CWEType') + self.assertEqual(ctrl, 'view') + self.assertEqual(rset.description[0][0], 'CWEType') + self.assertEqual("Any X,AA,AB ORDERBY AB WHERE X is_instance_of CWEType, " + "X modification_date AA, X name AB", + rset.printable_rql()) + self.assertEqual(req.form['vid'], 'sameetypelist') + + def test_rest_path_by_attr(self): + with self.admin_access.web_request() as req: + ctrl, rset = self.process(req, 'CWUser/login/admin') + self.assertEqual(ctrl, 'view') + self.assertEqual(len(rset), 1) + self.assertEqual(rset.description[0][0], 'CWUser') + self.assertEqual('Any X,AA,AB,AC,AD WHERE X is_instance_of CWUser, ' + 'X firstname AA, X login AB, X modification_date AC, ' + 'X surname AD, X login "admin"', + rset.printable_rql()) + + def test_rest_path_unique_attr(self): + with self.admin_access.web_request() as req: + ctrl, rset = self.process(req, 'cwuser/admin') + self.assertEqual(ctrl, 'view') + self.assertEqual(len(rset), 1) + self.assertEqual(rset.description[0][0], 'CWUser') + self.assertEqual('Any X,AA,AB,AC,AD WHERE X is_instance_of CWUser, ' + 'X firstname AA, X login AB, X modification_date AC, ' + 'X surname AD, X login "admin"', + rset.printable_rql()) + self.assertEqual(req.form['vid'], 'primary') + + def test_rest_path_eid(self): + with self.admin_access.web_request() as req: + ctrl, rset = self.process(req, 'cwuser/eid/%s' % req.user.eid) + self.assertEqual(ctrl, 'view') + self.assertEqual(len(rset), 1) + self.assertEqual(rset.description[0][0], 'CWUser') + self.assertEqual('Any X,AA,AB,AC,AD WHERE X is_instance_of CWUser, ' + 'X firstname AA, X login AB, X modification_date AC, ' + 'X surname AD, X eid %s' % rset[0][0], + rset.printable_rql()) + + def test_rest_path_non_ascii_paths(self): + with self.admin_access.web_request() as req: + ctrl, rset = self.process(req, 'CWUser/login/%C3%BFsa%C3%BFe') + self.assertEqual(ctrl, 'view') + self.assertEqual(len(rset), 1) + self.assertEqual(rset.description[0][0], 'CWUser') + self.assertEqual(u'Any X,AA,AB,AC,AD WHERE X is_instance_of CWUser, ' + u'X firstname AA, X login AB, X modification_date AC, ' + u'X surname AD, X login "\xffsa\xffe"', + rset.printable_rql()) + + def test_rest_path_quoted_paths(self): + with self.admin_access.web_request() as req: + ctrl, rset = self.process(req, 'BlogEntry/title/hell%27o') + self.assertEqual(ctrl, 'view') + self.assertEqual(len(rset), 1) + self.assertEqual(rset.description[0][0], 'BlogEntry') + self.assertEqual(u'Any X,AA,AB,AC WHERE X is_instance_of BlogEntry, ' + 'X creation_date AA, X modification_date AB, X title AC, ' + 'X title "hell\'o"', + rset.printable_rql()) + + def test_rest_path_use_vid_from_rset(self): + with self.admin_access.web_request(headers={'Accept': 'application/rdf+xml'}) as req: + views.VID_BY_MIMETYPE['application/rdf+xml'] = 'rdf' + try: + ctrl, rset = self.process(req, 'CWEType') + finally: + views.VID_BY_MIMETYPE.pop('application/rdf+xml') + self.assertEqual(req.form['vid'], 'rdf') + + def test_rest_path_errors(self): + with self.admin_access.web_request() as req: + self.assertRaises(NotFound, self.process, req, 'CWUser/eid/30000') + self.assertRaises(NotFound, self.process, req, 'Workcases') + self.assertRaises(NotFound, self.process, req, 'CWUser/inexistant_attribute/joe') + + def test_action_path(self): + """tests the action path resolution""" + with self.admin_access.web_request() as req: + self.assertRaises(Redirect, self.process, req, '1/edit') + self.assertRaises(Redirect, self.process, req, 'Tag/name/yo/edit') + self.assertRaises(Redirect, self.process, req, 'Tag/yo/edit') + self.assertRaises(NotFound, self.process, req, 'view/edit') + self.assertRaises(NotFound, self.process, req, '1/non_action') + self.assertRaises(NotFound, self.process, req, 'CWUser/login/admin/non_action') + + def test_regexp_path(self): + """tests the regexp path resolution""" + with self.admin_access.web_request() as req: + ctrl, rset = self.process(req, 'add/Task') + self.assertEqual(ctrl, 'view') + self.assertEqual(rset, None) + self.assertEqual(req.form, {'etype': "Task", 'vid': "creation"}) + self.assertRaises(NotFound, self.process, req, 'add/foo/bar') + + def test_nonascii_path(self): + oldrules = SimpleReqRewriter.rules + SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo'))] + with self.admin_access.web_request() as req: + try: + path = str(FakeRequest().url_quote(u'été')) + ctrl, rset = self.process(req, path) + self.assertEqual(rset, None) + self.assertEqual(req.form, {'vid': "foo"}) + finally: + SimpleReqRewriter.rules = oldrules + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_urlrewrite.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_urlrewrite.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,223 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from six import text_type + +from logilab.common import tempattr + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.fake import FakeRequest + +from cubicweb.web.views.urlrewrite import (SimpleReqRewriter, SchemaBasedRewriter, + rgx, rgx_action) + + +class UrlRewriteTC(CubicWebTC): + + def test_auto_extend_rules(self): + class Rewriter(SimpleReqRewriter): + rules = [ + ('foo', dict(rql='Foo F')), + ('/index', dict(vid='index2')), + ] + rules = [] + for pattern, values in Rewriter.rules: + if hasattr(pattern, 'pattern'): + pattern = pattern.pattern + rules.append((pattern, values)) + self.assertListEqual(rules, [ + ('foo' , dict(rql='Foo F')), + ('/index' , dict(vid='index2')), + ('/_', dict(vid='manage')), + ('/_registry', dict(vid='registry')), + ('/schema', dict(vid='schema')), + ('/myprefs', dict(vid='propertiesform')), + ('/siteconfig', dict(vid='systempropertiesform')), + ('/siteinfo', dict(vid='siteinfo')), + ('/manage', dict(vid='manage')), + ('/notfound', dict(vid='404')), + ('/error', dict(vid='error')), + ('/sparql', dict(vid='sparql')), + ('/processinfo', dict(vid='processinfo')), + ('/cwuser$', {'vid': 'cw.users-and-groups-management', + 'tab': 'cw_users_management'}), + ('/cwgroup$', {'vid': 'cw.users-and-groups-management', + 'tab': 'cw_groups_management'}), + ('/cwsource$', {'vid': 'cw.sources-management'}), + ('/schema/([^/]+?)/?$', {'rql': r'Any X WHERE X is CWEType, X name "\1"', 'vid': 'primary'}), + ('/add/([^/]+?)/?$' , dict(vid='creation', etype=r'\1')), + ('/doc/images/(.+?)/?$', dict(fid='\\1', vid='wdocimages')), + ('/doc/?$', dict(fid='main', vid='wdoc')), + ('/doc/(.+?)/?$', dict(fid='\\1', vid='wdoc')), + # now in SchemaBasedRewriter + #('/search/(.+)$', dict(rql=r'Any X WHERE X has_text "\1"')), + ]) + + + def test_no_extend_rules(self): + class Rewriter(SimpleReqRewriter): + ignore_baseclass_rules = True + rules = [ + ('foo', dict(rql='Foo F')), + ('/index', dict(vid='index2')), + ] + self.assertListEqual(Rewriter.rules, [ + ('foo' , dict(rql='Foo F')), + ('/index' , dict(vid='index2')), + ]) + + def test_basic_transformation(self): + """test simple string-based rewrite""" + req = FakeRequest() + rewriter = SimpleReqRewriter(req) + self.assertRaises(KeyError, rewriter.rewrite, req, '/view?vid=whatever') + self.assertEqual(req.form, {}) + rewriter.rewrite(req, '/index') + self.assertEqual(req.form, {'vid' : "index"}) + + def test_regexp_transformation(self): + """test regexp-based rewrite""" + req = FakeRequest() + rewriter = SimpleReqRewriter(req) + rewriter.rewrite(req, '/add/Task') + self.assertEqual(req.form, {'vid' : "creation", 'etype' : "Task"}) + req = FakeRequest() + rewriter.rewrite(req, '/add/Task/') + self.assertEqual(req.form, {'vid' : "creation", 'etype' : "Task"}) + + def test_inheritance(self): + BaseTransition = self.vreg['etypes'].etype_class('BaseTransition') + with self.admin_access.web_request() as req: + x = req.create_entity('WorkflowTransition', name=u'test') + ctrlid, rset = self.app.url_resolver.process(req, 'basetransition/%s' % x.eid) + self.assertEqual(ctrlid, 'view') + self.assertEqual(x.eid, rset[0][0]) + # cw_rest_attr_info is cached but clear_cache doesn't like cached class + # method + del BaseTransition._cw_rest_attr_info_cache_ + try: + with tempattr(BaseTransition, 'rest_attr', 'name'): + + ctrlid, rset = self.app.url_resolver.process(req, 'basetransition/%s' % x.name) + self.assertEqual(ctrlid, 'view') + self.assertEqual(x.eid, rset[0][0]) + finally: + del BaseTransition._cw_rest_attr_info_cache_ + + + +class RgxActionRewriteTC(CubicWebTC): + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + p1 = self.create_user(cnx, u'user1') + p1.cw_set(firstname=u'joe', surname=u'Dalton') + p2 = self.create_user(cnx, u'user2') + p2.cw_set(firstname=u'jack', surname=u'Dalton') + self.p1eid = p1.eid + cnx.commit() + + def test_rgx_action_with_transforms(self): + class TestSchemaBasedRewriter(SchemaBasedRewriter): + rules = [ + (rgx('/(?P<sn>\w+)/(?P<fn>\w+)'), + rgx_action(r'Any X WHERE X surname %(sn)s, ' + 'X firstname %(fn)s', + argsgroups=('sn', 'fn'), + transforms={'sn' : text_type.capitalize, + 'fn' : text_type.lower,})), + ] + with self.admin_access.web_request() as req: + rewriter = TestSchemaBasedRewriter(req) + _pmid, rset = rewriter.rewrite(req, u'/DaLToN/JoE') + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][0], self.p1eid) + self.assertEqual(rset.description[0][0], 'CWUser') + + def test_inheritance_precedence(self): + RQL1 = 'Any C WHERE C is CWEType' + RQL2 = 'Any C WHERE C is CWUser' + + class BaseRewriter(SchemaBasedRewriter): + rules = [ + (rgx('/collector(.*)'), + rgx_action(rql=RQL1, + form=dict(vid='baseindex')), + ), + ] + class Rewriter(BaseRewriter): + rules = [ + (rgx('/collector/something(/?)'), + rgx_action(rql=RQL2, + form=dict(vid='index')), + ), + ] + + with self.admin_access.web_request() as req: + rewriter = Rewriter(req) + _pmid, rset = rewriter.rewrite(req, '/collector') + self.assertEqual(rset.rql, RQL1) + self.assertEqual(req.form, {'vid' : "baseindex"}) + _pmid, rset = rewriter.rewrite(req, '/collector/something') + self.assertEqual(rset.rql, RQL2) + self.assertEqual(req.form, {'vid' : "index"}) + _pmid, rset = rewriter.rewrite(req, '/collector/something/') + self.assertEqual(req.form, {'vid' : "index"}) + self.assertEqual(rset.rql, RQL2) + _pmid, rset = rewriter.rewrite(req, '/collector/somethingelse/') + self.assertEqual(rset.rql, RQL1) + self.assertEqual(req.form, {'vid' : "baseindex"}) + + def test_inheritance_precedence_same_rgx(self): + RQL1 = 'Any C WHERE C is CWEType' + RQL2 = 'Any C WHERE C is CWUser' + + class BaseRewriter(SchemaBasedRewriter): + rules = [ + (rgx('/collector(.*)'), + rgx_action(rql=RQL1, + form=dict(vid='baseindex')), + ), + ] + class Rewriter(BaseRewriter): + rules = [ + (rgx('/collector(.*)'), + rgx_action(rql=RQL2, + form=dict(vid='index')), + ), + ] + + with self.admin_access.web_request() as req: + rewriter = Rewriter(req) + _pmid, rset = rewriter.rewrite(req, '/collector') + self.assertEqual(rset.rql, RQL2) + self.assertEqual(req.form, {'vid' : "index"}) + _pmid, rset = rewriter.rewrite(req, '/collector/something') + self.assertEqual(rset.rql, RQL2) + self.assertEqual(req.form, {'vid' : "index"}) + _pmid, rset = rewriter.rewrite(req, '/collector/something/') + self.assertEqual(req.form, {'vid' : "index"}) + self.assertEqual(rset.rql, RQL2) + _pmid, rset = rewriter.rewrite(req, '/collector/somethingelse/') + self.assertEqual(rset.rql, RQL2) + self.assertEqual(req.form, {'vid' : "index"}) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_actions.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_actions.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,64 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from logilab.common.testlib import unittest_main + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web.views import actions, uicfg + + +class ActionsTC(CubicWebTC): + def test_view_action(self): + with self.admin_access.web_request(vid='rss', rql='CWUser X') as req: + rset = req.execute('CWUser X') + actions = self.vreg['actions'].poss_visible_objects(req, rset=rset) + vaction = [action for action in actions if action.__regid__ == 'view'][0] + self.assertEqual(vaction.url(), 'http://testing.fr/cubicweb/view?rql=CWUser%20X') + + def test_has_editable_relations(self): + """ensure has_editable_relation predicate used by ModifyAction + return positive score if there is only some inlined forms + """ + # The schema only allows the anonymous user to modify his/her own + # EmailAddress if it is set, not to create one. Since the 'anon' CWUser + # entity is created without any associated EmailAddress entities, there + # are no attributes nor relations that can be edited: the "modify" + # action should not appear. + with self.new_access('anon').web_request() as req: + predicate = actions.has_editable_relation() + self.assertEqual(predicate(None, req, rset=req.user.as_rset()), + 0) + # being allowed to 'add' the relation is not enough + use_email = self.schema['use_email'].rdefs['CWUser', 'EmailAddress'] + with self.temporary_permissions((use_email, {'add': ('guests',)})): + with self.new_access('anon').web_request() as req: + predicate = actions.has_editable_relation() + self.assertEqual(predicate(None, req, rset=req.user.as_rset()), + 0) + # if we also allow creating the target etype, then the "modify" action + # should appear + with self.temporary_permissions((use_email, {'add': ('guests',)}), + EmailAddress={'add': ('guests',)}): + with self.new_access('anon').web_request() as req: + predicate = actions.has_editable_relation() + self.assertEqual(predicate(None, req, rset=req.user.as_rset()), + 1) + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_apacherewrite.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_apacherewrite.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,60 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +""" + +""" +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.web.views.apacherewrite import * + +class ApacheURLRewriteTC(TestCase): + + def test(self): + class MyAppRules(ApacheURLRewrite): + rules = [ + RewriteCond('logilab\.fr', match='host', + rules=[('/(.*)', r'http://www.logilab.fr/\1')], + action='redirect'), + RewriteCond('(www)\.logilab\.fr', match='host', action='stop'), + RewriteCond('/(data|json)/', match='path', action='stop'), + RewriteCond('(?P<cat>.*)\.logilab\.fr', match='host', + rules=[('/(.*)', r'/m_%(cat)s/\1')]), + ] + urlrewriter = MyAppRules() + req = None # not used in the above rules, so keep a simple TestCase here + try: + urlrewriter.rewrite('logilab.fr', '/whatever', req) + self.fail('redirect exception expected') + except Redirect as ex: + self.assertEqual(ex.location, 'http://www.logilab.fr/whatever') + self.assertEqual(urlrewriter.rewrite('www.logilab.fr', '/whatever', req), + '/whatever') + self.assertEqual(urlrewriter.rewrite('www.logilab.fr', '/json/bla', req), + '/json/bla') + self.assertEqual(urlrewriter.rewrite('abcd.logilab.fr', '/json/bla', req), + '/json/bla') + self.assertEqual(urlrewriter.rewrite('abcd.logilab.fr', '/data/bla', req), + '/data/bla') + self.assertEqual(urlrewriter.rewrite('abcd.logilab.fr', '/whatever', req), + '/m_abcd/whatever') + self.assertEqual(urlrewriter.rewrite('abcd.fr', '/whatever', req), + '/whatever') + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_basecontrollers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_basecontrollers.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1087 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""cubicweb.web.views.basecontrollers unit tests""" + +from six import text_type +from six.moves.urllib.parse import urlsplit, urlunsplit, urljoin, parse_qs + +import lxml + +from logilab.common.testlib import unittest_main +from logilab.common.decorators import monkeypatch + +from cubicweb import Binary, NoSelectableObject, ValidationError +from cubicweb.schema import RRQLExpression +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.webtest import CubicWebTestTC +from cubicweb.utils import json_dumps +from cubicweb.uilib import rql_for_eid +from cubicweb.web import Redirect, RemoteCallFailed +import cubicweb.server.session +from cubicweb.server.session import Connection as OldConnection +from cubicweb.web.views.autoform import get_pending_inserts, get_pending_deletes +from cubicweb.web.views.basecontrollers import JSonController, xhtmlize, jsonize +from cubicweb.web.views.ajaxcontroller import ajaxfunc, AjaxFunction +import cubicweb.transaction as tx +from cubicweb.server.hook import Hook, Operation +from cubicweb.predicates import is_instance + + +class ViewControllerTC(CubicWebTestTC): + def test_view_ctrl_with_valid_cache_headers(self): + resp = self.webapp.get('/manage') + self.assertEqual(resp.etag, 'manage/guests') + self.assertEqual(resp.status_code, 200) + cache_headers = {'if-modified-since': resp.headers['Last-Modified'], + 'if-none-match': resp.etag} + resp = self.webapp.get('/manage', headers=cache_headers) + self.assertEqual(resp.status_code, 304) + self.assertEqual(len(resp.body), 0) + + +def req_form(user): + return {'eid': [str(user.eid)], + '_cw_entity_fields:%s' % user.eid: '_cw_generic_field', + '__type:%s' % user.eid: user.__regid__ + } + + +class EditControllerTC(CubicWebTC): + + def setUp(self): + CubicWebTC.setUp(self) + self.assertIn('users', self.schema.eschema('CWGroup').get_groups('read')) + + def tearDown(self): + CubicWebTC.tearDown(self) + self.assertIn('users', self.schema.eschema('CWGroup').get_groups('read')) + + def test_noparam_edit(self): + """check behaviour of this controller without any form parameter + """ + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + self.ctrl_publish(req) + self.assertEqual(cm.exception.errors, {None: u'no selected entities'}) + + def test_validation_unique(self): + """test creation of two linked entities + """ + with self.admin_access.web_request() as req: + req.form = {'eid': 'X', '__type:X': 'CWUser', + '_cw_entity_fields:X': 'login-subject,upassword-subject', + 'login-subject:X': u'admin', + 'upassword-subject:X': u'toto', + 'upassword-subject-confirm:X': u'toto', + } + with self.assertRaises(ValidationError) as cm: + self.ctrl_publish(req) + cm.exception.translate(text_type) + self.assertEqual({'login-subject': 'the value "admin" is already used, use another one'}, + cm.exception.errors) + + def test_simultaneous_edition_only_one_commit(self): + """ Allow two simultaneous edit view of the same entity as long as only one commits + """ + with self.admin_access.web_request() as req: + e = req.create_entity('BlogEntry', title=u'cubicweb.org', content=u"hop") + expected_path = e.rest_path() + req.cnx.commit() + form = self.vreg['views'].select('edition', req, rset=e.as_rset(), row=0) + html_form = lxml.html.fromstring(form.render(w=None, action='edit')).forms[0] + + with self.admin_access.web_request() as req2: + form2 = self.vreg['views'].select('edition', req, rset=e.as_rset(), row=0) + + with self.admin_access.web_request(**dict(html_form.form_values())) as req: + path, args = self.expect_redirect_handle_request(req, path='edit') + self.assertEqual(path, expected_path) + + def test_simultaneous_edition_refuse_second_commit(self): + """ Disallow committing changes to an entity edited in between """ + with self.admin_access.web_request() as req: + e = req.create_entity('BlogEntry', title=u'cubicweb.org', content=u"hop") + eid = e.eid + req.cnx.commit() + form = self.vreg['views'].select('edition', req, rset=e.as_rset(), row=0) + html_form = lxml.html.fromstring(form.render(w=None, action='edit')).forms[0] + + with self.admin_access.web_request() as req2: + e = req2.entity_from_eid(eid) + e.cw_set(content = u"hip") + req2.cnx.commit() + + form_field_name = "content-subject:%d" % eid + form_values = dict(html_form.form_values()) + assert form_field_name in form_values + form_values[form_field_name] = u'yep' + with self.admin_access.web_request(**form_values) as req: + with self.assertRaises(ValidationError) as cm: + self.ctrl_publish(req) + reported_eid, dict_info = cm.exception.args + self.assertEqual(reported_eid, eid) + self.assertIn(None, dict_info) + self.assertIn("has changed since you started to edit it.", dict_info[None]) + + def test_user_editing_itself(self): + """checking that a manager user can edit itself + """ + with self.admin_access.web_request() as req: + user = req.user + groupeids = [eid for eid, in req.execute('CWGroup G WHERE G name ' + 'in ("managers", "users")')] + groups = [text_type(eid) for eid in groupeids] + eid = text_type(user.eid) + req.form = { + 'eid': eid, '__type:'+eid: 'CWUser', + '_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject,in_group-subject', + 'login-subject:'+eid: text_type(user.login), + 'surname-subject:'+eid: u'Th\xe9nault', + 'firstname-subject:'+eid: u'Sylvain', + 'in_group-subject:'+eid: groups, + } + self.expect_redirect_handle_request(req, 'edit') + e = req.execute('Any X WHERE X eid %(x)s', + {'x': user.eid}).get_entity(0, 0) + self.assertEqual(e.firstname, u'Sylvain') + self.assertEqual(e.surname, u'Th\xe9nault') + self.assertEqual(e.login, user.login) + self.assertEqual([g.eid for g in e.in_group], groupeids) + + def test_user_can_change_its_password(self): + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, u'user') + cnx.commit() + with self.new_access(u'user').web_request() as req: + eid = text_type(req.user.eid) + req.form = { + 'eid': eid, '__maineid' : eid, + '__type:'+eid: 'CWUser', + '_cw_entity_fields:'+eid: 'upassword-subject', + 'upassword-subject:'+eid: 'tournicoton', + 'upassword-subject-confirm:'+eid: 'tournicoton', + } + path, params = self.expect_redirect_handle_request(req, 'edit') + req.cnx.commit() # commit to check we don't get late validation error for instance + self.assertEqual(path, 'cwuser/user') + self.assertNotIn('vid', params) + + def test_user_editing_itself_no_relation(self): + """checking we can edit an entity without specifying some required + relations (meaning no changes) + """ + with self.admin_access.web_request() as req: + user = req.user + groupeids = [g.eid for g in user.in_group] + eid = text_type(user.eid) + req.form = { + 'eid': eid, + '__type:'+eid: 'CWUser', + '_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject', + 'login-subject:'+eid: text_type(user.login), + 'firstname-subject:'+eid: u'Th\xe9nault', + 'surname-subject:'+eid: u'Sylvain', + } + self.expect_redirect_handle_request(req, 'edit') + e = req.execute('Any X WHERE X eid %(x)s', + {'x': user.eid}).get_entity(0, 0) + self.assertEqual(e.login, user.login) + self.assertEqual(e.firstname, u'Th\xe9nault') + self.assertEqual(e.surname, u'Sylvain') + self.assertEqual([g.eid for g in e.in_group], groupeids) + self.assertEqual(e.cw_adapt_to('IWorkflowable').state, 'activated') + + + def test_create_multiple_linked(self): + with self.admin_access.web_request() as req: + gueid = req.execute('CWGroup G WHERE G name "users"')[0][0] + req.form = {'eid': ['X', 'Y'], '__maineid' : 'X', + '__type:X': 'CWUser', + '_cw_entity_fields:X': 'login-subject,upassword-subject,surname-subject,in_group-subject', + 'login-subject:X': u'adim', + 'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto', + 'surname-subject:X': u'Di Mascio', + 'in_group-subject:X': text_type(gueid), + + '__type:Y': 'EmailAddress', + '_cw_entity_fields:Y': 'address-subject,use_email-object', + 'address-subject:Y': u'dima@logilab.fr', + 'use_email-object:Y': 'X', + } + path, _params = self.expect_redirect_handle_request(req, 'edit') + # should be redirected on the created person + self.assertEqual(path, 'cwuser/adim') + e = req.execute('Any P WHERE P surname "Di Mascio"').get_entity(0, 0) + self.assertEqual(e.surname, 'Di Mascio') + email = e.use_email[0] + self.assertEqual(email.address, 'dima@logilab.fr') + + def test_create_mandatory_inlined(self): + with self.admin_access.web_request() as req: + req.form = {'eid': ['X', 'Y'], '__maineid' : 'X', + + '__type:X': 'Salesterm', + '_cw_entity_fields:X': '', + + '__type:Y': 'File', + '_cw_entity_fields:Y': 'data-subject,described_by_test-object', + 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), + 'described_by_test-object:Y': 'X', + } + path, _params = self.expect_redirect_handle_request(req, 'edit') + self.assertTrue(path.startswith('salesterm/'), path) + eid = path.split('/')[1] + salesterm = req.entity_from_eid(eid) + # The NOT NULL constraint of mandatory relation implies that the File + # must be created before the Salesterm, otherwise Salesterm insertion + # will fail. + # NOTE: sqlite does have NOT NULL constraint, unlike Postgres so the + # insertion does not fail and we have to check dumbly that File is + # created before. + self.assertGreater(salesterm.eid, salesterm.described_by_test[0].eid) + + def test_create_mandatory_inlined2(self): + with self.admin_access.web_request() as req: + req.form = {'eid': ['X', 'Y'], '__maineid' : 'X', + + '__type:X': 'Salesterm', + '_cw_entity_fields:X': 'described_by_test-subject', + 'described_by_test-subject:X': 'Y', + + '__type:Y': 'File', + '_cw_entity_fields:Y': 'data-subject', + 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), + } + path, _params = self.expect_redirect_handle_request(req, 'edit') + self.assertTrue(path.startswith('salesterm/'), path) + eid = path.split('/')[1] + salesterm = req.entity_from_eid(eid) + # The NOT NULL constraint of mandatory relation implies that the File + # must be created before the Salesterm, otherwise Salesterm insertion + # will fail. + # NOTE: sqlite does have NOT NULL constraint, unlike Postgres so the + # insertion does not fail and we have to check dumbly that File is + # created before. + self.assertGreater(salesterm.eid, salesterm.described_by_test[0].eid) + + def test_edit_mandatory_inlined3_object(self): + # non regression test for #3120495. Without the fix, leads to + # "unhashable type: 'list'" error + with self.admin_access.web_request() as req: + cwrelation = text_type(req.execute('CWEType X WHERE X name "CWSource"')[0][0]) + req.form = {'eid': [cwrelation], '__maineid' : cwrelation, + + '__type:'+cwrelation: 'CWEType', + '_cw_entity_fields:'+cwrelation: 'to_entity-object', + 'to_entity-object:'+cwrelation: [9999, 9998], + } + with req.cnx.deny_all_hooks_but(): + path, _params = self.expect_redirect_handle_request(req, 'edit') + self.assertTrue(path.startswith('cwetype/CWSource'), path) + + def test_edit_multiple_linked(self): + with self.admin_access.web_request() as req: + peid = text_type(self.create_user(req, u'adim').eid) + req.form = {'eid': [peid, 'Y'], '__maineid': peid, + + '__type:'+peid: u'CWUser', + '_cw_entity_fields:'+peid: u'surname-subject', + 'surname-subject:'+peid: u'Di Masci', + + '__type:Y': u'EmailAddress', + '_cw_entity_fields:Y': u'address-subject,use_email-object', + 'address-subject:Y': u'dima@logilab.fr', + 'use_email-object:Y': peid, + } + path, _params = self.expect_redirect_handle_request(req, 'edit') + # should be redirected on the created person + self.assertEqual(path, 'cwuser/adim') + e = req.execute('Any P WHERE P surname "Di Masci"').get_entity(0, 0) + email = e.use_email[0] + self.assertEqual(email.address, 'dima@logilab.fr') + + # with self.admin_access.web_request() as req: + emaileid = text_type(email.eid) + req.form = {'eid': [peid, emaileid], + + '__type:'+peid: u'CWUser', + '_cw_entity_fields:'+peid: u'surname-subject', + 'surname-subject:'+peid: u'Di Masci', + + '__type:'+emaileid: u'EmailAddress', + '_cw_entity_fields:'+emaileid: u'address-subject,use_email-object', + 'address-subject:'+emaileid: u'adim@logilab.fr', + 'use_email-object:'+emaileid: peid, + } + self.expect_redirect_handle_request(req, 'edit') + email.cw_clear_all_caches() + self.assertEqual(email.address, 'adim@logilab.fr') + + def test_password_confirm(self): + """test creation of two linked entities + """ + with self.admin_access.web_request() as req: + user = req.user + req.form = {'eid': 'X', + '__cloned_eid:X': text_type(user.eid), '__type:X': 'CWUser', + '_cw_entity_fields:X': 'login-subject,upassword-subject', + 'login-subject:X': u'toto', + 'upassword-subject:X': u'toto', + } + with self.assertRaises(ValidationError) as cm: + self.ctrl_publish(req) + self.assertEqual({'upassword-subject': u'password and confirmation don\'t match'}, + cm.exception.errors) + req.form = {'__cloned_eid:X': text_type(user.eid), + 'eid': 'X', '__type:X': 'CWUser', + '_cw_entity_fields:X': 'login-subject,upassword-subject', + 'login-subject:X': u'toto', + 'upassword-subject:X': u'toto', + 'upassword-subject-confirm:X': u'tutu', + } + with self.assertRaises(ValidationError) as cm: + self.ctrl_publish(req) + self.assertEqual({'upassword-subject': u'password and confirmation don\'t match'}, + cm.exception.errors) + + + def test_interval_bound_constraint_success(self): + with self.admin_access.repo_cnx() as cnx: + feid = cnx.execute('INSERT File X: X data_name "toto.txt", X data %(data)s', + {'data': Binary(b'yo')})[0][0] + cnx.commit() + + with self.admin_access.web_request(rollbackfirst=True) as req: + req.form = {'eid': ['X'], + '__type:X': 'Salesterm', + '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', + 'amount-subject:X': u'-10', + 'described_by_test-subject:X': text_type(feid), + } + with self.assertRaises(ValidationError) as cm: + self.ctrl_publish(req) + cm.exception.translate(text_type) + self.assertEqual({'amount-subject': 'value -10 must be >= 0'}, + cm.exception.errors) + + with self.admin_access.web_request(rollbackfirst=True) as req: + req.form = {'eid': ['X'], + '__type:X': 'Salesterm', + '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', + 'amount-subject:X': u'110', + 'described_by_test-subject:X': text_type(feid), + } + with self.assertRaises(ValidationError) as cm: + self.ctrl_publish(req) + cm.exception.translate(text_type) + self.assertEqual(cm.exception.errors, {'amount-subject': 'value 110 must be <= 100'}) + + with self.admin_access.web_request(rollbackfirst=True) as req: + req.form = {'eid': ['X'], + '__type:X': 'Salesterm', + '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', + 'amount-subject:X': u'10', + 'described_by_test-subject:X': text_type(feid), + } + self.expect_redirect_handle_request(req, 'edit') + # should be redirected on the created + #eid = params['rql'].split()[-1] + e = req.execute('Salesterm X').get_entity(0, 0) + self.assertEqual(e.amount, 10) + + def test_interval_bound_constraint_validateform(self): + """Test the FormValidatorController controller on entity with + constrained attributes""" + with self.admin_access.repo_cnx() as cnx: + feid = cnx.execute('INSERT File X: X data_name "toto.txt", X data %(data)s', + {'data': Binary(b'yo')})[0][0] + seid = cnx.create_entity('Salesterm', amount=0, described_by_test=feid).eid + cnx.commit() + + # ensure a value that violate a constraint is properly detected + with self.admin_access.web_request(rollbackfirst=True) as req: + req.form = {'eid': [text_type(seid)], + '__type:%s'%seid: 'Salesterm', + '_cw_entity_fields:%s'%seid: 'amount-subject', + 'amount-subject:%s'%seid: u'-10', + } + self.assertMultiLineEqual('''<script type="text/javascript"> + window.parent.handleFormValidationResponse('entityForm', null, null, [false, [%s, {"amount-subject": "value -10 must be >= 0"}], null], null); +</script>'''%seid, self.ctrl_publish(req, 'validateform').decode('ascii')) + + # ensure a value that comply a constraint is properly processed + with self.admin_access.web_request(rollbackfirst=True) as req: + req.form = {'eid': [text_type(seid)], + '__type:%s'%seid: 'Salesterm', + '_cw_entity_fields:%s'%seid: 'amount-subject', + 'amount-subject:%s'%seid: u'20', + } + self.assertMultiLineEqual('''<script type="text/javascript"> + window.parent.handleFormValidationResponse('entityForm', null, null, [true, "http://testing.fr/cubicweb/view", null], null); +</script>''', self.ctrl_publish(req, 'validateform').decode('ascii')) + self.assertEqual(20, req.execute('Any V WHERE X amount V, X eid %(eid)s', + {'eid': seid})[0][0]) + + with self.admin_access.web_request(rollbackfirst=True) as req: + req.form = {'eid': ['X'], + '__type:X': 'Salesterm', + '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', + 'amount-subject:X': u'0', + 'described_by_test-subject:X': text_type(feid), + } + + # ensure a value that is modified in an operation on a modify + # hook works as it should (see + # https://www.cubicweb.org/ticket/2509729 ) + class MyOperation(Operation): + def precommit_event(self): + self.entity.cw_set(amount=-10) + class ValidationErrorInOpAfterHook(Hook): + __regid__ = 'valerror-op-after-hook' + __select__ = Hook.__select__ & is_instance('Salesterm') + events = ('after_add_entity',) + def __call__(self): + MyOperation(self._cw, entity=self.entity) + + with self.temporary_appobjects(ValidationErrorInOpAfterHook): + self.assertMultiLineEqual('''<script type="text/javascript"> + window.parent.handleFormValidationResponse('entityForm', null, null, [false, ["X", {"amount-subject": "value -10 must be >= 0"}], null], null); +</script>''', self.ctrl_publish(req, 'validateform').decode('ascii')) + + self.assertMultiLineEqual('''<script type="text/javascript"> + window.parent.handleFormValidationResponse('entityForm', null, null, [true, "http://testing.fr/cubicweb/view", null], null); +</script>''', self.ctrl_publish(req, 'validateform').decode('ascii')) + + def test_req_pending_insert(self): + """make sure req's pending insertions are taken into account""" + with self.admin_access.web_request() as req: + tmpgroup = req.create_entity('CWGroup', name=u"test") + user = req.user + req.cnx.commit() + with self.admin_access.web_request(**req_form(user)) as req: + req.session.data['pending_insert'] = set([(user.eid, 'in_group', tmpgroup.eid)]) + self.expect_redirect_handle_request(req, 'edit') + usergroups = [gname for gname, in + req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', + {'u': user.eid})] + self.assertCountEqual(usergroups, ['managers', 'test']) + self.assertEqual(get_pending_inserts(req), []) + + def test_req_pending_delete(self): + """make sure req's pending deletions are taken into account""" + with self.admin_access.web_request() as req: + user = req.user + groupeid = req.execute('INSERT CWGroup G: G name "test", U in_group G WHERE U eid %(x)s', + {'x': user.eid})[0][0] + usergroups = [gname for gname, in + req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', + {'u': user.eid})] + # just make sure everything was set correctly + self.assertCountEqual(usergroups, ['managers', 'test']) + req.cnx.commit() + # now try to delete the relation + with self.admin_access.web_request(**req_form(user)) as req: + req.session.data['pending_delete'] = set([(user.eid, 'in_group', groupeid)]) + self.expect_redirect_handle_request(req, 'edit') + usergroups = [gname for gname, in + req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', + {'u': user.eid})] + self.assertCountEqual(usergroups, ['managers']) + self.assertEqual(get_pending_deletes(req), []) + + def test_redirect_apply_button(self): + with self.admin_access.web_request() as req: + redirectrql = rql_for_eid(4012) # whatever + req.form = { + 'eid': 'A', '__maineid' : 'A', + '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'content-subject,title-subject', + 'content-subject:A': u'"13:03:43"', + 'title-subject:A': u'huuu', + '__redirectrql': redirectrql, + '__redirectvid': 'primary', + '__redirectparams': 'toto=tutu&tata=titi', + '__form_id': 'edition', + '__action_apply': '', + } + path, params = self.expect_redirect_handle_request(req, 'edit') + self.assertTrue(path.startswith('blogentry/')) + eid = path.split('/')[1] + self.assertEqual(params['vid'], 'edition') + self.assertNotEqual(int(eid), 4012) + self.assertEqual(params['__redirectrql'], redirectrql) + self.assertEqual(params['__redirectvid'], 'primary') + self.assertEqual(params['__redirectparams'], 'toto=tutu&tata=titi') + + def test_redirect_ok_button(self): + with self.admin_access.web_request() as req: + redirectrql = rql_for_eid(4012) # whatever + req.form = { + 'eid': 'A', '__maineid' : 'A', + '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'content-subject,title-subject', + 'content-subject:A': u'"13:03:43"', + 'title-subject:A': u'huuu', + '__redirectrql': redirectrql, + '__redirectvid': 'primary', + '__redirectparams': 'toto=tutu&tata=titi', + '__form_id': 'edition', + } + path, params = self.expect_redirect_handle_request(req, 'edit') + self.assertEqual(path, 'view') + self.assertEqual(params['rql'], redirectrql) + self.assertEqual(params['vid'], 'primary') + self.assertEqual(params['tata'], 'titi') + self.assertEqual(params['toto'], 'tutu') + + def test_redirect_delete_button(self): + with self.admin_access.web_request() as req: + eid = req.create_entity('BlogEntry', title=u'hop', content=u'hop').eid + req.form = {'eid': text_type(eid), '__type:%s'%eid: 'BlogEntry', + '__action_delete': ''} + path, params = self.expect_redirect_handle_request(req, 'edit') + self.assertEqual(path, 'blogentry') + self.assertIn('_cwmsgid', params) + eid = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid + req.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s', + {'x': req.user.eid, 'e': eid}) + req.cnx.commit() + req.form = {'eid': text_type(eid), '__type:%s'%eid: 'EmailAddress', + '__action_delete': ''} + path, params = self.expect_redirect_handle_request(req, 'edit') + self.assertEqual(path, 'cwuser/admin') + self.assertIn('_cwmsgid', params) + eid1 = req.create_entity('BlogEntry', title=u'hop', content=u'hop').eid + eid2 = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid + req.form = {'eid': [text_type(eid1), text_type(eid2)], + '__type:%s'%eid1: 'BlogEntry', + '__type:%s'%eid2: 'EmailAddress', + '__action_delete': ''} + path, params = self.expect_redirect_handle_request(req, 'edit') + self.assertEqual(path, 'view') + self.assertIn('_cwmsgid', params) + + def test_simple_copy(self): + with self.admin_access.web_request() as req: + blog = req.create_entity('Blog', title=u'my-blog') + blogentry = req.create_entity('BlogEntry', title=u'entry1', + content=u'content1', entry_of=blog) + req.form = {'__maineid' : 'X', 'eid': 'X', + '__cloned_eid:X': blogentry.eid, '__type:X': 'BlogEntry', + '_cw_entity_fields:X': 'title-subject,content-subject', + 'title-subject:X': u'entry1-copy', + 'content-subject:X': u'content1', + } + self.expect_redirect_handle_request(req, 'edit') + blogentry2 = req.find('BlogEntry', title=u'entry1-copy').one() + self.assertEqual(blogentry2.entry_of[0].eid, blog.eid) + + def test_skip_copy_for(self): + with self.admin_access.web_request() as req: + blog = req.create_entity('Blog', title=u'my-blog') + blogentry = req.create_entity('BlogEntry', title=u'entry1', + content=u'content1', entry_of=blog) + blogentry.__class__.cw_skip_copy_for = [('entry_of', 'subject')] + try: + req.form = {'__maineid' : 'X', 'eid': 'X', + '__cloned_eid:X': blogentry.eid, '__type:X': 'BlogEntry', + '_cw_entity_fields:X': 'title-subject,content-subject', + 'title-subject:X': u'entry1-copy', + 'content-subject:X': u'content1', + } + self.expect_redirect_handle_request(req, 'edit') + blogentry2 = req.find('BlogEntry', title=u'entry1-copy').one() + # entry_of should not be copied + self.assertEqual(len(blogentry2.entry_of), 0) + finally: + blogentry.__class__.cw_skip_copy_for = [] + + def test_nonregr_eetype_etype_editing(self): + """non-regression test checking that a manager user can edit a CWEType entity + """ + with self.admin_access.web_request() as req: + groupeids = sorted(eid + for eid, in req.execute('CWGroup G ' + 'WHERE G name in ("managers", "users")')) + groups = [text_type(eid) for eid in groupeids] + cwetypeeid = req.execute('CWEType X WHERE X name "CWEType"')[0][0] + basegroups = [text_type(eid) + for eid, in req.execute('CWGroup G ' + 'WHERE X read_permission G, X eid %(x)s', + {'x': cwetypeeid})] + cwetypeeid = text_type(cwetypeeid) + req.form = { + 'eid': cwetypeeid, + '__type:'+cwetypeeid: 'CWEType', + '_cw_entity_fields:'+cwetypeeid: 'name-subject,final-subject,description-subject,read_permission-subject', + 'name-subject:'+cwetypeeid: u'CWEType', + 'final-subject:'+cwetypeeid: '', + 'description-subject:'+cwetypeeid: u'users group', + 'read_permission-subject:'+cwetypeeid: groups, + } + try: + self.expect_redirect_handle_request(req, 'edit') + e = req.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}).get_entity(0, 0) + self.assertEqual(e.name, 'CWEType') + self.assertEqual(sorted(g.eid for g in e.read_permission), groupeids) + finally: + # restore + req.execute('SET X read_permission Y WHERE X name "CWEType", ' + 'Y eid IN (%s), NOT X read_permission Y' % (','.join(basegroups))) + req.cnx.commit() + + def test_nonregr_strange_text_input(self): + """non-regression test checking text input containing "13:03:43" + + this seems to be postgres (tsearch?) specific + """ + with self.admin_access.web_request() as req: + req.form = { + 'eid': 'A', '__maineid' : 'A', + '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'title-subject,content-subject', + 'title-subject:A': u'"13:03:40"', + 'content-subject:A': u'"13:03:43"',} + path, _params = self.expect_redirect_handle_request(req, 'edit') + self.assertTrue(path.startswith('blogentry/')) + eid = path.split('/')[1] + e = req.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}).get_entity(0, 0) + self.assertEqual(e.title, '"13:03:40"') + self.assertEqual(e.content, '"13:03:43"') + + + def test_nonregr_multiple_empty_email_addr(self): + with self.admin_access.web_request() as req: + gueid = req.execute('CWGroup G WHERE G name "users"')[0][0] + req.form = {'eid': ['X', 'Y'], + + '__type:X': 'CWUser', + '_cw_entity_fields:X': 'login-subject,upassword-subject,in_group-subject', + 'login-subject:X': u'adim', + 'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto', + 'in_group-subject:X': repr(gueid), + + '__type:Y': 'EmailAddress', + '_cw_entity_fields:Y': 'address-subject,alias-subject,use_email-object', + 'address-subject:Y': u'', + 'alias-subject:Y': u'', + 'use_email-object:Y': 'X', + } + with self.assertRaises(ValidationError) as cm: + self.ctrl_publish(req) + self.assertEqual(cm.exception.errors, {'address-subject': u'required field'}) + + def test_nonregr_copy(self): + with self.admin_access.web_request() as req: + user = req.user + req.form = {'__maineid' : 'X', 'eid': 'X', + '__cloned_eid:X': user.eid, '__type:X': 'CWUser', + '_cw_entity_fields:X': 'login-subject,upassword-subject', + 'login-subject:X': u'toto', + 'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto', + } + path, _params = self.expect_redirect_handle_request(req, 'edit') + self.assertEqual(path, 'cwuser/toto') + e = req.execute('Any X WHERE X is CWUser, X login "toto"').get_entity(0, 0) + self.assertEqual(e.login, 'toto') + self.assertEqual(e.in_group[0].name, 'managers') + + + def test_nonregr_rollback_on_validation_error(self): + with self.admin_access.web_request() as req: + p = self.create_user(req, u"doe") + # do not try to skip 'primary_email' for this test + old_skips = p.__class__.skip_copy_for + p.__class__.skip_copy_for = () + try: + e = req.create_entity('EmailAddress', address=u'doe@doe.com') + req.execute('SET P use_email E, P primary_email E WHERE P eid %(p)s, E eid %(e)s', + {'p' : p.eid, 'e' : e.eid}) + req.form = {'eid': 'X', + '__cloned_eid:X': p.eid, '__type:X': 'CWUser', + '_cw_entity_fields:X': 'login-subject,surname-subject', + 'login-subject': u'dodo', + 'surname-subject:X': u'Boom', + '__errorurl' : "whatever but required", + } + # try to emulate what really happens in the web application + # 1/ validate form => EditController.publish raises a ValidationError + # which fires a Redirect + # 2/ When re-publishing the copy form, the publisher implicitly commits + try: + self.app_handle_request(req, 'edit') + except Redirect: + req.form['rql'] = 'Any X WHERE X eid %s' % p.eid + req.form['vid'] = 'copy' + self.app_handle_request(req, 'view') + rset = req.execute('CWUser P WHERE P surname "Boom"') + self.assertEqual(len(rset), 0) + finally: + p.__class__.skip_copy_for = old_skips + + def test_regr_inlined_forms(self): + with self.admin_access.web_request() as req: + self.schema['described_by_test'].inlined = False + try: + req.data['eidmap'] = {} + req.data['pending_others'] = set() + req.data['pending_inlined'] = {} + req.form = {'eid': ['X', 'Y'], '__maineid' : 'X', + + '__type:X': 'Salesterm', + '_cw_entity_fields:X': 'described_by_test-subject', + 'described_by_test-subject:X': 'Y', + + '__type:Y': 'File', + '_cw_entity_fields:Y': 'data-subject', + 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), + } + values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2)) + for eid in req.edited_eids()) + editctrl = self.vreg['controllers'].select('edit', req) + # don't call publish to enforce select order + editctrl.errors = [] + editctrl._to_create = {} + editctrl.edit_entity(values_by_eid['X']) # #3064653 raise ValidationError + editctrl.edit_entity(values_by_eid['Y']) + finally: + self.schema['described_by_test'].inlined = False + + +class ReportBugControllerTC(CubicWebTC): + + def test_usable_by_guest(self): + with self.new_access(u'anon').web_request() as req: + self.assertRaises(NoSelectableObject, + self.vreg['controllers'].select, 'reportbug', req) + with self.new_access(u'anon').web_request(description='hop') as req: + self.vreg['controllers'].select('reportbug', req) + + +class AjaxControllerTC(CubicWebTC): + tested_controller = 'ajax' + + def ctrl(self, req=None): + req = req or self.request(url='http://whatever.fr/') + return self.vreg['controllers'].select(self.tested_controller, req) + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + self.pytag = cnx.create_entity('Tag', name=u'python') + self.cubicwebtag = cnx.create_entity('Tag', name=u'cubicweb') + self.john = self.create_user(cnx, u'John') + cnx.commit() + + ## tests ################################################################## + def test_simple_exec(self): + with self.admin_access.web_request(rql='CWUser P WHERE P login "John"', + pageid='123', fname='view') as req: + ctrl = self.ctrl(req) + rset = self.john.as_rset() + rset.req = req + source = ctrl.publish() + self.assertTrue(source.startswith(b'<div>')) + +# def test_json_exec(self): +# rql = 'Any T,N WHERE T is Tag, T name N' +# ctrl = self.ctrl(self.request(mode='json', rql=rql, pageid='123')) +# self.assertEqual(ctrl.publish(), +# json_dumps(self.execute(rql).rows)) + + def test_remote_add_existing_tag(self): + with self.remote_calling('tag_entity', self.john.eid, ['python']) as (_, req): + self.assertCountEqual( + [tname for tname, in req.execute('Any N WHERE T is Tag, T name N')], + ['python', 'cubicweb']) + self.assertEqual( + req.execute('Any N WHERE T tags P, P is CWUser, T name N').rows, + [['python']]) + + def test_remote_add_new_tag(self): + with self.remote_calling('tag_entity', self.john.eid, ['javascript']) as (_, req): + self.assertCountEqual( + [tname for tname, in req.execute('Any N WHERE T is Tag, T name N')], + ['python', 'cubicweb', 'javascript']) + self.assertEqual( + req.execute('Any N WHERE T tags P, P is CWUser, T name N').rows, + [['javascript']]) + + def test_maydel_perms(self): + """Check that AjaxEditRelationCtxComponent calls rdef.check with a + sufficient context""" + with self.remote_calling('tag_entity', self.john.eid, ['python']) as (_, req): + req.cnx.commit() + with self.temporary_permissions( + (self.schema['tags'].rdefs['Tag', 'CWUser'], + {'delete': (RRQLExpression('S owned_by U'), )}, )): + with self.admin_access.web_request(rql='CWUser P WHERE P login "John"', + pageid='123', fname='view') as req: + ctrl = self.ctrl(req) + rset = self.john.as_rset() + rset.req = req + source = ctrl.publish() + # maydel jscall + self.assertIn(b'ajaxBoxRemoveLinkedEntity', source) + + def test_pending_insertion(self): + with self.remote_calling('add_pending_inserts', [['12', 'tags', '13']]) as (_, req): + deletes = get_pending_deletes(req) + self.assertEqual(deletes, []) + inserts = get_pending_inserts(req) + self.assertEqual(inserts, ['12:tags:13']) + with self.remote_calling('add_pending_inserts', [['12', 'tags', '14']]) as (_, req): + deletes = get_pending_deletes(req) + self.assertEqual(deletes, []) + inserts = get_pending_inserts(req) + self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14']) + inserts = get_pending_inserts(req, 12) + self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14']) + inserts = get_pending_inserts(req, 13) + self.assertEqual(inserts, ['12:tags:13']) + inserts = get_pending_inserts(req, 14) + self.assertEqual(inserts, ['12:tags:14']) + req.remove_pending_operations() + + def test_pending_deletion(self): + with self.remote_calling('add_pending_delete', ['12', 'tags', '13']) as (_, req): + inserts = get_pending_inserts(req) + self.assertEqual(inserts, []) + deletes = get_pending_deletes(req) + self.assertEqual(deletes, ['12:tags:13']) + with self.remote_calling('add_pending_delete', ['12', 'tags', '14']) as (_, req): + inserts = get_pending_inserts(req) + self.assertEqual(inserts, []) + deletes = get_pending_deletes(req) + self.assertCountEqual(deletes, ['12:tags:13', '12:tags:14']) + deletes = get_pending_deletes(req, 12) + self.assertCountEqual(deletes, ['12:tags:13', '12:tags:14']) + deletes = get_pending_deletes(req, 13) + self.assertEqual(deletes, ['12:tags:13']) + deletes = get_pending_deletes(req, 14) + self.assertEqual(deletes, ['12:tags:14']) + req.remove_pending_operations() + + def test_remove_pending_operations(self): + with self.remote_calling('add_pending_delete', ['12', 'tags', '13']): + pass + with self.remote_calling('add_pending_inserts', [['12', 'tags', '14']]) as (_, req): + inserts = get_pending_inserts(req) + self.assertEqual(inserts, ['12:tags:14']) + deletes = get_pending_deletes(req) + self.assertEqual(deletes, ['12:tags:13']) + req.remove_pending_operations() + self.assertEqual(get_pending_deletes(req), []) + self.assertEqual(get_pending_inserts(req), []) + + def test_add_inserts(self): + with self.remote_calling('add_pending_inserts', + [('12', 'tags', '13'), ('12', 'tags', '14')]) as (_, req): + inserts = get_pending_inserts(req) + self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14']) + req.remove_pending_operations() + + + # silly tests + def test_external_resource(self): + with self.remote_calling('external_resource', 'RSS_LOGO') as (res, _): + self.assertEqual(json_dumps(self.config.uiprops['RSS_LOGO']).encode('ascii'), + res) + + def test_i18n(self): + with self.remote_calling('i18n', ['bimboom']) as (res, _): + self.assertEqual(json_dumps(['bimboom']).encode('ascii'), res) + + def test_format_date(self): + with self.remote_calling('format_date', '2007-01-01 12:00:00') as (res, _): + self.assertEqual(json_dumps('2007/01/01').encode('ascii'), res) + + def test_ajaxfunc_noparameter(self): + @ajaxfunc + def foo(self, x, y): + return 'hello' + self.assertEqual(foo(object, 1, 2), 'hello') + appobject = foo.__appobject__ + self.assertTrue(issubclass(appobject, AjaxFunction)) + self.assertEqual(appobject.__regid__, 'foo') + self.assertEqual(appobject.check_pageid, False) + self.assertEqual(appobject.output_type, None) + with self.admin_access.web_request() as req: + f = appobject(req) + self.assertEqual(f(12, 13), 'hello') + + def test_ajaxfunc_checkpageid(self): + @ajaxfunc(check_pageid=True) + def foo(self, x, y): + return 'hello' + self.assertEqual(foo(object, 1, 2), 'hello') + appobject = foo.__appobject__ + self.assertTrue(issubclass(appobject, AjaxFunction)) + self.assertEqual(appobject.__regid__, 'foo') + self.assertEqual(appobject.check_pageid, True) + self.assertEqual(appobject.output_type, None) + # no pageid + with self.admin_access.web_request() as req: + f = appobject(req) + self.assertRaises(RemoteCallFailed, f, 12, 13) + + def test_ajaxfunc_json(self): + @ajaxfunc(output_type='json') + def foo(self, x, y): + return x + y + self.assertEqual(foo(object, 1, 2), 3) + appobject = foo.__appobject__ + self.assertTrue(issubclass(appobject, AjaxFunction)) + self.assertEqual(appobject.__regid__, 'foo') + self.assertEqual(appobject.check_pageid, False) + self.assertEqual(appobject.output_type, 'json') + # no pageid + with self.admin_access.web_request() as req: + f = appobject(req) + self.assertEqual(f(12, 13), '25') + + +class JSonControllerTC(AjaxControllerTC): + # NOTE: this class performs the same tests as AjaxController but with + # deprecated 'json' controller (i.e. check backward compatibility) + tested_controller = 'json' + + def setUp(self): + super(JSonControllerTC, self).setUp() + self.exposed_remote_funcs = [fname for fname in dir(JSonController) + if fname.startswith('js_')] + + def tearDown(self): + super(JSonControllerTC, self).tearDown() + for funcname in dir(JSonController): + # remove functions added dynamically during tests + if funcname.startswith('js_') and funcname not in self.exposed_remote_funcs: + delattr(JSonController, funcname) + + def test_monkeypatch_jsoncontroller(self): + with self.assertRaises(RemoteCallFailed): + with self.remote_calling('foo'): + pass + @monkeypatch(JSonController) + def js_foo(self): + return u'hello' + with self.remote_calling('foo') as (res, _): + self.assertEqual(res, b'hello') + + def test_monkeypatch_jsoncontroller_xhtmlize(self): + with self.assertRaises(RemoteCallFailed): + with self.remote_calling('foo'): + pass + @monkeypatch(JSonController) + @xhtmlize + def js_foo(self): + return u'hello' + with self.remote_calling('foo') as (res, _): + self.assertEqual(b'<div>hello</div>', res) + + def test_monkeypatch_jsoncontroller_jsonize(self): + with self.assertRaises(RemoteCallFailed): + with self.remote_calling('foo'): + pass + @monkeypatch(JSonController) + @jsonize + def js_foo(self): + return 12 + with self.remote_calling('foo') as (res, _): + self.assertEqual(res, b'12') + + def test_monkeypatch_jsoncontroller_stdfunc(self): + @monkeypatch(JSonController) + @jsonize + def js_reledit_form(self): + return 12 + with self.remote_calling('reledit_form') as (res, _): + self.assertEqual(res, b'12') + + +class UndoControllerTC(CubicWebTC): + + def setUp(self): + class Connection(OldConnection): + """Force undo feature to be turned on in all case""" + undo_actions = property(lambda tx: True, lambda x, y:None) + cubicweb.server.session.Connection = Connection + super(UndoControllerTC, self).setUp() + + def tearDown(self): + super(UndoControllerTC, self).tearDown() + cubicweb.server.session.Connection = OldConnection + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + self.toto = self.create_user(cnx, u'toto', + password=u'toto', + groups=('users',), + commit=False) + self.txuuid_toto = cnx.commit() + self.toto_email = cnx.create_entity('EmailAddress', + address=u'toto@logilab.org', + reverse_use_email=self.toto) + self.txuuid_toto_email = cnx.commit() + + def test_no_such_transaction(self): + with self.admin_access.web_request() as req: + txuuid = u"12345acbd" + req.form['txuuid'] = txuuid + controller = self.vreg['controllers'].select('undo', req) + with self.assertRaises(tx.NoSuchTransaction) as cm: + result = controller.publish(rset=None) + self.assertEqual(cm.exception.txuuid, txuuid) + + def assertURLPath(self, url, expected_path, expected_params=None): + """ This assert that the path part of `url` matches expected path + + TODO : implement assertion on the expected_params too + """ + with self.admin_access.web_request() as req: + scheme, netloc, path, query, fragment = urlsplit(url) + query_dict = parse_qs(query) + expected_url = urljoin(req.base_url(), expected_path) + self.assertEqual( urlunsplit((scheme, netloc, path, None, None)), expected_url) + + def test_redirect_redirectpath(self): + "Check that the potential __redirectpath is honored" + with self.admin_access.web_request() as req: + txuuid = self.txuuid_toto_email + req.form['txuuid'] = txuuid + rpath = "toto" + req.form['__redirectpath'] = rpath + controller = self.vreg['controllers'].select('undo', req) + with self.assertRaises(Redirect) as cm: + result = controller.publish(rset=None) + self.assertURLPath(cm.exception.location, rpath) + + +class LoginControllerTC(CubicWebTC): + + def test_login_with_dest(self): + with self.admin_access.web_request() as req: + req.form = {'postlogin_path': 'elephants/babar'} + with self.assertRaises(Redirect) as cm: + self.ctrl_publish(req, ctrl='login') + self.assertEqual(req.build_url('elephants/babar'), cm.exception.location) + + def test_login_no_dest(self): + with self.admin_access.web_request() as req: + with self.assertRaises(Redirect) as cm: + self.ctrl_publish(req, ctrl='login') + self.assertEqual(req.base_url(), cm.exception.location) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_basetemplates.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_basetemplates.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,47 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.htmlparser import XMLValidator + + +class LogFormTemplateTC(CubicWebTC): + + def _login_labels(self): + valid = self.content_type_validators.get('text/html', XMLValidator)() + req = self.requestcls(self.vreg, url='login') + page = valid.parse_string(self.vreg['views'].main_template(req, 'login')) + return page.find_tag('label') + + def test_label(self): + self.set_option('allow-email-login', 'yes') + self.assertEqual(self._login_labels(), ['login or email', 'password']) + self.set_option('allow-email-login', 'no') + self.assertEqual(self._login_labels(), ['login', 'password']) + + +class MainNoTopTemplateTC(CubicWebTC): + + def test_valid_xhtml(self): + with self.admin_access.web_request() as req: + self.view('index', template='main-no-top', req=req) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_baseviews.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_baseviews.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,160 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from logilab.common.testlib import unittest_main +from logilab.mtconverter import html_unescape + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.utils import json +from cubicweb.view import StartupView, TRANSITIONAL_DOCTYPE +from cubicweb.web.views import vid_from_rset + +def loadjson(value): + return json.loads(html_unescape(value)) + +class VidFromRsetTC(CubicWebTC): + + def test_no_rset(self): + with self.admin_access.web_request() as req: + self.assertEqual(vid_from_rset(req, None, self.schema), 'index') + + def test_no_entity(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X WHERE X login "blabla"') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'noresult') + + def test_one_entity(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X WHERE X login "admin"') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'primary') + rset = req.execute('Any X, L WHERE X login "admin", X login L') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'primary') + req.search_state = ('pasnormal',) + rset = req.execute('Any X WHERE X login "admin"') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'outofcontext-search') + + def test_one_entity_eid(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X WHERE X eid 1') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'primary') + + def test_more_than_one_entity_same_type(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X WHERE X is CWUser') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'sameetypelist') + rset = req.execute('Any X, L WHERE X login L') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'sameetypelist') + + def test_more_than_one_entity_diff_type(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X WHERE X is IN (CWUser, CWGroup)') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'list') + + def test_more_than_one_entity_by_row(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X, G WHERE X in_group G') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') + + def test_more_than_one_entity_by_row_2(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X, GN WHERE X in_group G, G name GN') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') + + def test_aggregat(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X, COUNT(T) GROUPBY X WHERE X is T') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') + rset = req.execute('Any MAX(X) WHERE X is CWUser') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') + + def test_subquery(self): + with self.admin_access.web_request() as req: + rset = req.execute( +'DISTINCT Any X,N ORDERBY N ' +'WITH X,N BEING (' +' (DISTINCT Any P,N WHERE P is CWUser, P login N)' +' UNION' +' (DISTINCT Any W,N WHERE W is CWGroup, W name N))') + self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') + + +class TableViewTC(CubicWebTC): + + def _prepare_entity(self, req): + e = req.create_entity("State", name=u'<toto>', description=u'loo"ong blabla') + rset = req.execute('Any X, D, CD, NOW - CD WHERE X is State, ' + 'X description D, X creation_date CD, X eid %(x)s', + {'x': e.eid}) + view = self.vreg['views'].select('table', req, rset=rset) + return e, rset, view + + def test_sortvalue(self): + with self.admin_access.web_request() as req: + e, _, view = self._prepare_entity(req) + colrenderers = view.build_column_renderers()[:3] + self.assertListEqual([renderer.sortvalue(0) for renderer in colrenderers], + [u'<toto>', u'loo"ong blabla', e.creation_date]) + + +class HTMLStreamTests(CubicWebTC): + + def test_set_doctype_reset_xmldecl(self): + """ + tests `cubicweb.web.request.CubicWebRequestBase.set_doctype` + with xmldecl reset + """ + class MyView(StartupView): + __regid__ = 'my-view' + def call(self): + self._cw.set_doctype('<!DOCTYPE html>') + + with self.admin_access.web_request() as req: + with self.temporary_appobjects(MyView): + html_source = self.view('my-view', req=req).source + source_lines = [line.strip() + for line in html_source.splitlines(False) + if line.strip()] + self.assertListEqual([b'<!DOCTYPE html>', + b'<html xmlns:cubicweb="http://www.cubicweb.org" lang="en">'], + source_lines[:2]) + + def test_set_doctype_no_reset_xmldecl(self): + """ + tests `cubicweb.web.request.CubicWebRequestBase.set_doctype` + with no xmldecl reset + """ + html_doctype = TRANSITIONAL_DOCTYPE.strip() + class MyView(StartupView): + __regid__ = 'my-view' + def call(self): + self._cw.set_doctype(html_doctype) + self._cw.main_stream.set_htmlattrs([('lang', 'cz')]) + + with self.admin_access.web_request() as req: + with self.temporary_appobjects(MyView): + html_source = self.view('my-view', req=req).source + source_lines = [line.strip() + for line in html_source.splitlines(False) + if line.strip()] + self.assertListEqual([html_doctype.encode('ascii'), + b'<html xmlns:cubicweb="http://www.cubicweb.org" lang="cz">', + b'<head>'], + source_lines[:3]) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_csv.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_csv.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from cubicweb.devtools.testlib import CubicWebTC + + +class CSVExportViewsTC(CubicWebTC): + + def test_csvexport(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' + 'WHERE X in_group G, G name GN') + data = self.view('csvexport', rset, req=req) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), + ['text/comma-separated-values;charset=UTF-8']) + expected_data = "String;COUNT(CWUser)\nguests;1\nmanagers;1" + self.assertMultiLineEqual(expected_data, data.decode('utf-8')) + + def test_csvexport_on_empty_rset(self): + """Should return the CSV header. + """ + with self.admin_access.web_request() as req: + rset = req.execute(u'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' + 'WHERE X in_group G, G name GN, X login "Miles"') + data = self.view('csvexport', rset, req=req) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), + ['text/comma-separated-values;charset=UTF-8']) + expected_data = "String;COUNT(CWUser)" + self.assertMultiLineEqual(expected_data, data.decode('utf-8')) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_editforms.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_editforms.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,257 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +from logilab.common.testlib import unittest_main, mock_object + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web.views import uicfg +from cubicweb.web.formwidgets import AutoCompletionWidget +from cubicweb.schema import RRQLExpression + + +AFFK = uicfg.autoform_field_kwargs +AFS = uicfg.autoform_section + +def rbc(entity, formtype, section): + if section in ('attributes', 'metadata', 'hidden'): + permission = 'update' + else: + permission = 'add' + return [(rschema.type, x) + for rschema, tschemas, x in AFS.relations_by_section(entity, + formtype, + section, + permission)] + +class AutomaticEntityFormTC(CubicWebTC): + + def test_custom_widget(self): + with self.admin_access.web_request() as req: + AFFK.tag_subject_of(('CWUser', 'login', '*'), + {'widget': AutoCompletionWidget(autocomplete_initfunc='get_logins')}) + form = self.vreg['forms'].select('edition', req, entity=req.user) + field = form.field_by_name('login', 'subject') + self.assertIsInstance(field.widget, AutoCompletionWidget) + AFFK.del_rtag('CWUser', 'login', '*', 'subject') + + + def test_cwuser_relations_by_category(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + # see custom configuration in views.cwuser + self.assertEqual(rbc(e, 'main', 'attributes'), + [('login', 'subject'), + ('upassword', 'subject'), + ('firstname', 'subject'), + ('surname', 'subject'), + ('in_group', 'subject'), + ]) + self.assertEqual(rbc(e, 'muledit', 'attributes'), + [('login', 'subject'), + ('upassword', 'subject'), + ('in_group', 'subject'), + ]) + self.assertCountEqual(rbc(e, 'main', 'metadata'), + [('last_login_time', 'subject'), + ('cw_source', 'subject'), + ('creation_date', 'subject'), + ('modification_date', 'subject'), + ('created_by', 'subject'), + ('owned_by', 'subject'), + ('bookmarked_by', 'object'), + ]) + # XXX skip 'tags' relation here and in the hidden category because + # of some test interdependancy when pytest is launched on whole cw + # (appears here while expected in hidden + self.assertCountEqual([x for x in rbc(e, 'main', 'relations') + if x != ('tags', 'object')], + [('connait', 'subject'), + ('custom_workflow', 'subject'), + ('primary_email', 'subject'), + ('checked_by', 'object'), + ]) + self.assertListEqual(rbc(e, 'main', 'inlined'), + [('use_email', 'subject'), + ]) + # owned_by is defined both as subject and object relations on CWUser + self.assertListEqual(sorted(x for x in rbc(e, 'main', 'hidden') + if x != ('tags', 'object')), + sorted([('for_user', 'object'), + ('created_by', 'object'), + ('wf_info_for', 'object'), + ('owned_by', 'object'), + ])) + + def test_inlined_view(self): + self.assertIn('main_inlined', + AFS.etype_get('CWUser', 'use_email', 'subject', 'EmailAddress')) + self.assertNotIn('main_inlined', + AFS.etype_get('CWUser', 'primary_email', 'subject', 'EmailAddress')) + self.assertIn('main_relations', + AFS.etype_get('CWUser', 'primary_email', 'subject', 'EmailAddress')) + + def test_personne_relations_by_category(self): + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('Personne')(req) + self.assertListEqual(rbc(e, 'main', 'attributes'), + [('nom', 'subject'), + ('prenom', 'subject'), + ('sexe', 'subject'), + ('promo', 'subject'), + ('titre', 'subject'), + ('ass', 'subject'), + ('web', 'subject'), + ('tel', 'subject'), + ('fax', 'subject'), + ('datenaiss', 'subject'), + ('test', 'subject'), + ('description', 'subject'), + ('salary', 'subject'), + ]) + self.assertListEqual(rbc(e, 'muledit', 'attributes'), + [('nom', 'subject'), + ]) + self.assertCountEqual(rbc(e, 'main', 'metadata'), + [('cw_source', 'subject'), + ('creation_date', 'subject'), + ('modification_date', 'subject'), + ('created_by', 'subject'), + ('owned_by', 'subject'), + ]) + self.assertCountEqual(rbc(e, 'main', 'relations'), + [('travaille', 'subject'), + ('manager', 'object'), + ('connait', 'object'), + ]) + self.assertListEqual(rbc(e, 'main', 'hidden'), + []) + + def test_edition_form(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X LIMIT 1') + form = self.vreg['forms'].select('edition', req, rset=rset, row=0, col=0) + # should be also selectable by specifying entity + self.vreg['forms'].select('edition', req, entity=rset.get_entity(0, 0)) + self.assertFalse(any(f for f in form.fields if f is None)) + + def test_edition_form_with_action(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X LIMIT 1') + form = self.vreg['forms'].select('edition', req, rset=rset, row=0, + col=0, action='my_custom_action') + self.assertEqual(form.form_action(), 'my_custom_action') + + def test_attribute_add_permissions(self): + # https://www.cubicweb.org/ticket/4342844 + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, u'toto') + cnx.commit() + with self.new_access(u'toto').web_request() as req: + e = self.vreg['etypes'].etype_class('Personne')(req) + cform = self.vreg['forms'].select('edition', req, entity=e) + self.assertIn('sexe', + [rschema.type + for rschema, _ in cform.editable_attributes()]) + with self.new_access(u'toto').repo_cnx() as cnx: + person_eid = cnx.create_entity('Personne', nom=u'Robert').eid + cnx.commit() + person = req.entity_from_eid(person_eid) + mform = self.vreg['forms'].select('edition', req, entity=person) + self.assertNotIn('sexe', + [rschema.type + for rschema, _ in mform.editable_attributes()]) + + def test_inlined_relations(self): + with self.admin_access.web_request() as req: + with self.temporary_permissions(EmailAddress={'add': ()}): + autoform = self.vreg['forms'].select('edition', req, entity=req.user) + self.assertEqual(list(autoform.inlined_form_views()), []) + + def test_check_inlined_rdef_permissions(self): + # try to check permissions when creating an entity ('user' below is a + # fresh entity without an eid) + with self.admin_access.web_request() as req: + ttype = 'EmailAddress' + rschema = self.schema['use_email'] + rdef = rschema.rdefs[('CWUser', ttype)] + tschema = self.schema[ttype] + role = 'subject' + with self.temporary_permissions((rdef, {'add': ()})): + user = self.vreg['etypes'].etype_class('CWUser')(req) + autoform = self.vreg['forms'].select('edition', req, entity=user) + self.assertFalse(autoform.check_inlined_rdef_permissions(rschema, role, + tschema, ttype)) + # we actually don't care about the actual expression, + # may_have_permission only checks the presence of such expressions + expr = RRQLExpression('S use_email O') + with self.temporary_permissions((rdef, {'add': (expr,)})): + user = self.vreg['etypes'].etype_class('CWUser')(req) + autoform = self.vreg['forms'].select('edition', req, entity=user) + self.assertTrue(autoform.check_inlined_rdef_permissions(rschema, role, + tschema, ttype)) + + +class FormViewsTC(CubicWebTC): + + def test_delete_conf_formview(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWGroup X') + self.view('deleteconf', rset, template=None, req=req).source + + def test_automatic_edition_formview(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X') + self.view('edition', rset, row=0, template=None, req=req).source + + def test_automatic_edition_copyformview(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X') + self.view('copy', rset, row=0, template=None, req=req).source + + def test_automatic_creation_formview(self): + with self.admin_access.web_request() as req: + self.view('creation', None, etype='CWUser', template=None, req=req).source + + def test_automatic_muledit_formview(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X') + self.view('muledit', rset, template=None, req=req).source + + def test_automatic_reledit_formview(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X') + self.view('reledit', rset, row=0, rtype='login', template=None, req=req).source + + def test_automatic_inline_edit_formview(self): + with self.admin_access.web_request() as req: + geid = req.execute('CWGroup X LIMIT 1')[0][0] + rset = req.execute('CWUser X LIMIT 1') + self.view('inline-edition', rset, row=0, col=0, rtype='in_group', + peid=geid, role='object', i18nctx='', pform=MOCKPFORM, + template=None, req=req).source + + def test_automatic_inline_creation_formview(self): + with self.admin_access.web_request() as req: + geid = req.execute('CWGroup X LIMIT 1')[0][0] + self.view('inline-creation', None, etype='CWUser', rtype='in_group', + peid=geid, petype='CWGroup', i18nctx='', role='object', pform=MOCKPFORM, + template=None, req=req) + +MOCKPFORM = mock_object(form_previous_values={}, form_valerror=None) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_errorform.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_errorform.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,92 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +import re +import sys + +from logilab.common.testlib import unittest_main + +from cubicweb import Forbidden +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.view import StartupView +from cubicweb.web import Redirect + + +class ErrorViewTC(CubicWebTC): + def setUp(self): + super(ErrorViewTC, self).setUp() + self.vreg.config['submit-mail'] = "test@logilab.fr" + self.vreg.config['print-traceback'] = "yes" + + def test_error_generation(self): + """ + tests + """ + + class MyWrongView(StartupView): + __regid__ = 'my-view' + def call(self): + raise ValueError('This is wrong') + + with self.temporary_appobjects(MyWrongView): + with self.admin_access.web_request() as req: + try: + self.view('my-view', req=req) + except Exception as e: + req.data['excinfo'] = sys.exc_info() + req.data['ex'] = e + html = self.view('error', req=req) + self.assertTrue(re.search(b'^<input name="__signature" type="hidden" ' + b'value="[0-9a-f]{32}" />$', + html.source, re.M)) + + + def test_error_submit_nosig(self): + """ + tests that the reportbug controller refuses submission if + there is not content signature + """ + with self.admin_access.web_request() as req: + req.form = {'description': u'toto'} + with self.assertRaises(Forbidden) as cm: + self.ctrl_publish(req, 'reportbug') + + def test_error_submit_wrongsig(self): + """ + tests that the reportbug controller refuses submission if the + content signature is invalid + """ + with self.admin_access.web_request() as req: + req.form = {'__signature': 'X', + 'description': u'toto'} + with self.assertRaises(Forbidden) as cm: + self.ctrl_publish(req, 'reportbug') + + def test_error_submit_ok(self): + """ + tests that the reportbug controller accept the email submission if the + content signature is valid + """ + with self.admin_access.web_request() as req: + sign = self.vreg.config.sign_text('toto') + req.form = {'__signature': sign, + 'description': u'toto'} + with self.assertRaises(Redirect) as cm: + self.ctrl_publish(req, 'reportbug') + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_forms.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_forms.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,74 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from logilab.common import tempattr, attrdict + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web.views.autoform import InlinedFormField + +class InlinedFormTC(CubicWebTC): + + def test_linked_to(self): + with self.admin_access.web_request() as req: + formview = req.vreg['views'].select( + 'inline-creation', req, + etype='File', rtype='described_by_test', role='subject', + peid=123, + petype='Salesterm') + self.assertEqual({('described_by_test', 'object'): [123]}, + formview.form.linked_to) + + def test_linked_to_parent_being_created(self): + with self.admin_access.web_request() as req: + formview = req.vreg['views'].select( + 'inline-creation', req, + etype='File', rtype='described_by_test', role='subject', + peid='A', + petype='Salesterm') + self.assertEqual(formview.form.linked_to, {}) + + def test_remove_js_depending_on_cardinality(self): + with self.admin_access.web_request() as req: + formview = req.vreg['views'].select( + 'inline-creation', req, + etype='File', rtype='described_by_test', role='subject', + peid='A', + petype='Salesterm') + # cardinality is 1, can't remove + self.assertIsNone(formview._get_removejs()) + rdef = self.schema['Salesterm'].rdef('described_by_test') + with tempattr(rdef, 'cardinality', '?*'): + self.assertTrue(formview._get_removejs()) + with tempattr(rdef, 'cardinality', '+*'): + # formview has no parent info (pform). This is what happens + # when an inline form is requested through AJAX. + self.assertTrue(formview._get_removejs()) + fakeview = attrdict(dict(rtype='described_by_test', role='subject')) + # formview is first, can't be removed + formview.pform = attrdict(fields=[InlinedFormField(view=formview), + InlinedFormField(view=fakeview)]) + self.assertIsNone(formview._get_removejs()) + # formview isn't first, can be removed + formview.pform = attrdict(fields=[InlinedFormField(view=fakeview), + InlinedFormField(view=formview)]) + self.assertTrue(formview._get_removejs()) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_json.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_json.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +from six import binary_type + +from cubicweb.devtools.testlib import CubicWebTC + + +class JsonViewsTC(CubicWebTC): + anonymize = True + res_jsonp_data = b'[["guests", 1]]' + + def setUp(self): + super(JsonViewsTC, self).setUp() + self.config.global_set_option('anonymize-jsonp-queries', self.anonymize) + + def test_json_rsetexport(self): + with self.admin_access.web_request() as req: + rset = req.execute( + 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN WHERE X in_group G, G name GN') + data = self.view('jsonexport', rset, req=req) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) + self.assertListEqual(data, [["guests", 1], ["managers", 1]]) + + def test_json_rsetexport_empty_rset(self): + with self.admin_access.web_request() as req: + rset = req.execute(u'Any X WHERE X is CWUser, X login "foobarbaz"') + data = self.view('jsonexport', rset, req=req) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) + self.assertListEqual(data, []) + + def test_json_rsetexport_with_jsonp(self): + with self.admin_access.web_request() as req: + req.form.update({'callback': u'foo', + 'rql': u'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' + 'WHERE X in_group G, G name GN'}) + data = self.ctrl_publish(req, ctrl='jsonp') + self.assertIsInstance(data, binary_type) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), + ['application/javascript']) + # because jsonp anonymizes data, only 'guests' group should be found + self.assertEqual(data, b'foo(' + self.res_jsonp_data + b')') + + def test_json_rsetexport_with_jsonp_and_bad_vid(self): + with self.admin_access.web_request() as req: + req.form.update({'callback': 'foo', + # "vid" parameter should be ignored by jsonp controller + 'vid': 'table', + 'rql': 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' + 'WHERE X in_group G, G name GN'}) + data = self.ctrl_publish(req, ctrl='jsonp') + self.assertEqual(req.headers_out.getRawHeaders('content-type'), + ['application/javascript']) + # result should be plain json, not the table view + self.assertEqual(data, b'foo(' + self.res_jsonp_data + b')') + + def test_json_ersetexport(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any G ORDERBY GN WHERE G is CWGroup, G name GN') + data = self.view('ejsonexport', rset, req=req) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) + self.assertEqual(data[0]['name'], 'guests') + self.assertEqual(data[1]['name'], 'managers') + + rset = req.execute(u'Any G WHERE G is CWGroup, G name "foo"') + data = self.view('ejsonexport', rset, req=req) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) + self.assertEqual(data, []) + + +class NotAnonymousJsonViewsTC(JsonViewsTC): + anonymize = False + res_jsonp_data = b'[["guests", 1], ["managers", 1]]' + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_navigation.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_navigation.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,123 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""cubicweb.web.views.navigation unit tests""" + +from logilab.common.testlib import unittest_main + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web.views.navigation import (PageNavigation, SortedNavigation, + PageNavigationSelect) +from cubicweb.web.views.ibreadcrumbs import BreadCrumbEntityVComponent + +BreadCrumbEntityVComponent.visible = True + +class NavigationTC(CubicWebTC): + + def test_navigation_selection_whatever(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X,N WHERE X name N') + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + self.assertIsInstance(navcomp, PageNavigation) + req.set_search_state('W:X:Y:Z') + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + self.assertIsInstance(navcomp, PageNavigation) + req.set_search_state('normal') + + def test_navigation_selection_ordered(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X,N ORDERBY N LIMIT 40 WHERE X name N') + navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20) + self.assertIsInstance(navcomp, SortedNavigation) + req.set_search_state('W:X:Y:Z') + navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20) + self.assertIsInstance(navcomp, SortedNavigation) + req.set_search_state('normal') + navcomp.render() + + def test_navigation_selection_large_rset(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X,N LIMIT 120 WHERE X name N') + navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20) + self.assertIsInstance(navcomp, PageNavigationSelect) + rset = req.execute('Any X,N ORDERBY N LIMIT 120 WHERE X name N') + navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20) + self.assertIsInstance(navcomp, PageNavigationSelect) + + def test_navigation_selection_not_enough_1(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X,N LIMIT 10 WHERE X name N') + navcomp = self.vreg['components'].select_or_none('navigation', req, rset=rset) + self.assertEqual(navcomp, None) + req.set_search_state('W:X:Y:Z') + navcomp = self.vreg['components'].select_or_none('navigation', req, rset=rset) + self.assertEqual(navcomp, None) + req.set_search_state('normal') + + def test_navigation_selection_not_enough_2(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any N, COUNT(RDEF) GROUPBY N ORDERBY N ' + 'WHERE RDEF relation_type RT, RT name N') + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + self.assertIsInstance(navcomp, SortedNavigation) + req.set_search_state('W:X:Y:Z') + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + self.assertIsInstance(navcomp, SortedNavigation) + + def test_navigation_selection_wrong_boundary(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X,N WHERE X name N') + req.form['__start'] = 1000000 + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + html = navcomp.render() + + def test_sorted_navigation_1(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any RDEF ORDERBY RT WHERE RDEF relation_type RT') + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + html = navcomp.render() + + def test_sorted_navigation_2(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any RDEF ORDERBY RDEF WHERE RDEF relation_type RT') + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + html = navcomp.render() + + def test_sorted_navigation_3(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWAttribute RDEF ORDERBY RDEF') + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + html = navcomp.render() + + def test_sorted_navigation_4(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any RDEF ORDERBY N ' + 'WHERE RDEF relation_type RT, RT name N') + navcomp = self.vreg['components'].select('navigation', req, rset=rset) + html = navcomp.render() + + def test_sorted_navigation_5(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any N, COUNT(RDEF) GROUPBY N ORDERBY N ' + 'WHERE RDEF relation_type RT, RT name N') + navcomp = self.vreg['components'].select('navigation', rset.req, rset=rset) + html = navcomp.render() + + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_pyviews.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_pyviews.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,45 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +from logilab.common.testlib import unittest_main +from cubicweb.devtools.testlib import CubicWebTC + +class PyViewsTC(CubicWebTC): + + def test_pyvaltable(self): + with self.admin_access.web_request() as req: + view = self.vreg['views'].select('pyvaltable', req, + pyvalue=[[1, 'a'], [2, 'b']]) + content = view.render(pyvalue=[[1, 'a'], [2, 'b']], + headers=['num', 'char']) + self.assertEqual(content.strip(), '''<table class="listing"><tbody>\ +<tr class="even" onmouseout="$(this).removeClass("highlighted")" onmouseover="$(this).addClass("highlighted");"><td >1</td><td >a</td></tr> +<tr class="odd" onmouseout="$(this).removeClass("highlighted")" onmouseover="$(this).addClass("highlighted");"><td >2</td><td >b</td></tr> +</tbody></table>''') + + def test_pyvallist(self): + with self.admin_access.web_request() as req: + view = self.vreg['views'].select('pyvallist', req, + pyvalue=[1, 'a']) + content = view.render(pyvalue=[1, 'a']) + self.assertEqual(content.strip(), '''<ul> +<li>1</li> +<li>a</li> +</ul>''') + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_searchrestriction.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_searchrestriction.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,102 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web import facet + + +class InsertAttrRelationTC(CubicWebTC): + + def parse(self, query): + rqlst = self.vreg.parse(self.session, query) + select = rqlst.children[0] + return rqlst + + def _generate(self, rqlst, rel, role, attr): + select = rqlst.children[0] + filtered_variable = facet.get_filtered_variable(select) + facet.prepare_select(select, filtered_variable) + facet.insert_attr_select_relation(select, filtered_variable, + rel, role, attr) + return rqlst.as_string() + + @property + def select(self): + return self.parse(u'Any B,(NOW - CD),S,V,U,GROUP_CONCAT(TN),VN,P,CD,BMD ' + 'GROUPBY B,CD,S,V,U,VN,P,BMD ' + 'WHERE B in_state S, B creation_date CD, ' + 'B modification_date BMD, T? tags B, T name TN, ' + 'V? bookmarked_by B, V title VN, B created_by U?, ' + 'B in_group P, P name "managers"') + + def test_1(self): + self.assertEqual(self._generate(self.select, 'in_state', 'subject', 'name'), + 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' + 'B in_state A, B is CWUser, A name C') + + def test_2(self): + self.assertEqual(self._generate(self.select, 'tags', 'object', 'name'), + 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' + 'A tags B, B is CWUser, A name C') + + def test_3(self): + self.assertEqual(self._generate(self.select, 'created_by', 'subject', 'login'), + 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' + 'B created_by A, B is CWUser, A login C') + + def test_4(self): + self.assertEqual(self._generate(self.parse(u'Any X WHERE X is CWUser'), 'created_by', 'subject', 'login'), + "DISTINCT Any A,B ORDERBY B WHERE X is CWUser, X created_by A, A login B") + + def test_5(self): + self.assertEqual(self._generate(self.parse(u'Any X,L WHERE X is CWUser, X login L'), 'created_by', 'subject', 'login'), + "DISTINCT Any A,B ORDERBY B WHERE X is CWUser, X created_by A, A login B") + + def test_nonregr1(self): + select = self.parse(u'Any T,V WHERE T bookmarked_by V?, ' + 'V in_state VS, VS name "published", T created_by U') + self.assertEqual(self._generate(select, 'created_by', 'subject', 'login'), + "DISTINCT Any A,B ORDERBY B WHERE T created_by U, " + "T created_by A, T is Bookmark, A login B") + + def test_nonregr2(self): + #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N' + select = self.parse(u'DISTINCT Any V,TN,L ORDERBY TN,L WHERE T nom TN, V connait T, T is Personne, V is CWUser,' + 'NOT V in_state VS, VS name "published", V login L') + rschema = self.schema['connait'] + for rdefs in rschema.rdefs.values(): + rdefs.cardinality = '++' + try: + self.assertEqual(self._generate(select, 'in_state', 'subject', 'name'), + 'DISTINCT Any A,B ORDERBY B WHERE V is CWUser, ' + 'NOT EXISTS(V in_state VS), VS name "published", ' + 'V in_state A, A name B') + finally: + for rdefs in rschema.rdefs.values(): + rdefs.cardinality = '**' + + def test_nonregr3(self): + #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N' + select = self.parse(u'DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is CWUser, Y is Bookmark, X in_group A') + self.assertEqual(self._generate(select, 'in_group', 'subject', 'name'), + "DISTINCT Any B,C ORDERBY C WHERE X is CWUser, X in_group B, B name C") + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_staticcontrollers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_staticcontrollers.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +from contextlib import contextmanager + +from logilab.common import tempattr +from logilab.common.testlib import Tags +from cubicweb.devtools.testlib import CubicWebTC + +import os +import os.path as osp +import glob + +from cubicweb.utils import HTMLHead +from cubicweb.web.views.staticcontrollers import ConcatFilesHandler + +class staticfilespublishermixin(object): + + @contextmanager + def _publish_static_files(self, url, header={}): + with self.admin_access.web_request(headers=header) as req: + req._url = url + self.app_handle_request(req, url) + yield req + +class StaticControllerCacheTC(staticfilespublishermixin, CubicWebTC): + tags = CubicWebTC.tags | Tags('static_controller', 'cache', 'http') + + def test_static_file_are_cached(self): + with self._publish_static_files('data/cubicweb.css') as req: + self.assertEqual(200, req.status_out) + self.assertIn('last-modified', req.headers_out) + next_headers = { + 'if-modified-since': req.get_response_header('last-modified', raw=True), + } + with self._publish_static_files('data/cubicweb.css', next_headers) as req: + self.assertEqual(304, req.status_out) + +class StaticDirectoryControllerTC(staticfilespublishermixin, CubicWebTC): + + def test_check_static_dir_access(self): + """write a file in the static directory and test the access""" + staticdir = osp.join(self.session.vreg.config.static_directory) + if not os.path.exists(staticdir): + os.makedirs(staticdir) + filename = osp.join(staticdir, 'test') + with open(filename, 'a') as f: + with self._publish_static_files('static/test') as req: + self.assertEqual(200, req.status_out) + +class DataControllerTC(staticfilespublishermixin, CubicWebTC): + tags = CubicWebTC.tags | Tags('static_controller', 'data', 'http') + + def _check_datafile_ok(self, fname): + with self._publish_static_files(fname) as req: + self.assertEqual(200, req.status_out) + self.assertIn('last-modified', req.headers_out) + self.assertIn('expires', req.headers_out) + self.assertEqual(req.get_response_header('cache-control'), + {'max-age': 604800}) + next_headers = { + 'if-modified-since': req.get_response_header('last-modified', raw=True), + } + with self._publish_static_files(fname, next_headers) as req: + self.assertEqual(304, req.status_out) + + def _check_datafile_redirect(self, fname, expected): + with self._publish_static_files(fname) as req: + self.assertEqual(302, req.status_out) + self.assertEqual(req.get_response_header('location'), + req.base_url() + expected) + + def _check_no_datafile(self, fname): + with self._publish_static_files(fname) as req: + self.assertEqual(404, req.status_out) + + def test_static_data_mode(self): + hash = self.vreg.config.instance_md5_version() + self.assertEqual(32, len(hash)) + + with tempattr(self.vreg.config, 'mode', 'test'): + self._check_datafile_ok('data/cubicweb.css') + self._check_no_datafile('data/does/not/exist') + self._check_no_datafile('data/%s/cubicweb.css' % ('0'*len(hash))) + + with tempattr(self.vreg.config, 'mode', 'notest'): + self.config._init_base_url() # reset config.datadir_url + self._check_datafile_redirect('data/cubicweb.css', 'data/%s/cubicweb.css' % hash) + self._check_datafile_ok('data/%s/cubicweb.css' % hash) + self._check_no_datafile('data/%s/does/not/exist' % hash) + self._check_datafile_redirect('data/%s/does/not/exist' % ('0'*len(hash)), + 'data/%s/%s/does/not/exist' % (hash, '0'*len(hash))) + + +class ConcatFilesTC(CubicWebTC): + + tags = CubicWebTC.tags | Tags('static_controller', 'concat') + + def tearDown(self): + super(ConcatFilesTC, self).tearDown() + self._cleanup_concat_cache() + + def _cleanup_concat_cache(self): + uicachedir = osp.join(self.config.apphome, 'uicache') + for fname in glob.glob(osp.join(uicachedir, 'cache_concat_*')): + os.unlink(osp.join(uicachedir, fname)) + + @contextmanager + def _publish_js_files(self, js_files): + with self.admin_access.web_request() as req: + head = HTMLHead(req) + url = head.concat_urls([req.data_url(js_file) + for js_file in js_files])[len(req.base_url()):] + req._url = url + res = self.app_handle_request(req, url) + yield res, req + + def expected_content(self, js_files): + content = b'' + for js_file in js_files: + dirpath, rid = self.config.locate_resource(js_file) + if dirpath is not None: # ignore resources not found + with open(osp.join(dirpath, rid), 'rb') as f: + content += f.read() + b'\n' + return content + + def test_cache(self): + js_files = ('cubicweb.ajax.js', 'jquery.js') + with self._publish_js_files(js_files) as (result, req): + self.assertNotEqual(404, req.status_out) + # check result content + self.assertEqual(result, self.expected_content(js_files)) + # make sure we kept a cached version on filesystem + concat_hander = ConcatFilesHandler(self.config) + filepath = concat_hander.build_filepath(js_files) + self.assertTrue(osp.isfile(filepath)) + + + def test_invalid_file_in_debug_mode(self): + js_files = ('cubicweb.ajax.js', 'dummy.js') + # in debug mode, an error is raised + self.config.debugmode = True + try: + with self._publish_js_files(js_files) as (result, req): + #print result + self.assertEqual(404, req.status_out) + finally: + self.config.debugmode = False + + def test_invalid_file_in_production_mode(self): + js_files = ('cubicweb.ajax.js', 'dummy.js') + with self._publish_js_files(js_files) as (result, req): + self.assertNotEqual(404, req.status_out) + # check result content + self.assertEqual(result, self.expected_content(js_files)) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_treeview.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_treeview.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,51 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from logilab.common.testlib import unittest_main +from logilab.mtconverter import html_unescape + +from cubicweb.devtools.htmlparser import XMLValidator +from cubicweb.devtools.testlib import CubicWebTC + + +class TreeViewTC(CubicWebTC): + + def test_treeview(self): + with self.admin_access.repo_cnx() as cnx: + ce = cnx.create_entity + root = ce('TreeNode', name=u'root') + node = ce('TreeNode', name=u'node1', parent=root) + ce('TreeNode', name=u'leaf1a', parent=node) + ce('TreeNode', name=u'leaf1b', parent=node) + node = ce('TreeNode', name=u'node2', parent=root) + ce('TreeNode', name=u'leaf2a', parent=node) + ce('TreeNode', name=u'leaf2b', parent=node) + root_eid = root.eid + cnx.commit() + + with self.admin_access.web_request() as req: + root = req.entity_from_eid(root_eid) + valid = self.content_type_validators.get('text/html', XMLValidator)() + page = valid.parse_string(root.view('tree', klass='oh-my-class')) + uls = page.find_tag('ul', gettext=False) + for _, attrib in uls: + self.assertEqual(attrib['class'], 'oh-my-class') + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_views_xmlrss.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_views_xmlrss.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,40 @@ +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web.views.xmlrss import SERIALIZERS + +class EntityXMLViewTC(CubicWebTC): + """see also cw.sobjects.test.unittest_parsers""" + def test(self): + rels = ['tags-object', 'in_group-subject', + 'in_state-subject', 'use_email-subject'] + with self.admin_access.web_request(relation=rels) as req: + self.assertMultiLineEqual( + req.user.view('xml'), + '''\ +<CWUser eid="6" cwuri="http://testing.fr/cubicweb/6" cwsource="system"> + <creation_date>%(cdate)s</creation_date> + <firstname/> + <last_login_time/> + <login>admin</login> + <modification_date>%(mdate)s</modification_date> + <surname/> + <upassword/> + <tags role="object"> + </tags> + <in_group role="subject"> + <CWGroup eid="%(group_eid)s" cwuri="http://testing.fr/cubicweb/%(group_eid)s"/> + </in_group> + <in_state role="subject"> + <State eid="%(state_eid)s" cwuri="http://testing.fr/cubicweb/%(state_eid)s" name="activated"/> + </in_state> + <use_email role="subject"> + </use_email> +</CWUser> +''' % {'cdate': SERIALIZERS['Datetime'](req.user.creation_date), + 'mdate': SERIALIZERS['Datetime'](req.user.modification_date), + 'state_eid': req.user.in_state[0].eid, + 'group_eid': req.user.in_group[0].eid}) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_viewselector.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_viewselector.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,547 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""XXX rename, split, reorganize this""" +from __future__ import print_function + +from logilab.common.testlib import unittest_main + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb import Binary, UnknownProperty +from cubicweb.predicates import (is_instance, + specified_etype_implements, rql_condition) +from cubicweb.web import NoSelectableObject +from cubicweb.web.action import Action + +from cubicweb.web.views import (primary, baseviews, tableview, + editforms, management, actions, startup, cwuser, schema, xbel, + vcard, owl, treeview, idownloadable, wdoc, debug, cwuser, + cwproperties, cwsources, xmlrss, rdf, csvexport, json, + undohistory) + + +USERACTIONS = [actions.UserPreferencesAction, + actions.UserInfoAction, + actions.LogoutAction] +SITEACTIONS = [actions.ManageAction] +FOOTERACTIONS = [wdoc.HelpAction, + wdoc.AboutAction, + actions.PoweredByAction] +MANAGEACTIONS = [actions.SiteConfigurationAction, + schema.ViewSchemaAction, + cwuser.ManageUsersAction, + cwsources.ManageSourcesAction, + debug.SiteInfoAction] + +if hasattr(rdf, 'RDFView'): # not available if rdflib not installed + RDFVIEWS = [('rdf', rdf.RDFView), ('n3rdf', rdf.RDFN3View)] +else: + RDFVIEWS = [] + +class ViewSelectorTC(CubicWebTC): + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + cnx.create_entity('BlogEntry', title=u"une news !", content=u"cubicweb c'est beau") + cnx.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index") + cnx.create_entity('EmailAddress', address=u"devel@logilab.fr", alias=u'devel') + cnx.create_entity('Tag', name=u'x') + cnx.commit() + +class VRegistryTC(ViewSelectorTC): + """test the view selector""" + + def _test_registered(self, registry, content): + try: + expected = getattr(self, 'all_%s' % registry) + except AttributeError: + return + if registry == 'hooks': + self.assertEqual(len(content), expected, content) + return + try: + self.assertSetEqual(list(content), expected) + except Exception: + print(registry, sorted(expected), sorted(content)) + print('no more', [v for v in expected if not v in content]) + print('missing', [v for v in content if not v in expected]) + raise + + def setUp(self): + super(VRegistryTC, self).setUp() + assert self.vreg['views']['propertiesform'] + + def test_possible_views_none_rset(self): + with self.admin_access.web_request() as req: + self.assertListEqual(self.pviews(req, None), + [('cw.sources-management', cwsources.CWSourcesManagementView), + ('cw.users-and-groups-management', cwuser.UsersAndGroupsManagementView), + ('gc', debug.GCView), + ('index', startup.IndexView), + ('info', debug.ProcessInformationView), + ('manage', startup.ManageView), + ('owl', owl.OWLView), + ('propertiesform', cwproperties.CWPropertiesForm), + ('registry', debug.RegistryView), + ('schema', schema.SchemaView), + ('siteinfo', debug.SiteInfoView), + ('systempropertiesform', cwproperties.SystemCWPropertiesForm), + ('undohistory', undohistory.UndoHistoryView)]) + + def test_possible_views_noresult(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X WHERE X eid 999999') + self.assertListEqual(self.pviews(req, rset), + [('csvexport', csvexport.CSVRsetView), + ('ecsvexport', csvexport.CSVEntityView), + ('ejsonexport', json.JsonEntityView), + ('jsonexport', json.JsonRsetView), + ]) + + def test_possible_views_one_egroup(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWGroup X WHERE X name "managers"') + self.assertCountEqual(self.pviews(req, rset), + RDFVIEWS + + [('csvexport', csvexport.CSVRsetView), + ('ecsvexport', csvexport.CSVEntityView), + ('ejsonexport', json.JsonEntityView), + ('filetree', treeview.FileTreeView), + ('jsonexport', json.JsonRsetView), + ('list', baseviews.ListView), + ('oneline', baseviews.OneLineView), + ('owlabox', owl.OWLABOXView), + ('primary', cwuser.CWGroupPrimaryView), + ('rsetxml', xmlrss.XMLRsetView), + ('rss', xmlrss.RSSView), + ('sameetypelist', baseviews.SameETypeListView), + ('security', management.SecurityManagementView), + ('table', tableview.RsetTableView), + ('text', baseviews.TextView), + ('treeview', treeview.TreeView), + ('xbel', xbel.XbelView), + ('xml', xmlrss.XMLView)]) + + def test_possible_views_multiple_egroups(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWGroup X') + self.assertCountEqual(self.pviews(req, rset), + RDFVIEWS + + [('csvexport', csvexport.CSVRsetView), + ('ecsvexport', csvexport.CSVEntityView), + ('ejsonexport', json.JsonEntityView), + ('filetree', treeview.FileTreeView), + ('jsonexport', json.JsonRsetView), + ('list', baseviews.ListView), + ('oneline', baseviews.OneLineView), + ('owlabox', owl.OWLABOXView), + ('primary', cwuser.CWGroupPrimaryView), + ('rsetxml', xmlrss.XMLRsetView), + ('rss', xmlrss.RSSView), + ('sameetypelist', baseviews.SameETypeListView), + ('security', management.SecurityManagementView), + ('table', tableview.RsetTableView), + ('text', baseviews.TextView), + ('treeview', treeview.TreeView), + ('xbel', xbel.XbelView), + ('xml', xmlrss.XMLView), + ]) + + def test_propertiesform_admin(self): + assert self.vreg['views']['propertiesform'] + with self.admin_access.web_request() as req: + rset1 = req.execute('CWUser X WHERE X login "admin"') + self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=None)) + self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset1)) + rset2 = req.execute('CWUser X WHERE X login "anon"') + self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset2)) + + def test_propertiesform_anon(self): + with self.new_access(u'anon').web_request() as req: + rset1 = req.execute('CWUser X WHERE X login "admin"') + self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=None) + self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset1) + rset2 = req.execute('CWUser X WHERE X login "anon"') + self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset2) + + def test_propertiesform_jdoe(self): + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, u'jdoe') + cnx.commit() + with self.new_access(u'jdoe').web_request() as req: + rset1 = req.execute('CWUser X WHERE X login "admin"') + rset2 = req.execute('CWUser X WHERE X login "jdoe"') + self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=None)) + self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset1) + self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset2)) + + def test_possible_views_multiple_different_types(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X') + self.assertCountEqual(self.pviews(req, rset), + RDFVIEWS + + [('csvexport', csvexport.CSVRsetView), + ('ecsvexport', csvexport.CSVEntityView), + ('ejsonexport', json.JsonEntityView), + ('filetree', treeview.FileTreeView), + ('jsonexport', json.JsonRsetView), + ('list', baseviews.ListView), + ('oneline', baseviews.OneLineView), + ('owlabox', owl.OWLABOXView), + ('primary', primary.PrimaryView), + ('rsetxml', xmlrss.XMLRsetView), + ('rss', xmlrss.RSSView), + ('security', management.SecurityManagementView), + ('table', tableview.RsetTableView), + ('text', baseviews.TextView), + ('treeview', treeview.TreeView), + ('xbel', xbel.XbelView), + ('xml', xmlrss.XMLView), + ]) + + def test_possible_views_any_rset(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any N, X WHERE X in_group Y, Y name N') + self.assertListEqual(self.pviews(req, rset), + [('csvexport', csvexport.CSVRsetView), + ('jsonexport', json.JsonRsetView), + ('rsetxml', xmlrss.XMLRsetView), + ('table', tableview.RsetTableView), + ]) + + def test_possible_views_multiple_eusers(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X') + self.assertCountEqual(self.pviews(req, rset), + RDFVIEWS + + [('csvexport', csvexport.CSVRsetView), + ('ecsvexport', csvexport.CSVEntityView), + ('ejsonexport', json.JsonEntityView), + ('filetree', treeview.FileTreeView), + ('foaf', cwuser.FoafView), + ('jsonexport', json.JsonRsetView), + ('list', baseviews.ListView), + ('oneline', baseviews.OneLineView), + ('owlabox', owl.OWLABOXView), + ('primary', primary.PrimaryView), + ('rsetxml', xmlrss.XMLRsetView), + ('rss', xmlrss.RSSView), + ('sameetypelist', baseviews.SameETypeListView), + ('security', management.SecurityManagementView), + ('table', tableview.RsetTableView), + ('text', baseviews.TextView), + ('treeview', treeview.TreeView), + ('vcard', vcard.VCardCWUserView), + ('xbel', xbel.XbelView), + ('xml', xmlrss.XMLView), + ]) + + def test_possible_actions_none_rset(self): + with self.admin_access.web_request() as req: + self.assertDictEqual(self.pactionsdict(req, None, skipcategories=()), + {'useractions': USERACTIONS, + 'siteactions': SITEACTIONS, + 'manage': MANAGEACTIONS, + 'footer': FOOTERACTIONS}) + + def test_possible_actions_no_entity(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X WHERE X eid 999999') + self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), + {'useractions': USERACTIONS, + 'siteactions': SITEACTIONS, + 'manage': MANAGEACTIONS, + 'footer': FOOTERACTIONS, + }) + + def test_possible_actions_same_type_entities(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWGroup X') + self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), + {'useractions': USERACTIONS, + 'siteactions': SITEACTIONS, + 'manage': MANAGEACTIONS, + 'footer': FOOTERACTIONS, + 'mainactions': [actions.MultipleEditAction], + 'moreactions': [actions.DeleteAction, + actions.AddNewAction]}) + + def test_possible_actions_different_types_entities(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any X') + self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), + {'useractions': USERACTIONS, + 'siteactions': SITEACTIONS, + 'manage': MANAGEACTIONS, + 'footer': FOOTERACTIONS, + 'moreactions': [actions.DeleteAction], + }) + + def test_possible_actions_final_entities(self): + with self.admin_access.web_request() as req: + rset = req.execute('Any N, X WHERE X in_group Y, Y name N') + self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), + {'useractions': USERACTIONS, + 'siteactions': SITEACTIONS, + 'manage': MANAGEACTIONS, + 'footer': FOOTERACTIONS, + }) + + def test_possible_actions_eetype_cwuser_entity(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWEType X WHERE X name "CWUser"') + self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), + {'useractions': USERACTIONS, + 'siteactions': SITEACTIONS, + 'manage': MANAGEACTIONS, + 'footer': FOOTERACTIONS, + 'mainactions': [actions.ModifyAction, + actions.ViewSameCWEType], + 'moreactions': [actions.ManagePermissionsAction, + actions.AddRelatedActions, + actions.DeleteAction, + actions.CopyAction, + ], + }) + + + def test_select_creation_form(self): + rset = None + with self.admin_access.web_request() as req: + # creation form + req.form['etype'] = 'CWGroup' + self.assertIsInstance(self.vreg['views'].select('creation', req, rset=rset), + editforms.CreationFormView) + + with self.admin_access.web_request() as req: + # custom creation form + class CWUserCreationForm(editforms.CreationFormView): + __select__ = specified_etype_implements('CWUser') + + self.vreg._loadedmods[__name__] = {} + self.vreg.register(CWUserCreationForm) + req.form['etype'] = 'CWUser' + + self.assertIsInstance(self.vreg['views'].select('creation', req, rset=rset), + CWUserCreationForm) + + def test_select_view(self): + # no entity + rset = None + with self.admin_access.web_request() as req: + self.assertIsInstance(self.vreg['views'].select('index', req, rset=rset), + startup.IndexView) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'primary', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'table', req, rset=rset) + + # no entity + rset = req.execute('Any X WHERE X eid 999999') + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'index', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'creation', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'primary', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'table', req, rset=rset) + # one entity + rset = req.execute('CWGroup X WHERE X name "managers"') + self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), + primary.PrimaryView) + self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset), + baseviews.ListView) + self.assertIsInstance(self.vreg['views'].select('edition', req, rset=rset), + editforms.EditionFormView) + self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), + tableview.RsetTableView) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'creation', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'index', req, rset=rset) + # list of entities of the same type + rset = req.execute('CWGroup X') + self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), + primary.PrimaryView) + self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset), + baseviews.ListView) + self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), + tableview.RsetTableView) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'creation', req, rset=rset) + # list of entities of different types + rset = req.execute('Any X') + self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), + primary.PrimaryView) + self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset), + baseviews.ListView) + self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), + tableview.RsetTableView) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'creation', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'index', req, rset=rset) + # whatever + rset = req.execute('Any N, X WHERE X in_group Y, Y name N') + self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), + tableview.RsetTableView) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'index', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'creation', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'primary', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'list', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'edition', req, rset=rset) + # mixed query + rset = req.execute('Any U,G WHERE U is CWUser, G is CWGroup') + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'edition', req, rset=rset) + self.assertRaises(NoSelectableObject, + self.vreg['views'].select, 'creation', req, rset=rset) + self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), + tableview.RsetTableView) + + def test_interface_selector(self): + with self.admin_access.web_request() as req: + req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim')) + # image primary view priority + rset = req.execute('File X WHERE X data_name "bim.png"') + self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), + idownloadable.IDownloadablePrimaryView) + + + def test_score_entity_selector(self): + with self.admin_access.web_request() as req: + req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim')) + # image/ehtml primary view priority + rset = req.execute('File X WHERE X data_name "bim.png"') + self.assertIsInstance(self.vreg['views'].select('image', req, rset=rset), + idownloadable.ImageView) + self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset) + + fileobj = req.create_entity('File', data_name=u'bim.html', data=Binary(b'<html>bam</html')) + # image/ehtml primary view priority + rset = req.execute('File X WHERE X data_name "bim.html"') + self.assertIsInstance(self.vreg['views'].select('ehtml', req, rset=rset), + idownloadable.EHTMLView) + self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset) + + fileobj = req.create_entity('File', data_name=u'bim.txt', data=Binary(b'boum')) + # image/ehtml primary view priority + rset = req.execute('File X WHERE X data_name "bim.txt"') + self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset) + self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset) + + + def _test_view(self, vid, rql, args): + with self.admin_access.web_request() as req: + if rql is None: + rset = None + else: + rset = req.execute(rql) + try: + obj = self.vreg['views'].select(vid, req, rset=rset, **args) + return obj.render(**args) + except Exception: + print(vid, rset, args) + raise + + def test_form(self): + for vid, rql, args in ( + #('creation', 'Any X WHERE X eid 999999', {}), + ('edition', 'CWGroup X WHERE X name "managers"', {}), + ('copy', 'CWGroup X WHERE X name "managers"', {}), + ('muledit', 'CWGroup X', {}), + #('muledit', 'Any X', {}), + ): + self._test_view(vid, rql, args) + + + def test_properties(self): + self.assertEqual(sorted(k for k in self.vreg['propertydefs'] + if k.startswith('ctxcomponents.edit_box')), + ['ctxcomponents.edit_box.context', + 'ctxcomponents.edit_box.order', + 'ctxcomponents.edit_box.visible']) + self.assertEqual([k for k in self.vreg['propertyvalues'] + if not k.startswith('system.version')], + []) + self.assertEqual(self.vreg.property_value('ctxcomponents.edit_box.visible'), True) + self.assertEqual(self.vreg.property_value('ctxcomponents.edit_box.order'), 2) + self.assertEqual(self.vreg.property_value('ctxcomponents.possible_views_box.visible'), False) + self.assertEqual(self.vreg.property_value('ctxcomponents.possible_views_box.order'), 10) + self.assertRaises(UnknownProperty, self.vreg.property_value, 'ctxcomponents.actions_box') + + + +class CWETypeRQLAction(Action): + __regid__ = 'testaction' + __select__ = is_instance('CWEType') & rql_condition('X name "CWEType"') + title = 'bla' + + +class RQLActionTC(ViewSelectorTC): + + def setUp(self): + super(RQLActionTC, self).setUp() + self.vreg._loadedmods[__name__] = {} + self.vreg.register(CWETypeRQLAction) + actionsreg = self.vreg['actions'] + actionsreg['testaction'][0].__registered__(actionsreg) + + def tearDown(self): + super(RQLActionTC, self).tearDown() + del self.vreg['actions']['testaction'] + + def test(self): + with self.admin_access.web_request() as req: + rset = req.execute('CWEType X WHERE X name "CWEType"') + self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), + {'useractions': USERACTIONS, + 'siteactions': SITEACTIONS, + 'footer': FOOTERACTIONS, + 'manage': MANAGEACTIONS, + 'mainactions': [actions.ModifyAction, actions.ViewSameCWEType], + 'moreactions': [actions.ManagePermissionsAction, + actions.AddRelatedActions, + actions.DeleteAction, + actions.CopyAction, + CWETypeRQLAction, + ], + }) + rset = req.execute('CWEType X WHERE X name "CWRType"') + self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), + {'useractions': USERACTIONS, + 'siteactions': SITEACTIONS, + 'footer': FOOTERACTIONS, + 'manage': MANAGEACTIONS, + 'mainactions': [actions.ModifyAction, actions.ViewSameCWEType], + 'moreactions': [actions.ManagePermissionsAction, + actions.AddRelatedActions, + actions.DeleteAction, + actions.CopyAction,] + }) + + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_web.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_web.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,157 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. + +from json import loads +from os.path import join +import tempfile + +try: + import requests + assert [int(n) for n in requests.__version__.split('.', 2)][:2] >= [1, 2] +except (ImportError, AssertionError): + requests = None + +from logilab.common.testlib import TestCase, unittest_main +from cubicweb.devtools.httptest import CubicWebServerTC +from cubicweb.devtools.fake import FakeRequest + +class AjaxReplaceUrlTC(TestCase): + + def test_ajax_replace_url_1(self): + self._test_arurl("fname=view&rql=Person%20P&vid=list", + rql='Person P', vid='list') + + def test_ajax_replace_url_2(self): + self._test_arurl("age=12&fname=view&name=bar&rql=Person%20P&vid=oneline", + rql='Person P', vid='oneline', name='bar', age=12) + + def _test_arurl(self, qs, **kwargs): + req = FakeRequest() + arurl = req.ajax_replace_url + # NOTE: for the simplest use cases, we could use doctest + url = arurl('foo', **kwargs) + self.assertTrue(url.startswith('javascript:')) + self.assertTrue(url.endswith('()')) + cbname = url.split()[1][:-2] + self.assertMultiLineEqual( + 'function %s() { $("#foo").loadxhtml("http://testing.fr/cubicweb/ajax?%s",' + '{pageid: "%s"},"get","replace"); }' % + (cbname, qs, req.pageid), + req.html_headers.post_inlined_scripts[0]) + + +class FileUploadTC(CubicWebServerTC): + + def setUp(self): + "Skip whole test class if a suitable requests module is not available" + if requests is None: + self.skipTest('Python ``requests`` module is not available') + super(FileUploadTC, self).setUp() + + @property + def _post_url(self): + with self.admin_access.web_request() as req: + return req.build_url('ajax', fname='fileupload') + + def _fobject(self, fname): + return open(join(self.datadir, fname), 'rb') + + def _fcontent(self, fname): + return self._fobject(fname).read() + + def test_single_file_upload(self): + files = {'file': ('schema.py', self._fobject('schema.py'))} + webreq = requests.post(self._post_url, files=files) + # check backward compat : a single uploaded file leads to a single + # 2-uple in the request form + expect = {'fname': u'fileupload', + 'file': ['schema.py', self._fcontent('schema.py')]} + self.assertEqual(webreq.status_code, 200) + self.assertDictEqual(expect, loads(webreq.content)) + + def test_multiple_file_upload(self): + files = [('files', ('schema.py', self._fobject('schema.py'))), + ('files', ('views.py', self._fobject('views.py')))] + webreq = requests.post(self._post_url, files=files,) + expect = {'fname': u'fileupload', + 'files': [['schema.py', self._fcontent('schema.py')], + ['views.py', self._fcontent('views.py')]],} + self.assertEqual(webreq.status_code, 200) + self.assertDictEqual(expect, loads(webreq.content)) + + +class LanguageTC(CubicWebServerTC): + + def test_language_neg(self): + headers = {'Accept-Language': 'fr'} + webreq = self.web_request(headers=headers) + self.assertIn('lang="fr"', webreq.read()) + vary = [h.lower().strip() for h in webreq.getheader('Vary').split(',')] + self.assertIn('accept-language', vary) + headers = {'Accept-Language': 'en'} + webreq = self.web_request(headers=headers) + self.assertIn('lang="en"', webreq.read()) + vary = [h.lower().strip() for h in webreq.getheader('Vary').split(',')] + self.assertIn('accept-language', vary) + + def test_response_codes(self): + with self.admin_access.client_cnx() as cnx: + admin_eid = cnx.user.eid + # guest can't see admin + webreq = self.web_request('/%d' % admin_eid) + self.assertEqual(webreq.status, 403) + + # but admin can + self.web_login() + webreq = self.web_request('/%d' % admin_eid) + self.assertEqual(webreq.status, 200) + + def test_session_cookie_httponly(self): + webreq = self.web_request() + self.assertIn('HttpOnly', webreq.getheader('set-cookie')) + + +class MiscOptionsTC(CubicWebServerTC): + @classmethod + def setUpClass(cls): + super(MiscOptionsTC, cls).setUpClass() + cls.logfile = tempfile.NamedTemporaryFile() + + def setUp(self): + super(MiscOptionsTC, self).setUp() + self.config.global_set_option('query-log-file', self.logfile.name) + self.config.global_set_option('datadir-url', '//static.testing.fr/') + # call load_configuration again to let the config reset its datadir_url + self.config.load_configuration() + + def test_log_queries(self): + self.web_request() + self.assertTrue(self.logfile.read()) + + def test_datadir_url(self): + webreq = self.web_request() + self.assertNotIn('/data/', webreq.read()) + + @classmethod + def tearDownClass(cls): + super(MiscOptionsTC, cls).tearDownClass() + cls.logfile.close() + + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/test/unittest_webconfig.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/test/unittest_webconfig.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""cubicweb.web.webconfig unit tests""" + +import os + +from logilab.common.testlib import TestCase, unittest_main +from cubicweb.devtools import ApptestConfiguration, fake + +class WebconfigTC(TestCase): + def setUp(self): + # need explicit None if dirname(__file__) is empty, see + # ApptestConfiguration.__init__ + self.config = ApptestConfiguration('data', apphome=os.path.dirname(__file__) or None) + self.config._cubes = ['file'] + self.config.load_configuration() + + def test_nonregr_print_css_as_list(self): + """make sure PRINT_CSS *must* is a list""" + config = self.config + print_css = config.uiprops['STYLESHEETS_PRINT'] + self.assertTrue(isinstance(print_css, list)) + ie_css = config.uiprops['STYLESHEETS_IE'] + self.assertTrue(isinstance(ie_css, list)) + + def test_locate_resource(self): + self.assertIn('FILE_ICON', self.config.uiprops) + rname = self.config.uiprops['FILE_ICON'].replace(self.config.datadir_url, '') + self.assertIn('file', self.config.locate_resource(rname)[0].split(os.sep)) + cubicwebcsspath = self.config.locate_resource('cubicweb.css')[0].split(os.sep) + + # 'shared' if tests under apycot + self.assertTrue('web' in cubicwebcsspath or 'shared' in cubicwebcsspath, + 'neither "web" nor "shared" found in cubicwebcsspath (%s)' + % cubicwebcsspath) + + def test_sign_text(self): + signature = self.config.sign_text(u'hôp') + self.assertTrue(self.config.check_text_sign(u'hôp', signature)) + +if __name__ == '__main__': + unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/uicfg.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/uicfg.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,28 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +""" +This module has been moved to web.views.uicfg. +""" +__docformat__ = "restructuredtext en" + +from warnings import warn +from cubicweb.web.views.uicfg import * + + +warn('[3.16] moved to cubicweb.web.views.uicfg', + DeprecationWarning, stacklevel=2) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/uihelper.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/uihelper.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,177 @@ +# copyright 2011-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""This module provide highlevel helpers to avoid uicfg boilerplate +for most common tasks such as fields ordering, widget customization, etc. + + +Here are a few helpers to customize *action box* rendering: + +.. autofunction:: cubicweb.web.uihelper.append_to_addmenu +.. autofunction:: cubicweb.web.uihelper.remove_from_addmenu + + +and a few other ones for *form configuration*: + +.. autofunction:: cubicweb.web.uihelper.set_fields_order +.. autofunction:: cubicweb.web.uihelper.hide_field +.. autofunction:: cubicweb.web.uihelper.hide_fields +.. autofunction:: cubicweb.web.uihelper.set_field_kwargs +.. autofunction:: cubicweb.web.uihelper.set_field +.. autofunction:: cubicweb.web.uihelper.edit_inline +.. autofunction:: cubicweb.web.uihelper.edit_as_attr +.. autofunction:: cubicweb.web.uihelper.set_muledit_editable + +The module also provides a :class:`FormConfig` base class that lets you gather +uicfg declaration in the scope of a single class, which can sometimes +be clearer to read than a bunch of sequential function calls. + +.. autoclass:: cubicweb.web.uihelper.FormConfig + +""" +__docformat__ = "restructuredtext en" + +from six import add_metaclass + +from logilab.common.deprecation import deprecated +from cubicweb.web.views import uicfg + + +## generic uicfg helpers ###################################################### + +backward_compat_funcs = (('append_to_addmenu', uicfg.actionbox_appearsin_addmenu), + ('remove_from_addmenu', uicfg.actionbox_appearsin_addmenu), + ('set_fields_order', uicfg.autoform_field_kwargs), + ('hide_field', uicfg.autoform_section), + ('hide_fields', uicfg.autoform_section), + ('set_field_kwargs', uicfg.autoform_field_kwargs), + ('set_field', uicfg.autoform_field), + ('edit_inline', uicfg.autoform_section), + ('edit_as_attr', uicfg.autoform_section), + ('set_muledit_editable', uicfg.autoform_section), + ) + +for funcname, tag in backward_compat_funcs: + msg = ('[3.16] uihelper.%(name)s is deprecated, please use ' + 'web.views.uicfg.%(rtagid)s.%(name)s' % dict( + name=funcname, rtagid=tag.__regid__)) + globals()[funcname] = deprecated(msg)(getattr(tag, funcname)) + + +class meta_formconfig(type): + """metaclass of FormConfig classes, only for easier declaration purpose""" + def __init__(cls, name, bases, classdict): + if cls.etype is None: + return + uicfg_afs = cls.uicfg_afs or uicfg.autoform_section + uicfg_aff = cls.uicfg_aff or uicfg.autoform_field + uicfg_affk = cls.uicfg_affk or uicfg.autoform_field_kwargs + for attr_role in cls.hidden: + uicfg_afs.hide_field(cls.etype, attr_role, formtype=cls.formtype) + for attr_role in cls.rels_as_attrs: + uicfg_afs.edit_as_attr(cls.etype, attr_role, formtype=cls.formtype) + for attr_role in cls.inlined: + uicfg_afs.edit_inline(cls.etype, attr_role, formtype=cls.formtype) + for rtype, widget in cls.widgets.items(): + uicfg_affk.set_field_kwargs(cls.etype, rtype, widget=widget) + for rtype, field in cls.fields.items(): + uicfg_aff.set_field(cls.etype, rtype, field) + uicfg_affk.set_fields_order(cls.etype, cls.fields_order) + super(meta_formconfig, cls).__init__(name, bases, classdict) + + +@add_metaclass(meta_formconfig) +class FormConfig: + """helper base class to define uicfg rules on a given entity type. + + In all descriptions below, attributes list can either be a list of + attribute names of a list of 2-tuples (relation name, role of + the edited entity in the relation). + + **Attributes** + + :attr:`etype` + which entity type the form config is for. This attribute is **mandatory** + + :attr:`formtype` + the formtype the class tries toc customize (i.e. *main*, *inlined*, or *muledit*), + default is *main*. + + :attr:`hidden` + the list of attributes or relations to hide. + + :attr:`rels_as_attrs` + the list of attributes to edit in the *attributes* section. + + :attr:`inlined` + the list of attributes to edit in the *inlined* section. + + :attr:`fields_order` + the list of attributes to edit, in the desired order. Unspecified + fields will be displayed after specified ones, their order + being consistent with the schema definition. + + :attr:`widgets` + a dictionary mapping attribute names to widget instances. + + :attr:`fields` + a dictionary mapping attribute names to field instances. + + :attr:`uicfg_afs` + an instance of ``cubicweb.web.uicfg.AutoformSectionRelationTags`` + Default is None, meaning ``cubicweb.web.uicfg.autoform_section`` is used. + + :attr:`uicfg_aff` + an instance of ``cubicweb.web.uicfg.AutoformFieldTags`` + Default is None, meaning ``cubicweb.web.uicfg.autoform_field`` is used. + + :attr:`uicfg_affk` + an instance of ``cubicweb.web.uicfg.AutoformFieldKwargsTags`` + Default is None, meaning ``cubicweb.web.uicfg.autoform_field_kwargs`` is used. + + Examples: + +.. sourcecode:: python + + from cubicweb.web import uihelper, formwidgets as fwdgs + + class LinkFormConfig(uihelper.FormConfig): + etype = 'Link' + hidden = ('title', 'description', 'embed') + widgets = dict( + url=fwdgs.TextInput(attrs={'size':40}), + ) + + class UserFormConfig(uihelper.FormConfig): + etype = 'CWUser' + hidden = ('login',) + rels_as_attrs = ('in_group',) + fields_order = ('firstname', 'surname', 'in_group', 'use_email') + inlined = ('use_email',) + + """ + formtype = 'main' + etype = None # must be defined in concrete subclasses + hidden = () + rels_as_attrs = () + inlined = () + fields_order = () + widgets = {} + fields = {} + uicfg_afs = None + uicfg_aff = None + uicfg_affk = None diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,148 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""Views, forms, actions... for the CubicWeb web client""" + +__docformat__ = "restructuredtext en" + +import os +import sys +import tempfile + +from six import add_metaclass + +from rql import nodes +from logilab.mtconverter import xml_escape +from logilab.common.deprecation import class_deprecated + + +def need_table_view(rset, schema): + """return True if we think that a table view is more appropriate than a + list or primary view to display the given result set + """ + rqlst = rset.syntax_tree() + if len(rqlst.children) > 1: + # UNION query, use a table + return True + selected = rqlst.children[0].selection + try: + mainvar = selected[0] + except AttributeError: + # not a variable ref, using table view is probably a good option + return True + if not (isinstance(mainvar, nodes.VariableRef) or + (isinstance(mainvar, nodes.Constant) and mainvar.uid)): + return True + for i, etype in enumerate(rset.description[0][1:]): + # etype may be None on outer join + if etype is None: + return True + # check the selected index node is a VariableRef (else we + # won't detect aggregate function + if not isinstance(selected[i+1], nodes.VariableRef): + return True + # if this is not a final entity + if not schema.eschema(etype).final: + return True + # if this is a final entity not linked to the main variable + var = selected[i+1].variable + for vref in var.references(): + rel = vref.relation() + if rel is None: + continue + if mainvar.is_equivalent(rel.children[0]): + break + else: + return True + return False + +# FIXME: VID_BY_MIMETYPE is unfortunately a bit too naive since +# some browsers (e.g. FF2) send a bunch of mimetypes in +# the Accept header, for instance: +# text/xml,application/xml,application/xhtml+xml,text/html;q=0.9, +# text/plain;q=0.8,image/png,*/*;q=0.5 +VID_BY_MIMETYPE = { + #'text/xml': 'xml', + # XXX rss, owl... +} +def vid_from_rset(req, rset, schema, check_table=True): + """given a result set, return a view id""" + if rset is None: + return 'index' + for mimetype in req.parse_accept_header('Accept'): + if mimetype in VID_BY_MIMETYPE: + return VID_BY_MIMETYPE[mimetype] + nb_rows = len(rset) + # empty resultset + if nb_rows == 0: + return 'noresult' + # entity result set + if not schema.eschema(rset.description[0][0]).final: + if check_table and need_table_view(rset, schema): + return 'table' + if nb_rows == 1: + if req.search_state[0] == 'normal': + return 'primary' + return 'outofcontext-search' + if len(rset.column_types(0)) == 1: + return 'sameetypelist' + return 'list' + return 'table' + + +def linksearch_select_url(req, rset): + """when searching an entity to create a relation, return a URL to select + entities in the given rset + """ + req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') ) + target, eid, r_type, searchedtype = req.search_state[1] + if target == 'subject': + id_fmt = '%s:%s:%%s' % (eid, r_type) + else: + id_fmt = '%%s:%s:%s' % (r_type, eid) + triplets = '-'.join(id_fmt % row[0] for row in rset.rows) + return "javascript: selectForAssociation('%s', '%s');" % (triplets, eid) + + +def add_etype_button(req, etype, csscls='addButton right', **urlkwargs): + vreg = req.vreg + eschema = vreg.schema.eschema(etype) + if eschema.has_perm(req, 'add'): + url = vreg['etypes'].etype_class(etype).cw_create_url(req, **urlkwargs) + return u'<a href="%s" class="%s">%s</a>' % ( + xml_escape(url), csscls, req.__('New %s' % etype)) + return u'' + + + +@add_metaclass(class_deprecated) +class TmpFileViewMixin(object): + __deprecation_warning__ = '[3.18] %(cls)s is deprecated' + binary = True + content_type = 'application/octet-stream' + cache_max_age = 60*60*2 # stay in http cache for 2 hours by default + + def call(self): + self.cell_call() + + def cell_call(self, row=0, col=0): + self.cw_row, self.cw_col = row, col # in case one needs it + fd, tmpfile = tempfile.mkstemp('.png') + os.close(fd) + self._generate(tmpfile) + self.w(open(tmpfile, 'rb').read()) + os.unlink(tmpfile) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/actions.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/actions.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,429 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""Set of HTML base actions""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from logilab.mtconverter import xml_escape +from logilab.common.registry import objectify_predicate, yes + +from cubicweb.schema import display_name +from cubicweb.predicates import (EntityPredicate, + one_line_rset, multi_lines_rset, one_etype_rset, relation_possible, + nonempty_rset, non_final_entity, score_entity, + authenticated_user, match_user_groups, match_search_state, + has_permission, has_add_permission, is_instance, debug_mode, + ) +from cubicweb.web import controller, action +from cubicweb.web.views import uicfg, linksearch_select_url, vid_from_rset + + +class has_editable_relation(EntityPredicate): + """accept if some relations for an entity found in the result set is + editable by the logged user. + + See `EntityPredicate` documentation for behaviour when row is not specified. + """ + + def score_entity(self, entity): + # if user has no update right but it can modify some relation, + # display action anyway + form = entity._cw.vreg['forms'].select('edition', entity._cw, + entity=entity, mainform=False) + for dummy in form.editable_relations(): + return 1 + for dummy in form.inlined_form_views(): + return 1 + for dummy in form.editable_attributes(strict=True): + return 1 + return 0 + +@objectify_predicate +def match_searched_etype(cls, req, rset=None, **kwargs): + return req.match_search_state(rset) + +@objectify_predicate +def view_is_not_default_view(cls, req, rset=None, **kwargs): + # interesting if it propose another view than the current one + vid = req.form.get('vid') + if vid and vid != vid_from_rset(req, rset, req.vreg.schema): + return 1 + return 0 + +@objectify_predicate +def addable_etype_empty_rset(cls, req, rset=None, **kwargs): + if rset is not None and not rset.rowcount: + rqlst = rset.syntax_tree() + if len(rqlst.children) > 1: + return 0 + select = rqlst.children[0] + if len(select.defined_vars) == 1 and len(select.solutions) == 1: + rset._searched_etype = next(iter(select.solutions[0].values())) + eschema = req.vreg.schema.eschema(rset._searched_etype) + if not (eschema.final or eschema.is_subobject(strict=True)) \ + and eschema.has_perm(req, 'add'): + return 1 + return 0 + +class has_undoable_transactions(EntityPredicate): + "Select entities having public (i.e. end-user) undoable transactions." + + def score_entity(self, entity): + if not entity._cw.vreg.config['undo-enabled']: + return 0 + if entity._cw.cnx.undoable_transactions(eid=entity.eid): + return 1 + else: + return 0 + + +# generic 'main' actions ####################################################### + +class SelectAction(action.Action): + """base class for link search actions. By default apply on + any size entity result search it the current state is 'linksearch' + if accept match. + """ + __regid__ = 'select' + __select__ = (match_search_state('linksearch') & nonempty_rset() + & match_searched_etype()) + + title = _('select') + category = 'mainactions' + order = 0 + + def url(self): + return linksearch_select_url(self._cw, self.cw_rset) + + +class CancelSelectAction(action.Action): + __regid__ = 'cancel' + __select__ = match_search_state('linksearch') + + title = _('cancel select') + category = 'mainactions' + order = 10 + + def url(self): + target, eid, r_type, searched_type = self._cw.search_state[1] + return self._cw.build_url(str(eid), + vid='edition', __mode='normal') + + +class ViewAction(action.Action): + __regid__ = 'view' + __select__ = (action.Action.__select__ & + match_user_groups('users', 'managers') & + view_is_not_default_view() & + non_final_entity()) + + title = _('view') + category = 'mainactions' + order = 0 + + def url(self): + params = self._cw.form.copy() + for param in ('vid', '__message') + controller.NAV_FORM_PARAMETERS: + params.pop(param, None) + if self._cw.ajax_request: + path = 'view' + if self.cw_rset is not None: + params = {'rql': self.cw_rset.printable_rql()} + else: + path = self._cw.relative_path(includeparams=False) + return self._cw.build_url(path, **params) + + +class ModifyAction(action.Action): + __regid__ = 'edit' + __select__ = (action.Action.__select__ + & one_line_rset() & has_editable_relation()) + + title = _('modify') + category = 'mainactions' + order = 10 + + def url(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return entity.absolute_url(vid='edition') + + +class MultipleEditAction(action.Action): + __regid__ = 'muledit' # XXX get strange conflicts if id='edit' + __select__ = (action.Action.__select__ & multi_lines_rset() & + one_etype_rset() & has_permission('update')) + + title = _('modify') + category = 'mainactions' + order = 10 + + def url(self): + return self._cw.build_url('view', rql=self.cw_rset.printable_rql(), vid='muledit') + + +# generic "more" actions ####################################################### + +class ManagePermissionsAction(action.Action): + __regid__ = 'managepermission' + __select__ = (action.Action.__select__ & one_line_rset() & + non_final_entity() & match_user_groups('managers')) + + title = _('manage permissions') + category = 'moreactions' + order = 15 + + def url(self): + return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0).absolute_url(vid='security') + + +class DeleteAction(action.Action): + __regid__ = 'delete' + __select__ = action.Action.__select__ & has_permission('delete') + + title = _('delete') + category = 'moreactions' + order = 20 + + def url(self): + if len(self.cw_rset) == 1: + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return self._cw.build_url(entity.rest_path(), vid='deleteconf') + return self._cw.build_url(rql=self.cw_rset.printable_rql(), vid='deleteconf') + + +class CopyAction(action.Action): + __regid__ = 'copy' + __select__ = (action.Action.__select__ & one_line_rset() + & has_permission('add')) + + title = _('copy') + category = 'moreactions' + order = 30 + + def url(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return entity.absolute_url(vid='copy') + + +class AddNewAction(MultipleEditAction): + """when we're seeing more than one entity with the same type, propose to + add a new one + """ + __regid__ = 'addentity' + __select__ = (action.Action.__select__ & + (addable_etype_empty_rset() + | (multi_lines_rset() & one_etype_rset() & has_add_permission())) + ) + + category = 'moreactions' + order = 40 + + @property + def rsettype(self): + if self.cw_rset: + return self.cw_rset.description[0][0] + return self.cw_rset._searched_etype + + @property + def title(self): + return self._cw.__('add a %s' % self.rsettype) # generated msgid + + def url(self): + return self._cw.vreg["etypes"].etype_class(self.rsettype).cw_create_url(self._cw) + + +class AddRelatedActions(action.Action): + """fill 'addrelated' sub-menu of the actions box""" + __regid__ = 'addrelated' + __select__ = action.Action.__select__ & one_line_rset() & non_final_entity() + + submenu = _('addrelated') + order = 17 + + def fill_menu(self, box, menu): + # when there is only one item in the sub-menu, replace the sub-menu by + # item's title prefixed by 'add' + menu.label_prefix = self._cw._('add') + super(AddRelatedActions, self).fill_menu(box, menu) + + def redirect_params(self, entity): + return {'__redirectpath': entity.rest_path(), # should not be url quoted! + '__redirectvid': self._cw.form.get('vid', '')} + + def actual_actions(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + eschema = entity.e_schema + params = self.redirect_params(entity) + for rschema, teschema, role in self.add_related_schemas(entity): + if rschema.role_rdef(eschema, teschema, role).role_cardinality(role) in '1?': + if entity.related(rschema, role): + continue + if role == 'subject': + label = 'add %s %s %s %s' % (eschema, rschema, teschema, role) + url = self.linkto_url(entity, rschema, teschema, 'object', **params) + else: + label = 'add %s %s %s %s' % (teschema, rschema, eschema, role) + url = self.linkto_url(entity, rschema, teschema, 'subject', **params) + yield self.build_action(self._cw._(label), url) + + def add_related_schemas(self, entity): + """this is actually used ui method to generate 'addrelated' actions from + the schema. + + If you don't want any auto-generated actions, you should overrides this + method to return an empty list. If you only want some, you can configure + them by using uicfg.actionbox_appearsin_addmenu + """ + appearsin_addmenu = self._cw.vreg['uicfg'].select( + 'actionbox_appearsin_addmenu', self._cw, entity=entity) + req = self._cw + eschema = entity.e_schema + for role, rschemas in (('subject', eschema.subject_relations()), + ('object', eschema.object_relations())): + for rschema in rschemas: + if rschema.final: + continue + for teschema in rschema.targets(eschema, role): + if not appearsin_addmenu.etype_get(eschema, rschema, + role, teschema): + continue + rdef = rschema.role_rdef(eschema, teschema, role) + # check the relation can be added + # XXX consider autoform_permissions_overrides? + if role == 'subject'and not rdef.has_perm( + req, 'add', fromeid=entity.eid): + continue + if role == 'object'and not rdef.has_perm( + req, 'add', toeid=entity.eid): + continue + # check the target types can be added as well + if teschema.may_have_permission('add', req): + yield rschema, teschema, role + + def linkto_url(self, entity, rtype, etype, target, **kwargs): + return self._cw.vreg["etypes"].etype_class(etype).cw_create_url( + self._cw, __linkto='%s:%s:%s' % (rtype, entity.eid, target), + **kwargs) + + +class ViewSameCWEType(action.Action): + """when displaying the schema of a CWEType, offer to list entities of that type + """ + __regid__ = 'entitiesoftype' + __select__ = one_line_rset() & is_instance('CWEType') & score_entity(lambda x: not x.final) + category = 'mainactions' + order = 40 + + @property + def etype(self): + return self.cw_rset.get_entity(0,0).name + + @property + def title(self): + return self._cw.__('view all %s') % display_name(self._cw, self.etype, 'plural').lower() + + def url(self): + return self._cw.build_url(self.etype) + +# logged user actions ######################################################### + +class UserPreferencesAction(action.Action): + __regid__ = 'myprefs' + __select__ = authenticated_user() + + title = _('user preferences') + category = 'useractions' + order = 10 + + def url(self): + return self._cw.build_url(self.__regid__) + + +class UserInfoAction(action.Action): + __regid__ = 'myinfos' + __select__ = authenticated_user() + + title = _('profile') + category = 'useractions' + order = 20 + + def url(self): + return self._cw.build_url('cwuser/%s'%self._cw.user.login, vid='edition') + + +class LogoutAction(action.Action): + __regid__ = 'logout' + __select__ = authenticated_user() + + title = _('logout') + category = 'useractions' + order = 30 + + def url(self): + return self._cw.build_url(self.__regid__) + + +# site actions ################################################################ + +class ManagersAction(action.Action): + __abstract__ = True + __select__ = match_user_groups('managers') + + category = 'siteactions' + + def url(self): + return self._cw.build_url(self.__regid__) + + +class SiteConfigurationAction(ManagersAction): + __regid__ = 'siteconfig' + title = _('site configuration') + order = 10 + category = 'manage' + + +class ManageAction(ManagersAction): + __regid__ = 'manage' + title = _('manage') + order = 20 + + +# footer actions ############################################################### + +class PoweredByAction(action.Action): + __regid__ = 'poweredby' + __select__ = yes() + + category = 'footer' + order = 3 + title = _('Powered by CubicWeb') + + def url(self): + return 'http://www.cubicweb.org' + +## default actions ui configuration ########################################### + +addmenu = uicfg.actionbox_appearsin_addmenu +addmenu.tag_object_of(('*', 'relation_type', 'CWRType'), True) +addmenu.tag_object_of(('*', 'from_entity', 'CWEType'), False) +addmenu.tag_object_of(('*', 'to_entity', 'CWEType'), False) +addmenu.tag_object_of(('*', 'in_group', 'CWGroup'), True) +addmenu.tag_object_of(('*', 'bookmarked_by', 'CWUser'), True) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/ajaxcontroller.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/ajaxcontroller.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,458 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +# +# (disable pylint msg for client obj access to protected member as in obj._cw) +# pylint: disable=W0212 +"""The ``ajaxcontroller`` module defines the :class:`AjaxController` +controller and the ``ajax-func`` cubicweb registry. + +.. autoclass:: cubicweb.web.views.ajaxcontroller.AjaxController + :members: + +``ajax-funcs`` registry hosts exposed remote functions, that is +functions that can be called from the javascript world. + +To register a new remote function, either decorate your function +with the :func:`~cubicweb.web.views.ajaxcontroller.ajaxfunc` decorator: + +.. sourcecode:: python + + from cubicweb.predicates import mactch_user_groups + from cubicweb.web.views.ajaxcontroller import ajaxfunc + + @ajaxfunc(output_type='json', selector=match_user_groups('managers')) + def list_users(self): + return [u for (u,) in self._cw.execute('Any L WHERE U login L')] + +or inherit from :class:`~cubicweb.web.views.ajaxcontroller.AjaxFunction` and +implement the ``__call__`` method: + +.. sourcecode:: python + + from cubicweb.web.views.ajaxcontroller import AjaxFunction + class ListUser(AjaxFunction): + __regid__ = 'list_users' # __regid__ is the name of the exposed function + __select__ = match_user_groups('managers') + output_type = 'json' + + def __call__(self): + return [u for (u, ) in self._cw.execute('Any L WHERE U login L')] + + +.. autoclass:: cubicweb.web.views.ajaxcontroller.AjaxFunction + :members: + +.. autofunction:: cubicweb.web.views.ajaxcontroller.ajaxfunc + +""" + +__docformat__ = "restructuredtext en" + +from warnings import warn +from functools import partial + +from six import PY2, text_type + +from logilab.common.date import strptime +from logilab.common.registry import yes +from logilab.common.deprecation import deprecated + +from cubicweb import ObjectNotFound, NoSelectableObject +from cubicweb.appobject import AppObject +from cubicweb.utils import json, json_dumps, UStringIO +from cubicweb.uilib import exc_message +from cubicweb.web import RemoteCallFailed, DirectResponse +from cubicweb.web.controller import Controller +from cubicweb.web.views import vid_from_rset +from cubicweb.web.views import basecontrollers + + +def optional_kwargs(extraargs): + if extraargs is None: + return {} + # we receive unicode keys which is not supported by the **syntax + return dict((str(key), value) for key, value in extraargs.items()) + + +class AjaxController(Controller): + """AjaxController handles ajax remote calls from javascript + + The following javascript function call: + + .. sourcecode:: javascript + + var d = asyncRemoteExec('foo', 12, "hello"); + d.addCallback(function(result) { + alert('server response is: ' + result); + }); + + will generate an ajax HTTP GET on the following url:: + + BASE_URL/ajax?fname=foo&arg=12&arg="hello" + + The AjaxController controller will therefore be selected to handle those URLs + and will itself select the :class:`cubicweb.web.views.ajaxcontroller.AjaxFunction` + matching the *fname* parameter. + """ + __regid__ = 'ajax' + + def publish(self, rset=None): + self._cw.ajax_request = True + try: + fname = self._cw.form['fname'] + except KeyError: + raise RemoteCallFailed('no method specified') + # 1/ check first for old-style (JSonController) ajax func for bw compat + try: + func = getattr(basecontrollers.JSonController, 'js_%s' % fname) + if PY2: + func = func.__func__ + func = partial(func, self) + except AttributeError: + # 2/ check for new-style (AjaxController) ajax func + try: + func = self._cw.vreg['ajax-func'].select(fname, self._cw) + except ObjectNotFound: + raise RemoteCallFailed('no %s method' % fname) + else: + warn('[3.15] remote function %s found on JSonController, ' + 'use AjaxFunction / @ajaxfunc instead' % fname, + DeprecationWarning, stacklevel=2) + # no <arg> attribute means the callback takes no argument + args = self._cw.form.get('arg', ()) + if not isinstance(args, (list, tuple)): + args = (args,) + try: + args = [json.loads(arg) for arg in args] + except ValueError as exc: + self.exception('error while decoding json arguments for ' + 'js_%s: %s (err: %s)', fname, args, exc) + raise RemoteCallFailed(exc_message(exc, self._cw.encoding)) + try: + result = func(*args) + except (RemoteCallFailed, DirectResponse): + raise + except Exception as exc: + self.exception('an exception occurred while calling js_%s(%s): %s', + fname, args, exc) + raise RemoteCallFailed(exc_message(exc, self._cw.encoding)) + if result is None: + return '' + # get unicode on @htmlize methods, encoded string on @jsonize methods + elif isinstance(result, text_type): + return result.encode(self._cw.encoding) + return result + +class AjaxFunction(AppObject): + """ + Attributes on this base class are: + + :attr: `check_pageid`: make sure the pageid received is valid before proceeding + :attr: `output_type`: + + - *None*: no processing, no change on content-type + + - *json*: serialize with `json_dumps` and set *application/json* + content-type + + - *xhtml*: wrap result in an XML node and forces HTML / XHTML + content-type (use ``_cw.html_content_type()``) + + """ + __registry__ = 'ajax-func' + __select__ = yes() + __abstract__ = True + + check_pageid = False + output_type = None + + @staticmethod + def _rebuild_posted_form(names, values, action=None): + form = {} + for name, value in zip(names, values): + # remove possible __action_xxx inputs + if name.startswith('__action'): + if action is None: + # strip '__action_' to get the actual action name + action = name[9:] + continue + # form.setdefault(name, []).append(value) + if name in form: + curvalue = form[name] + if isinstance(curvalue, list): + curvalue.append(value) + else: + form[name] = [curvalue, value] + else: + form[name] = value + # simulate click on __action_%s button to help the controller + if action: + form['__action_%s' % action] = u'whatever' + return form + + def validate_form(self, action, names, values): + self._cw.form = self._rebuild_posted_form(names, values, action) + return basecontrollers._validate_form(self._cw, self._cw.vreg) + + def _exec(self, rql, args=None, rocheck=True): + """json mode: execute RQL and return resultset as json""" + rql = rql.strip() + if rql.startswith('rql:'): + rql = rql[4:] + if rocheck: + self._cw.ensure_ro_rql(rql) + try: + return self._cw.execute(rql, args) + except Exception as ex: + self.exception("error in _exec(rql=%s): %s", rql, ex) + return None + return None + + def _call_view(self, view, paginate=False, **kwargs): + divid = self._cw.form.get('divid') + # we need to call pagination before with the stream set + try: + stream = view.set_stream() + except AttributeError: + stream = UStringIO() + kwargs['w'] = stream.write + assert not paginate + if divid == 'pageContent': + # ensure divid isn't reused by the view (e.g. table view) + del self._cw.form['divid'] + # mimick main template behaviour + stream.write(u'<div id="pageContent">') + vtitle = self._cw.form.get('vtitle') + if vtitle: + stream.write(u'<h1 class="vtitle">%s</h1>\n' % vtitle) + paginate = True + nav_html = UStringIO() + if paginate and not view.handle_pagination: + view.paginate(w=nav_html.write) + stream.write(nav_html.getvalue()) + if divid == 'pageContent': + stream.write(u'<div id="contentmain">') + view.render(**kwargs) + extresources = self._cw.html_headers.getvalue(skiphead=True) + if extresources: + stream.write(u'<div class="ajaxHtmlHead">\n') # XXX use a widget? + stream.write(extresources) + stream.write(u'</div>\n') + if divid == 'pageContent': + stream.write(u'</div>%s</div>' % nav_html.getvalue()) + return stream.getvalue() + + +def _ajaxfunc_factory(implementation, selector=yes(), _output_type=None, + _check_pageid=False, regid=None): + """converts a standard python function into an AjaxFunction appobject""" + class AnAjaxFunc(AjaxFunction): + __regid__ = regid or implementation.__name__ + __select__ = selector + output_type = _output_type + check_pageid = _check_pageid + + def serialize(self, content): + if self.output_type is None: + return content + elif self.output_type == 'xhtml': + self._cw.set_content_type(self._cw.html_content_type()) + return ''.join((u'<div>', + content.strip(), u'</div>')) + elif self.output_type == 'json': + self._cw.set_content_type('application/json') + return json_dumps(content) + raise RemoteCallFailed('no serializer found for output type %s' + % self.output_type) + + def __call__(self, *args, **kwargs): + if self.check_pageid: + data = self._cw.session.data.get(self._cw.pageid) + if data is None: + raise RemoteCallFailed(self._cw._('pageid-not-found')) + return self.serialize(implementation(self, *args, **kwargs)) + + AnAjaxFunc.__name__ = implementation.__name__ + # make sure __module__ refers to the original module otherwise + # vreg.register(obj) will ignore ``obj``. + AnAjaxFunc.__module__ = implementation.__module__ + # relate the ``implementation`` object to its wrapper appobject + # will be used by e.g.: + # import base_module + # @ajaxfunc + # def foo(self): + # return 42 + # assert foo(object) == 42 + # vreg.register_and_replace(foo, base_module.older_foo) + implementation.__appobject__ = AnAjaxFunc + return implementation + + +def ajaxfunc(implementation=None, selector=yes(), output_type=None, + check_pageid=False, regid=None): + """promote a standard function to an ``AjaxFunction`` appobject. + + All parameters are optional: + + :param selector: a custom selector object if needed, default is ``yes()`` + + :param output_type: either None, 'json' or 'xhtml' to customize output + content-type. Default is None + + :param check_pageid: whether the function requires a valid `pageid` or not + to proceed. Default is False. + + :param regid: a custom __regid__ for the created ``AjaxFunction`` object. Default + is to keep the wrapped function name. + + ``ajaxfunc`` can be used both as a standalone decorator: + + .. sourcecode:: python + + @ajaxfunc + def my_function(self): + return 42 + + or as a parametrizable decorator: + + .. sourcecode:: python + + @ajaxfunc(output_type='json') + def my_function(self): + return 42 + + """ + # if used as a parametrized decorator (e.g. @ajaxfunc(output_type='json')) + if implementation is None: + def _decorator(func): + return _ajaxfunc_factory(func, selector=selector, + _output_type=output_type, + _check_pageid=check_pageid, + regid=regid) + return _decorator + # else, used as a standalone decorator (i.e. @ajaxfunc) + return _ajaxfunc_factory(implementation, selector=selector, + _output_type=output_type, + _check_pageid=check_pageid, regid=regid) + + + +############################################################################### +# Cubicweb remote functions for : # +# - appobject rendering # +# - user / page session data management # +############################################################################### +@ajaxfunc(output_type='xhtml') +def view(self): + # XXX try to use the page-content template + req = self._cw + rql = req.form.get('rql') + if rql: + rset = self._exec(rql) + elif 'eid' in req.form: + rset = self._cw.eid_rset(req.form['eid']) + else: + rset = None + vid = req.form.get('vid') or vid_from_rset(req, rset, self._cw.vreg.schema) + try: + viewobj = self._cw.vreg['views'].select(vid, req, rset=rset) + except NoSelectableObject: + vid = req.form.get('fallbackvid', 'noresult') + viewobj = self._cw.vreg['views'].select(vid, req, rset=rset) + viewobj.set_http_cache_headers() + if req.is_client_cache_valid(): + return '' + return self._call_view(viewobj, paginate=req.form.pop('paginate', False)) + + +@ajaxfunc(output_type='xhtml') +def component(self, compid, rql, registry='components', extraargs=None): + if rql: + rset = self._exec(rql) + else: + rset = None + # XXX while it sounds good, addition of the try/except below cause pb: + # when filtering using facets return an empty rset, the edition box + # isn't anymore selectable, as expected. The pb is that with the + # try/except below, we see a "an error occurred" message in the ui, while + # we don't see it without it. Proper fix would probably be to deal with + # this by allowing facet handling code to tell to js_component that such + # error is expected and should'nt be reported. + #try: + comp = self._cw.vreg[registry].select(compid, self._cw, rset=rset, + **optional_kwargs(extraargs)) + #except NoSelectableObject: + # raise RemoteCallFailed('unselectable') + return self._call_view(comp, **optional_kwargs(extraargs)) + +@ajaxfunc(output_type='xhtml') +def render(self, registry, oid, eid=None, + selectargs=None, renderargs=None): + if eid is not None: + rset = self._cw.eid_rset(eid) + # XXX set row=0 + elif self._cw.form.get('rql'): + rset = self._cw.execute(self._cw.form['rql']) + else: + rset = None + viewobj = self._cw.vreg[registry].select(oid, self._cw, rset=rset, + **optional_kwargs(selectargs)) + return self._call_view(viewobj, **optional_kwargs(renderargs)) + + +@ajaxfunc(output_type='json') +def i18n(self, msgids): + """returns the translation of `msgid`""" + return [self._cw._(msgid) for msgid in msgids] + +@ajaxfunc(output_type='json') +def format_date(self, strdate): + """returns the formatted date for `msgid`""" + date = strptime(strdate, '%Y-%m-%d %H:%M:%S') + return self._cw.format_date(date) + +@ajaxfunc(output_type='json') +def external_resource(self, resource): + """returns the URL of the external resource named `resource`""" + return self._cw.uiprops[resource] + +@ajaxfunc +def unload_page_data(self): + """remove user's session data associated to current pageid""" + self._cw.session.data.pop(self._cw.pageid, None) + +@ajaxfunc(output_type='json') +@deprecated("[3.13] use jQuery.cookie(cookiename, cookievalue, {path: '/'}) in js land instead") +def set_cookie(self, cookiename, cookievalue): + """generates the Set-Cookie HTTP reponse header corresponding + to `cookiename` / `cookievalue`. + """ + cookiename, cookievalue = str(cookiename), str(cookievalue) + self._cw.set_cookie(cookiename, cookievalue) + + + +@ajaxfunc +def delete_relation(self, rtype, subjeid, objeid): + rql = 'DELETE S %s O WHERE S eid %%(s)s, O eid %%(o)s' % rtype + self._cw.execute(rql, {'s': subjeid, 'o': objeid}) + +@ajaxfunc +def add_relation(self, rtype, subjeid, objeid): + rql = 'SET S %s O WHERE S eid %%(s)s, O eid %%(o)s' % rtype + self._cw.execute(rql, {'s': subjeid, 'o': objeid}) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/ajaxedit.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/ajaxedit.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,111 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""Set of views allowing edition of entities/relations using ajax""" + +__docformat__ = "restructuredtext en" + +from cubicweb import role +from cubicweb.view import View +from cubicweb.predicates import match_form_params, match_kwargs +from cubicweb.web import component, stdmsgs, formwidgets as fw + +class AddRelationView(component.EditRelationMixIn, View): + """base class for view which let add entities linked by a given relation + + subclasses should define at least id, rtype and target class attributes. + """ + __registry__ = 'views' + __regid__ = 'xaddrelation' + __select__ = (match_form_params('rtype', 'target') + | match_kwargs('rtype', 'target')) + cw_property_defs = {} # don't want to inherit this from Box + expected_kwargs = form_params = ('rtype', 'target') + + def cell_call(self, row, col, rtype=None, target=None, etype=None): + self.rtype = rtype or self._cw.form['rtype'] + self.target = target or self._cw.form['target'] + self.etype = etype or self._cw.form.get('etype') + entity = self.cw_rset.get_entity(row, col) + rschema = self._cw.vreg.schema.rschema(self.rtype) + if not self.etype: + if self.target == 'object': + etypes = rschema.objects(entity.e_schema) + else: + etypes = rschema.subjects(entity.e_schema) + if len(etypes) == 1: + self.etype = etypes[0] + self.w(u'<div id="%s">' % self.domid) + self.w(u'<h1>%s</h1>' % self._cw._('relation %(relname)s of %(ent)s') + % {'relname': rschema.display_name(self._cw, role(self)), + 'ent': entity.view('incontext')}) + self.w(u'<ul class="list-unstyled">') + for boxitem in self.unrelated_boxitems(entity): + self.w('<li>%s</li>' % boxitem) + self.w(u'</ul></div>') + + def unrelated_entities(self, entity): + """returns the list of unrelated entities + + if etype is not defined on the Box's class, the default + behaviour is to use the entity's appropraite vocabulary function + """ + # use entity.unrelated if we've been asked for a particular etype + if getattr(self, 'etype', None): + rset = entity.unrelated(self.rtype, self.etype, role(self), + ordermethod='fetch_order') + self.paginate(self._cw, rset=rset, w=self.w) + return rset.entities() + super(AddRelationView, self).unrelated_entities(self) + + +def ajax_composite_form(container, entity, rtype, okjs, canceljs, + entityfkwargs=None): + """ + * if entity is None, edit container (assert container.has_eid()) + * if entity has not eid, will be created + * if container has not eid, will be created (see vcreview InsertionPoint) + """ + req = container._cw + parentexists = entity is None or container.has_eid() + buttons = [fw.Button(onclick=okjs), + fw.Button(stdmsgs.BUTTON_CANCEL, onclick=canceljs)] + freg = req.vreg['forms'] + # main form kwargs + mkwargs = dict(action='#', domid='%sForm%s' % (rtype, container.eid), + form_buttons=buttons, + onsubmit='javascript: %s; return false' % okjs) + # entity form kwargs + # use formtype=inlined to skip the generic relations edition section + fkwargs = dict(entity=entity or container, formtype='inlined') + if entityfkwargs is not None: + fkwargs.update(entityfkwargs) + # form values + formvalues = {} + if entity is not None: # creation + formvalues[rtype] = container.eid + if parentexists: # creation / edition + mkwargs.update(fkwargs) + # use formtype=inlined to avoid viewing the relation edition section + form = freg.select('edition', req, **mkwargs) + else: # creation of both container and comment entities + form = freg.select('composite', req, form_renderer_id='default', + **mkwargs) + form.add_subform(freg.select('edition', req, entity=container, + mainform=False, mainentity=True)) + form.add_subform(freg.select('edition', req, mainform=False, **fkwargs)) + return form, formvalues diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/apacherewrite.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/apacherewrite.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,111 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""provide class to do Apache rewrite rules'job inside cubicweb (though functionnalities +are much more limited for the moment) + +""" + +__docformat__ = "restructuredtext en" + +from re import compile + +from cubicweb.web import Redirect +from cubicweb.web.component import Component + +class RewriteCond(object): + def __init__(self, condition, match='host', rules=(), action='rewrite'): + self.condition = compile(condition) + assert match in ('host', 'path'), match + self.match_part = match + self.rules = [] + for rule, replace in rules: + rulergx = compile(rule) + self.rules.append( (rulergx, replace) ) + assert action in ('rewrite', 'redirect', 'stop'), action + self.process = getattr(self, 'action_%s' % action) + + def match(self, **kwargs): + self._match = self.condition.match(kwargs[self.match_part]) + return not self._match is None + + def action_rewrite(self, path): + for rgx, replace in self.rules: + if not rgx.match(path) is None: + matchdict = self._match.groupdict() or None + if not matchdict is None: + replace = replace % matchdict + return rgx.sub(replace, path) + return path + + def action_redirect(self, path): + url = self.action_rewrite(path) + raise Redirect(url) + + def action_stop(self, path): + return path + + +class ApacheURLRewrite(Component): + """inherit from this class with actual rules to activate apache style rewriting + + rules should have the form : + + [('condition pattern 1', [('rule1 pattern', 'replace expression'), + ('rule2 pattern', 'replace expression')], + ('condition pattern 2', [('rule1 pattern', 'replace expression'), + ('rule2 pattern', 'replace expression')] + ] + + for instance the equivalent of the following apache rules: + + RewriteCond %{HTTP_HOST} ^logilab\.fr + RewriteRule ^/(.*) http://www.logilab.fr/$1 [L,R=301] + + RewriteCond %{HTTP_HOST} ^www\.logilab\.fr + RewriteRule ^/(.*) http://localhost:8080/$1 [L,P] + + RewriteCond %{HTTP_HOST} ^(.+)\.logilab\.fr + RewriteRule ^/(data/.*) http://localhost:8080/$1 [L,P] + RewriteRule ^/(json.*) http://localhost:8080/$1 [L,P] + RewriteRule ^/(.*) http://localhost:8080/m_%1/$1 [L,P] + + could be written (considering that no "host rewritting" is necessary): + + class MyAppRules(ApacheURLRewrite): + rules = [ + RewriteCond('logilab\.fr', match='host', + rules=[('/(.*)', r'http://www.logilab.fr/\1')], + action='redirect'), + RewriteCond('(www)\.logilab\.fr', match='host', action='stop'), + RewriteCond('/(data|json)/', match='path', action='stop'), + RewriteCond('(?P<cat>.*)\.logilab\.fr', match='host', + rules=[('/(.*)', r'/m_%(cat)s/\1')]), + ] + """ + __abstract__ = True + __regid__ = 'urlrewriter' + rules = [] + + def get_rules(self, req): + return self.rules + + def rewrite(self, host, path, req): + for cond in self.get_rules(req): + if cond.match(host=host, path=path): + return cond.process(path) + return path diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/authentication.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/authentication.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,174 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""user authentication component""" + +__docformat__ = "restructuredtext en" + +from logilab.common.deprecation import class_renamed + +from cubicweb import AuthenticationError +from cubicweb.view import Component +from cubicweb.web import InvalidSession + + +class NoAuthInfo(Exception): pass + + +class WebAuthInfoRetriever(Component): + __registry__ = 'webauth' + order = None + __abstract__ = True + + def authentication_information(self, req): + """retrieve authentication information from the given request, raise + NoAuthInfo if expected information is not found. + """ + raise NotImplementedError() + + def authenticated(self, retriever, req, session, login, authinfo): + """callback when return authentication information have opened a + repository connection successfully. Take care req has no session + attached yet, hence req.execute isn't available. + """ + pass + + def request_has_auth_info(self, req): + """tells from the request if it has enough information + to proceed to authentication, would the current session + be invalidated + """ + raise NotImplementedError() + + def revalidate_login(self, req): + """returns a login string or None, for repository session validation + purposes + """ + raise NotImplementedError() + + def cleanup_authentication_information(self, req): + """called when the retriever has returned some authentication + information but we get an authentication error when using them, so it + get a chance to clean things up (e.g. remove cookie) + """ + pass + +WebAuthInfoRetreiver = class_renamed( + 'WebAuthInfoRetreiver', WebAuthInfoRetriever, + '[3.17] WebAuthInfoRetreiver had been renamed into WebAuthInfoRetriever ' + '("ie" instead of "ei")') + + +class LoginPasswordRetriever(WebAuthInfoRetriever): + __regid__ = 'loginpwdauth' + order = 10 + + def authentication_information(self, req): + """retreive authentication information from the given request, raise + NoAuthInfo if expected information is not found. + """ + login, password = req.get_authorization() + if not login: + raise NoAuthInfo() + return login, {'password': password} + + def request_has_auth_info(self, req): + return req.get_authorization()[0] is not None + + def revalidate_login(self, req): + return req.get_authorization()[0] + +LoginPasswordRetreiver = class_renamed( + 'LoginPasswordRetreiver', LoginPasswordRetriever, + '[3.17] LoginPasswordRetreiver had been renamed into LoginPasswordRetriever ' + '("ie" instead of "ei")') + + + +class RepositoryAuthenticationManager(object): + """authenticate user associated to a request and check session validity""" + + def __init__(self, repo): + self.repo = repo + vreg = repo.vreg + self.log_queries = vreg.config['query-log-file'] + self.authinforetrievers = sorted(vreg['webauth'].possible_objects(vreg), + key=lambda x: x.order) + # 2-uple login / password, login is None when no anonymous access + # configured + self.anoninfo = vreg.config.anonymous_user() + if self.anoninfo[0]: + self.anoninfo = (self.anoninfo[0], {'password': self.anoninfo[1]}) + + def validate_session(self, req, session): + """check session validity and return the connected user on success. + + raise :exc:`InvalidSession` if session is corrupted for a reason or + another and should be closed + + also invoked while going from anonymous to logged in + """ + for retriever in self.authinforetrievers: + if retriever.request_has_auth_info(req): + login = retriever.revalidate_login(req) + return self._validate_session(req, session, login) + # let's try with the current session + return self._validate_session(req, session, None) + + def _validate_session(self, req, session, login): + # check session.login and not user.login, since in case of login by + # email, login and cnx.login are the email while user.login is the + # actual user login + if login and session.login != login: + raise InvalidSession('login mismatch') + + def authenticate(self, req): + """authenticate user using connection information found in the request, + and return corresponding a :class:`~cubicweb.dbapi.Connection` instance, + as well as login used to open the connection. + + raise :exc:`cubicweb.AuthenticationError` if authentication failed + (no authentication info found or wrong user/password) + """ + has_auth = False + for retriever in self.authinforetrievers: + try: + login, authinfo = retriever.authentication_information(req) + except NoAuthInfo: + continue + has_auth = True + try: + session = self._authenticate(login, authinfo) + except AuthenticationError: + retriever.cleanup_authentication_information(req) + continue # the next one may succeed + for retriever_ in self.authinforetrievers: + retriever_.authenticated(retriever, req, session, login, authinfo) + return session, login + # false if no authentication info found, i.e. this is not an + # authentication failure + if has_auth: + req.set_message(req._('authentication failure')) + login, authinfo = self.anoninfo + if login: + session = self._authenticate(login, authinfo) + return session, login + raise AuthenticationError() + + def _authenticate(self, login, authinfo): + sessionid = self.repo.connect(login, **authinfo) + return self.repo._sessions[sessionid] diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/autoform.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/autoform.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1057 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +""" +.. autodocstring:: cubicweb.web.views.autoform::AutomaticEntityForm + +Configuration through uicfg +``````````````````````````` + +It is possible to manage which and how an entity's attributes and relations +will be edited in the various contexts where the automatic entity form is used +by using proper uicfg tags. + +The details of the uicfg syntax can be found in the :ref:`uicfg` chapter. + +Possible relation tags that apply to entity forms are detailled below. +They are all in the :mod:`cubicweb.web.uicfg` module. + +Attributes/relations display location +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +``autoform_section`` specifies where to display a relation in form for a given +form type. :meth:`tag_attribute`, :meth:`tag_subject_of` and +:meth:`tag_object_of` methods for this relation tag expect two arguments +additionally to the relation key: a `formtype` and a `section`. + +`formtype` may be one of: + +* 'main', the main entity form (e.g. the one you get when creating or editing an + entity) + +* 'inlined', the form for an entity inlined into another form + +* 'muledit', the table form when editing multiple entities of the same type + + +section may be one of: + +* 'hidden', don't display (not even in a hidden input) + +* 'attributes', display in the attributes section + +* 'relations', display in the relations section, using the generic relation + selector combobox (available in main form only, and not usable for attributes) + +* 'inlined', display target entity of the relation into an inlined form + (available in main form only, and not for attributes) + +By default, mandatory relations are displayed in the 'attributes' section, +others in 'relations' section. + + +Change default fields +^^^^^^^^^^^^^^^^^^^^^ + +Use ``autoform_field`` to replace the default field class to use for a relation +or attribute. You can put either a field class or instance as value (put a class +whenether it's possible). + +.. Warning:: + + `autoform_field_kwargs` should usually be used instead of + `autoform_field`. If you put a field instance into `autoform_field`, + `autoform_field_kwargs` values for this relation will be ignored. + + +Customize field options +^^^^^^^^^^^^^^^^^^^^^^^ + +In order to customize field options (see :class:`~cubicweb.web.formfields.Field` +for a detailed list of options), use `autoform_field_kwargs`. This rtag takes +a dictionary as arguments, that will be given to the field's contructor. + +You can then put in that dictionary any arguments supported by the field +class. For instance: + +.. sourcecode:: python + + # Change the content of the combobox. Here `ticket_done_in_choices` is a + # function which returns a list of elements to populate the combobox + autoform_field_kwargs.tag_subject_of(('Ticket', 'done_in', '*'), + {'sort': False, + 'choices': ticket_done_in_choices}) + + # Force usage of a TextInput widget for the expression attribute of + # RQLExpression entities + autoform_field_kwargs.tag_attribute(('RQLExpression', 'expression'), + {'widget': fw.TextInput}) + +.. note:: + + the widget argument can be either a class or an instance (the later + case being convenient to pass the Widget specific initialisation + options) + +Overriding permissions +^^^^^^^^^^^^^^^^^^^^^^ + +The `autoform_permissions_overrides` rtag provides a way to by-pass security +checking for dark-corner case where it can't be verified properly. + + +.. More about inlined forms +.. Controlling the generic relation fields +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from six.moves import range + +from logilab.mtconverter import xml_escape +from logilab.common.decorators import iclassmethod, cached +from logilab.common.deprecation import deprecated +from logilab.common.registry import NoSelectableObject + +from cubicweb import neg_role, uilib +from cubicweb.schema import display_name +from cubicweb.view import EntityView +from cubicweb.predicates import ( + match_kwargs, match_form_params, non_final_entity, + specified_etype_implements) +from cubicweb.utils import json_dumps +from cubicweb.web import (stdmsgs, eid_param, + form as f, formwidgets as fw, formfields as ff) +from cubicweb.web.views import uicfg, forms +from cubicweb.web.views.ajaxcontroller import ajaxfunc + + +# inlined form handling ######################################################## + +class InlinedFormField(ff.Field): + def __init__(self, view=None, **kwargs): + kwargs.setdefault('label', None) + # don't add eidparam=True since this field doesn't actually hold the + # relation value (the subform does) hence should not be listed in + # _cw_entity_fields + super(InlinedFormField, self).__init__(name=view.rtype, role=view.role, + **kwargs) + self.view = view + + def render(self, form, renderer): + """render this field, which is part of form, using the given form + renderer + """ + view = self.view + i18nctx = 'inlined:%s.%s.%s' % (form.edited_entity.e_schema, + view.rtype, view.role) + return u'<div class="inline-%s-%s-slot">%s</div>' % ( + view.rtype, view.role, + view.render(i18nctx=i18nctx, row=view.cw_row, col=view.cw_col)) + + def form_init(self, form): + """method called before by build_context to trigger potential field + initialization requiring the form instance + """ + if self.view.form: + self.view.form.build_context(form.formvalues) + + @property + def needs_multipart(self): + if self.view.form: + # take a look at inlined forms to check (recursively) if they need + # multipart handling. + return self.view.form.needs_multipart + return False + + def has_been_modified(self, form): + return False + + def process_posted(self, form): + pass # handled by the subform + + +class InlineEntityEditionFormView(f.FormViewMixIn, EntityView): + """ + :attr peid: the parent entity's eid hosting the inline form + :attr rtype: the relation bridging `etype` and `peid` + :attr role: the role played by the `peid` in the relation + :attr pform: the parent form where this inlined form is being displayed + """ + __regid__ = 'inline-edition' + __select__ = non_final_entity() & match_kwargs('peid', 'rtype') + + _select_attrs = ('peid', 'rtype', 'role', 'pform', 'etype') + removejs = "removeInlinedEntity('%s', '%s', '%s')" + + # make pylint happy + peid = rtype = role = pform = etype = None + + def __init__(self, *args, **kwargs): + for attr in self._select_attrs: + # don't pop attributes from kwargs, so the end-up in + # self.cw_extra_kwargs which is then passed to the edition form (see + # the .form method) + setattr(self, attr, kwargs.get(attr)) + super(InlineEntityEditionFormView, self).__init__(*args, **kwargs) + + def _entity(self): + assert self.cw_row is not None, self + return self.cw_rset.get_entity(self.cw_row, self.cw_col) + + @property + def petype(self): + assert isinstance(self.peid, int) + pentity = self._cw.entity_from_eid(self.peid) + return pentity.e_schema.type + + @property + @cached + def form(self): + entity = self._entity() + form = self._cw.vreg['forms'].select('edition', self._cw, + entity=entity, + formtype='inlined', + form_renderer_id='inline', + copy_nav_params=False, + mainform=False, + parent_form=self.pform, + **self.cw_extra_kwargs) + if self.pform is None: + form.restore_previous_post(form.session_key()) + #assert form.parent_form + self.add_hiddens(form, entity) + return form + + def cell_call(self, row, col, i18nctx, **kwargs): + """ + :param peid: the parent entity's eid hosting the inline form + :param rtype: the relation bridging `etype` and `peid` + :param role: the role played by the `peid` in the relation + """ + entity = self._entity() + divonclick = "restoreInlinedEntity('%s', '%s', '%s')" % ( + self.peid, self.rtype, entity.eid) + self.render_form(i18nctx, divonclick=divonclick, **kwargs) + + def _get_removejs(self): + """ + Don't display the remove link in edition form if the + cardinality is 1. Handled in InlineEntityCreationFormView for + creation form. + """ + entity = self._entity() + rdef = entity.e_schema.rdef(self.rtype, neg_role(self.role), self.petype) + card = rdef.role_cardinality(self.role) + if card == '1': # don't display remove link + return None + # if cardinality is 1..n (+), dont display link to remove an inlined form for the first form + # allowing to edit the relation. To detect so: + # + # * if parent form (pform) is None, we're generated through an ajax call and so we know this + # is not the first form + # + # * if parent form is not None, look for previous InlinedFormField in the parent's form + # fields + if card == '+' and self.pform is not None: + # retrieve all field'views handling this relation and return None if we're the first of + # them + first_view = next(iter((f.view for f in self.pform.fields + if isinstance(f, InlinedFormField) + and f.view.rtype == self.rtype and f.view.role == self.role))) + if self == first_view: + return None + return self.removejs and self.removejs % ( + self.peid, self.rtype, entity.eid) + + def render_form(self, i18nctx, **kwargs): + """fetch and render the form""" + entity = self._entity() + divid = '%s-%s-%s' % (self.peid, self.rtype, entity.eid) + title = self.form_title(entity, i18nctx) + removejs = self._get_removejs() + countkey = '%s_count' % self.rtype + try: + self._cw.data[countkey] += 1 + except KeyError: + self._cw.data[countkey] = 1 + self.form.render(w=self.w, divid=divid, title=title, removejs=removejs, + i18nctx=i18nctx, counter=self._cw.data[countkey] , + **kwargs) + + def form_title(self, entity, i18nctx): + return self._cw.pgettext(i18nctx, entity.cw_etype) + + def add_hiddens(self, form, entity): + """to ease overriding (see cubes.vcsfile.views.forms for instance)""" + iid = 'rel-%s-%s-%s' % (self.peid, self.rtype, entity.eid) + # * str(self.rtype) in case it's a schema object + # * neged_role() since role is the for parent entity, we want the role + # of the inlined entity + form.add_hidden(name=str(self.rtype), value=self.peid, + role=neg_role(self.role), eidparam=True, id=iid) + + def keep_entity(self, form, entity): + if not entity.has_eid(): + return True + # are we regenerating form because of a validation error? + if form.form_previous_values: + cdvalues = self._cw.list_form_param(eid_param(self.rtype, self.peid), + form.form_previous_values) + if unicode(entity.eid) not in cdvalues: + return False + return True + + +class InlineEntityCreationFormView(InlineEntityEditionFormView): + """ + :attr etype: the entity type being created in the inline form + """ + __regid__ = 'inline-creation' + __select__ = (match_kwargs('peid', 'petype', 'rtype') + & specified_etype_implements('Any')) + _select_attrs = InlineEntityEditionFormView._select_attrs + ('petype',) + + # make pylint happy + petype = None + + @property + def removejs(self): + entity = self._entity() + rdef = entity.e_schema.rdef(self.rtype, neg_role(self.role), self.petype) + card = rdef.role_cardinality(self.role) + # when one is adding an inline entity for a relation of a single card, + # the 'add a new xxx' link disappears. If the user then cancel the addition, + # we have to make this link appears back. This is done by giving add new link + # id to removeInlineForm. + if card == '?': + divid = "addNew%s%s%s:%s" % (self.etype, self.rtype, self.role, self.peid) + return "removeInlineForm('%%s', '%%s', '%s', '%%s', '%s')" % ( + self.role, divid) + elif card in '+*': + return "removeInlineForm('%%s', '%%s', '%s', '%%s')" % self.role + # don't do anything for card == '1' + + @cached + def _entity(self): + try: + cls = self._cw.vreg['etypes'].etype_class(self.etype) + except Exception: + self.w(self._cw._('no such entity type %s') % self.etype) + return + entity = cls(self._cw) + entity.eid = next(self._cw.varmaker) + return entity + + def call(self, i18nctx, **kwargs): + self.render_form(i18nctx, **kwargs) + + +class InlineAddNewLinkView(InlineEntityCreationFormView): + """ + :attr card: the cardinality of the relation according to role of `peid` + """ + __regid__ = 'inline-addnew-link' + __select__ = (match_kwargs('peid', 'petype', 'rtype') + & specified_etype_implements('Any')) + + _select_attrs = InlineEntityCreationFormView._select_attrs + ('card',) + card = None # make pylint happy + form = None # no actual form wrapped + + def call(self, i18nctx, **kwargs): + self._cw.set_varmaker() + divid = "addNew%s%s%s:%s" % (self.etype, self.rtype, self.role, self.peid) + self.w(u'<div class="inlinedform" id="%s" cubicweb:limit="true">' + % divid) + js = "addInlineCreationForm('%s', '%s', '%s', '%s', '%s', '%s')" % ( + self.peid, self.petype, self.etype, self.rtype, self.role, i18nctx) + if self.pform.should_hide_add_new_relation_link(self.rtype, self.card): + js = "toggleVisibility('%s'); %s" % (divid, js) + __ = self._cw.pgettext + self.w(u'<a class="addEntity" id="add%s:%slink" href="javascript: %s" >+ %s.</a>' + % (self.rtype, self.peid, js, __(i18nctx, 'add a %s' % self.etype))) + self.w(u'</div>') + + +# generic relations handling ################################################## + +def relation_id(eid, rtype, role, reid): + """return an identifier for a relation between two entities""" + if role == 'subject': + return u'%s:%s:%s' % (eid, rtype, reid) + return u'%s:%s:%s' % (reid, rtype, eid) + +def toggleable_relation_link(eid, nodeid, label='x'): + """return javascript snippet to delete/undelete a relation between two + entities + """ + js = u"javascript: togglePendingDelete('%s', %s);" % ( + nodeid, xml_escape(json_dumps(eid))) + return u'[<a class="handle" href="%s" id="handle%s">%s</a>]' % ( + js, nodeid, label) + + +def get_pending_inserts(req, eid=None): + """shortcut to access req's pending_insert entry + + This is where are stored relations being added while editing + an entity. This used to be stored in a temporary cookie. + """ + pending = req.session.data.get('pending_insert', ()) + return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending + if eid is None or eid in (subj, obj)] + +def get_pending_deletes(req, eid=None): + """shortcut to access req's pending_delete entry + + This is where are stored relations being removed while editing + an entity. This used to be stored in a temporary cookie. + """ + pending = req.session.data.get('pending_delete', ()) + return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending + if eid is None or eid in (subj, obj)] + +def parse_relations_descr(rdescr): + """parse a string describing some relations, in the form + subjeids:rtype:objeids + where subjeids and objeids are eids separeted by a underscore + + return an iterator on (subject eid, relation type, object eid) found + """ + for rstr in rdescr: + subjs, rtype, objs = rstr.split(':') + for subj in subjs.split('_'): + for obj in objs.split('_'): + yield int(subj), rtype, int(obj) + +def delete_relations(req, rdefs): + """delete relations from the repository""" + # FIXME convert to using the syntax subject:relation:eids + execute = req.execute + for subj, rtype, obj in parse_relations_descr(rdefs): + rql = 'DELETE X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype + execute(rql, {'x': subj, 'y': obj}) + req.set_message(req._('relations deleted')) + +def insert_relations(req, rdefs): + """insert relations into the repository""" + execute = req.execute + for subj, rtype, obj in parse_relations_descr(rdefs): + rql = 'SET X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype + execute(rql, {'x': subj, 'y': obj}) + + +# ajax edition helpers ######################################################## +@ajaxfunc(output_type='xhtml', check_pageid=True) +def inline_creation_form(self, peid, petype, ttype, rtype, role, i18nctx): + view = self._cw.vreg['views'].select('inline-creation', self._cw, + etype=ttype, rtype=rtype, role=role, + peid=peid, petype=petype) + return self._call_view(view, i18nctx=i18nctx) + +@ajaxfunc(output_type='json') +def validate_form(self, action, names, values): + return self.validate_form(action, names, values) + +@ajaxfunc +def cancel_edition(self, errorurl): + """cancelling edition from javascript + + We need to clear associated req's data : + - errorurl + - pending insertions / deletions + """ + self._cw.cancel_edition(errorurl) + + +def _add_pending(req, eidfrom, rel, eidto, kind): + key = 'pending_%s' % kind + pendings = req.session.data.setdefault(key, set()) + pendings.add( (int(eidfrom), rel, int(eidto)) ) + +def _remove_pending(req, eidfrom, rel, eidto, kind): + key = 'pending_%s' % kind + pendings = req.session.data[key] + pendings.remove( (int(eidfrom), rel, int(eidto)) ) + +@ajaxfunc(output_type='json') +def remove_pending_insert(self, args): + eidfrom, rel, eidto = args + _remove_pending(self._cw, eidfrom, rel, eidto, 'insert') + +@ajaxfunc(output_type='json') +def add_pending_inserts(self, tripletlist): + for eidfrom, rel, eidto in tripletlist: + _add_pending(self._cw, eidfrom, rel, eidto, 'insert') + +@ajaxfunc(output_type='json') +def remove_pending_delete(self, args): + eidfrom, rel, eidto = args + _remove_pending(self._cw, eidfrom, rel, eidto, 'delete') + +@ajaxfunc(output_type='json') +def add_pending_delete(self, args): + eidfrom, rel, eidto = args + _add_pending(self._cw, eidfrom, rel, eidto, 'delete') + + +class GenericRelationsWidget(fw.FieldWidget): + + def render(self, form, field, renderer): + stream = [] + w = stream.append + req = form._cw + _ = req._ + __ = _ + eid = form.edited_entity.eid + w(u'<table id="relatedEntities">') + for rschema, role, related in field.relations_table(form): + # already linked entities + if related: + label = rschema.display_name(req, role, context=form.edited_entity.cw_etype) + w(u'<tr><th class="labelCol">%s</th>' % label) + w(u'<td>') + w(u'<ul class="list-unstyled">') + for viewparams in related: + w(u'<li>%s<span id="span%s" class="%s">%s</span></li>' + % (viewparams[1], viewparams[0], viewparams[2], viewparams[3])) + if not form.force_display and form.maxrelitems < len(related): + link = (u'<span>' + '[<a href="javascript: window.location.href+=\'&__force_display=1\'">%s</a>]' + '</span>' % _('view all')) + w(u'<li>%s</li>' % link) + w(u'</ul>') + w(u'</td>') + w(u'</tr>') + pendings = list(field.restore_pending_inserts(form)) + if not pendings: + w(u'<tr><th> </th><td> </td></tr>') + else: + for row in pendings: + # soon to be linked to entities + w(u'<tr id="tr%s">' % row[1]) + w(u'<th>%s</th>' % row[3]) + w(u'<td>') + w(u'<a class="handle" title="%s" href="%s">[x]</a>' % + (_('cancel this insert'), row[2])) + w(u'<a id="a%s" class="editionPending" href="%s">%s</a>' + % (row[1], row[4], xml_escape(row[5]))) + w(u'</td>') + w(u'</tr>') + w(u'<tr id="relationSelectorRow_%s" class="separator">' % eid) + w(u'<th class="labelCol">') + w(u'<select id="relationSelector_%s" tabindex="%s" ' + 'onchange="javascript:showMatchingSelect(this.options[this.selectedIndex].value,%s);">' + % (eid, req.next_tabindex(), xml_escape(json_dumps(eid)))) + w(u'<option value="">%s</option>' % _('select a relation')) + for i18nrtype, rschema, role in field.relations: + # more entities to link to + w(u'<option value="%s_%s">%s</option>' % (rschema, role, i18nrtype)) + w(u'</select>') + w(u'</th>') + w(u'<td id="unrelatedDivs_%s"></td>' % eid) + w(u'</tr>') + w(u'</table>') + return '\n'.join(stream) + + +class GenericRelationsField(ff.Field): + widget = GenericRelationsWidget + + def __init__(self, relations, name='_cw_generic_field', **kwargs): + assert relations + kwargs['eidparam'] = True + super(GenericRelationsField, self).__init__(name, **kwargs) + self.relations = relations + + def process_posted(self, form): + todelete = get_pending_deletes(form._cw) + if todelete: + delete_relations(form._cw, todelete) + toinsert = get_pending_inserts(form._cw) + if toinsert: + insert_relations(form._cw, toinsert) + return () + + def relations_table(self, form): + """yiels 3-tuples (rtype, role, related_list) + where <related_list> itself a list of : + - node_id (will be the entity element's DOM id) + - appropriate javascript's togglePendingDelete() function call + - status 'pendingdelete' or '' + - oneline view of related entity + """ + entity = form.edited_entity + pending_deletes = get_pending_deletes(form._cw, entity.eid) + for label, rschema, role in self.relations: + related = [] + if entity.has_eid(): + rset = entity.related(rschema, role, limit=form.related_limit) + if role == 'subject': + haspermkwargs = {'fromeid': entity.eid} + else: + haspermkwargs = {'toeid': entity.eid} + if rschema.has_perm(form._cw, 'delete', **haspermkwargs): + toggleable_rel_link_func = toggleable_relation_link + else: + toggleable_rel_link_func = lambda x, y, z: u'' + for row in range(rset.rowcount): + nodeid = relation_id(entity.eid, rschema, role, + rset[row][0]) + if nodeid in pending_deletes: + status, label = u'pendingDelete', '+' + else: + status, label = u'', 'x' + dellink = toggleable_rel_link_func(entity.eid, nodeid, label) + eview = form._cw.view('oneline', rset, row=row) + related.append((nodeid, dellink, status, eview)) + yield (rschema, role, related) + + def restore_pending_inserts(self, form): + """used to restore edition page as it was before clicking on + 'search for <some entity type>' + """ + entity = form.edited_entity + pending_inserts = set(get_pending_inserts(form._cw, form.edited_entity.eid)) + for pendingid in pending_inserts: + eidfrom, rtype, eidto = pendingid.split(':') + pendingid = 'id' + pendingid + if int(eidfrom) == entity.eid: # subject + label = display_name(form._cw, rtype, 'subject', + entity.cw_etype) + reid = eidto + else: + label = display_name(form._cw, rtype, 'object', + entity.cw_etype) + reid = eidfrom + jscall = "javascript: cancelPendingInsert('%s', 'tr', null, %s);" \ + % (pendingid, entity.eid) + rset = form._cw.eid_rset(reid) + eview = form._cw.view('text', rset, row=0) + yield rtype, pendingid, jscall, label, reid, eview + + +class UnrelatedDivs(EntityView): + __regid__ = 'unrelateddivs' + __select__ = match_form_params('relation') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + relname, role = self._cw.form.get('relation').rsplit('_', 1) + rschema = self._cw.vreg.schema.rschema(relname) + hidden = 'hidden' in self._cw.form + is_cell = 'is_cell' in self._cw.form + self.w(self.build_unrelated_select_div(entity, rschema, role, + is_cell=is_cell, hidden=hidden)) + + def build_unrelated_select_div(self, entity, rschema, role, + is_cell=False, hidden=True): + options = [] + divid = 'div%s_%s_%s' % (rschema.type, role, entity.eid) + selectid = 'select%s_%s_%s' % (rschema.type, role, entity.eid) + if rschema.symmetric or role == 'subject': + targettypes = rschema.objects(entity.e_schema) + etypes = '/'.join(sorted(etype.display_name(self._cw) for etype in targettypes)) + else: + targettypes = rschema.subjects(entity.e_schema) + etypes = '/'.join(sorted(etype.display_name(self._cw) for etype in targettypes)) + etypes = uilib.cut(etypes, self._cw.property_value('navigation.short-line-size')) + options.append('<option>%s %s</option>' % (self._cw._('select a'), etypes)) + options += self._get_select_options(entity, rschema, role) + options += self._get_search_options(entity, rschema, role, targettypes) + relname, role = self._cw.form.get('relation').rsplit('_', 1) + return u"""\ +<div class="%s" id="%s"> + <select id="%s" onchange="javascript: addPendingInsert(this.options[this.selectedIndex], %s, %s, '%s');"> + %s + </select> +</div> +""" % (hidden and 'hidden' or '', divid, selectid, + xml_escape(json_dumps(entity.eid)), is_cell and 'true' or 'null', relname, + '\n'.join(options)) + + def _get_select_options(self, entity, rschema, role): + """add options to search among all entities of each possible type""" + options = [] + pending_inserts = get_pending_inserts(self._cw, entity.eid) + rtype = rschema.type + form = self._cw.vreg['forms'].select('edition', self._cw, entity=entity) + field = form.field_by_name(rschema, role, entity.e_schema) + limit = self._cw.property_value('navigation.combobox-limit') + # NOTE: expect 'limit' arg on choices method of relation field + for eview, reid in field.vocabulary(form, limit=limit): + if reid is None: + if eview: # skip blank value + options.append('<option class="separator">-- %s --</option>' + % xml_escape(eview)) + elif reid != ff.INTERNAL_FIELD_VALUE: + optionid = relation_id(entity.eid, rtype, role, reid) + if optionid not in pending_inserts: + # prefix option's id with letters to make valid XHTML wise + options.append('<option id="id%s" value="%s">%s</option>' % + (optionid, reid, xml_escape(eview))) + return options + + def _get_search_options(self, entity, rschema, role, targettypes): + """add options to search among all entities of each possible type""" + options = [] + _ = self._cw._ + for eschema in targettypes: + mode = '%s:%s:%s:%s' % (role, entity.eid, rschema.type, eschema) + url = self._cw.build_url(entity.rest_path(), vid='search-associate', + __mode=mode) + options.append((eschema.display_name(self._cw), + '<option value="%s">%s %s</option>' % ( + xml_escape(url), _('Search for'), eschema.display_name(self._cw)))) + return [o for l, o in sorted(options)] + + +# The automatic entity form #################################################### + +class AutomaticEntityForm(forms.EntityFieldsForm): + """AutomaticEntityForm is an automagic form to edit any entity. It + is designed to be fully generated from schema but highly + configurable through uicfg. + + Of course, as for other forms, you can also customise it by specifying + various standard form parameters on selection, overriding, or + adding/removing fields in selected instances. + """ + __regid__ = 'edition' + + cwtarget = 'eformframe' + cssclass = 'entityForm' + copy_nav_params = True + form_buttons = [fw.SubmitButton(), + fw.Button(stdmsgs.BUTTON_APPLY, cwaction='apply'), + fw.Button(stdmsgs.BUTTON_CANCEL, + {'class': fw.Button.css_class + ' cwjs-edition-cancel'})] + # for attributes selection when searching in uicfg.autoform_section + formtype = 'main' + # set this to a list of [(relation, role)] if you want to explictily tell + # which relations should be edited + display_fields = None + # action on the form tag + _default_form_action_path = 'validateform' + + @iclassmethod + def field_by_name(cls_or_self, name, role=None, eschema=None): + """return field with the given name and role. If field is not explicitly + defined for the form but `eclass` is specified, guess_field will be + called. + """ + try: + return super(AutomaticEntityForm, cls_or_self).field_by_name(name, role, eschema) + except f.FieldNotFound: + if name == '_cw_generic_field' and not isinstance(cls_or_self, type): + return cls_or_self._generic_relations_field() + raise + + # base automatic entity form methods ####################################### + + def __init__(self, *args, **kwargs): + super(AutomaticEntityForm, self).__init__(*args, **kwargs) + self.uicfg_afs = self._cw.vreg['uicfg'].select( + 'autoform_section', self._cw, entity=self.edited_entity) + entity = self.edited_entity + if entity.has_eid(): + entity.complete() + for rtype, role in self.editable_attributes(): + try: + self.field_by_name(str(rtype), role) + continue # explicitly specified + except f.FieldNotFound: + # has to be guessed + try: + field = self.field_by_name(str(rtype), role, + eschema=entity.e_schema) + self.fields.append(field) + except f.FieldNotFound: + # meta attribute such as <attr>_format + continue + if self.fieldsets_in_order: + fsio = list(self.fieldsets_in_order) + else: + fsio = [None] + self.fieldsets_in_order = fsio + # add fields for relation whose target should have an inline form + for formview in self.inlined_form_views(): + field = self._inlined_form_view_field(formview) + self.fields.append(field) + if not field.fieldset in fsio: + fsio.append(field.fieldset) + if self.formtype == 'main': + # add the generic relation field if necessary + if entity.has_eid() and ( + self.display_fields is None or + '_cw_generic_field' in self.display_fields): + try: + field = self.field_by_name('_cw_generic_field') + except f.FieldNotFound: + # no editable relation + pass + else: + self.fields.append(field) + if not field.fieldset in fsio: + fsio.append(field.fieldset) + self.maxrelitems = self._cw.property_value('navigation.related-limit') + self.force_display = bool(self._cw.form.get('__force_display')) + fnum = len(self.fields) + self.fields.sort(key=lambda f: f.order is None and fnum or f.order) + + @property + def related_limit(self): + if self.force_display: + return None + return self.maxrelitems + 1 + + # autoform specific fields ################################################# + + def _generic_relations_field(self): + srels_by_cat = self.editable_relations() + if not srels_by_cat: + raise f.FieldNotFound('_cw_generic_field') + fieldset = 'This %s:' % self.edited_entity.e_schema + return GenericRelationsField(self.editable_relations(), + fieldset=fieldset, label=None) + + def _inlined_form_view_field(self, view): + # XXX allow more customization + kwargs = self.uicfg_affk.etype_get(self.edited_entity.e_schema, + view.rtype, view.role, view.etype) + if kwargs is None: + kwargs = {} + return InlinedFormField(view=view, **kwargs) + + # methods mapping edited entity relations to fields in the form ############ + + def _relations_by_section(self, section, permission='add', strict=False): + """return a list of (relation schema, target schemas, role) matching + given category(ies) and permission + """ + return self.uicfg_afs.relations_by_section( + self.edited_entity, self.formtype, section, permission, strict) + + def editable_attributes(self, strict=False): + """return a list of (relation schema, role) to edit for the entity""" + if self.display_fields is not None: + schema = self._cw.vreg.schema + return [(schema[rtype], role) for rtype, role in self.display_fields] + if self.edited_entity.has_eid() and not self.edited_entity.cw_has_perm('update'): + return [] + action = 'update' if self.edited_entity.has_eid() else 'add' + return [(rtype, role) for rtype, _, role in self._relations_by_section( + 'attributes', action, strict)] + + def editable_relations(self): + """return a sorted list of (relation's label, relation'schema, role) for + relations in the 'relations' section + """ + result = [] + for rschema, _, role in self._relations_by_section('relations', + strict=True): + result.append( (rschema.display_name(self.edited_entity._cw, role, + self.edited_entity.cw_etype), + rschema, role) ) + return sorted(result) + + def inlined_relations(self): + """return a list of (relation schema, target schemas, role) matching + given category(ies) and permission + """ + return self._relations_by_section('inlined') + + # inlined forms control #################################################### + + def inlined_form_views(self): + """compute and return list of inlined form views (hosting the inlined + form object) + """ + allformviews = [] + entity = self.edited_entity + for rschema, ttypes, role in self.inlined_relations(): + # show inline forms only if there's one possible target type + # for rschema + if len(ttypes) != 1: + self.warning('entity related by the %s relation should have ' + 'inlined form but there is multiple target types, ' + 'dunno what to do', rschema) + continue + tschema = ttypes[0] + ttype = tschema.type + formviews = list(self.inline_edition_form_view(rschema, ttype, role)) + card = rschema.role_rdef(entity.e_schema, ttype, role).role_cardinality(role) + # there is no related entity and we need at least one: we need to + # display one explicit inline-creation view + if self.should_display_inline_creation_form(rschema, formviews, card): + formviews += self.inline_creation_form_view(rschema, ttype, role) + # we can create more than one related entity, we thus display a link + # to add new related entities + if self.must_display_add_new_relation_link(rschema, role, tschema, + ttype, formviews, card): + addnewlink = self._cw.vreg['views'].select( + 'inline-addnew-link', self._cw, + etype=ttype, rtype=rschema, role=role, card=card, + peid=self.edited_entity.eid, + petype=self.edited_entity.e_schema, pform=self) + formviews.append(addnewlink) + allformviews += formviews + return allformviews + + def should_display_inline_creation_form(self, rschema, existant, card): + """return true if a creation form should be inlined + + by default true if there is no related entity and we need at least one + """ + return not existant and card in '1+' + + def should_display_add_new_relation_link(self, rschema, existant, card): + """return true if we should add a link to add a new creation form + (through ajax call) + + by default true if there is no related entity or if the relation has + multiple cardinality + """ + return not existant or card in '+*' + + def must_display_add_new_relation_link(self, rschema, role, tschema, + ttype, existant, card): + """return true if we must add a link to add a new creation form + (through ajax call) + + by default true if there is no related entity or if the relation has + multiple cardinality and it is permitted to add the inlined object and + relation. + """ + return (self.should_display_add_new_relation_link( + rschema, existant, card) and + self.check_inlined_rdef_permissions( + rschema, role, tschema, ttype)) + + def check_inlined_rdef_permissions(self, rschema, role, tschema, ttype): + """return true if permissions are granted on the inlined object and + relation""" + if not tschema.has_perm(self._cw, 'add'): + return False + entity = self.edited_entity + rdef = entity.e_schema.rdef(rschema, role, ttype) + if entity.has_eid(): + if role == 'subject': + rdefkwargs = {'fromeid': entity.eid} + else: + rdefkwargs = {'toeid': entity.eid} + return rdef.has_perm(self._cw, 'add', **rdefkwargs) + return rdef.may_have_permission('add', self._cw) + + + def should_hide_add_new_relation_link(self, rschema, card): + """return true if once an inlined creation form is added, the 'add new' + link should be hidden + + by default true if the relation has single cardinality + """ + return card in '1?' + + def inline_edition_form_view(self, rschema, ttype, role): + """yield inline form views for already related entities through the + given relation + """ + entity = self.edited_entity + related = entity.has_eid() and entity.related(rschema, role) + if related: + vvreg = self._cw.vreg['views'] + # display inline-edition view for all existing related entities + for i, relentity in enumerate(related.entities()): + if relentity.cw_has_perm('update'): + yield vvreg.select('inline-edition', self._cw, + rset=related, row=i, col=0, + etype=ttype, rtype=rschema, role=role, + peid=entity.eid, pform=self) + + def inline_creation_form_view(self, rschema, ttype, role): + """yield inline form views to a newly related (hence created) entity + through the given relation + """ + try: + yield self._cw.vreg['views'].select('inline-creation', self._cw, + etype=ttype, rtype=rschema, role=role, + peid=self.edited_entity.eid, + petype=self.edited_entity.e_schema, + pform=self) + except NoSelectableObject: + # may be raised if user doesn't have the permission to add ttype entities (no checked + # earlier) or if there is some custom selector on the view + pass + + +## default form ui configuration ############################################## + +_AFS = uicfg.autoform_section +# use primary and not generated for eid since it has to be an hidden +_AFS.tag_attribute(('*', 'eid'), 'main', 'hidden') +_AFS.tag_attribute(('*', 'eid'), 'muledit', 'attributes') +_AFS.tag_attribute(('*', 'description'), 'main', 'attributes') +_AFS.tag_attribute(('*', 'has_text'), 'main', 'hidden') +_AFS.tag_subject_of(('*', 'in_state', '*'), 'main', 'hidden') +for rtype in ('creation_date', 'modification_date', 'cwuri', + 'owned_by', 'created_by', 'cw_source'): + _AFS.tag_subject_of(('*', rtype, '*'), 'main', 'metadata') + +_AFS.tag_subject_of(('*', 'by_transition', '*'), 'main', 'attributes') +_AFS.tag_subject_of(('*', 'by_transition', '*'), 'muledit', 'attributes') +_AFS.tag_object_of(('*', 'by_transition', '*'), 'main', 'hidden') +_AFS.tag_object_of(('*', 'from_state', '*'), 'main', 'hidden') +_AFS.tag_object_of(('*', 'to_state', '*'), 'main', 'hidden') +_AFS.tag_subject_of(('*', 'wf_info_for', '*'), 'main', 'attributes') +_AFS.tag_subject_of(('*', 'wf_info_for', '*'), 'muledit', 'attributes') +_AFS.tag_object_of(('*', 'wf_info_for', '*'), 'main', 'hidden') +_AFS.tag_attribute(('CWEType', 'final'), 'main', 'hidden') +_AFS.tag_attribute(('CWRType', 'final'), 'main', 'hidden') +_AFS.tag_attribute(('CWUser', 'firstname'), 'main', 'attributes') +_AFS.tag_attribute(('CWUser', 'surname'), 'main', 'attributes') +_AFS.tag_attribute(('CWUser', 'last_login_time'), 'main', 'metadata') +_AFS.tag_subject_of(('CWUser', 'in_group', '*'), 'main', 'attributes') +_AFS.tag_subject_of(('CWUser', 'in_group', '*'), 'muledit', 'attributes') +_AFS.tag_subject_of(('*', 'primary_email', '*'), 'main', 'relations') +_AFS.tag_subject_of(('*', 'use_email', '*'), 'main', 'inlined') +_AFS.tag_subject_of(('CWRelation', 'relation_type', '*'), 'main', 'inlined') +_AFS.tag_subject_of(('CWRelation', 'from_entity', '*'), 'main', 'inlined') +_AFS.tag_subject_of(('CWRelation', 'to_entity', '*'), 'main', 'inlined') + +_AFFK = uicfg.autoform_field_kwargs +_AFFK.tag_attribute(('RQLExpression', 'expression'), + {'widget': fw.TextInput}) +_AFFK.tag_subject_of(('TrInfo', 'wf_info_for', '*'), + {'widget': fw.HiddenInput}) + +def registration_callback(vreg): + global etype_relation_field + + def etype_relation_field(etype, rtype, role='subject'): + try: + eschema = vreg.schema.eschema(etype) + return AutomaticEntityForm.field_by_name(rtype, role, eschema) + except (KeyError, f.FieldNotFound): + # catch KeyError raised when etype/rtype not found in schema + AutomaticEntityForm.error('field for %s %s may not be found in schema' % (rtype, role)) + return None + + vreg.register_all(globals().values(), __name__) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/basecomponents.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/basecomponents.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,228 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""Bases HTML components: + +* the rql input form +* the logged user link +""" +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from logilab.mtconverter import xml_escape +from logilab.common.registry import yes +from logilab.common.deprecation import class_renamed +from rql import parse + +from cubicweb.predicates import (match_form_params, match_context, + multi_etypes_rset, configuration_values, + anonymous_user, authenticated_user) +from cubicweb.schema import display_name +from cubicweb.utils import wrap_on_write +from cubicweb.uilib import toggle_action +from cubicweb.web import component +from cubicweb.web.htmlwidgets import MenuWidget, PopupBoxMenu + +VISIBLE_PROP_DEF = { + _('visible'): dict(type='Boolean', default=True, + help=_('display the component or not')), + } + +class RQLInputForm(component.Component): + """build the rql input form, usually displayed in the header""" + __regid__ = 'rqlinput' + cw_property_defs = VISIBLE_PROP_DEF + visible = False + + def call(self, view=None): + req = self._cw + if hasattr(view, 'filter_box_context_info'): + rset = view.filter_box_context_info()[0] + else: + rset = self.cw_rset + # display multilines query as one line + rql = rset is not None and rset.printable_rql() or req.form.get('rql', '') + rql = rql.replace(u"\n", u" ") + rql_suggestion_comp = self._cw.vreg['components'].select_or_none('rql.suggestions', self._cw) + if rql_suggestion_comp is not None: + # enable autocomplete feature only if the rql + # suggestions builder is available + self._cw.add_css('jquery.ui.css') + self._cw.add_js(('cubicweb.ajax.js', 'jquery.ui.js')) + self._cw.add_onload('$("#rql").autocomplete({source: "%s"});' + % (req.build_url('json', fname='rql_suggest'))) + self.w(u'''<div id="rqlinput" class="%s"><form action="%s"><fieldset> +<input type="text" id="rql" name="rql" value="%s" title="%s" tabindex="%s" accesskey="q" class="searchField" /> +''' % (not self.cw_propval('visible') and 'hidden' or '', + req.build_url('view'), xml_escape(rql), req._('full text or RQL query'), req.next_tabindex())) + if req.search_state[0] != 'normal': + self.w(u'<input type="hidden" name="__mode" value="%s"/>' + % ':'.join(req.search_state[1])) + self.w(u'</fieldset></form></div>') + + + +class HeaderComponent(component.CtxComponent): # XXX rename properly along with related context + """if the user is the anonymous user, build a link to login else display a menu + with user'action (preference, logout, etc...) + """ + __abstract__ = True + cw_property_defs = component.override_ctx( + component.CtxComponent, + vocabulary=['header-center', 'header-left', 'header-right', ]) + # don't want user to hide this component using an cwproperty + site_wide = True + context = _('header-center') + + +class ApplLogo(HeaderComponent): + """build the instance logo, usually displayed in the header""" + __regid__ = 'logo' + __select__ = yes() # no need for a cnx + order = -1 + context = _('header-left') + + def render(self, w): + w(u'<a id="logo" href="%s"></a>' % self._cw.base_url()) + + +class ApplicationName(HeaderComponent): + """display the instance name""" + __regid__ = 'appliname' + + # XXX support kwargs for compat with other components which gets the view as + # argument + def render(self, w, **kwargs): + title = self._cw.property_value('ui.site-title') + if title: + w(u'<span id="appliName"><a href="%s">%s</a></span>' % ( + self._cw.base_url(), xml_escape(title))) + + +class CookieLoginComponent(HeaderComponent): + __regid__ = 'anonuserlink' + __select__ = (HeaderComponent.__select__ & anonymous_user() + & configuration_values('auth-mode', 'cookie')) + context = 'header-right' + loginboxid = 'popupLoginBox' + _html = u"""<a class="logout icon-login" title="%s" href="javascript: +cw.htmlhelpers.popupLoginBox('%s', '__login');">%s</a>""" + + def render(self, w): + # XXX bw compat, though should warn about subclasses redefining call + self.w = w + self.call() + + def call(self): + self._cw.add_css('cubicweb.pictograms.css') + self.w(self._html % (self._cw._('login / password'), + self.loginboxid, self._cw._('i18n_login_popup'))) + self._cw.view('logform', rset=self.cw_rset, id=self.loginboxid, + klass='%s hidden' % self.loginboxid, title=False, + showmessage=False, w=self.w) + + +class HTTPLoginComponent(CookieLoginComponent): + __select__ = (HeaderComponent.__select__ & anonymous_user() + & configuration_values('auth-mode', 'http')) + + def render(self, w): + # this redirects to the 'login' controller which in turn + # will raise a 401/Unauthorized + req = self._cw + w(u'[<a class="logout" title="%s" href="%s">%s</a>]' + % (req._('login / password'), req.build_url('login'), req._('login'))) + + +_UserLink = class_renamed('_UserLink', HeaderComponent) +AnonUserLink = class_renamed('AnonUserLink', CookieLoginComponent) +AnonUserLink.__abstract__ = True +AnonUserLink.__select__ &= yes(1) + + +class AnonUserStatusLink(HeaderComponent): + __regid__ = 'userstatus' + __select__ = anonymous_user() + context = _('header-right') + order = HeaderComponent.order - 10 + + def render(self, w): + pass + +class AuthenticatedUserStatus(AnonUserStatusLink): + __select__ = authenticated_user() + + def render(self, w): + # display useractions and siteactions + self._cw.add_css('cubicweb.pictograms.css') + actions = self._cw.vreg['actions'].possible_actions(self._cw, rset=self.cw_rset) + box = MenuWidget('', 'userActionsBox', _class='', islist=False) + menu = PopupBoxMenu(self._cw.user.login, isitem=False, link_class='icon-user') + box.append(menu) + for action in actions.get('useractions', ()): + menu.append(self.action_link(action)) + if actions.get('useractions') and actions.get('siteactions'): + menu.append(self.separator()) + for action in actions.get('siteactions', ()): + menu.append(self.action_link(action)) + box.render(w=w) + + +class ApplicationMessage(component.Component): + """display messages given using the __message/_cwmsgid parameter into a + special div section + """ + __select__ = yes() + __regid__ = 'applmessages' + # don't want user to hide this component using a cwproperty + cw_property_defs = {} + + def call(self, msg=None): + if msg is None: + msg = self._cw.message # XXX don't call self._cw.message twice + self.w(u'<div id="appMsg" onclick="%s" class="%s">\n' % + (toggle_action('appMsg'), (msg and ' ' or 'hidden'))) + self.w(u'<div class="message" id="%s">%s</div>' % (self.domid, msg)) + self.w(u'</div>') + + +# contextual components ######################################################## + + +class MetaDataComponent(component.EntityCtxComponent): + __regid__ = 'metadata' + context = 'navbottom' + order = 1 + + def render_body(self, w): + self.entity.view('metadata', w=w) + + +class SectionLayout(component.Layout): + __select__ = match_context('navtop', 'navbottom', + 'navcontenttop', 'navcontentbottom') + cssclass = 'section' + + def render(self, w): + if self.init_rendering(): + view = self.cw_extra_kwargs['view'] + w(u'<div class="%s %s" id="%s">' % (self.cssclass, view.cssclass, + view.domid)) + with wrap_on_write(w, '<h4>') as wow: + view.render_title(wow) + view.render_body(w) + w(u'</div>\n') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/basecontrollers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/basecontrollers.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,302 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""Set of base controllers, which are directly plugged into the application +object to handle publication. +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from six import text_type + +from logilab.common.deprecation import deprecated + +from cubicweb import (NoSelectableObject, ObjectNotFound, ValidationError, + AuthenticationError, UndoTransactionException, + Forbidden) +from cubicweb.utils import json_dumps +from cubicweb.predicates import (authenticated_user, anonymous_user, + match_form_params) +from cubicweb.web import Redirect, RemoteCallFailed +from cubicweb.web.controller import Controller, append_url_params +from cubicweb.web.views import vid_from_rset +import cubicweb.transaction as tx + +@deprecated('[3.15] jsonize is deprecated, use AjaxFunction appobjects instead') +def jsonize(func): + """decorator to sets correct content_type and calls `json_dumps` on + results + """ + def wrapper(self, *args, **kwargs): + self._cw.set_content_type('application/json') + return json_dumps(func(self, *args, **kwargs)) + wrapper.__name__ = func.__name__ + return wrapper + +@deprecated('[3.15] xhtmlize is deprecated, use AjaxFunction appobjects instead') +def xhtmlize(func): + """decorator to sets correct content_type and calls `xmlize` on results""" + def wrapper(self, *args, **kwargs): + self._cw.set_content_type(self._cw.html_content_type()) + result = func(self, *args, **kwargs) + return ''.join((u'<div>', result.strip(), + u'</div>')) + wrapper.__name__ = func.__name__ + return wrapper + +@deprecated('[3.15] check_pageid is deprecated, use AjaxFunction appobjects instead') +def check_pageid(func): + """decorator which checks the given pageid is found in the + user's session data + """ + def wrapper(self, *args, **kwargs): + data = self._cw.session.data.get(self._cw.pageid) + if data is None: + raise RemoteCallFailed(self._cw._('pageid-not-found')) + return func(self, *args, **kwargs) + return wrapper + + +class LoginController(Controller): + __regid__ = 'login' + __select__ = anonymous_user() + + def publish(self, rset=None): + """log in the instance""" + if self._cw.vreg.config['auth-mode'] == 'http': + # HTTP authentication + raise AuthenticationError() + else: + # Cookie authentication + return self.appli.need_login_content(self._cw) + +class LoginControllerForAuthed(Controller): + __regid__ = 'login' + __select__ = ~anonymous_user() + + def publish(self, rset=None): + """log in the instance""" + path = self._cw.form.get('postlogin_path', '') + # Redirect expects a URL, not a path. Also path may contain a query + # string, hence should not be given to _cw.build_url() + raise Redirect(self._cw.base_url() + path) + + +class LogoutController(Controller): + __regid__ = 'logout' + + def publish(self, rset=None): + """logout from the instance""" + return self.appli.session_handler.logout(self._cw, self.goto_url()) + + def goto_url(self): + # * in http auth mode, url will be ignored + # * in cookie mode redirecting to the index view is enough : either + # anonymous connection is allowed and the page will be displayed or + # we'll be redirected to the login form + msg = self._cw._('you have been logged out') + return self._cw.build_url('view', vid='loggedout') + + +class ViewController(Controller): + """standard entry point : + - build result set + - select and call main template + """ + __regid__ = 'view' + template = 'main-template' + + def publish(self, rset=None): + """publish a request, returning an encoded string""" + view, rset = self._select_view_and_rset(rset) + view.set_http_cache_headers() + if self._cw.is_client_cache_valid(): + return b'' + template = self.appli.main_template_id(self._cw) + return self._cw.vreg['views'].main_template(self._cw, template, + rset=rset, view=view) + + def _select_view_and_rset(self, rset): + req = self._cw + if rset is None and not hasattr(req, '_rql_processed'): + req._rql_processed = True + if req.cnx: + rset = self.process_rql() + else: + rset = None + vid = req.form.get('vid') or vid_from_rset(req, rset, self._cw.vreg.schema) + try: + view = self._cw.vreg['views'].select(vid, req, rset=rset) + except ObjectNotFound: + self.warning("the view %s could not be found", vid) + req.set_message(req._("The view %s could not be found") % vid) + vid = vid_from_rset(req, rset, self._cw.vreg.schema) + view = self._cw.vreg['views'].select(vid, req, rset=rset) + except NoSelectableObject: + if rset: + req.set_message(req._("The view %s can not be applied to this query") % vid) + else: + req.set_message(req._("You have no access to this view or it can not " + "be used to display the current data.")) + vid = req.form.get('fallbackvid') or vid_from_rset(req, rset, req.vreg.schema) + view = req.vreg['views'].select(vid, req, rset=rset) + return view, rset + + def execute_linkto(self, eid=None): + """XXX __linkto parameter may cause security issue + + defined here since custom application controller inheriting from this + one use this method? + """ + req = self._cw + if not '__linkto' in req.form: + return + if eid is None: + eid = int(req.form['eid']) + for linkto in req.list_form_param('__linkto', pop=True): + rtype, eids, target = linkto.split(':') + assert target in ('subject', 'object') + eids = eids.split('_') + if target == 'subject': + rql = 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype + else: + rql = 'SET Y %s X WHERE X eid %%(x)s, Y eid %%(y)s' % rtype + for teid in eids: + req.execute(rql, {'x': eid, 'y': int(teid)}) + + +def _validation_error(req, ex): + req.cnx.rollback() + ex.translate(req._) # translate messages using ui language + # XXX necessary to remove existant validation error? + # imo (syt), it's not necessary + req.session.data.pop(req.form.get('__errorurl'), None) + foreid = ex.entity + eidmap = req.data.get('eidmap', {}) + for var, eid in eidmap.items(): + if foreid == eid: + foreid = var + break + return (foreid, ex.errors) + + +def _validate_form(req, vreg): + # XXX should use the `RemoteCallFailed` mechanism + try: + ctrl = vreg['controllers'].select('edit', req=req) + except NoSelectableObject: + return (False, {None: req._('not authorized')}, None) + try: + ctrl.publish(None) + except ValidationError as ex: + return (False, _validation_error(req, ex), ctrl._edited_entity) + except Redirect as ex: + try: + txuuid = req.cnx.commit() # ValidationError may be raised on commit + except ValidationError as ex: + return (False, _validation_error(req, ex), ctrl._edited_entity) + except Exception as ex: + req.cnx.rollback() + req.exception('unexpected error while validating form') + return (False, str(ex).decode('utf-8'), ctrl._edited_entity) + else: + if txuuid is not None: + req.data['last_undoable_transaction'] = txuuid + # complete entity: it can be used in js callbacks where we might + # want every possible information + if ctrl._edited_entity: + ctrl._edited_entity.complete() + return (True, ex.location, ctrl._edited_entity) + except Exception as ex: + req.cnx.rollback() + req.exception('unexpected error while validating form') + return (False, text_type(ex), ctrl._edited_entity) + return (False, '???', None) + + +class FormValidatorController(Controller): + __regid__ = 'validateform' + + def response(self, domid, status, args, entity): + callback = str(self._cw.form.get('__onsuccess', 'null')) + errback = str(self._cw.form.get('__onfailure', 'null')) + cbargs = str(self._cw.form.get('__cbargs', 'null')) + self._cw.set_content_type('text/html') + jsargs = json_dumps((status, args, entity)) + return """<script type="text/javascript"> + window.parent.handleFormValidationResponse('%s', %s, %s, %s, %s); +</script>""" % (domid, callback, errback, jsargs, cbargs) + + def publish(self, rset=None): + self._cw.ajax_request = True + # XXX unclear why we have a separated controller here vs + # js_validate_form on the json controller + status, args, entity = _validate_form(self._cw, self._cw.vreg) + domid = self._cw.form.get('__domid', 'entityForm') + return self.response(domid, status, args, entity).encode(self._cw.encoding) + + +class JSonController(Controller): + __regid__ = 'json' + + def publish(self, rset=None): + warn('[3.15] JSONController is deprecated, use AjaxController instead', + DeprecationWarning) + ajax_controller = self._cw.vreg['controllers'].select('ajax', self._cw, appli=self.appli) + return ajax_controller.publish(rset) + + +class MailBugReportController(Controller): + __regid__ = 'reportbug' + __select__ = match_form_params('description') + + def publish(self, rset=None): + req = self._cw + desc = req.form['description'] + # The description is generated and signed by cubicweb itself, check + # description's signature so we don't want to send spam here + sign = req.form.get('__signature', '') + if not (sign and req.vreg.config.check_text_sign(desc, sign)): + raise Forbidden('Invalid content') + self.sendmail(req.vreg.config['submit-mail'], + req._('%s error report') % req.vreg.config.appid, + desc) + raise Redirect(req.build_url(__message=req._('bug report sent'))) + + +class UndoController(Controller): + __regid__ = 'undo' + __select__ = authenticated_user() & match_form_params('txuuid') + + def publish(self, rset=None): + txuuid = self._cw.form['txuuid'] + try: + self._cw.cnx.undo_transaction(txuuid) + except UndoTransactionException as exc: + errors = exc.errors + #This will cause a rollback in main_publish + raise ValidationError(None, {None: '\n'.join(errors)}) + else : + self.redirect() # Will raise Redirect + + def redirect(self, msg=None): + req = self._cw + msg = msg or req._("transaction undone") + self._redirect({'_cwmsgid': req.set_redirect_message(msg)}) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/basetemplates.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/basetemplates.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,530 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""default templates for CubicWeb web client""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from logilab.mtconverter import xml_escape +from logilab.common.deprecation import class_renamed +from logilab.common.registry import objectify_predicate +from logilab.common.decorators import classproperty + +from cubicweb.predicates import match_kwargs, no_cnx, anonymous_user +from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW, StartupView +from cubicweb.utils import UStringIO +from cubicweb.schema import display_name +from cubicweb.web import component, formfields as ff, formwidgets as fw +from cubicweb.web.views import forms + +# main templates ############################################################## + +class LogInOutTemplate(MainTemplate): + + def call(self): + self.set_request_content_type() + w = self.w + self.write_doctype() + self.template_header('text/html', self._cw._('login_action')) + w(u'<body>\n') + self.content(w) + w(u'</body>') + + def template_header(self, content_type, view=None, page_title='', additional_headers=()): + w = self.whead + # explictly close the <base> tag to avoid IE 6 bugs while browsing DOM + w(u'<base href="%s"></base>' % xml_escape(self._cw.base_url())) + w(u'<meta http-equiv="content-type" content="%s; charset=%s"/>\n' + % (content_type, self._cw.encoding)) + w(NOINDEX) + w(NOFOLLOW) + w(u'\n'.join(additional_headers) + u'\n') + self.wview('htmlheader', rset=self.cw_rset) + w(u'<title>%s\n' % xml_escape(page_title)) + + def content(self): + raise NotImplementedError() + + +class LogInTemplate(LogInOutTemplate): + __regid__ = 'login' + __select__ = anonymous_user() + title = 'log in' + + def content(self, w): + self.wview('logform', rset=self.cw_rset, id='loginBox', klass='') + + +class LoggedOutTemplate(StartupView): + __regid__ = 'loggedout' + __select__ = anonymous_user() + title = 'logged out' + + def call(self): + msg = self._cw._('you have been logged out') + if self._cw.cnx: + comp = self._cw.vreg['components'].select('applmessages', self._cw) + comp.render(w=self.w, msg=msg) + self.wview('index') + else: + self.w(u'

          %s

          ' % msg) + + +@objectify_predicate +def modal_view(cls, req, rset, *args, **kwargs): + if req.form.get('__modal', None): + return 1 + +@objectify_predicate +def templatable_view(cls, req, rset, *args, **kwargs): + view = kwargs.pop('view', None) + if view is None: + return 1 + if view.binary: + return 0 + if '__notemplate' in req.form: + return 0 + return view.templatable + + +class NonTemplatableViewTemplate(MainTemplate): + """main template for any non templatable views (xml, binaries, etc.)""" + __regid__ = 'main-template' + __select__ = ~templatable_view() + + def call(self, view): + view.set_request_content_type() + view.set_stream() + if (('__notemplate' in self._cw.form) + and view.templatable + and view.content_type == self._cw.html_content_type()): + view.w(u'
          ') + view.render() + view.w(u'
          ') + else: + view.render() + # have to replace our stream by view's stream (which may be a binary + # stream) + self._stream = view._stream + + +class ModalMainTemplate(MainTemplate): + """ a no-decoration main template for standard views + that typically live in a modal context """ + __regid__ = 'main-template' + __select__ = templatable_view() & modal_view() + + def call(self, view): + view.set_request_content_type() + view.render(w=self.w) + + +class TheMainTemplate(MainTemplate): + """default main template : + + - call header / footer templates + """ + __regid__ = 'main-template' + __select__ = templatable_view() + + def call(self, view): + self.set_request_content_type() + self.template_header(self.content_type, view) + w = self.w + w(u'
          \n') + vtitle = self._cw.form.get('vtitle') + if vtitle: + w(u'
          %s
          \n' % xml_escape(vtitle)) + # display entity type restriction component + etypefilter = self._cw.vreg['components'].select_or_none( + 'etypenavigation', self._cw, rset=self.cw_rset) + if etypefilter and etypefilter.cw_propval('visible'): + etypefilter.render(w=w) + nav_html = UStringIO() + if view and not view.handle_pagination: + view.paginate(w=nav_html.write) + w(nav_html.getvalue()) + w(u'
          \n') + view.render(w=w) + w(u'
          \n') # close id=contentmain + w(nav_html.getvalue()) + w(u'
          \n') # closes id=pageContent + self.template_footer(view) + + def template_header(self, content_type, view=None, page_title='', additional_headers=()): + page_title = page_title or view.page_title() + additional_headers = additional_headers or view.html_headers() + self.template_html_header(content_type, page_title, additional_headers) + self.template_body_header(view) + + def template_html_header(self, content_type, page_title, additional_headers=()): + w = self.whead + lang = self._cw.lang + self.write_doctype() + self._cw.html_headers.define_var('BASE_URL', self._cw.base_url()) + self._cw.html_headers.define_var('DATA_URL', self._cw.datadir_url) + w(u'\n' + % (content_type, self._cw.encoding)) + w(u'\n'.join(additional_headers) + u'\n') + self.wview('htmlheader', rset=self.cw_rset) + if page_title: + w(u'%s\n' % xml_escape(page_title)) + + def template_body_header(self, view): + w = self.w + w(u'\n') + self.wview('header', rset=self.cw_rset, view=view) + w(u'
          \n') + self.nav_column(view, 'left') + w(u'\n') + self.nav_column(view, 'right') + self.w(u'
          \n') + components = self._cw.vreg['components'] + rqlcomp = components.select_or_none('rqlinput', self._cw, rset=self.cw_rset) + if rqlcomp: + rqlcomp.render(w=self.w, view=view) + msgcomp = components.select_or_none('applmessages', self._cw, rset=self.cw_rset) + if msgcomp: + msgcomp.render(w=self.w) + self.content_header(view) + + def template_footer(self, view=None): + self.content_footer(view) + self.w(u'
          \n') + self.wview('footer', rset=self.cw_rset) + self.w(u'') + + def nav_column(self, view, context): + boxes = list(self._cw.vreg['ctxcomponents'].poss_visible_objects( + self._cw, rset=self.cw_rset, view=view, context=context)) + if boxes: + getlayout = self._cw.vreg['components'].select + self.w(u'\n') + + def content_header(self, view=None): + """by default, display informal messages in content header""" + self.wview('contentheader', rset=self.cw_rset, view=view) + + def content_footer(self, view=None): + self.wview('contentfooter', rset=self.cw_rset, view=view) + + +class ErrorTemplate(TheMainTemplate): + """fallback template if an internal error occurred during displaying the main + template. This template may be called for authentication error, which means + that req.cnx and req.user may not be set. + """ + __regid__ = 'error-template' + + def call(self): + """display an unexpected error""" + self.set_request_content_type() + self._cw.reset_headers() + view = self._cw.vreg['views'].select('error', self._cw, rset=self.cw_rset) + self.template_header(self.content_type, view, self._cw._('an error occurred'), + [NOINDEX, NOFOLLOW]) + view.render(w=self.w) + self.template_footer(view) + + def template_header(self, content_type, view=None, page_title='', additional_headers=()): + w = self.whead + lang = self._cw.lang + self.write_doctype() + w(u'\n' + % (content_type, self._cw.encoding)) + w(u'\n'.join(additional_headers)) + self.wview('htmlheader', rset=self.cw_rset) + w(u'%s\n' % xml_escape(page_title)) + self.w(u'\n') + + def template_footer(self, view=None): + self.w(u'') + + +class SimpleMainTemplate(TheMainTemplate): + + __regid__ = 'main-no-top' + + def template_header(self, content_type, view=None, page_title='', additional_headers=()): + page_title = page_title or view.page_title() + additional_headers = additional_headers or view.html_headers() + whead = self.whead + lang = self._cw.lang + self.write_doctype() + whead(u'\n' + % (content_type, self._cw.encoding)) + whead(u'\n'.join(additional_headers) + u'\n') + self.wview('htmlheader', rset=self.cw_rset) + w = self.w + whead(u'%s\n' % xml_escape(page_title)) + w(u'\n') + w(u'
          ') + w(u'\n') + w(u'') + w(u'' % (entity.cw_row % 2 and u'even' or u'odd')) + # XXX turn this into a widget used on the eid field + w(u'' % checkbox('eid', entity.eid, + checked=qeid in values)) + for field in fields: + error = form.field_error(field) + if error: + w(u'\n') + w(u'') + else: + self._main_display_fields = fields + + +class EntityFormRenderer(BaseFormRenderer): + """This is the 'default' renderer for entity's form. + + You can still use form_renderer_id = 'base' if you want base FormRenderer + layout even when selected for an entity. + """ + __regid__ = 'default' + # needs some additional points in some case (XXX explain cases) + __select__ = is_instance('Any') & yes() + + _options = FormRenderer._options + ('main_form_title',) + main_form_title = _('main informations') + + def open_form(self, form, values): + attrs_fs_label = '' + if self.main_form_title: + attrs_fs_label += ('
          %s
          ' + % self._cw._(self.main_form_title)) + attrs_fs_label += '
          ' + return attrs_fs_label + super(EntityFormRenderer, self).open_form(form, values) + + def close_form(self, form, values): + """seems dumb but important for consistency w/ close form, and necessary + for form renderers overriding open_form to use something else or more than + and
          + """ + return super(EntityFormRenderer, self).close_form(form, values) + '
          ' + + def render_buttons(self, w, form): + if len(form.form_buttons) == 3: + w("""
          ') + + def topleft_header(self): + logo = self._cw.vreg['components'].select_or_none('logo', self._cw, + rset=self.cw_rset) + if logo and logo.cw_propval('visible'): + w = self.w + w(u'\n') + w(u'\n') + w(u'\n') + + +# page parts templates ######################################################## + +class HTMLHeader(View): + """default html headers""" + __regid__ = 'htmlheader' + + def call(self, **kwargs): + self.favicon() + self.stylesheets() + self.javascripts() + self.alternates() + + def favicon(self): + favicon = self._cw.uiprops.get('FAVICON', None) + if favicon: + self.whead(u'\n' % favicon) + + def stylesheets(self): + req = self._cw + add_css = req.add_css + for css in req.uiprops['STYLESHEETS']: + add_css(css, localfile=False) + for css in req.uiprops['STYLESHEETS_PRINT']: + add_css(css, u'print', localfile=False) + for css in req.uiprops['STYLESHEETS_IE']: + add_css(css, localfile=False, ieonly=True) + + def javascripts(self): + for jscript in self._cw.uiprops['JAVASCRIPTS']: + self._cw.add_js(jscript, localfile=False) + + def alternates(self): + urlgetter = self._cw.vreg['components'].select_or_none('rss_feed_url', + self._cw, rset=self.cw_rset) + if urlgetter is not None: + self.whead(u'\n' + % xml_escape(urlgetter.feed_url())) + + +class HTMLPageHeader(View): + """default html page header""" + __regid__ = 'header' + main_cell_components = ('appliname', 'breadcrumbs') + headers = (('headtext', 'header-left'), + ('header-center', 'header-center'), + ('header-right', 'header-right') + ) + + def call(self, view, **kwargs): + self.main_header(view) + self.w(u'
          ') + self.state_header() + self.w(u'
          ') + + def main_header(self, view): + """build the top menu with authentification info and the rql box""" + w = self.w + w(u'\n') + for colid, context in self.headers: + w(u'') + w(u'\n') + + def state_header(self): + state = self._cw.search_state + if state[0] == 'normal': + return + _ = self._cw._ + value = self._cw.view('oneline', self._cw.eid_rset(state[1][1])) + msg = ' '.join((_("searching for"), + display_name(self._cw, state[1][3]), + _("to associate with"), value, + _("by relation"), '"', + display_name(self._cw, state[1][2], state[1][0]), + '"')) + return self.w(u'
          %s
          ' % msg) + + +class HTMLPageFooter(View): + """default html page footer: include footer actions""" + __regid__ = 'footer' + + def call(self, **kwargs): + self.w(u'') + + def footer_content(self): + actions = self._cw.vreg['actions'].possible_actions(self._cw, + rset=self.cw_rset) + footeractions = actions.get('footer', ()) + for i, action in enumerate(footeractions): + self.w(u'%s' % (action.url(), + self._cw._(action.title))) + if i < (len(footeractions) - 1): + self.w(u' | ') + +class HTMLContentHeader(View): + """default html page content header: + * include message component if selectable for this request + * include selectable content navigation components + """ + __regid__ = 'contentheader' + + def call(self, view, **kwargs): + """by default, display informal messages in content header""" + components = self._cw.vreg['ctxcomponents'].poss_visible_objects( + self._cw, rset=self.cw_rset, view=view, context='navtop') + if components: + self.w(u'
          ') + for comp in components: + comp.render(w=self.w, view=view) + self.w(u'
          ') + + +class HTMLContentFooter(View): + """default html page content footer: include selectable content navigation + components + """ + __regid__ = 'contentfooter' + + def call(self, view, **kwargs): + components = self._cw.vreg['ctxcomponents'].poss_visible_objects( + self._cw, rset=self.cw_rset, view=view, context='navbottom') + if components: + self.w(u'
          ') + for comp in components: + comp.render(w=self.w, view=view) + self.w(u'
          ') + +class BaseLogForm(forms.FieldsForm): + """Abstract Base login form to be used by any login form + """ + __abstract__ = True + + __regid__ = 'logform' + domid = 'loginForm' + needs_css = ('cubicweb.login.css',) + + onclick_base = "javascript: cw.htmlhelpers.popupLoginBox('%s', '%s');" + onclick_args = (None, None) + + @classproperty + def form_buttons(cls): + # we use a property because sub class will need to define their own onclick_args. + # Therefor we can't juste make the string formating when instanciating this class + onclick = cls.onclick_base % cls.onclick_args + form_buttons = [fw.SubmitButton(label=_('log in'), + attrs={'class': 'loginButton'}), + fw.ResetButton(label=_('cancel'), + attrs={'class': 'loginButton', + 'onclick': onclick}),] + ## Can't shortcut next access because __dict__ is a "dictproxy" which + ## does not support items assignement. + # cls.__dict__['form_buttons'] = form_buttons + return form_buttons + + def form_action(self): + if self.action is None: + # reuse existing redirection if it exists + target = self._cw.form.get('postlogin_path', + self._cw.relative_path()) + url_args = {} + if target and target != '/': + url_args['postlogin_path'] = target + return self._cw.build_url('login', __secure__=True, **url_args) + return super(BaseLogForm, self).form_action() + +class LogForm(BaseLogForm): + """Simple login form that send username and password + """ + __regid__ = 'logform' + domid = 'loginForm' + needs_css = ('cubicweb.login.css',) + # XXX have to recall fields name since python is mangling __login/__password + __login = ff.StringField('__login', widget=fw.TextInput({'class': 'data'})) + __password = ff.StringField('__password', label=_('password'), + widget=fw.PasswordSingleInput({'class': 'data'})) + + onclick_args = ('popupLoginBox', '__login') + + +class LogFormView(View): + # XXX an awful lot of hardcoded assumptions there + # makes it unobvious to reuse/specialize + __regid__ = 'logform' + __select__ = match_kwargs('id', 'klass') + + title = 'log in' + + def call(self, id, klass, title=True, showmessage=True): + w = self.w + w(u'
          ' % (id, klass)) + if title: + stitle = self._cw.property_value('ui.site-title') + if stitle: + stitle = xml_escape(stitle) + else: + stitle = u' ' + w(u'
          %s
          ' % stitle) + w(u'
          \n') + if showmessage and self._cw.message: + w(u'
          %s
          \n' % self._cw.message) + config = self._cw.vreg.config + if config['auth-mode'] != 'http': + self.login_form(id) # Cookie authentication + w(u'
          ') + w(u'
          \n') + + def login_form(self, id): + cw = self._cw + form = cw.vreg['forms'].select('logform', cw) + if cw.vreg.config['allow-email-login']: + label = cw._('login or email') + else: + label = cw.pgettext('CWUser', 'login') + form.field_by_name('__login').label = label + form.render(w=self.w, table_class='', display_progress_div=False) + cw.html_headers.add_onload('jQuery("#__login:visible").focus()') + +LogFormTemplate = class_renamed('LogFormTemplate', LogFormView) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/baseviews.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/baseviews.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,644 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +HTML views +~~~~~~~~~~ + +Special views +````````````` + +.. autoclass:: NullView +.. autoclass:: NoResultView +.. autoclass:: FinalView + + +Base entity views +````````````````` + +.. autoclass:: InContextView +.. autoclass:: OutOfContextView +.. autoclass:: OneLineView + +Those are used to display a link to an entity, whose label depends on the entity +having to be displayed in or out of context (of another entity): some entities +make sense in the context of another entity. For instance, the `Version` of a +`Project` in forge. So one may expect that 'incontext' will be called when +display a version from within the context of a project, while 'outofcontext"' +will be called in other cases. In our example, the 'incontext' view of the +version would be something like '0.1.2', while the 'outofcontext' view would +include the project name, e.g. 'baz 0.1.2' (since only a version number without +the associated project doesn't make sense if you don't know yet that you're +talking about the famous 'baz' project. |cubicweb| tries to make guess and call +'incontext'/'outofcontext' nicely. When it can't know, the 'oneline' view should +be used. + + +List entity views +````````````````` + +.. autoclass:: ListView +.. autoclass:: SimpleListView +.. autoclass:: SameETypeListView +.. autoclass:: CSVView + +Those list views can be given a 'subvid' arguments, telling the view to use of +each item in the list. When not specified, the value of the 'redirect_vid' +attribute of :class:`ListItemView` (for 'listview') or of +:class:`SimpleListView` will be used. This default to 'outofcontext' for 'list' +/ 'incontext' for 'simplelist' + + +Text entity views +~~~~~~~~~~~~~~~~~ + +Basic HTML view have some variants to be used when generating raw text, not HTML +(for notifications for instance). Also, as explained above, some of the HTML +views use those text views as a basis. + +.. autoclass:: TextView +.. autoclass:: InContextTextView +.. autoclass:: OutOfContextView +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from datetime import timedelta +from warnings import warn + +from six.moves import range + +from rql import nodes + +from logilab.mtconverter import TransformError, xml_escape +from logilab.common.registry import yes + +from cubicweb import NoSelectableObject, tags +from cubicweb.predicates import empty_rset, one_etype_rset, match_kwargs +from cubicweb.schema import display_name +from cubicweb.view import EntityView, AnyRsetView, View +from cubicweb.uilib import cut +from cubicweb.web.views import calendar + + +class NullView(AnyRsetView): + """:__regid__: *null* + + This view is the default view used when nothing needs to be rendered. It is + always applicable and is usually used as fallback view when calling + :meth:`_cw.view` to display nothing if the result set is empty. + """ + __regid__ = 'null' + __select__ = yes() + def call(self, **kwargs): + pass + cell_call = call + + +class NoResultView(View): + """:__regid__: *noresult* + + This view is the default view to be used when no result has been found + (i.e. empty result set). + + It's usually used as fallback view when calling :meth:`_cw.view` to display + "no results" if the result set is empty. + """ + __regid__ = 'noresult' + __select__ = empty_rset() + + def call(self, **kwargs): + self.w(u'
          %s
          \n' + % self._cw._('No result matching query')) + + +class FinalView(AnyRsetView): + """:__regid__: *final* + + Display the value of a result set cell with minimal transformations + (i.e. you'll get a number for entities). It is applicable on any result set, + though usually dedicated for cells containing an attribute's value. + """ + __regid__ = 'final' + + def cell_call(self, row, col, props=None, format='text/html'): + value = self.cw_rset.rows[row][col] + if value is None: + self.w(u'') + return + etype = self.cw_rset.description[row][col] + if etype == 'String': + entity, rtype = self.cw_rset.related_entity(row, col) + if entity is not None: + # call entity's printable_value which may have more information + # about string format & all + self.w(entity.printable_value(rtype, value, format=format)) + return + value = self._cw.printable_value(etype, value, props) + if etype in ('Time', 'Interval'): + self.w(value.replace(' ', ' ')) + else: + self.wdata(value) + + +class InContextView(EntityView): + """:__regid__: *incontext* + + This view is used when the entity should be considered as displayed in its + context. By default it produces the result of ``entity.dc_title()`` wrapped in a + link leading to the primary view of the entity. + """ + __regid__ = 'incontext' + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + desc = cut(entity.dc_description(), 50) + self.w(u'%s' % ( + xml_escape(entity.absolute_url()), xml_escape(desc), + xml_escape(entity.dc_title()))) + +class OutOfContextView(EntityView): + """:__regid__: *outofcontext* + + This view is used when the entity should be considered as displayed out of + its context. By default it produces the result of ``entity.dc_long_title()`` + wrapped in a link leading to the primary view of the entity. + """ + __regid__ = 'outofcontext' + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + desc = cut(entity.dc_description(), 50) + self.w(u'%s' % ( + xml_escape(entity.absolute_url()), xml_escape(desc), + xml_escape(entity.dc_long_title()))) + + +class OneLineView(EntityView): + """:__regid__: *oneline* + + This view is used when we can't tell if the entity should be considered as + displayed in or out of context. By default it produces the result of the + `text` view in a link leading to the primary view of the entity. + """ + __regid__ = 'oneline' + title = _('oneline') + + def cell_call(self, row, col, **kwargs): + """the one line view for an entity: linked text view + """ + entity = self.cw_rset.get_entity(row, col) + desc = cut(entity.dc_description(), 50) + title = cut(entity.dc_title(), + self._cw.property_value('navigation.short-line-size')) + self.w(u'%s' % ( + xml_escape(entity.absolute_url()), xml_escape(desc), + xml_escape(title))) + + +# text views ################################################################### + +class TextView(EntityView): + """:__regid__: *text* + + This is the simplest text view for an entity. By default it returns the + result of the entity's `dc_title()` method, which is cut to fit the + `navigation.short-line-size` property if necessary. + """ + __regid__ = 'text' + title = _('text') + content_type = 'text/plain' + + def call(self, **kwargs): + """The view is called for an entire result set, by default loop other + rows of the result set and call the same view on the particular row. + + Subclasses views that are applicable on None result sets will have to + override this method. + """ + rset = self.cw_rset + if rset is None: + raise NotImplementedError(self) + for i in range(len(rset)): + self.wview(self.__regid__, rset, row=i, **kwargs) + if len(rset) > 1: + self.w(u"\n") + + def cell_call(self, row, col=0, **kwargs): + entity = self.cw_rset.get_entity(row, col) + self.w(cut(entity.dc_title(), + self._cw.property_value('navigation.short-line-size'))) + + +class InContextTextView(TextView): + """:__regid__: *textincontext* + + Similar to the `text` view, but called when an entity is considered in + context (see description of incontext HTML view for more information on + this). By default it displays what's returned by the `dc_title()` method of + the entity. + """ + __regid__ = 'textincontext' + title = None # not listed as a possible view + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + self.w(entity.dc_title()) + + +class OutOfContextTextView(InContextTextView): + """:__regid__: *textoutofcontext* + + Similar to the `text` view, but called when an entity is considered out of + context (see description of outofcontext HTML view for more information on + this). By default it displays what's returned by the `dc_long_title()` + method of the entity. + """ + __regid__ = 'textoutofcontext' + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + self.w(entity.dc_long_title()) + + +# list views ################################################################## + +class ListView(EntityView): + """:__regid__: *list* + + This view displays a list of entities by creating a HTML list (`
            `) and + call the view `listitem` for each entity of the result set. The 'list' view + will generate HTML like: + + .. sourcecode:: html + +
              +
            • "result of 'subvid' view for a row
            • + ... +
            + + If you wish to use a different view for each entity, either subclass and + change the :attr:`item_vid` class attribute or specify a `subvid` argument + when calling this view. + """ + __regid__ = 'list' + title = _('list') + item_vid = 'listitem' + + def call(self, klass=None, title=None, subvid=None, listid=None, **kwargs): + """display a list of entities by calling their view + + :param listid: the DOM id to use for the root element + """ + # XXX much of the behaviour here should probably be outside this view + if subvid is None and 'subvid' in self._cw.form: + subvid = self._cw.form.pop('subvid') # consume it + if listid: + listid = u' id="%s"' % listid + else: + listid = u'' + if title: + self.w(u'

            %s

            \n' % (listid, klass or 'section', title)) + self.w(u'
              \n') + else: + self.w(u'\n' % (listid, klass or 'section')) + for i in range(self.cw_rset.rowcount): + self.cell_call(row=i, col=0, vid=subvid, klass=klass, **kwargs) + self.w(u'
            \n') + if title: + self.w(u'\n') + + def cell_call(self, row, col=0, vid=None, klass=None, **kwargs): + self.w(u'
          • ') + self.wview(self.item_vid, self.cw_rset, row=row, col=col, vid=vid, **kwargs) + self.w(u'
          • \n') + + +class ListItemView(EntityView): + __regid__ = 'listitem' + + @property + def redirect_vid(self): + if self._cw.search_state[0] == 'normal': + return 'outofcontext' + return 'outofcontext-search' + + def cell_call(self, row, col, vid=None, **kwargs): + if not vid: + vid = self.redirect_vid + try: + self.wview(vid, self.cw_rset, row=row, col=col, **kwargs) + except NoSelectableObject: + if vid == self.redirect_vid: + raise + self.wview(self.redirect_vid, self.cw_rset, row=row, col=col, **kwargs) + + +class SimpleListView(ListItemView): + """:__regid__: *simplelist* + + Similar to :class:~cubicweb.web.views.baseviews.ListView but using '
            ' + instead of '
              '. It rely on '
              ' behaviour to separate items. HTML will + look like + + .. sourcecode:: html + +
              "result of 'subvid' view for a row
              + ... + + + It relies on base :class:`~cubicweb.view.View` class implementation of the + :meth:`call` method to insert those
              . + """ + __regid__ = 'simplelist' + redirect_vid = 'incontext' + + def call(self, subvid=None, **kwargs): + """display a list of entities by calling their view + + :param listid: the DOM id to use for the root element + """ + if subvid is None and 'vid' in kwargs: + warn("should give a 'subvid' argument instead of 'vid'", + DeprecationWarning, stacklevel=2) + else: + kwargs['vid'] = subvid + return super(SimpleListView, self).call(**kwargs) + + +class SameETypeListView(EntityView): + """:__regid__: *sameetypelist* + + This view displays a list of entities of the same type, in HTML section + ('
              ') and call the view `sameetypelistitem` for each entity of the + result set. It's designed to get a more adapted global list when displayed + entities are all of the same type (for instance, display gallery if there + are only images entities). + """ + __regid__ = 'sameetypelist' + __select__ = EntityView.__select__ & one_etype_rset() + item_vid = 'sameetypelistitem' + + @property + def title(self): + etype = next(iter(self.cw_rset.column_types(0))) + return display_name(self._cw, etype, form='plural') + + def call(self, **kwargs): + """display a list of entities by calling their view""" + showtitle = kwargs.pop('showtitle', not 'vtitle' in self._cw.form) + if showtitle: + self.w(u'

              %s

              ' % self.title) + super(SameETypeListView, self).call(**kwargs) + + def cell_call(self, row, col=0, **kwargs): + self.wview(self.item_vid, self.cw_rset, row=row, col=col, **kwargs) + + +class SameETypeListItemView(EntityView): + __regid__ = 'sameetypelistitem' + + def cell_call(self, row, col, **kwargs): + self.wview('listitem', self.cw_rset, row=row, col=col, **kwargs) + + +class CSVView(SimpleListView): + """:__regid__: *csv* + + This view displays each entity in a coma separated list. It is NOT related + to the well-known text file format. + """ + __regid__ = 'csv' + redirect_vid = 'incontext' + separator = u', ' + + def call(self, subvid=None, **kwargs): + kwargs['vid'] = subvid + rset = self.cw_rset + for i in range(len(rset)): + self.cell_call(i, 0, **kwargs) + if i < rset.rowcount-1: + self.w(self.separator) + + +# XXX to be documented views ################################################### + +class MetaDataView(EntityView): + """paragraph view of some metadata""" + __regid__ = 'metadata' + show_eid = True + + def cell_call(self, row, col): + _ = self._cw._ + entity = self.cw_rset.get_entity(row, col) + self.w(u'
              ') + if self.show_eid: + self.w(u'%s #%s - ' % (entity.dc_type(), entity.eid)) + if entity.modification_date != entity.creation_date: + self.w(u'%s ' % _('latest update on')) + self.w(u'%s, ' + % self._cw.format_date(entity.modification_date)) + # entities from external source may not have a creation date (eg ldap) + if entity.creation_date: + self.w(u'%s ' % _('created on')) + self.w(u'%s' + % self._cw.format_date(entity.creation_date)) + if entity.creator: + if entity.creation_date: + self.w(u' %s ' % _('by')) + else: + self.w(u' %s ' % _('created_by')) + self.w(u'%s' % entity.creator.name()) + meta = entity.cw_metainformation() + if meta['source']['uri'] != 'system': + self.w(u' (%s' % _('cw_source')) + self.w(u' %s)' % meta['source']['uri']) + self.w(u'
              ') + + +class TreeItemView(ListItemView): + __regid__ = 'treeitem' + + def cell_call(self, row, col): + self.wview('incontext', self.cw_rset, row=row, col=col) + + +class TextSearchResultView(EntityView): + """this view is used to display full-text search + + It tries to highlight part of data where the search word appears. + + XXX: finish me (fixed line width, fixed number of lines, CSS, etc.) + """ + __regid__ = 'tsearch' + + def cell_call(self, row, col, **kwargs): + entity = self.cw_rset.complete_entity(row, col) + self.w(entity.view('incontext')) + searched = self.cw_rset.searched_text() + if searched is None: + return + searched = searched.lower() + highlighted = '%s' % searched + for attr in entity.e_schema.indexable_attributes(): + try: + value = xml_escape(entity.printable_value(attr, format='text/plain').lower()) + except TransformError as ex: + continue + except Exception: + continue + if searched in value: + contexts = [] + for ctx in value.split(searched): + if len(ctx) > 30: + contexts.append(u'...' + ctx[-30:]) + else: + contexts.append(ctx) + value = u'\n' + highlighted.join(contexts) + self.w(value.replace('\n', '
              ')) + + +class TooltipView(EntityView): + """A entity view used in a tooltip""" + __regid__ = 'tooltip' + def cell_call(self, row, col): + self.wview('oneline', self.cw_rset, row=row, col=col) + + +class GroupByView(EntityView): + """grouped view of a result set. The `group_key` method return the group + key of an entities (a string or tuple of string). + + For each group, display a link to entities of this group by generating url + like / or //. + """ + __abstract__ = True + __select__ = EntityView.__select__ & match_kwargs('basepath') + entity_attribute = None + reversed = False + + def index_url(self, basepath, key, **kwargs): + if isinstance(key, (list, tuple)): + key = '/'.join(key) + return self._cw.build_url('%s/%s' % (basepath, key), + **kwargs) + + def index_link(self, basepath, key, items): + url = self.index_url(basepath, key) + if isinstance(key, (list, tuple)): + key = ' '.join(key) + return tags.a(key, href=url) + + def group_key(self, entity, **kwargs): + value = getattr(entity, self.entity_attribute) + if callable(value): + value = value() + return value + + def call(self, basepath, maxentries=None, **kwargs): + index = {} + for entity in self.cw_rset.entities(): + index.setdefault(self.group_key(entity, **kwargs), []).append(entity) + displayed = sorted(index) + if self.reversed: + displayed = reversed(displayed) + if maxentries is None: + needmore = False + else: + needmore = len(index) > maxentries + displayed = tuple(displayed)[:maxentries] + w = self.w + w(u'
                ') + for key in displayed: + if key: + w(u'
              • %s
              • \n' % + self.index_link(basepath, key, index[key])) + if needmore: + url = self._cw.build_url('view', vid=self.__regid__, + rql=self.cw_rset.printable_rql()) + w( u'
              • %s
              • \n' % tags.a(u'[%s]' % self._cw._('see more'), + href=url)) + w(u'
              \n') + + +class ArchiveView(GroupByView): + """archive view of a result set. Links to months are built using a basepath + parameters, eg using url like // + """ + __regid__ = 'cw.archive.by_date' + entity_attribute = 'creation_date' + reversed = True + + def group_key(self, entity, **kwargs): + value = super(ArchiveView, self).group_key(entity, **kwargs) + return '%04d' % value.year, '%02d' % value.month + + def index_link(self, basepath, key, items): + """represent a single month entry""" + year, month = key + label = u'%s %s [%s]' % (self._cw._(calendar.MONTHNAMES[int(month)-1]), + year, len(items)) + etypes = set(entity.cw_etype for entity in items) + vtitle = '%s %s' % (', '.join(display_name(self._cw, etype, 'plural') + for etype in etypes), + label) + title = self._cw._('archive for %(month)s/%(year)s') % { + 'month': month, 'year': year} + url = self.index_url(basepath, key, vtitle=vtitle) + return tags.a(label, href=url, title=title) + + +class AuthorView(GroupByView): + """author view of a result set. Links to month are built using a basepath + parameters, eg using url like / + """ + __regid__ = 'cw.archive.by_author' + entity_attribute = 'creator' + + def group_key(self, entity, **kwargs): + value = super(AuthorView, self).group_key(entity, **kwargs) + if value: + return (value.name(), value.login) + return (None, None) + + def index_link(self, basepath, key, items): + if key[0] is None: + return + label = u'%s [%s]' % (key[0], len(items)) + etypes = set(entity.cw_etype for entity in items) + vtitle = self._cw._('%(etype)s by %(author)s') % { + 'etype': ', '.join(display_name(self._cw, etype, 'plural') + for etype in etypes), + 'author': label} + url = self.index_url(basepath, key[1], vtitle=vtitle) + title = self._cw._('archive for %(author)s') % {'author': key[0]} + return tags.a(label, href=url, title=title) + + +# bw compat #################################################################### + +from logilab.common.deprecation import class_moved, class_deprecated + +from cubicweb.web.views import boxes, xmlrss, primary, tableview +PrimaryView = class_moved(primary.PrimaryView) +SideBoxView = class_moved(boxes.SideBoxView) +XmlView = class_moved(xmlrss.XMLView) +XmlItemView = class_moved(xmlrss.XMLItemView) +XmlRsetView = class_moved(xmlrss.XMLRsetView) +RssView = class_moved(xmlrss.RSSView) +RssItemView = class_moved(xmlrss.RSSItemView) +TableView = class_moved(tableview.TableView) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/bookmark.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/bookmark.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,140 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Primary view for bookmarks + user's bookmarks box""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from logilab.mtconverter import xml_escape + +from cubicweb import Unauthorized +from cubicweb.predicates import is_instance, one_line_rset +from cubicweb.web import action, component, htmlwidgets, formwidgets as fw +from cubicweb.web.views import uicfg, primary +from cubicweb.web.views.ajaxcontroller import ajaxfunc + +_abaa = uicfg.actionbox_appearsin_addmenu +_abaa.tag_subject_of(('*', 'bookmarked_by', '*'), False) +_abaa.tag_object_of(('*', 'bookmarked_by', '*'), False) + +_afs = uicfg.autoform_section +_afs.tag_object_of(('*', 'bookmarked_by', 'CWUser'), 'main', 'metadata') +_afs.tag_attribute(('Bookmark', 'path'), 'main', 'attributes') +_afs.tag_attribute(('Bookmark', 'path'), 'muledit', 'attributes') + +_affk = uicfg.autoform_field_kwargs +_affk.tag_attribute(('Bookmark', 'path'), {'widget': fw.EditableURLWidget}) + + +class FollowAction(action.Action): + __regid__ = 'follow' + __select__ = one_line_rset() & is_instance('Bookmark') + + title = _('follow') + category = 'mainactions' + + def url(self): + return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0).actual_url() + + +class BookmarkPrimaryView(primary.PrimaryView): + __select__ = is_instance('Bookmark') + + def cell_call(self, row, col): + """the primary view for bookmark entity""" + entity = self.cw_rset.complete_entity(row, col) + self.w(u' ') + self.w(u"") + self.w(u"%s : %s" % (self._cw._('Bookmark'), xml_escape(entity.title))) + self.w(u"") + self.w(u'

              ' % ( + xml_escape(entity.actual_url()))) + self.w(u'') + self.w(u'

              %s%s

              ' % (self._cw._('Used by:'), ', '.join(xml_escape(u.name()) + for u in entity.bookmarked_by))) + self.w(u'
              ') + + +class BookmarksBox(component.CtxComponent): + """display a box containing all user's bookmarks""" + __regid__ = 'bookmarks_box' + + title = _('bookmarks') + order = 40 + rql = ('Any B,T,P ORDERBY lower(T) ' + 'WHERE B is Bookmark,B title T, B path P, B bookmarked_by U, ' + 'U eid %(x)s') + + def init_rendering(self): + ueid = self._cw.user.eid + self.bookmarks_rset = self._cw.execute(self.rql, {'x': ueid}) + rschema = self._cw.vreg.schema.rschema('bookmarked_by') + eschema = self._cw.vreg.schema.eschema('Bookmark') + self.can_delete = rschema.has_perm(self._cw, 'delete', toeid=ueid) + self.can_edit = (eschema.has_perm(self._cw, 'add') and + rschema.has_perm(self._cw, 'add', toeid=ueid)) + if not self.bookmarks_rset and not self.can_edit: + raise component.EmptyComponent() + self.items = [] + + def render_body(self, w): + ueid = self._cw.user.eid + req = self._cw + if self.can_delete: + req.add_js('cubicweb.ajax.js') + for bookmark in self.bookmarks_rset.entities(): + label = self.link(bookmark.title, bookmark.action_url()) + if self.can_delete: + dlink = u'[-]' % ( + bookmark.eid, req._('delete this bookmark')) + label = '
              %s %s
              ' % (dlink, label) + self.append(label) + if self.can_edit: + menu = htmlwidgets.BoxMenu(req._('manage bookmarks')) + linkto = 'bookmarked_by:%s:subject' % ueid + # use a relative path so that we can move the instance without + # loosing bookmarks + path = req.relative_path() + # XXX if vtitle specified in params, extract it and use it as + # default value for bookmark's title + url = req.vreg['etypes'].etype_class('Bookmark').cw_create_url( + req, __linkto=linkto, path=path) + menu.append(self.link(req._('bookmark this page'), url)) + if self.bookmarks_rset: + if req.user.is_in_group('managers'): + bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, U eid %s' % ueid + erset = self.bookmarks_rset + else: + # we can't edit shared bookmarks we don't own + bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, B owned_by U, U eid %(x)s' + erset = req.execute(bookmarksrql, {'x': ueid}, + build_descr=False) + bookmarksrql %= {'x': ueid} + if erset: + url = req.build_url(vid='muledit', rql=bookmarksrql) + menu.append(self.link(req._('edit bookmarks'), url)) + url = req.user.absolute_url(vid='xaddrelation', rtype='bookmarked_by', + target='subject') + menu.append(self.link(req._('pick existing bookmarks'), url)) + self.append(menu) + self.render_items(w) + +@ajaxfunc +def delete_bookmark(self, beid): + rql = 'DELETE B bookmarked_by U WHERE B eid %(b)s, U eid %(u)s' + self._cw.execute(rql, {'b': int(beid), 'u' : self._cw.user.eid}) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/boxes.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/boxes.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,279 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Generic boxes for CubicWeb web client: + +* actions box +* search box + +Additional boxes (disabled by default): +* schema box +* possible views box +* startup views box +""" +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from six import text_type, add_metaclass + +from logilab.mtconverter import xml_escape +from logilab.common.deprecation import class_deprecated + +from cubicweb import Unauthorized +from cubicweb.predicates import (match_user_groups, match_kwargs, + non_final_entity, nonempty_rset, + match_context, contextual) +from cubicweb.utils import wrap_on_write +from cubicweb.view import EntityView +from cubicweb.schema import display_name +from cubicweb.web import component, box, htmlwidgets + +# XXX bw compat, some cubes import this class from here +BoxTemplate = box.BoxTemplate +BoxHtml = htmlwidgets.BoxHtml + +class EditBox(component.CtxComponent): + """ + box with all actions impacting the entity displayed: edit, copy, delete + change state, add related entities... + """ + __regid__ = 'edit_box' + + title = _('actions') + order = 2 + contextual = True + __select__ = component.CtxComponent.__select__ & non_final_entity() + + def init_rendering(self): + super(EditBox, self).init_rendering() + _ = self._cw._ + self._menus_in_order = [] + self._menus_by_id = {} + # build list of actions + actions = self._cw.vreg['actions'].possible_actions(self._cw, self.cw_rset, + **self.cw_extra_kwargs) + other_menu = self._get_menu('moreactions', _('more actions')) + for category, defaultmenu in (('mainactions', self), + ('moreactions', other_menu), + ('addrelated', None)): + for action in actions.get(category, ()): + if action.submenu: + menu = self._get_menu(action.submenu) + else: + menu = defaultmenu + action.fill_menu(self, menu) + # if we've nothing but actions in the other_menu, add them directly into the box + if not self.items and len(self._menus_by_id) == 1 and not other_menu.is_empty(): + self.items = other_menu.items + else: # ensure 'more actions' menu appears last + self._menus_in_order.remove(other_menu) + self._menus_in_order.append(other_menu) + for submenu in self._menus_in_order: + self.add_submenu(self, submenu) + if not self.items: + raise component.EmptyComponent() + + def render_title(self, w): + title = self._cw._(self.title) + if self.cw_rset: + etypes = self.cw_rset.column_types(0) + if len(etypes) == 1: + plural = self.cw_rset.rowcount > 1 and 'plural' or '' + etypelabel = display_name(self._cw, next(iter(etypes)), plural) + title = u'%s - %s' % (title, etypelabel.lower()) + w(title) + + def render_body(self, w): + self.render_items(w) + + def _get_menu(self, id, title=None, label_prefix=None): + try: + return self._menus_by_id[id] + except KeyError: + if title is None: + title = self._cw._(id) + self._menus_by_id[id] = menu = htmlwidgets.BoxMenu(title) + menu.label_prefix = label_prefix + self._menus_in_order.append(menu) + return menu + + def add_submenu(self, box, submenu, label_prefix=None): + appendanyway = getattr(submenu, 'append_anyway', False) + if len(submenu.items) == 1 and not appendanyway: + boxlink = submenu.items[0] + if submenu.label_prefix: + # XXX iirk + if hasattr(boxlink, 'label'): + boxlink.label = u'%s %s' % (submenu.label_prefix, boxlink.label) + else: + boxlink = u'%s %s' % (submenu.label_prefix, boxlink) + box.append(boxlink) + elif submenu.items: + box.append(submenu) + elif appendanyway: + box.append(xml_escape(submenu.label)) + + +class SearchBox(component.CtxComponent): + """display a box with a simple search form""" + __regid__ = 'search_box' + + title = _('search') + order = 0 + formdef = u"""
              +
              + + + + + +
              +
              """ + + def render_title(self, w): + w(u"""%s""" + % self._cw._(self.title)) + + def render_body(self, w): + if self._cw.form.pop('__fromsearchbox', None): + rql = self._cw.form.get('rql', '') + else: + rql = '' + tabidx1 = self._cw.next_tabindex() + tabidx2 = self._cw.next_tabindex() + w(self.formdef % {'action': self._cw.build_url('view'), + 'value': xml_escape(rql), + 'id': self.cw_extra_kwargs.get('domid', 'tsearch'), + 'tabindex1': tabidx1, + 'tabindex2': tabidx2}) + + +# boxes disabled by default ################################################### + +class PossibleViewsBox(component.CtxComponent): + """display a box containing links to all possible views""" + __regid__ = 'possible_views_box' + + contextual = True + title = _('possible views') + order = 10 + visible = False # disabled by default + + def init_rendering(self): + self.views = [v for v in self._cw.vreg['views'].possible_views(self._cw, + rset=self.cw_rset) + if v.category != 'startupview'] + if not self.views: + raise component.EmptyComponent() + self.items = [] + + def render_body(self, w): + for category, views in box.sort_by_category(self.views): + menu = htmlwidgets.BoxMenu(self._cw._(category), ident=category) + for view in views: + menu.append(self.action_link(view)) + self.append(menu) + self.render_items(w) + + +class StartupViewsBox(PossibleViewsBox): + """display a box containing links to all startup views""" + __regid__ = 'startup_views_box' + + contextual = False + title = _('startup views') + order = 70 + visible = False # disabled by default + + def init_rendering(self): + self.views = [v for v in self._cw.vreg['views'].possible_views(self._cw) + if v.category == 'startupview'] + if not self.views: + raise component.EmptyComponent() + self.items = [] + + +class RsetBox(component.CtxComponent): + """helper view class to display an rset in a sidebox""" + __select__ = nonempty_rset() & match_kwargs('title', 'vid') + __regid__ = 'rsetbox' + cw_property_defs = {} + context = 'incontext' + + @property + def domid(self): + return super(RsetBox, self).domid + text_type(abs(id(self))) + text_type(abs(id(self.cw_rset))) + + def render_title(self, w): + w(self.cw_extra_kwargs['title']) + + def render_body(self, w): + if 'dispctrl' in self.cw_extra_kwargs: + # XXX do not modify dispctrl! + self.cw_extra_kwargs['dispctrl'].setdefault('subvid', 'outofcontext') + self.cw_extra_kwargs['dispctrl'].setdefault('use_list_limit', 1) + self._cw.view(self.cw_extra_kwargs['vid'], self.cw_rset, w=w, + initargs=self.cw_extra_kwargs) + + # helper classes ############################################################## + +@add_metaclass(class_deprecated) +class SideBoxView(EntityView): + """helper view class to display some entities in a sidebox""" + __deprecation_warning__ = '[3.10] SideBoxView is deprecated, use RsetBox instead (%(cls)s)' + + __regid__ = 'sidebox' + + def call(self, title=u'', **kwargs): + """display a list of entities by calling their view""" + if 'dispctrl' in self.cw_extra_kwargs: + # XXX do not modify dispctrl! + self.cw_extra_kwargs['dispctrl'].setdefault('subvid', 'outofcontext') + self.cw_extra_kwargs['dispctrl'].setdefault('use_list_limit', 1) + if title: + self.cw_extra_kwargs['title'] = title + self.cw_extra_kwargs.setdefault('context', 'incontext') + box = self._cw.vreg['ctxcomponents'].select( + 'rsetbox', self._cw, rset=self.cw_rset, vid='autolimited', + **self.cw_extra_kwargs) + box.render(self.w) + + +class ContextualBoxLayout(component.Layout): + __select__ = match_context('incontext', 'left', 'right') & contextual() + # predefined class in cubicweb.css: contextualBox | contextFreeBox + cssclass = 'contextualBox' + + def render(self, w): + if self.init_rendering(): + view = self.cw_extra_kwargs['view'] + w(u'
              ' % (self.cssclass, view.cssclass, + view.domid)) + with wrap_on_write(w, u'
              ', + u'
              ') as wow: + view.render_title(wow) + w(u'
              ') + view.render_body(w) + # boxFooter div is a CSS place holder (for shadow for example) + w(u'
              \n') + + +class ContextFreeBoxLayout(ContextualBoxLayout): + __select__ = match_context('incontext', 'left', 'right') & ~contextual() + cssclass = 'contextFreeBox' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/calendar.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/calendar.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,244 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""html calendar views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +import copy +from datetime import timedelta + +from logilab.mtconverter import xml_escape +from logilab.common.date import todatetime + +from cubicweb.utils import json_dumps, make_uid +from cubicweb.predicates import adaptable +from cubicweb.view import EntityView, EntityAdapter + +# useful constants & functions ################################################ + +ONEDAY = timedelta(1) + +WEEKDAYS = (_("monday"), _("tuesday"), _("wednesday"), _("thursday"), + _("friday"), _("saturday"), _("sunday")) +MONTHNAMES = ( _('january'), _('february'), _('march'), _('april'), _('may'), + _('june'), _('july'), _('august'), _('september'), _('october'), + _('november'), _('december') + ) + + +class ICalendarableAdapter(EntityAdapter): + __needs_bw_compat__ = True + __regid__ = 'ICalendarable' + __abstract__ = True + + @property + def start(self): + """return start date""" + raise NotImplementedError + + @property + def stop(self): + """return stop date""" + raise NotImplementedError + + +# Calendar views ############################################################## + +try: + from vobject import iCalendar + + class iCalView(EntityView): + """A calendar view that generates a iCalendar file (RFC 2445) + + Does apply to ICalendarable compatible entities + """ + __select__ = adaptable('ICalendarable') + paginable = False + content_type = 'text/calendar' + title = _('iCalendar') + templatable = False + __regid__ = 'ical' + + def call(self): + ical = iCalendar() + for i in range(len(self.cw_rset.rows)): + task = self.cw_rset.complete_entity(i, 0) + event = ical.add('vevent') + event.add('summary').value = task.dc_title() + event.add('description').value = task.dc_description() + icalendarable = task.cw_adapt_to('ICalendarable') + if icalendarable.start: + event.add('dtstart').value = icalendarable.start + if icalendarable.stop: + event.add('dtend').value = icalendarable.stop + + buff = ical.serialize() + if not isinstance(buff, unicode): + buff = unicode(buff, self._cw.encoding) + self.w(buff) + +except ImportError: + pass + +class hCalView(EntityView): + """A calendar view that generates a hCalendar file + + Does apply to ICalendarable compatible entities + """ + __regid__ = 'hcal' + __select__ = adaptable('ICalendarable') + paginable = False + title = _('hCalendar') + #templatable = False + + def call(self): + self.w(u'
              ') + for i in range(len(self.cw_rset.rows)): + task = self.cw_rset.complete_entity(i, 0) + self.w(u'
              ') + self.w(u'

              %s

              ' % xml_escape(task.dc_title())) + self.w(u'
              %s
              ' + % task.dc_description(format='text/html')) + icalendarable = task.cw_adapt_to('ICalendarable') + if icalendarable.start: + self.w(u'%s' + % (icalendarable.start.isoformat(), + self._cw.format_date(icalendarable.start))) + if icalendarable.stop: + self.w(u'%s' + % (icalendarable.stop.isoformat(), + self._cw.format_date(icalendarable.stop))) + self.w(u'
              ') + self.w(u'
              ') + + +class CalendarItemView(EntityView): + __regid__ = 'calendaritem' + + def cell_call(self, row, col, dates=False): + task = self.cw_rset.complete_entity(row, 0) + task.view('oneline', w=self.w) + if dates: + icalendarable = task.cw_adapt_to('ICalendarable') + if icalendarable.start and icalendarable.stop: + self.w('
              %s' % self._cw._('from %(date)s') + % {'date': self._cw.format_date(icalendarable.start)}) + self.w('
              %s' % self._cw._('to %(date)s') + % {'date': self._cw.format_date(icalendarable.stop)}) + else: + self.w('
              %s'%self._cw.format_date(icalendarable.start + or icalendarable.stop)) + + +class _TaskEntry(object): + def __init__(self, task, color, index=0): + self.task = task + self.color = color + self.index = index + self.length = 1 + icalendarable = task.cw_adapt_to('ICalendarable') + self.start = icalendarable.start + self.stop = icalendarable.stop + + def in_working_hours(self): + """predicate returning True is the task is in working hours""" + if todatetime(self.start).hour > 7 and todatetime(self.stop).hour < 20: + return True + return False + + def is_one_day_task(self): + return self.start and self.stop and self.start.isocalendar() == self.stop.isocalendar() + + +class CalendarView(EntityView): + __regid__ = 'calendar' + __select__ = adaptable('ICalendarable') + + paginable = False + title = _('calendar') + + fullcalendar_options = { + 'firstDay': 1, + 'firstHour': 8, + 'defaultView': 'month', + 'editable': True, + 'header': {'left': 'prev,next today', + 'center': 'title', + 'right': 'month,agendaWeek,agendaDay', + }, + } + + def call(self): + self._cw.add_css(('fullcalendar.css', 'cubicweb.calendar.css')) + self._cw.add_js(('jquery.ui.js', 'fullcalendar.min.js', 'jquery.qtip.min.js', 'fullcalendar.locale.js')) + self.calendar_id = 'cal' + make_uid('uid') + self.add_onload() + # write calendar div to load jquery fullcalendar object + self.w(u'
              ' % self.calendar_id) + + def add_onload(self): + fullcalendar_options = self.fullcalendar_options.copy() + fullcalendar_options['events'] = self.get_events() + # i18n + # js callback to add a tooltip and to put html in event's title + js = """ + var options = $.fullCalendar.regional('%s', %s); + options.eventRender = function(event, $element) { + // add a tooltip for each event + var div = '
              '+ event.description+ '
              '; + $element.append(div); + // allow to have html tags in event's title + $element.find('span.fc-event-title').html($element.find('span.fc-event-title').text()); + }; + $("#%s").fullCalendar(options); + """ #" + self._cw.add_onload(js % (self._cw.lang, json_dumps(fullcalendar_options), self.calendar_id)) + + def get_events(self): + events = [] + for entity in self.cw_rset.entities(): + icalendarable = entity.cw_adapt_to('ICalendarable') + if not (icalendarable.start and icalendarable.stop): + continue + start_date = icalendarable.start or icalendarable.stop + event = {'eid': entity.eid, + 'title': entity.view('calendaritem'), + 'url': xml_escape(entity.absolute_url()), + 'className': 'calevent', + 'description': entity.view('tooltip'), + } + event['start'] = start_date.strftime('%Y-%m-%dT%H:%M') + event['allDay'] = True + if icalendarable.stop: + event['end'] = icalendarable.stop.strftime('%Y-%m-%dT%H:%M') + event['allDay'] = False + events.append(event) + return events + +class OneMonthCal(CalendarView): + __regid__ = 'onemonthcal' + + title = _('one month') + +class OneWeekCal(CalendarView): + __regid__ = 'oneweekcal' + + title = _('one week') + fullcalendar_options = CalendarView.fullcalendar_options.copy() + fullcalendar_options['defaultView'] = 'agendaWeek' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/csvexport.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/csvexport.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,107 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""csv export views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six import PY2 +from six.moves import range + +from cubicweb.schema import display_name +from cubicweb.predicates import any_rset, empty_rset +from cubicweb.uilib import UnicodeCSVWriter +from cubicweb.view import EntityView, AnyRsetView + +class CSVMixIn(object): + """mixin class for CSV views""" + templatable = False + content_type = "text/comma-separated-values" + binary = PY2 # python csv module is unicode aware in py3k + csv_params = {'dialect': 'excel', + 'quotechar': '"', + 'delimiter': ';', + 'lineterminator': '\n'} + + def set_request_content_type(self): + """overriden to set a .csv filename""" + self._cw.set_content_type(self.content_type, filename='cubicwebexport.csv') + + def csvwriter(self, **kwargs): + params = self.csv_params.copy() + params.update(kwargs) + return UnicodeCSVWriter(self.w, self._cw.encoding, **params) + + +class CSVRsetView(CSVMixIn, AnyRsetView): + """dumps raw result set in CSV""" + __regid__ = 'csvexport' + __select__ = any_rset() + title = _('csv export') + + def call(self): + writer = self.csvwriter() + writer.writerow(self.columns_labels()) + rset, descr = self.cw_rset, self.cw_rset.description + eschema = self._cw.vreg.schema.eschema + for rowindex, row in enumerate(rset): + csvrow = [] + for colindex, val in enumerate(row): + etype = descr[rowindex][colindex] + if val is not None and not eschema(etype).final: + # csvrow.append(val) # val is eid in that case + content = self._cw.view('textincontext', rset, + row=rowindex, col=colindex) + else: + content = self._cw.view('final', rset, + format='text/plain', + row=rowindex, col=colindex) + csvrow.append(content) + writer.writerow(csvrow) + + +class CSVEntityView(CSVMixIn, EntityView): + """dumps rset's entities (with full set of attributes) in CSV + + the generated CSV file will have a table per entity type found in the + resultset. ('table' here only means empty lines separation between table + contents) + """ + __regid__ = 'ecsvexport' + __select__ = EntityView.__select__ | empty_rset() + title = _('csv export (entities)') + + def call(self): + req = self._cw + rows_by_type = {} + writer = self.csvwriter() + rowdef_by_type = {} + for index in range(len(self.cw_rset)): + entity = self.cw_rset.complete_entity(index) + if entity.e_schema not in rows_by_type: + rowdef_by_type[entity.e_schema] = [rs for rs, at in entity.e_schema.attribute_definitions() + if at != 'Bytes'] + rows_by_type[entity.e_schema] = [[display_name(req, rschema.type) + for rschema in rowdef_by_type[entity.e_schema]]] + rows = rows_by_type[entity.e_schema] + rows.append([entity.printable_value(rs.type, format='text/plain') + for rs in rowdef_by_type[entity.e_schema]]) + for rows in rows_by_type.values(): + writer.writerows(rows) + # use two empty lines as separator + writer.writerows([[], []]) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/cwproperties.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/cwproperties.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,442 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Specific views for CWProperty (eg site/user preferences""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from logilab.mtconverter import xml_escape + +from logilab.common.decorators import cached + +from cubicweb import UnknownProperty +from cubicweb.predicates import (one_line_rset, none_rset, is_instance, + match_user_groups, logged_user_in_rset) +from cubicweb.view import StartupView +from cubicweb.web import stdmsgs +from cubicweb.web.form import FormViewMixIn +from cubicweb.web.formfields import FIELDS, StringField +from cubicweb.web.formwidgets import (Select, TextInput, Button, SubmitButton, + FieldWidget) +from cubicweb.web.views import uicfg, primary, formrenderers, editcontroller +from cubicweb.web.views.ajaxcontroller import ajaxfunc + +uicfg.primaryview_section.tag_object_of(('*', 'for_user', '*'), 'hidden') + +# some string we want to be internationalizable for nicer display of property +# groups +_('navigation') +_('ui') +_('boxes') +_('components') +_('ctxcomponents') +_('navigation.combobox-limit') +_('navigation.page-size') +_('navigation.related-limit') +_('navigation.short-line-size') +_('ui.date-format') +_('ui.datetime-format') +_('ui.default-text-format') +_('ui.fckeditor') +_('ui.float-format') +_('ui.language') +_('ui.time-format') +_('open all') +_('ui.main-template') +_('ui.site-title') +_('ui.encoding') +_('category') + + +def make_togglable_link(nodeid, label): + """builds a HTML link that switches the visibility & remembers it""" + return u'%s' % ( + nodeid, label) + +def css_class(someclass): + return someclass and 'class="%s"' % someclass or '' + + +class CWPropertyPrimaryView(primary.PrimaryView): + __select__ = is_instance('CWProperty') + skip_none = False + + +class SystemCWPropertiesForm(FormViewMixIn, StartupView): + """site-wide properties edition form""" + __regid__ = 'systempropertiesform' + __select__ = none_rset() & match_user_groups('managers') + form_buttons = [SubmitButton()] + + title = _('site configuration') + category = 'startupview' + + def linkable(self): + return True + + def url(self): + """return the url associated with this view. We can omit rql here""" + return self._cw.build_url('view', vid=self.__regid__) + + def _cookie_name(self, somestr): + return str('%s_property_%s' % (self._cw.vreg.config.appid, somestr)) + + def _group_status(self, group, default=u'hidden'): + """return css class name 'hidden' (collapsed), or '' (open)""" + cookies = self._cw.get_cookie() + cookiename = self._cookie_name(group) + cookie = cookies.get(cookiename) + if cookie is None: + self._cw.set_cookie(cookiename, default, maxage=None) + status = default + else: + status = cookie.value + return status + + def call(self, **kwargs): + self._cw.add_js(('cubicweb.preferences.js', + 'cubicweb.edition.js', 'cubicweb.ajax.js')) + self._cw.add_css('cubicweb.preferences.css') + values = self.defined_keys + mainopts, groupedopts = self.group_properties() + # precompute all forms first to consume error message + mainforms, groupedforms = self.build_forms(mainopts, groupedopts) + _ = self._cw._ + self.w(u'

              %s

              \n' % _(self.title)) + for label, group, form in sorted((_(g), g, f) + for g, f in mainforms.items()): + self.wrap_main_form(group, label, form) + for label, group, objects in sorted((_(g), g, o) + for g, o in groupedforms.items()): + self.wrap_grouped_form(group, label, objects) + + @property + @cached + def cwprops_rset(self): + return self._cw.execute('Any P,K,V WHERE P is CWProperty, P pkey K, ' + 'P value V, NOT P for_user U') + + @property + def defined_keys(self): + values = {} + for i, entity in enumerate(self.cwprops_rset.entities()): + values[entity.pkey] = i + return values + + def group_properties(self): + mainopts, groupedopts = {}, {} + vreg = self._cw.vreg + # "self._regid__=='systempropertiesform'" to skip site wide properties on + # user's preference but not site's configuration + for key in vreg.user_property_keys(self.__regid__=='systempropertiesform'): + parts = key.split('.') + if parts[0] in vreg and len(parts) >= 3: + # appobject configuration + reg = parts[0] + propid = parts[-1] + oid = '.'.join(parts[1:-1]) + groupedopts.setdefault(reg, {}).setdefault(oid, []).append(key) + else: + mainopts.setdefault(parts[0], []).append(key) + return mainopts, groupedopts + + def build_forms(self, mainopts, groupedopts): + mainforms, groupedforms = {}, {} + for group, keys in mainopts.items(): + mainforms[group] = self.form(group, keys, False) + for group, objects in groupedopts.items(): + groupedforms[group] = {} + for oid, keys in objects.items(): + groupedforms[group][oid] = self.form(group + '_' + oid, keys, True) + return mainforms, groupedforms + + def entity_for_key(self, key): + values = self.defined_keys + if key in values: + entity = self.cwprops_rset.get_entity(values[key], 0) + else: + entity = self._cw.vreg['etypes'].etype_class('CWProperty')(self._cw) + entity.eid = next(self._cw.varmaker) + entity.cw_attr_cache['pkey'] = key + entity.cw_attr_cache['value'] = self._cw.vreg.property_value(key) + return entity + + def form(self, formid, keys, splitlabel=False): + form = self._cw.vreg['forms'].select( + 'composite', self._cw, domid=formid, action=self._cw.build_url(), + form_buttons=self.form_buttons, + onsubmit="return validatePrefsForm('%s')" % formid, + submitmsg=self._cw._('changes applied')) + path = self._cw.relative_path() + if '?' in path: + path, params = path.split('?', 1) + form.add_hidden('__redirectparams', params) + form.add_hidden('__redirectpath', path) + for key in keys: + self.form_row(form, key, splitlabel) + renderer = self._cw.vreg['formrenderers'].select('cwproperties', self._cw, + display_progress_div=False) + data = [] + form.render(w=data.append, renderer=renderer) + return u'\n'.join(data) + + def form_row(self, form, key, splitlabel): + entity = self.entity_for_key(key) + if splitlabel: + label = key.split('.')[-1] + else: + label = key + subform = self._cw.vreg['forms'].select('base', self._cw, entity=entity, + mainform=False) + subform.append_field(PropertyValueField(name='value', label=label, role='subject', + eidparam=True)) + subform.add_hidden('pkey', key, eidparam=True, role='subject') + form.add_subform(subform) + return subform + + def wrap_main_form(self, group, label, form): + status = css_class(self._group_status(group)) + self.w(u'
              %s
              \n' % + (make_togglable_link('fieldset_' + group, label))) + self.w(u'
              ' % (group, status)) + self.w(u'
              ') + self.w(form) + self.w(u'
              ') + + def wrap_grouped_form(self, group, label, objects): + status = css_class(self._group_status(group)) + self.w(u'
              %s
              \n' % + (make_togglable_link('fieldset_' + group, label))) + self.w(u'
              ' % (group, status)) + sorted_objects = sorted((self._cw.__('%s_%s' % (group, o)), o, f) + for o, f in objects.items()) + for label, oid, form in sorted_objects: + self.wrap_object_form(group, oid, label, form) + self.w(u'
              ') + + def wrap_object_form(self, group, oid, label, form): + w = self.w + w(u'
              ') + w(u'''') + docmsgid = '%s_%s_description' % (group, oid) + doc = self._cw._(docmsgid) + if doc != docmsgid: + w(u'
              %s
              ' % xml_escape(doc).capitalize()) + w(u'
              ') + w(u'') + + +class CWPropertiesForm(SystemCWPropertiesForm): + """user's preferences properties edition form""" + __regid__ = 'propertiesform' + __select__ = ( + (none_rset() & match_user_groups('users','managers')) + | (one_line_rset() & match_user_groups('users') & logged_user_in_rset()) + | (one_line_rset() & match_user_groups('managers') & is_instance('CWUser')) + ) + + title = _('user preferences') + + @property + def user(self): + if self.cw_rset is None: + return self._cw.user + return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + + @property + @cached + def cwprops_rset(self): + return self._cw.execute('Any P,K,V WHERE P is CWProperty, P pkey K, P value V,' + 'P for_user U, U eid %(x)s', {'x': self.user.eid}) + + def form_row(self, form, key, splitlabel): + subform = super(CWPropertiesForm, self).form_row(form, key, splitlabel) + # if user is in the managers group and the property is being created, + # we have to set for_user explicitly + if not subform.edited_entity.has_eid() and self.user.matching_groups('managers'): + subform.add_hidden('for_user', self.user.eid, eidparam=True, role='subject') + return subform + +# cwproperty form objects ###################################################### + +class PlaceHolderWidget(FieldWidget): + + def render(self, form, field, renderer): + domid = field.dom_id(form) + # empty span as well else html validation fail (label is refering to + # this id) + return '
              %s
              ' % ( + domid, domid, form._cw._('select a key first')) + + +class NotEditableWidget(FieldWidget): + def __init__(self, value, msg=None): + self.value = value + self.msg = msg + + def render(self, form, field, renderer): + domid = field.dom_id(form) + value = '%s' % (domid, self.value) + if self.msg: + value += '
              %s
              ' % self.msg + return value + + +class PropertyKeyField(StringField): + """specific field for CWProperty.pkey to set the value widget according to + the selected key + """ + widget = Select + + def render(self, form, renderer): + wdg = self.get_widget(form) + # pylint: disable=E1101 + wdg.attrs['tabindex'] = form._cw.next_tabindex() + wdg.attrs['onchange'] = "javascript:setPropValueWidget('%s', %s)" % ( + form.edited_entity.eid, form._cw.next_tabindex()) + return wdg.render(form, self, renderer) + + def vocabulary(self, form): + entity = form.edited_entity + _ = form._cw._ + if entity.has_eid(): + return [(_(entity.pkey), entity.pkey)] + choices = entity._cw.vreg.user_property_keys() + return [(u'', u'')] + sorted(zip((_(v) for v in choices), choices)) + + +class PropertyValueField(StringField): + """specific field for CWProperty.value which will be different according to + the selected key type and vocabulary information + """ + widget = PlaceHolderWidget + + def render(self, form, renderer=None, tabindex=None): + wdg = self.get_widget(form) + if tabindex is not None: + wdg.attrs['tabindex'] = tabindex + return wdg.render(form, self, renderer) + + def form_init(self, form): + entity = form.edited_entity + if not (entity.has_eid() or 'pkey' in entity.cw_attr_cache): + # no key set yet, just include an empty div which will be filled + # on key selection + return + try: + pdef = form._cw.vreg.property_info(entity.pkey) + except UnknownProperty as ex: + form.warning('%s (you should probably delete that property ' + 'from the database)', ex) + msg = form._cw._('you should probably delete that property') + self.widget = NotEditableWidget(entity.printable_value('value'), + '%s (%s)' % (msg, ex)) + return + if entity.pkey.startswith('system.'): + msg = form._cw._('value associated to this key is not editable ' + 'manually') + self.widget = NotEditableWidget(entity.printable_value('value'), msg) + # XXX race condition when used from CWPropertyForm, should not rely on + # instance attributes + self.value = pdef['default'] + self.help = pdef['help'] + vocab = pdef['vocabulary'] + if vocab is not None: + if callable(vocab): + # list() just in case its a generator function + self.choices = list(vocab()) + else: + self.choices = vocab + wdg = Select() + elif pdef['type'] == 'String': # else we'll get a TextArea by default + wdg = TextInput() + else: + field = FIELDS[pdef['type']]() + wdg = field.widget + if pdef['type'] == 'Boolean': + self.choices = field.vocabulary(form) + self.widget = wdg + + +class CWPropertiesFormRenderer(formrenderers.FormRenderer): + """specific renderer for properties""" + __regid__ = 'cwproperties' + + def open_form(self, form, values): + err = '
              ' + return super(CWPropertiesFormRenderer, self).open_form(form, values) + err + + def _render_fields(self, fields, w, form): + for field in fields: + w(u'
              \n') + if self.display_label: + w(u'%s' % self.render_label(form, field)) + error = form.field_error(field) + if error: + w(u'%s' % error) + w(u'%s' % self.render_help(form, field)) + w(u'
              ') + w(field.render(form, self)) + w(u'
              ') + w(u'
              ') + + def render_buttons(self, w, form): + w(u'
              \n') + for button in form.form_buttons: + w(u'%s\n' % button.render(form)) + w(u'
              ') + + +class CWPropertyIEditControlAdapter(editcontroller.IEditControlAdapter): + __select__ = is_instance('CWProperty') + + def after_deletion_path(self): + """return (path, parameters) which should be used as redirect + information when this entity is being deleted + """ + return 'view', {} + + +@ajaxfunc(output_type='xhtml') +def prop_widget(self, propkey, varname, tabindex=None): + """specific method for CWProperty handling""" + entity = self._cw.vreg['etypes'].etype_class('CWProperty')(self._cw) + entity.eid = varname + entity.pkey = propkey + form = self._cw.vreg['forms'].select('edition', self._cw, entity=entity) + form.build_context() + vfield = form.field_by_name('value', 'subject') + renderer = formrenderers.FormRenderer(self._cw) + return vfield.render(form, renderer, tabindex=tabindex) \ + + renderer.render_help(form, vfield) + +_afs = uicfg.autoform_section +_afs.tag_subject_of(('*', 'for_user', '*'), 'main', 'hidden') +_afs.tag_object_of(('*', 'for_user', '*'), 'main', 'hidden') +_aff = uicfg.autoform_field +_aff.tag_attribute(('CWProperty', 'pkey'), PropertyKeyField) +_aff.tag_attribute(('CWProperty', 'value'), PropertyValueField) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/cwsources.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/cwsources.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,516 @@ +# copyright 2010-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Specific views for data sources and related entities (eg CWSource, +CWSourceHostConfig, CWSourceSchemaConfig). +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +import logging +from itertools import repeat + +from six.moves import range + +from logilab.mtconverter import xml_escape +from logilab.common.decorators import cachedproperty + +from cubicweb import Unauthorized, tags +from cubicweb.utils import make_uid +from cubicweb.predicates import (is_instance, score_entity, has_related_entities, + match_user_groups, match_kwargs, match_view, one_line_rset) +from cubicweb.view import EntityView, StartupView +from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, display_name +from cubicweb.web import Redirect, formwidgets as wdgs, facet, action +from cubicweb.web.views import add_etype_button +from cubicweb.web.views import (uicfg, tabs, actions, ibreadcrumbs, navigation, + tableview, pyviews) + + +_abaa = uicfg.actionbox_appearsin_addmenu +# there are explicit 'add' buttons for those +_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_schema', '*'), False) +_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_for_source', '*'), False) +_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_host_config_of', '*'), False) +_abaa.tag_object_of(('CWDataImport', 'cw_import_of', '*'), False) + +_afs = uicfg.autoform_section +_afs.tag_attribute(('CWSource', 'latest_retrieval'), 'main', 'hidden') +_afs.tag_attribute(('CWSource', 'in_synchronization'), 'main', 'hidden') +_afs.tag_object_of(('*', 'cw_for_source', 'CWSource'), 'main', 'hidden') + +_affk = uicfg.autoform_field_kwargs +_affk.tag_attribute(('CWSource', 'parser'), {'widget': wdgs.TextInput}) + +# source primary views ######################################################### + +_pvs = uicfg.primaryview_section +_pvs.tag_attribute(('CWSource', 'name'), 'hidden') +_pvs.tag_object_of(('*', 'cw_for_source', 'CWSource'), 'hidden') +_pvs.tag_object_of(('*', 'cw_host_config_of', 'CWSource'), 'hidden') + +_pvdc = uicfg.primaryview_display_ctrl +_pvdc.tag_attribute(('CWSource', 'type'), {'vid': 'attribute'})# disable reledit + +_rc = uicfg.reledit_ctrl +_rc.tag_attribute(('CWSource', 'config'), {'rvid': 'verbatimattr'}) +_rc.tag_attribute(('CWSourceHostConfig', 'config'), {'rvid': 'verbatimattr'}) +_rc.tag_attribute(('CWSourceSchemaConfig', 'options'), {'rvid': 'verbatimattr'}) + + +class CWSourcePrimaryView(tabs.TabbedPrimaryView): + __select__ = is_instance('CWSource') + tabs = [_('cwsource-main'), _('cwsource-mapping'), _('cwsource-imports')] + default_tab = 'cwsource-main' + + +class CWSourceMainTab(tabs.PrimaryTab): + __regid__ = 'cwsource-main' + __select__ = is_instance('CWSource') + + def render_entity_attributes(self, entity): + super(CWSourceMainTab, self).render_entity_attributes(entity) + self.w(add_etype_button(self._cw, 'CWSourceHostConfig', + __linkto='cw_host_config_of:%s:subject' % entity.eid, + __redirectpath=entity.rest_path())) + try: + hostconfig = self._cw.execute( + 'Any X, XC, XH WHERE X cw_host_config_of S, S eid %(s)s, ' + 'X config XC, X match_host XH', {'s': entity.eid}) + except Unauthorized: + pass + else: + if hostconfig: + self.w(u'

              %s

              ' % self._cw._('CWSourceHostConfig_plural')) + self._cw.view('table', hostconfig, w=self.w, + displaycols=list(range(2)), + cellvids={1: 'editable-final'}) + + +MAPPED_SOURCE_TYPES = set( ('datafeed',) ) + +class CWSourceMappingTab(EntityView): + __regid__ = 'cwsource-mapping' + __select__ = (is_instance('CWSource') + & match_user_groups('managers') + & score_entity(lambda x:x.type in MAPPED_SOURCE_TYPES)) + + def entity_call(self, entity): + _ = self._cw._ + self.w('

              %s

              ' % _('Entity and relation supported by this source')) + self.w(add_etype_button(self._cw, 'CWSourceSchemaConfig', + __linkto='cw_for_source:%s:subject' % entity.eid)) + self.w(u'
              ') + rset = self._cw.execute( + 'Any X, SCH, XO ORDERBY ET WHERE X options XO, X cw_for_source S, S eid %(s)s, ' + 'X cw_schema SCH, SCH is ET', {'s': entity.eid}) + self.wview('table', rset, 'noresult') + checker = MappingChecker(entity) + checker.check() + if (checker.errors or checker.warnings or checker.infos): + self.w('

              %s

              ' % _('Detected problems')) + errors = zip(repeat(_('error')), checker.errors) + warnings = zip(repeat(_('warning')), checker.warnings) + infos = zip(repeat(_('warning')), checker.infos) + self.wview('pyvaltable', pyvalue=errors + warnings + infos) + + +class MappingChecker(object): + def __init__(self, cwsource): + self.cwsource = cwsource + self.errors = [] + self.warnings = [] + self.infos = [] + self.schema = cwsource._cw.vreg.schema + + def init(self): + # supported entity types + self.sentities = set() + # supported relations + self.srelations = {} + # avoid duplicated messages + self.seen = set() + # first get mapping as dict/sets + for schemacfg in self.cwsource.reverse_cw_for_source: + self.init_schemacfg(schemacfg) + + def init_schemacfg(self, schemacfg): + cwerschema = schemacfg.schema + if cwerschema.__regid__ == 'CWEType': + self.sentities.add(cwerschema.name) + elif cwerschema.__regid__ == 'CWRType': + assert not cwerschema.name in self.srelations + self.srelations[cwerschema.name] = None + else: # CWAttribute/CWRelation + self.srelations.setdefault(cwerschema.rtype.name, []).append( + (cwerschema.stype.name, cwerschema.otype.name) ) + self.sentities.add(cwerschema.stype.name) + self.sentities.add(cwerschema.otype.name) + + def check(self): + self.init() + error = self.errors.append + warning = self.warnings.append + info = self.infos.append + for etype in self.sentities: + eschema = self.schema[etype] + for rschema, ttypes, role in eschema.relation_definitions(): + if rschema in META_RTYPES: + continue + ttypes = [ttype for ttype in ttypes if ttype in self.sentities] + if not rschema in self.srelations: + for ttype in ttypes: + rdef = rschema.role_rdef(etype, ttype, role) + self.seen.add(rdef) + if rdef.role_cardinality(role) in '1+': + error(_('relation %(type)s with %(etype)s as %(role)s ' + 'and target type %(target)s is mandatory but ' + 'not supported') % + {'rtype': rschema, 'etype': etype, 'role': role, + 'target': ttype}) + elif ttype in self.sentities: + warning(_('%s could be supported') % rdef) + elif not ttypes: + warning(_('relation %(rtype)s with %(etype)s as %(role)s is ' + 'supported but no target type supported') % + {'rtype': rschema, 'role': role, 'etype': etype}) + for rtype, rdefs in self.srelations.items(): + if rdefs is None: + rschema = self.schema[rtype] + for subj, obj in rschema.rdefs: + if subj in self.sentities and obj in self.sentities: + break + else: + error(_('relation %s is supported but none of its definitions ' + 'matches supported entities') % rtype) + self.custom_check() + + def custom_check(self): + pass + + + +class CWSourceImportsTab(EntityView): + __regid__ = 'cwsource-imports' + __select__ = (is_instance('CWSource') + & has_related_entities('cw_import_of', 'object')) + + def entity_call(self, entity): + rset = self._cw.execute('Any X, XST, XET, XS ORDERBY XST DESC WHERE ' + 'X cw_import_of S, S eid %(s)s, X status XS, ' + 'X start_timestamp XST, X end_timestamp XET', + {'s': entity.eid}) + self._cw.view('cw.imports-table', rset, w=self.w) + + +class CWImportsTable(tableview.EntityTableView): + __regid__ = 'cw.imports-table' + __select__ = is_instance('CWDataImport') + columns = ['import', 'start_timestamp', 'end_timestamp'] + column_renderers = {'import': tableview.MainEntityColRenderer()} + layout_args = {'display_filter': 'top'} + + +class CWSourceSyncAction(action.Action): + __regid__ = 'cw.source-sync' + __select__ = (action.Action.__select__ & match_user_groups('managers') + & one_line_rset() & is_instance('CWSource') + & score_entity(lambda x: x.name != 'system')) + + title = _('synchronize') + category = 'mainactions' + order = 20 + + def url(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return entity.absolute_url(vid=self.__regid__) + + +class CWSourceSyncView(EntityView): + __regid__ = 'cw.source-sync' + __select__ = (match_user_groups('managers') + & one_line_rset() & is_instance('CWSource') + & score_entity(lambda x: x.name != 'system')) + + title = _('synchronize') + + def entity_call(self, entity): + self._cw.call_service('source-sync', source_eid=entity.eid) + msg = self._cw._('Source has been synchronized') + url = entity.absolute_url(tab='cwsource-imports', __message=msg) + raise Redirect(url) + + + + +# sources management view ###################################################### + +class ManageSourcesAction(actions.ManagersAction): + __regid__ = 'cwsource' + title = _('data sources') + category = 'manage' + order = 100 + + +class CWSourcesManagementView(StartupView): + __regid__ = 'cw.sources-management' + rql = ('Any S,ST,SP,SD,SN ORDERBY SN WHERE S is CWSource, S name SN, S type ST, ' + 'S latest_retrieval SD, S parser SP') + title = _('data sources management') + + def call(self, **kwargs): + self.w('

              %s

              ' % self._cw._(self.title)) + self.w(add_etype_button(self._cw, 'CWSource')) + self.w(u'
              ') + self.wview('cw.sources-table', self._cw.execute(self.rql)) + + +class CWSourcesTable(tableview.EntityTableView): + __regid__ = 'cw.sources-table' + __select__ = is_instance('CWSource') + columns = ['source', 'type', 'parser', 'latest_retrieval', 'latest_import'] + + class LatestImportColRenderer(tableview.EntityTableColRenderer): + def render_cell(self, w, rownum): + entity = self.entity(rownum) + rset = self._cw.execute('Any X,XS,XST ORDERBY XST DESC LIMIT 1 WHERE ' + 'X cw_import_of S, S eid %(s)s, X status XS, ' + 'X start_timestamp XST', {'s': entity.eid}) + if rset: + self._cw.view('incontext', rset, row=0, w=w) + else: + w(self.empty_cell_content) + + column_renderers = { + 'source': tableview.MainEntityColRenderer(), + 'latest_import': LatestImportColRenderer(header=_('latest import'), + sortable=False) + } + +# datafeed source import ####################################################### + +REVERSE_SEVERITIES = { + logging.DEBUG : _('DEBUG'), + logging.INFO : _('INFO'), + logging.WARNING : _('WARNING'), + logging.ERROR : _('ERROR'), + logging.FATAL : _('FATAL') +} + + +def log_to_table(req, rawdata): + data = [] + for msg_idx, msg in enumerate(rawdata.split('
              ')): + record = msg.strip() + if not record: + continue + try: + severity, url, line, msg = record.split('\t', 3) + except ValueError: + req.warning('badly formated log %s' % record) + url = line = u'' + severity = logging.DEBUG + msg = record + data.append( (severity, url, line, msg) ) + return data + + +class LogTableLayout(tableview.TableLayout): + __select__ = match_view('cw.log.table') + needs_js = tableview.TableLayout.needs_js + ('cubicweb.log.js',) + needs_css = tableview.TableLayout.needs_css + ('cubicweb.log.css',) + columns_css = { + 0: 'logSeverity', + 1: 'logPath', + 2: 'logLine', + 3: 'logMsg', + } + + def render_table(self, w, actions, paginate): + default_level = self.view.cw_extra_kwargs['default_level'] + if default_level != 'Debug': + self._cw.add_onload('$("select.logFilter").val("%s").change();' + % self._cw.form.get('logLevel', default_level)) + w(u'\n
              ') + w(u'' % self._cw._(u'Message threshold')) + w(u'') + w(u'
              ') + super(LogTableLayout, self).render_table(w, actions, paginate) + + def table_attributes(self): + attrs = super(LogTableLayout, self).table_attributes() + attrs['id'] = 'table'+self.view.domid + return attrs + + def row_attributes(self, rownum): + attrs = super(LogTableLayout, self).row_attributes(rownum) + attrs['id'] = 'log_msg_%i' % rownum + severityname = REVERSE_SEVERITIES[int(self.view.pyvalue[rownum][0])] + attrs['class'] = 'log%s' % severityname.capitalize() + return attrs + + def cell_attributes(self, rownum, colnum, colid): + attrs = super(LogTableLayout, self).cell_attributes(rownum, colnum, colid) + attrs['class'] = self.columns_css[colnum] + return attrs + + +class LogTable(pyviews.PyValTableView): + __regid__ = 'cw.log.table' + headers = [_('severity'), _('url'), _('line'), _('message')] + + @cachedproperty + def domid(self): + return make_uid('logTable') + + class SeverityRenderer(pyviews.PyValTableColRenderer): + def render_cell(self, w, rownum): + severity = self.data[rownum][0] + w(u'''' + u' %(severity)s' % { + 'severity': self._cw._(REVERSE_SEVERITIES[int(severity)]), + 'title': self._cw._('permalink to this message'), + 'msg_id': 'log_msg_%i' % rownum, + }) + def sortvalue(self, rownum): + return int(self.data[rownum][0]) + + class URLRenderer(pyviews.PyValTableColRenderer): + def render_cell(self, w, rownum): + url = self.data[rownum][1] + if url and url.startswith('http'): + url = tags.a(url, href=url) + w(url or u' ') + + class LineRenderer(pyviews.PyValTableColRenderer): + def render_cell(self, w, rownum): + line = self.data[rownum][2] + w(line or u' ') + + class MessageRenderer(pyviews.PyValTableColRenderer): + snip_over = 7 + def render_cell(self, w, rownum): + msg = self.data[rownum][3] + lines = msg.splitlines() + if len(lines) <= self.snip_over: + w(u'
              %s
              ' % msg) + else: + # The make_uid argument has no specific meaning here. + div_snip_id = make_uid(u'log_snip_') + div_full_id = make_uid(u'log_full_') + divs_id = (div_snip_id, div_full_id) + snip = u'\n'.join((lines[0], lines[1], + u' ...', + u' %i more lines [double click to expand]' % (len(lines)-4), + u' ...', + lines[-2], lines[-1])) + divs = ( + (div_snip_id, snip, u'expand', "class='collapsed'"), + (div_full_id, msg, u'collapse', "class='hidden'") + ) + for div_id, content, button, h_class in divs: + text = self._cw._(button) + js = u"toggleVisibility('%s'); toggleVisibility('%s');" % divs_id + w(u'
              ' % (div_id, h_class)) + w(u'
              ' % (js, text))
              +                    w(content)
              +                    w(u'
              ') + w(u'
              ') + + column_renderers = {0: SeverityRenderer(), + 1: URLRenderer(sortable=False), + 2: LineRenderer(sortable=False), + 3: MessageRenderer(sortable=False), + } + + +class DataFeedSourceDataImport(EntityView): + __select__ = EntityView.__select__ & match_kwargs('rtype') + __regid__ = 'cw.formated_log' + + def cell_call(self, row, col, rtype, loglevel='Info', **kwargs): + if 'dispctrl' in self.cw_extra_kwargs: + loglevel = self.cw_extra_kwargs['dispctrl'].get('loglevel', loglevel) + entity = self.cw_rset.get_entity(row, col) + value = getattr(entity, rtype) + if value: + self._cw.view('cw.log.table', pyvalue=log_to_table(self._cw, value), + default_level=loglevel, w=self.w) + else: + self.w(self._cw._('no log to display')) + + +_pvs.tag_attribute(('CWDataImport', 'log'), 'relations') +_pvdc.tag_attribute(('CWDataImport', 'log'), {'vid': 'cw.formated_log'}) +_pvs.tag_subject_of(('CWDataImport', 'cw_import_of', '*'), 'hidden') # in breadcrumbs +_pvs.tag_object_of(('*', 'cw_import_of', 'CWSource'), 'hidden') # in dedicated tab + + +class CWDataImportIPrevNextAdapter(navigation.IPrevNextAdapter): + __select__ = is_instance('CWDataImport') + + def next_entity(self): + if self.entity.start_timestamp is not None: + # add NOT X eid %(e)s because > may not be enough + rset = self._cw.execute( + 'Any X,XSTS ORDERBY 2 LIMIT 1 WHERE X is CWDataImport, ' + 'X cw_import_of S, S eid %(s)s, NOT X eid %(e)s, ' + 'X start_timestamp XSTS, X start_timestamp > %(sts)s', + {'sts': self.entity.start_timestamp, + 'e': self.entity.eid, + 's': self.entity.cwsource.eid}) + if rset: + return rset.get_entity(0, 0) + + def previous_entity(self): + if self.entity.start_timestamp is not None: + # add NOT X eid %(e)s because < may not be enough + rset = self._cw.execute( + 'Any X,XSTS ORDERBY 2 DESC LIMIT 1 WHERE X is CWDataImport, ' + 'X cw_import_of S, S eid %(s)s, NOT X eid %(e)s, ' + 'X start_timestamp XSTS, X start_timestamp < %(sts)s', + {'sts': self.entity.start_timestamp, + 'e': self.entity.eid, + 's': self.entity.cwsource.eid}) + if rset: + return rset.get_entity(0, 0) + +class CWDataImportStatusFacet(facet.AttributeFacet): + __regid__ = 'datafeed.dataimport.status' + __select__ = is_instance('CWDataImport') + rtype = 'status' + + +# breadcrumbs configuration #################################################### + +class CWsourceConfigIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('CWSourceHostConfig', 'CWSourceSchemaConfig') + def parent_entity(self): + return self.entity.cwsource + +class CWDataImportIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('CWDataImport') + def parent_entity(self): + return self.entity.cw_import_of[0] diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/cwuser.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/cwuser.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,258 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Specific views for users and groups""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from hashlib import sha1 # pylint: disable=E0611 + +from six import text_type +from six.moves import range + +from logilab.mtconverter import xml_escape + +from cubicweb import tags +from cubicweb.schema import display_name +from cubicweb.predicates import one_line_rset, is_instance, match_user_groups +from cubicweb.view import EntityView, StartupView +from cubicweb.web import action, formwidgets +from cubicweb.web.views import uicfg, tabs, tableview, actions, add_etype_button + +_pvs = uicfg.primaryview_section +_pvs.tag_attribute(('CWUser', 'login'), 'hidden') + +_affk = uicfg.autoform_field_kwargs +_affk.tag_subject_of(('CWUser', 'in_group', 'CWGroup'), + {'widget': formwidgets.InOutWidget}) + +class UserPreferencesEntityAction(action.Action): + __regid__ = 'prefs' + __select__ = (one_line_rset() & is_instance('CWUser') & + match_user_groups('owners', 'managers')) + + title = _('preferences') + category = 'mainactions' + + def url(self): + user = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return user.absolute_url(vid='propertiesform') + + +class FoafView(EntityView): + __regid__ = 'foaf' + __select__ = is_instance('CWUser') + + title = _('foaf') + templatable = False + content_type = 'text/xml' + + def call(self): + self.w(u''' + '''% self._cw.encoding) + for i in range(self.cw_rset.rowcount): + self.cell_call(i, 0) + self.w(u'\n') + + def entity_call(self, entity, **kwargs): + entity.complete() + # account + self.w(u'\n' % entity.absolute_url()) + self.w(u' %s\n' % entity.login) + self.w(u'\n') + # person + self.w(u'\n' % entity.absolute_url()) + self.w(u' \n' % entity.absolute_url()) + if entity.surname: + self.w(u'%s\n' + % xml_escape(entity.surname)) + if entity.firstname: + self.w(u'%s\n' + % xml_escape(entity.firstname)) + emailaddr = entity.cw_adapt_to('IEmailable').get_email() + if emailaddr: + self.w(u'%s\n' + % sha1(emailaddr.encode('utf-8')).hexdigest()) + self.w(u'\n') + + +# group views ################################################################## + +_pvs.tag_attribute(('CWGroup', 'name'), 'hidden') +_pvs.tag_subject_of(('CWGroup', 'read_permission', '*'), 'relations') +_pvs.tag_subject_of(('CWGroup', 'add_permission', '*'), 'relations') +_pvs.tag_subject_of(('CWGroup', 'delete_permission', '*'), 'relations') +_pvs.tag_subject_of(('CWGroup', 'update_permission', '*'), 'relations') +_pvs.tag_object_of(('CWUser', 'in_group', 'CWGroup'), 'hidden') +_pvs.tag_object_of(('*', 'require_group', 'CWGroup'), 'hidden') + + +class CWGroupPrimaryView(tabs.TabbedPrimaryView): + __select__ = is_instance('CWGroup') + tabs = [_('cwgroup-main'), _('cwgroup-permissions')] + default_tab = 'cwgroup-main' + + +class CWGroupMainTab(tabs.PrimaryTab): + __regid__ = 'cwgroup-main' + __select__ = tabs.PrimaryTab.__select__ & is_instance('CWGroup') + + def render_entity_attributes(self, entity): + rset = self._cw.execute( + 'Any U, FN, LN, CD, LL ORDERBY L WHERE U in_group G, ' + 'U login L, U firstname FN, U surname LN, U creation_date CD, ' + 'U last_login_time LL, G eid %(x)s', {'x': entity.eid}) + self.wview('cwgroup.users', rset, 'null') + +class CWGroupUsersTable(tableview.RsetTableView): + __regid__ = 'cwgroup.users' + __select__ = is_instance('CWUser') + headers = (_(u'user'), _(u'first name'), _(u'last name'), + _(u'creation date'), _(u'last login time')) + layout_args = {'display_filter': 'top'} + finalvid = 'editable-final' + + +class CWGroupPermTab(EntityView): + __regid__ = 'cwgroup-permissions' + __select__ = is_instance('CWGroup') + + def entity_call(self, entity): + self._cw.add_css(('cubicweb.schema.css','cubicweb.acl.css')) + access_types = ('read', 'delete', 'add', 'update') + w = self.w + objtype_access = {'CWEType': ('read', 'delete', 'add', 'update'), + 'CWRelation': ('add', 'delete')} + rql_cwetype = 'DISTINCT Any X WHERE X %s_permission CWG, X is CWEType, ' \ + 'CWG eid %%(e)s' + rql_cwrelation = 'DISTINCT Any RT WHERE X %s_permission CWG, X is CWRelation, ' \ + 'X relation_type RT, CWG eid %%(e)s' + self.render_objtype_access(entity, 'CWEType', objtype_access, rql_cwetype) + self.render_objtype_access(entity, 'CWRelation', objtype_access, rql_cwrelation) + + def render_objtype_access(self, entity, objtype, objtype_access, rql): + self.w(u'

              %s

              ' % self._cw._(objtype)) + for access_type in objtype_access[objtype]: + rset = self._cw.execute(rql % access_type, {'e': entity.eid}) + if rset: + self.w(u'
              %s:
              ' % self._cw.__(access_type + '_permission')) + self.w(u'
              %s

              ' % self._cw.view('csv', rset, 'null')) + + +class CWGroupInContextView(EntityView): + __regid__ = 'incontext' + __select__ = is_instance('CWGroup') + + def entity_call(self, entity, **kwargs): + entity.complete() + self.w(u'%s' % ( + entity.absolute_url(), xml_escape(entity.name), + entity.printable_value('name'))) + + +# user / groups management views ############################################### + +class ManageUsersAction(actions.ManagersAction): + __regid__ = 'cwuser' # see rewrite rule /cwuser + title = _('users and groups') + category = 'manage' + + +class UsersAndGroupsManagementView(tabs.TabsMixin, StartupView): + __regid__ = 'cw.users-and-groups-management' + __select__ = StartupView.__select__ & match_user_groups('managers') + title = _('Users and groups management') + tabs = [_('cw.users-management'), _('cw.groups-management'),] + default_tab = 'cw.users-management' + + def call(self, **kwargs): + """The default view representing the instance's management""" + self.w(u'

              %s

              ' % self._cw._(self.title)) + self.render_tabs(self.tabs, self.default_tab) + + +class CWUserManagementView(StartupView): + __regid__ = 'cw.users-management' + __select__ = StartupView.__select__ & match_user_groups('managers') + cache_max_age = 0 # disable caching + # XXX one could wish to display for instance only user's firstname/surname + # for non managers but filtering out NULL caused crash with an ldapuser + # source. The ldapuser source has been dropped and this code can be updated. + rql = ('Any U,US,F,S,U,UAA,UDS, L,UAA,USN,UDSN ORDERBY L WHERE U is CWUser, ' + 'U login L, U firstname F, U surname S, ' + 'U in_state US, US name USN, ' + 'U primary_email UA?, UA address UAA, ' + 'U cw_source UDS, US name UDSN') + + def call(self, **kwargs): + self.w(add_etype_button(self._cw, 'CWUser')) + self.w(u'
              ') + self.wview('cw.users-table', self._cw.execute(self.rql)) + + +class CWUsersTable(tableview.EntityTableView): + __regid__ = 'cw.users-table' + __select__ = is_instance('CWUser') + columns = ['user', 'in_state', 'firstname', 'surname', + 'in_group', 'primary_email', 'cw_source'] + layout_args = {'display_filter': 'top'} + finalvid = 'editable-final' + + column_renderers = { + 'user': tableview.EntityTableColRenderer( + renderfunc=lambda w,x: w(tags.a(x.login, href=x.absolute_url())), + sortfunc=lambda x: x.login), + 'in_state': tableview.EntityTableColRenderer( + renderfunc=lambda w,x: w(x.cw_adapt_to('IWorkflowable').printable_state), + sortfunc=lambda x: x.cw_adapt_to('IWorkflowable').printable_state), + 'in_group': tableview.EntityTableColRenderer( + renderfunc=lambda w,x: x.view('reledit', rtype='in_group', role='subject', w=w)), + 'primary_email': tableview.RelatedEntityColRenderer( + getrelated=lambda x:x.primary_email and x.primary_email[0] or None), + 'cw_source': tableview.RelatedEntityColRenderer( + getrelated=lambda x: x.cw_source[0]), + } + + +class CWGroupsManagementView(StartupView): + __regid__ = 'cw.groups-management' + __select__ = StartupView.__select__ & match_user_groups('managers') + cache_max_age = 0 # disable caching + rql = ('Any G,GN ORDERBY GN WHERE G is CWGroup, G name GN, NOT G name "owners"') + + def call(self, **kwargs): + self.w(add_etype_button(self._cw, 'CWGroup')) + self.w(u'
              ') + self.wview('cw.groups-table', self._cw.execute(self.rql)) + + +class CWGroupsTable(tableview.EntityTableView): + __regid__ = 'cw.groups-table' + __select__ = is_instance('CWGroup') + columns = ['group', 'nb_users'] + layout_args = {'display_filter': 'top'} + + column_renderers = { + 'group': tableview.MainEntityColRenderer(), + 'nb_users': tableview.EntityTableColRenderer( + header=_('num. users'), + renderfunc=lambda w,x: w(text_type(x.num_users())), + sortfunc=lambda x: x.num_users()), + } diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/debug.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/debug.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,190 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""management and error screens""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from time import strftime, localtime + +from six import text_type + +from logilab.mtconverter import xml_escape + +from cubicweb.predicates import none_rset, match_user_groups +from cubicweb.view import StartupView +from cubicweb.web.views import actions, tabs + +def dict_to_html(w, dict): + # XHTML doesn't allow emtpy
                nodes + if dict: + w(u'
                  ') + for key in sorted(dict): + w(u'
                • %s: %s
                • ' % ( + xml_escape(str(key)), xml_escape(repr(dict[key])))) + w(u'
                ') + + +class SiteInfoAction(actions.ManagersAction): + __regid__ = 'siteinfo' + __select__ = match_user_groups('users','managers') + title = _('Site information') + category = 'manage' + order = 1000 + + +class SiteInfoView(tabs.TabsMixin, StartupView): + __regid__ = 'siteinfo' + title = _('Site information') + tabs = [_('info'), _('registry'), _('gc')] + default_tab = 'info' + + def call(self, **kwargs): + """The default view representing the instance's management""" + self.w(u'

                %s

                ' % self._cw._(self.title)) + self.render_tabs(self.tabs, self.default_tab) + + +class ProcessInformationView(StartupView): + """display various web server /repository information""" + __regid__ = 'info' + __select__ = none_rset() & match_user_groups('managers', 'users') + + title = _('server information') + cache_max_age = 0 + + def call(self, **kwargs): + req = self._cw + dtformat = req.property_value('ui.datetime-format') + _ = req._ + w = self.w + repo = req.cnx.repo + # generic instance information + w(u'

                %s

                ' % _('Instance')) + pyvalue = ((_('config type'), self._cw.vreg.config.name), + (_('config mode'), self._cw.vreg.config.mode), + (_('instance home'), self._cw.vreg.config.apphome)) + self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) + vcconf = repo.get_versions() + w(u'

                %s

                ' % _('versions configuration')) + missing = _('no version information') + pyvalue = [('CubicWeb', vcconf.get('cubicweb', missing))] + pyvalue += [(cube, vcconf.get(cube, missing)) + for cube in sorted(self._cw.vreg.config.cubes())] + self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) + # repository information + w(u'

                %s

                ' % _('Repository')) + w(u'

                %s

                ' % _('resources usage')) + stats = self._cw.call_service('repo_stats') + stats['looping_tasks'] = ', '.join('%s (%s seconds)' % (n, i) for n, i in stats['looping_tasks']) + stats['threads'] = ', '.join(sorted(stats['threads'])) + for k in stats: + if k in ('extid_cache_size', 'type_source_cache_size'): + continue + if k.endswith('_cache_size'): + stats[k] = '%s / %s' % (stats[k]['size'], stats[k]['maxsize']) + def format_stat(sname, sval): + return '%s %s' % (xml_escape(text_type(sval)), + sname.endswith('percent') and '%' or '') + pyvalue = [(sname, format_stat(sname, sval)) + for sname, sval in sorted(stats.items())] + self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) + # open repo sessions + if req.cnx.is_repo_in_memory and req.user.is_in_group('managers'): + w(u'

                %s

                ' % _('opened sessions')) + sessions = repo._sessions.values() + if sessions: + w(u'
                  ') + for session in sessions: + w(u'
                • %s (%s: %s)
                  ' % ( + xml_escape(text_type(session)), + _('last usage'), + strftime(dtformat, localtime(session.timestamp)))) + dict_to_html(w, session.data) + w(u'
                • ') + w(u'
                ') + else: + w(u'

                %s

                ' % _('no repository sessions found')) + # web server information + w(u'

                %s

                ' % _('Web server')) + pyvalue = ((_('base url'), req.base_url()), + (_('data directory url'), req.datadir_url)) + self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) + from cubicweb.web.application import SESSION_MANAGER + if SESSION_MANAGER is not None and req.user.is_in_group('managers'): + sessions = SESSION_MANAGER.current_sessions() + w(u'

                %s

                ' % _('opened web sessions')) + if sessions: + w(u'
                  ') + for session in sessions: + last_usage_time = session.mtime + w(u'
                • %s (%s: %s)
                  ' % ( + session.sessionid, + _('last usage'), + strftime(dtformat, localtime(last_usage_time)))) + dict_to_html(w, session.data) + w(u'
                • ') + w(u'
                ') + else: + w(u'

                %s

                ' % _('no web sessions found')) + + + +class RegistryView(StartupView): + """display vregistry content""" + __regid__ = 'registry' + __select__ = StartupView.__select__ & match_user_groups('managers') + title = _('registry') + cache_max_age = 0 + + def call(self, **kwargs): + self.w(u'

                %s

                ' % self._cw._("Registry's content")) + keys = sorted(self._cw.vreg) + url = xml_escape(self._cw.url()) + self.w(u'

                %s

                \n' % ' - '.join('%s' + % (url, key, key) for key in keys)) + for key in keys: + if key in ('boxes', 'contentnavigation'): # those are bw compat registries + continue + self.w(u'

                %s

                ' % (key, key)) + if self._cw.vreg[key]: + values = sorted(self._cw.vreg[key].items()) + self.wview('pyvaltable', pyvalue=[(key, xml_escape(repr(val))) + for key, val in values]) + else: + self.w(u'

                Empty

                \n') + + +class GCView(StartupView): + """display garbage collector information""" + __regid__ = 'gc' + __select__ = StartupView.__select__ & match_user_groups('managers') + title = _('memory leak debugging') + cache_max_age = 0 + + def call(self, **kwargs): + stats = self._cw.call_service('repo_gc_stats') + self.w(u'

                %s

                ' % _('Garbage collection information')) + self.w(u'

                %s

                ' % self._cw._('Looked up classes')) + self.wview('pyvaltable', pyvalue=stats['lookupclasses']) + self.w(u'

                %s

                ' % self._cw._('Most referenced classes')) + self.wview('pyvaltable', pyvalue=stats['referenced']) + if stats['unreachable']: + self.w(u'

                %s

                ' % self._cw._('Unreachable objects')) + values = [xml_escape(val) for val in stats['unreachable']] + self.wview('pyvallist', pyvalue=values) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/dotgraphview.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/dotgraphview.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,74 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some basic stuff to build dot generated graph images""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +import tempfile +import os +import codecs + +from logilab.mtconverter import xml_escape +from logilab.common.graph import GraphGenerator, DotBackend + +from cubicweb.view import EntityView +from cubicweb.utils import make_uid + +class DotGraphView(EntityView): + __abstract__ = True + backend_class = DotBackend + backend_kwargs = {'ratio': 'compress', 'size': '30,10'} + + def cell_call(self, row, col): + if 'MSIE 8' in self._cw.useragent(): + return + entity = self.cw_rset.get_entity(row, col) + visitor = self.build_visitor(entity) + prophdlr = self.build_dotpropshandler() + graphname = 'dotgraph%s' % str(entity.eid) + generator = GraphGenerator(self.backend_class(graphname, None, + **self.backend_kwargs)) + # image file + fd, tmpfile = tempfile.mkstemp('.svg') + os.close(fd) + generator.generate(visitor, prophdlr, tmpfile) + with codecs.open(tmpfile, 'rb', encoding='utf-8') as svgfile: + self.w(svgfile.read()) + + def build_visitor(self, entity): + raise NotImplementedError + + def build_dotpropshandler(self): + return DotPropsHandler(self._cw) + + +class DotPropsHandler(object): + def __init__(self, req): + self._ = req._ + + def node_properties(self, entity): + """return default DOT drawing options for a state or transition""" + return {'label': entity.dc_long_title(), + 'href': entity.absolute_url(), + 'fontname': 'Courier', 'fontsize': 10, 'shape':'box', + } + + def edge_properties(self, transition, fromstate, tostate): + return {'label': '', 'dir': 'forward', + 'color': 'black', 'style': 'filled'} diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/edit_attributes.pt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/edit_attributes.pt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,13 @@ + + + + + +
                attrname +
                error message if any
                +
                widget (input, textarea, etc.)
                +
                format help if any
                +
                diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/editcontroller.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/editcontroller.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,395 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""The edit controller, automatically handling entity form submitting""" + +__docformat__ = "restructuredtext en" + +from warnings import warn +from collections import defaultdict + +from datetime import datetime + +from six import text_type + +from logilab.common.deprecation import deprecated +from logilab.common.graph import ordered_nodes + +from rql.utils import rqlvar_maker + +from cubicweb import _, Binary, ValidationError +from cubicweb.view import EntityAdapter +from cubicweb.predicates import is_instance +from cubicweb.web import (INTERNAL_FIELD_VALUE, RequestError, NothingToEdit, + ProcessFormError) +from cubicweb.web.views import basecontrollers, autoform + + +class IEditControlAdapter(EntityAdapter): + __regid__ = 'IEditControl' + __select__ = is_instance('Any') + + def __init__(self, _cw, **kwargs): + if self.__class__ is not IEditControlAdapter: + warn('[3.14] IEditControlAdapter is deprecated, override EditController' + ' using match_edited_type or match_form_id selectors for example.', + DeprecationWarning) + super(IEditControlAdapter, self).__init__(_cw, **kwargs) + + def after_deletion_path(self): + """return (path, parameters) which should be used as redirect + information when this entity is being deleted + """ + parent = self.entity.cw_adapt_to('IBreadCrumbs').parent_entity() + if parent is not None: + return parent.rest_path(), {} + return str(self.entity.e_schema).lower(), {} + + def pre_web_edit(self): + """callback called by the web editcontroller when an entity will be + created/modified, to let a chance to do some entity specific stuff. + + Do nothing by default. + """ + pass + + +def valerror_eid(eid): + try: + return int(eid) + except (ValueError, TypeError): + return eid + +class RqlQuery(object): + def __init__(self): + self.edited = [] + self.restrictions = [] + self.kwargs = {} + + def __repr__(self): + return ('Query ' % ( + self.edited, self.restrictions, self.kwargs)) + + def insert_query(self, etype): + if self.edited: + rql = 'INSERT %s X: %s' % (etype, ','.join(self.edited)) + else: + rql = 'INSERT %s X' % etype + if self.restrictions: + rql += ' WHERE %s' % ','.join(self.restrictions) + return rql + + def update_query(self, eid): + varmaker = rqlvar_maker() + var = next(varmaker) + while var in self.kwargs: + var = next(varmaker) + rql = 'SET %s WHERE X eid %%(%s)s' % (','.join(self.edited), var) + if self.restrictions: + rql += ', %s' % ','.join(self.restrictions) + self.kwargs[var] = eid + return rql + + def set_attribute(self, attr, value): + self.kwargs[attr] = value + self.edited.append('X %s %%(%s)s' % (attr, attr)) + + def set_inlined(self, relation, value): + self.kwargs[relation] = value + self.edited.append('X %s %s' % (relation, relation.upper())) + self.restrictions.append('%s eid %%(%s)s' % (relation.upper(), relation)) + + +class EditController(basecontrollers.ViewController): + __regid__ = 'edit' + + def publish(self, rset=None): + """edit / create / copy / delete entity / relations""" + for key in self._cw.form: + # There should be 0 or 1 action + if key.startswith('__action_'): + cbname = key[1:] + try: + callback = getattr(self, cbname) + except AttributeError: + raise RequestError(self._cw._('invalid action %r' % key)) + else: + return callback() + self._default_publish() + self.reset() + + def _ordered_formparams(self): + """ Return form parameters dictionaries for each edited entity. + + We ensure that entities can be created in this order accounting for + mandatory inlined relations. + """ + req = self._cw + graph = {} + get_rschema = self._cw.vreg.schema.rschema + # minparams = 2, because at least __type and eid are needed + values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2)) + for eid in req.edited_eids()) + # iterate over all the edited entities + for eid, values in values_by_eid.items(): + # add eid to the dependency graph + graph.setdefault(eid, set()) + # search entity's edited fields for mandatory inlined relation + for param in values['_cw_entity_fields'].split(','): + try: + rtype, role = param.split('-') + except ValueError: + # e.g. param='__type' + continue + rschema = get_rschema(rtype) + if rschema.inlined: + for target in rschema.targets(values['__type'], role): + rdef = rschema.role_rdef(values['__type'], target, role) + # if cardinality is 1 and if the target entity is being + # simultaneously edited, the current entity must be + # created before the target one + if rdef.cardinality[0 if role == 'subject' else 1] == '1': + # use .get since param may be unspecified (though it will usually lead + # to a validation error later) + target_eid = values.get(param) + if target_eid in values_by_eid: + # add dependency from the target entity to the + # current one + if role == 'object': + graph.setdefault(target_eid, set()).add(eid) + else: + graph.setdefault(eid, set()).add(target_eid) + break + for eid in reversed(ordered_nodes(graph)): + yield values_by_eid[eid] + + def _default_publish(self): + req = self._cw + self.errors = [] + self.relations_rql = [] + form = req.form + # so we're able to know the main entity from the repository side + if '__maineid' in form: + req.transaction_data['__maineid'] = form['__maineid'] + # no specific action, generic edition + self._to_create = req.data['eidmap'] = {} + # those two data variables are used to handle relation from/to entities + # which doesn't exist at time where the entity is edited and that + # deserves special treatment + req.data['pending_inlined'] = defaultdict(set) + req.data['pending_others'] = set() + try: + for formparams in self._ordered_formparams(): + eid = self.edit_entity(formparams) + except (RequestError, NothingToEdit) as ex: + if '__linkto' in req.form and 'eid' in req.form: + self.execute_linkto() + elif not ('__delete' in req.form or '__insert' in req.form): + raise ValidationError(None, {None: text_type(ex)}) + # all pending inlined relations to newly created entities have been + # treated now (pop to ensure there are no attempt to add new ones) + pending_inlined = req.data.pop('pending_inlined') + assert not pending_inlined, pending_inlined + # handle all other remaining relations now + for form_, field in req.data.pop('pending_others'): + self.handle_formfield(form_, field) + # then execute rql to set all relations + for querydef in self.relations_rql: + self._cw.execute(*querydef) + # XXX this processes *all* pending operations of *all* entities + if '__delete' in req.form: + todelete = req.list_form_param('__delete', req.form, pop=True) + if todelete: + autoform.delete_relations(self._cw, todelete) + self._cw.remove_pending_operations() + if self.errors: + errors = dict((f.name, text_type(ex)) for f, ex in self.errors) + raise ValidationError(valerror_eid(form.get('__maineid')), errors) + + def _insert_entity(self, etype, eid, rqlquery): + rql = rqlquery.insert_query(etype) + try: + entity = self._cw.execute(rql, rqlquery.kwargs).get_entity(0, 0) + neweid = entity.eid + except ValidationError as ex: + self._to_create[eid] = ex.entity + if self._cw.ajax_request: # XXX (syt) why? + ex.entity = eid + raise + self._to_create[eid] = neweid + return neweid + + def _update_entity(self, eid, rqlquery): + self._cw.execute(rqlquery.update_query(eid), rqlquery.kwargs) + + def edit_entity(self, formparams, multiple=False): + """edit / create / copy an entity and return its eid""" + req = self._cw + etype = formparams['__type'] + entity = req.vreg['etypes'].etype_class(etype)(req) + entity.eid = valerror_eid(formparams['eid']) + is_main_entity = req.form.get('__maineid') == formparams['eid'] + # let a chance to do some entity specific stuff + entity.cw_adapt_to('IEditControl').pre_web_edit() + # create a rql query from parameters + rqlquery = RqlQuery() + # process inlined relations at the same time as attributes + # this will generate less rql queries and might be useful in + # a few dark corners + if is_main_entity: + formid = req.form.get('__form_id', 'edition') + else: + # XXX inlined forms formid should be saved in a different formparams entry + # inbetween, use cubicweb standard formid for inlined forms + formid = 'edition' + form = req.vreg['forms'].select(formid, req, entity=entity) + eid = form.actual_eid(entity.eid) + editedfields = formparams['_cw_entity_fields'] + form.formvalues = {} # init fields value cache + for field in form.iter_modified_fields(editedfields, entity): + self.handle_formfield(form, field, rqlquery) + # if there are some inlined field which were waiting for this entity's + # creation, add relevant data to the rqlquery + for form_, field in req.data['pending_inlined'].pop(entity.eid, ()): + rqlquery.set_inlined(field.name, form_.edited_entity.eid) + if self.errors: + errors = dict((f.role_name(), text_type(ex)) for f, ex in self.errors) + raise ValidationError(valerror_eid(entity.eid), errors) + if eid is None: # creation or copy + entity.eid = eid = self._insert_entity(etype, formparams['eid'], rqlquery) + elif rqlquery.edited: # edition of an existant entity + self.check_concurrent_edition(formparams, eid) + self._update_entity(eid, rqlquery) + if is_main_entity: + self.notify_edited(entity) + if '__delete' in formparams: + # XXX deprecate? + todelete = req.list_form_param('__delete', formparams, pop=True) + autoform.delete_relations(req, todelete) + if '__cloned_eid' in formparams: + entity.copy_relations(int(formparams['__cloned_eid'])) + if is_main_entity: # only execute linkto for the main entity + self.execute_linkto(entity.eid) + return eid + + def handle_formfield(self, form, field, rqlquery=None): + eschema = form.edited_entity.e_schema + try: + for field, value in field.process_posted(form): + if not ( + (field.role == 'subject' and field.name in eschema.subjrels) + or + (field.role == 'object' and field.name in eschema.objrels)): + continue + rschema = self._cw.vreg.schema.rschema(field.name) + if rschema.final: + rqlquery.set_attribute(field.name, value) + else: + if form.edited_entity.has_eid(): + origvalues = set(entity.eid for entity in form.edited_entity.related(field.name, field.role, entities=True)) + else: + origvalues = set() + if value is None or value == origvalues: + continue # not edited / not modified / to do later + if rschema.inlined and rqlquery is not None and field.role == 'subject': + self.handle_inlined_relation(form, field, value, origvalues, rqlquery) + elif form.edited_entity.has_eid(): + self.handle_relation(form, field, value, origvalues) + else: + form._cw.data['pending_others'].add( (form, field) ) + except ProcessFormError as exc: + self.errors.append((field, exc)) + + def handle_inlined_relation(self, form, field, values, origvalues, rqlquery): + """handle edition for the (rschema, x) relation of the given entity + """ + if values: + rqlquery.set_inlined(field.name, next(iter(values))) + elif form.edited_entity.has_eid(): + self.handle_relation(form, field, values, origvalues) + + def handle_relation(self, form, field, values, origvalues): + """handle edition for the (rschema, x) relation of the given entity + """ + etype = form.edited_entity.e_schema + rschema = self._cw.vreg.schema.rschema(field.name) + if field.role == 'subject': + desttype = rschema.objects(etype)[0] + card = rschema.rdef(etype, desttype).cardinality[0] + subjvar, objvar = 'X', 'Y' + else: + desttype = rschema.subjects(etype)[0] + card = rschema.rdef(desttype, etype).cardinality[1] + subjvar, objvar = 'Y', 'X' + eid = form.edited_entity.eid + if field.role == 'object' or not rschema.inlined or not values: + # this is not an inlined relation or no values specified, + # explicty remove relations + rql = 'DELETE %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % ( + subjvar, rschema, objvar) + for reid in origvalues.difference(values): + self.relations_rql.append((rql, {'x': eid, 'y': reid})) + seteids = values.difference(origvalues) + if seteids: + rql = 'SET %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % ( + subjvar, rschema, objvar) + for reid in seteids: + self.relations_rql.append((rql, {'x': eid, 'y': reid})) + + def delete_entities(self, eidtypes): + """delete entities from the repository""" + redirect_info = set() + eidtypes = tuple(eidtypes) + for eid, etype in eidtypes: + entity = self._cw.entity_from_eid(eid, etype) + path, params = entity.cw_adapt_to('IEditControl').after_deletion_path() + redirect_info.add( (path, tuple(params.items())) ) + entity.cw_delete() + if len(redirect_info) > 1: + # In the face of ambiguity, refuse the temptation to guess. + self._after_deletion_path = 'view', () + else: + self._after_deletion_path = next(iter(redirect_info)) + if len(eidtypes) > 1: + self._cw.set_message(self._cw._('entities deleted')) + else: + self._cw.set_message(self._cw._('entity deleted')) + + + def check_concurrent_edition(self, formparams, eid): + req = self._cw + try: + form_ts = datetime.utcfromtimestamp(float(formparams['__form_generation_time'])) + except KeyError: + # Backward and tests compatibility : if no timestamp consider edition OK + return + if req.execute("Any X WHERE X modification_date > %(fts)s, X eid %(eid)s", + {'eid': eid, 'fts': form_ts}): + # We only mark the message for translation but the actual + # translation will be handled by the Validation mechanism... + msg = _("Entity %(eid)s has changed since you started to edit it." + " Reload the page and reapply your changes.") + # ... this is why we pass the formats' dict as a third argument. + raise ValidationError(eid, {None: msg}, {'eid' : eid}) + + def _action_apply(self): + self._default_publish() + self.reset() + + def _action_delete(self): + self.delete_entities(self._cw.edited_eids(withtype=True)) + return self.reset() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/editforms.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/editforms.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,272 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Set of HTML automatic forms to create, delete, copy or edit a single entity +or a list of entities of the same type +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from copy import copy + +from six.moves import range + +from logilab.mtconverter import xml_escape +from logilab.common.decorators import cached +from logilab.common.registry import yes +from logilab.common.deprecation import class_moved + +from cubicweb import tags +from cubicweb.predicates import (match_kwargs, one_line_rset, non_final_entity, + specified_etype_implements, is_instance) +from cubicweb.view import EntityView +from cubicweb.schema import display_name +from cubicweb.web import stdmsgs, eid_param, \ + formfields as ff, formwidgets as fw +from cubicweb.web.form import FormViewMixIn, FieldNotFound +from cubicweb.web.views import uicfg, forms, reledit + +_pvdc = uicfg.primaryview_display_ctrl + + +class DeleteConfForm(forms.CompositeForm): + __regid__ = 'deleteconf' + # XXX non_final_entity does not implement eclass_selector + __select__ = is_instance('Any') + + domid = 'deleteconf' + copy_nav_params = True + form_buttons = [fw.Button(stdmsgs.BUTTON_DELETE, cwaction='delete'), + fw.Button(stdmsgs.BUTTON_CANCEL, cwaction='cancel')] + + def __init__(self, *args, **kwargs): + super(DeleteConfForm, self).__init__(*args, **kwargs) + done = set() + for entity in self.cw_rset.entities(): + if entity.eid in done: + continue + done.add(entity.eid) + subform = self._cw.vreg['forms'].select('base', self._cw, + entity=entity, + mainform=False) + self.add_subform(subform) + + +class DeleteConfFormView(FormViewMixIn, EntityView): + """form used to confirm deletion of some entities""" + __regid__ = 'deleteconf' + title = _('delete') + # don't use navigation, all entities asked to be deleted should be displayed + # else we will only delete the displayed page + paginable = False + + def call(self, onsubmit=None): + """ask for confirmation before real deletion""" + req, w = self._cw, self.w + _ = req._ + w(u'\n' + % _('this action is not reversible!')) + # XXX above message should have style of a warning + w(u'

                %s

                \n' % _('Do you want to delete the following element(s)?')) + form = self._cw.vreg['forms'].select(self.__regid__, req, + rset=self.cw_rset, + onsubmit=onsubmit) + w(u'
                  \n') + for entity in self.cw_rset.entities(): + # don't use outofcontext view or any other that may contain inline + # edition form + w(u'
                • %s
                • ' % tags.a(entity.view('textoutofcontext'), + href=entity.absolute_url())) + w(u'
                \n') + form.render(w=self.w) + + +class EditionFormView(FormViewMixIn, EntityView): + """display primary entity edition form""" + __regid__ = 'edition' + # add yes() so it takes precedence over deprecated views in baseforms, + # though not baseforms based customized view + __select__ = one_line_rset() & non_final_entity() & yes() + form_id = 'edition' + + title = _('modification') + + def cell_call(self, row, col, **kwargs): + entity = self.cw_rset.complete_entity(row, col) + self.render_form(entity) + + def render_form(self, entity): + """fetch and render the form""" + self.form_title(entity) + form = self._cw.vreg['forms'].select(self.form_id, self._cw, + entity=entity, + submitmsg=self.submited_message()) + self.init_form(form, entity) + form.render(w=self.w) + + def init_form(self, form, entity): + """customize your form before rendering here""" + pass + + def form_title(self, entity): + """the form view title""" + ptitle = self._cw._(self.title) + self.w(u'
                %s %s
                ' % ( + entity.dc_type(), ptitle and '(%s)' % ptitle)) + + def submited_message(self): + """return the message that will be displayed on successful edition""" + return self._cw._('entity edited') + + +class CreationFormView(EditionFormView): + """display primary entity creation form""" + __regid__ = 'creation' + __select__ = specified_etype_implements('Any') & yes() + + title = _('creation') + + def call(self, **kwargs): + """creation view for an entity""" + # at this point we know etype is a valid entity type, thanks to our + # selector + etype = kwargs.pop('etype', self._cw.form.get('etype')) + entity = self._cw.vreg['etypes'].etype_class(etype)(self._cw) + entity.eid = next(self._cw.varmaker) + self.render_form(entity) + + def form_title(self, entity): + """the form view title""" + if '__linkto' in self._cw.form: + if isinstance(self._cw.form['__linkto'], list): + # XXX which one should be considered (case: add a ticket to a + # version in jpl) + rtype, linkto_eid, role = self._cw.form['__linkto'][0].split(':') + else: + rtype, linkto_eid, role = self._cw.form['__linkto'].split(':') + linkto_rset = self._cw.eid_rset(linkto_eid) + linkto_type = linkto_rset.description[0][0] + if role == 'subject': + title = self._cw.__('creating %s (%s %s %s %%(linkto)s)' % ( + entity.e_schema, entity.e_schema, rtype, linkto_type)) + else: + title = self._cw.__('creating %s (%s %%(linkto)s %s %s)' % ( + entity.e_schema, linkto_type, rtype, entity.e_schema)) + msg = title % {'linkto' : self._cw.view('incontext', linkto_rset)} + self.w(u'
                %s
                ' % msg) + else: + super(CreationFormView, self).form_title(entity) + + def url(self): + """return the url associated with this view""" + req = self._cw + return req.vreg["etypes"].etype_class(req.form['etype']).cw_create_url( + req) + + def submited_message(self): + """return the message that will be displayed on successful edition""" + return self._cw._('entity created') + + +class CopyFormView(EditionFormView): + """display primary entity creation form initialized with values from another + entity + """ + __regid__ = 'copy' + + title = _('copy') + warning_message = _('Please note that this is only a shallow copy') + + def render_form(self, entity): + """fetch and render the form""" + # make a copy of entity to avoid altering the entity in the + # request's cache. + entity.complete() + self.newentity = copy(entity) + self.copying = entity + self.newentity.eid = next(self._cw.varmaker) + self.w(u'\n' + % self._cw._(self.warning_message)) + super(CopyFormView, self).render_form(self.newentity) + del self.newentity + + def init_form(self, form, entity): + """customize your form before rendering here""" + super(CopyFormView, self).init_form(form, entity) + if entity.eid == self.newentity.eid: + form.add_hidden(eid_param('__cloned_eid', entity.eid), + self.copying.eid) + for rschema, role in form.editable_attributes(): + if not rschema.final: + # ensure relation cache is filed + rset = self.copying.related(rschema, role) + self.newentity.cw_set_relation_cache(rschema, role, rset) + + def submited_message(self): + """return the message that will be displayed on successful edition""" + return self._cw._('entity copied') + + +class TableEditForm(forms.CompositeForm): + __regid__ = 'muledit' + domid = 'entityForm' + onsubmit = "return validateForm('%s', null);" % domid + form_buttons = [fw.SubmitButton(_('validate modifications on selected items')), + fw.ResetButton(_('revert changes'))] + + def __init__(self, req, rset, **kwargs): + kwargs.setdefault('__redirectrql', rset.printable_rql()) + super(TableEditForm, self).__init__(req, rset=rset, **kwargs) + for row in range(len(self.cw_rset)): + form = self._cw.vreg['forms'].select('edition', self._cw, + rset=self.cw_rset, row=row, + formtype='muledit', + copy_nav_params=False, + mainform=False) + # XXX rely on the EntityCompositeFormRenderer to put the eid input + form.remove_field(form.field_by_name('eid')) + self.add_subform(form) + + +class TableEditFormView(FormViewMixIn, EntityView): + __regid__ = 'muledit' + __select__ = EntityView.__select__ & yes() + title = _('multiple edit') + + def call(self, **kwargs): + """a view to edit multiple entities of the same type the first column + should be the eid + """ + # XXX overriding formvid (eg __form_id) necessary to make work edition: + # the edit controller try to select the form with no rset but + # entity=entity, and use this form to edit the entity. So we want + # edition form there but specifying formvid may have other undesired + # side effect. Maybe we should provide another variable optionally + # telling which form the edit controller should select (eg difffers + # between html generation / post handling form) + form = self._cw.vreg['forms'].select(self.__regid__, self._cw, + rset=self.cw_rset, + copy_nav_params=True, + formvid='edition') + form.render(w=self.w) + + +# click and edit handling ('reledit') ########################################## + +ClickAndEditFormView = class_moved(reledit.ClickAndEditFormView) +AutoClickAndEditFormView = class_moved(reledit.AutoClickAndEditFormView) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/editviews.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/editviews.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,128 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Some views used to help to the edition process""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from logilab.common.decorators import cached +from logilab.mtconverter import xml_escape + +from cubicweb.view import EntityView, StartupView +from cubicweb.predicates import (one_line_rset, non_final_entity, + match_search_state) +from cubicweb.web import httpcache +from cubicweb.web.views import baseviews, linksearch_select_url + + +class SearchForAssociationView(EntityView): + """view called by the edition view when the user asks to search for + something to link to the edited eid + """ + __regid__ = 'search-associate' + __select__ = (one_line_rset() & match_search_state('linksearch') + & non_final_entity()) + + title = _('search for association') + + def cell_call(self, row, col): + rset, vid, divid, paginate = self.filter_box_context_info() + self.cw_rset = rset + self.w(u'
                ' % divid) + self.paginate() + self.wview(vid, rset, 'noresult') + self.w(u'
                ') + + @cached + def filter_box_context_info(self): + entity = self.cw_rset.get_entity(0, 0) + role, eid, rtype, etype = self._cw.search_state[1] + assert entity.eid == int(eid) + # the default behaviour is to fetch all unrelated entities and display + # them. Use fetch_order and not fetch_unrelated_order as sort method + # since the latter is mainly there to select relevant items in the combo + # box, it doesn't give interesting result in this context + rql, args = entity.cw_unrelated_rql(rtype, etype, role, + ordermethod='fetch_order', + vocabconstraints=False) + rset = self._cw.execute(rql, args) + return rset, 'list', "search-associate-content", True + + +class OutOfContextSearch(EntityView): + __regid__ = 'outofcontext-search' + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + erset = entity.as_rset() + if self._cw.match_search_state(erset): + self.w(u'%s [...]' % ( + xml_escape(linksearch_select_url(self._cw, erset)), + self._cw._('select this entity'), + xml_escape(entity.view('textoutofcontext')), + xml_escape(entity.absolute_url(vid='primary')), + self._cw._('view detail for this entity'))) + else: + entity.view('outofcontext', w=self.w) + + +class ComboboxView(EntityView): + """the view used in combobox (unrelated entities) + + THIS IS A TEXT VIEW. DO NOT HTML_ESCAPE + """ + __regid__ = 'combobox' + title = None + + def cell_call(self, row, col, **kwargs): + """the combo-box view for an entity: same as text out of context view + by default + """ + self.wview('textoutofcontext', self.cw_rset, row=row, col=col) + + +class EditableFinalView(baseviews.FinalView): + """same as FinalView but enables inplace-edition when possible""" + __regid__ = 'editable-final' + + def cell_call(self, row, col, props=None): + entity, rtype = self.cw_rset.related_entity(row, col) + if entity is not None: + self.w(entity.view('reledit', rtype=rtype)) + else: + super(EditableFinalView, self).cell_call(row, col, props) + +try: + from cubicweb.web import captcha +except ImportError: + # PIL not installed + pass +else: + class CaptchaView(StartupView): + __regid__ = 'captcha' + + http_cache_manager = httpcache.NoHTTPCacheManager + binary = True + templatable = False + content_type = 'image/jpg' + + def call(self): + text, data = captcha.captcha(self._cw.vreg.config['captcha-font-file'], + self._cw.vreg.config['captcha-font-size']) + key = self._cw.form.get('captchakey', 'captcha') + self._cw.session.data[key] = text + self.w(data.read()) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/emailaddress.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/emailaddress.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,146 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Specific views for email addresses entities""" + +__docformat__ = "restructuredtext en" + +from logilab.mtconverter import xml_escape + +from cubicweb.schema import display_name +from cubicweb.predicates import is_instance +from cubicweb import Unauthorized +from cubicweb.web.views import uicfg, baseviews, primary, ibreadcrumbs + +_pvs = uicfg.primaryview_section +_pvs.tag_subject_of(('*', 'use_email', '*'), 'attributes') +_pvs.tag_subject_of(('*', 'primary_email', '*'), 'hidden') + +class EmailAddressPrimaryView(primary.PrimaryView): + __select__ = is_instance('EmailAddress') + + def cell_call(self, row, col, skipeids=None): + self.skipeids = skipeids + super(EmailAddressPrimaryView, self).cell_call(row, col) + + def render_entity_attributes(self, entity): + self.w(u'

                ') + entity.view('oneline', w=self.w) + if entity.prefered: + self.w(u' (%s)' % entity.prefered.view('oneline')) + self.w(u'

                ') + try: + persons = entity.reverse_primary_email + except Unauthorized: + persons = [] + if persons: + emailof = persons[0] + self.field(display_name(self._cw, 'primary_email', 'object'), emailof.view('oneline')) + pemaileid = emailof.eid + else: + pemaileid = None + try: + emailof = 'use_email' in self._cw.vreg.schema and entity.reverse_use_email or () + emailof = [e for e in emailof if not e.eid == pemaileid] + except Unauthorized: + emailof = [] + if emailof: + emailofstr = ', '.join(e.view('oneline') for e in emailof) + self.field(display_name(self._cw, 'use_email', 'object'), emailofstr) + + def render_entity_relations(self, entity): + for i, email in enumerate(entity.related_emails(self.skipeids)): + self.w(u'
                ' % (i%2 and 'even' or 'odd')) + email.view('oneline', w=self.w, contexteid=entity.eid) + self.w(u'
                ') + + +class EmailAddressShortPrimaryView(EmailAddressPrimaryView): + __select__ = is_instance('EmailAddress') + __regid__ = 'shortprimary' + title = None # hidden view + + def render_entity_attributes(self, entity): + self.w(u'
                ') + entity.view('oneline', w=self.w) + self.w(u'
                ') + + +class EmailAddressOneLineView(baseviews.OneLineView): + __select__ = is_instance('EmailAddress') + + def entity_call(self, entity, **kwargs): + if entity.reverse_primary_email: + self.w(u'') + if entity.alias: + self.w(u'%s <' % xml_escape(entity.alias)) + self.w('%s' % (xml_escape(entity.absolute_url()), + xml_escape(entity.display_address()))) + if entity.alias: + self.w(u'>\n') + if entity.reverse_primary_email: + self.w(u'') + + +class EmailAddressMailToView(baseviews.OneLineView): + """A one line view that builds a user clickable URL for an email with + 'mailto:'""" + + __regid__ = 'mailto' + __select__ = is_instance('EmailAddress') + + def entity_call(self, entity, **kwargs): + if entity.reverse_primary_email: + self.w(u'') + if entity.alias: + alias = entity.alias + elif entity.reverse_use_email: + alias = entity.reverse_use_email[0].dc_title() + else: + alias = None + if alias: + mailto = "mailto:%s <%s>" % (alias, entity.display_address()) + else: + mailto = "mailto:%s" % entity.display_address() + self.w(u'%s' % (xml_escape(mailto), + xml_escape(entity.display_address()))) + if entity.reverse_primary_email: + self.w(u'') + + +class EmailAddressInContextView(baseviews.InContextView): + __select__ = is_instance('EmailAddress') + + def cell_call(self, row, col, **kwargs): + if self._cw.vreg.config['mangle-emails']: + self.wview('oneline', self.cw_rset, row=row, col=col, **kwargs) + else: + self.wview('mailto', self.cw_rset, row=row, col=col, **kwargs) + + +class EmailAddressTextView(baseviews.TextView): + __select__ = is_instance('EmailAddress') + + def cell_call(self, row, col, **kwargs): + self.w(self.cw_rset.get_entity(row, col).display_address()) + + +class EmailAddressIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('EmailAddress') + + def parent_entity(self): + return self.entity.email_of diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/embedding.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/embedding.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,38 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Objects interacting together to provides the external page embeding +functionality. +""" + +from logilab.common.deprecation import class_moved, moved + +try: + from cubes.embed.views import * + + IEmbedableAdapter = class_moved(IEmbedableAdapter, message='[3.17] IEmbedableAdapter moved to cubes.embed.views') + ExternalTemplate = class_moved(ExternalTemplate, message='[3.17] IEmbedableAdapter moved to cubes.embed.views') + EmbedController = class_moved(EmbedController, message='[3.17] IEmbedableAdapter moved to cubes.embed.views') + entity_has_embedable_url = moved('cubes.embed.views', 'entity_has_embedable_url') + EmbedAction = class_moved(EmbedAction, message='[3.17] EmbedAction moved to cubes.embed.views') + replace_href = class_moved(replace_href, message='[3.17] replace_href moved to cubes.embed.views') + embed_external_page = moved('cubes.embed.views', 'embed_external_page') + absolutize_links = class_moved(absolutize_links, message='[3.17] absolutize_links moved to cubes.embed.views') + prefix_links = moved('cubes.embed.views', 'prefix_links') +except ImportError: + from cubicweb.web import LOGGER + LOGGER.warning('[3.17] embedding extracted to cube embed that was not found. try installing it.') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/error.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/error.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,40 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Set of HTML errors views. Error view are generally implemented +as startup views and are used for standard error pages (404, 500, etc.) +""" + +__docformat__ = "restructuredtext en" + +from cubicweb.view import StartupView + +class FourOhFour(StartupView): + __regid__ = '404' + + def call(self): + _ = self._cw._ + self.w(u"

                %s

                " % _('this resource does not exist')) + + +class ErrorOccured(StartupView): + __regid__ = '500' + + def call(self): + _ = self._cw._ + self.w(u"

                %s

                " % + _('an error occurred, the request cannot be fulfilled')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/facets.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/facets.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,435 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""the facets box and some basic facets""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from logilab.mtconverter import xml_escape +from logilab.common.decorators import cachedproperty +from logilab.common.registry import objectify_predicate, yes + +from cubicweb import tags +from cubicweb.predicates import (non_final_entity, multi_lines_rset, + match_context_prop, relation_possible) +from cubicweb.utils import json_dumps +from cubicweb.uilib import css_em_num_value +from cubicweb.view import AnyRsetView +from cubicweb.web import component, facet as facetbase +from cubicweb.web.views.ajaxcontroller import ajaxfunc + +def facets(req, rset, context, mainvar=None, **kwargs): + """return the base rql and a list of widgets for facets applying to the + given rset/context (cached version of :func:`_facet`) + + :param req: A :class:`~cubicweb.req.RequestSessionBase` object + :param rset: A :class:`~cubicweb.rset.ResultSet` + :param context: A string that match the ``__regid__`` of a ``FacetFilter`` + :param mainvar: A string that match a select var from the rset + """ + try: + cache = req.__rset_facets + except AttributeError: + cache = req.__rset_facets = {} + try: + return cache[(rset, context, mainvar)] + except KeyError: + facets = _facets(req, rset, context, mainvar, **kwargs) + cache[(rset, context, mainvar)] = facets + return facets + +def _facets(req, rset, context, mainvar, **kwargs): + """return the base rql and a list of widgets for facets applying to the + given rset/context + + :param req: A :class:`~cubicweb.req.RequestSessionBase` object + :param rset: A :class:`~cubicweb.rset.ResultSet` + :param context: A string that match the ``__regid__`` of a ``FacetFilter`` + :param mainvar: A string that match a select var from the rset + """ + ### initialisation + # XXX done by selectors, though maybe necessary when rset has been hijacked + # (e.g. contextview_selector matched) + origqlst = rset.syntax_tree() + # union not yet supported + if len(origqlst.children) != 1: + req.debug('facette disabled on union request %s', origqlst) + return None, () + rqlst = origqlst.copy() + select = rqlst.children[0] + filtered_variable, baserql = facetbase.init_facets(rset, select, mainvar) + ### Selection + possible_facets = req.vreg['facets'].poss_visible_objects( + req, rset=rset, rqlst=origqlst, select=select, + context=context, filtered_variable=filtered_variable, **kwargs) + wdgs = [(facet, facet.get_widget()) for facet in possible_facets] + return baserql, [wdg for facet, wdg in wdgs if wdg is not None] + + +@objectify_predicate +def contextview_selector(cls, req, rset=None, row=None, col=None, view=None, + **kwargs): + if view: + try: + getcontext = getattr(view, 'filter_box_context_info') + except AttributeError: + return 0 + rset = getcontext()[0] + if rset is None or rset.rowcount < 2: + return 0 + wdgs = facets(req, rset, cls.__regid__, view=view)[1] + return len(wdgs) + return 0 + +@objectify_predicate +def has_facets(cls, req, rset=None, **kwargs): + if rset is None or rset.rowcount < 2: + return 0 + wdgs = facets(req, rset, cls.__regid__, **kwargs)[1] + return len(wdgs) + + +def filter_hiddens(w, baserql, wdgs, **kwargs): + kwargs['facets'] = ','.join(wdg.facet.__regid__ for wdg in wdgs) + kwargs['baserql'] = baserql + for key, val in kwargs.items(): + w(u'' % ( + key, xml_escape(val))) + + +class FacetFilterMixIn(object): + """Mixin Class to generate Facet Filter Form + + To generate the form, you need to explicitly call the following method: + + .. automethod:: generate_form + + The most useful function to override is: + + .. automethod:: layout_widgets + """ + + needs_js = ['cubicweb.ajax.js', 'cubicweb.facets.js'] + needs_css = ['cubicweb.facets.css'] + + def generate_form(self, w, rset, divid, vid, vidargs=None, mainvar=None, + paginate=False, cssclass='', hiddens=None, **kwargs): + """display a form to filter some view's content + + :param w: Write function + + :param rset: ResultSet to be filtered + + :param divid: Dom ID of the div where the rendering of the view is done. + :type divid: string + + :param vid: ID of the view display in the div + :type vid: string + + :param paginate: Is the view paginated? + :type paginate: boolean + + :param cssclass: Additional css classes to put on the form. + :type cssclass: string + + :param hiddens: other hidden parametters to include in the forms. + :type hiddens: dict from extra keyword argument + """ + # XXX Facet.context property hijacks an otherwise well-behaved + # vocabulary with its own notions + # Hence we whack here to avoid a clash + kwargs.pop('context', None) + baserql, wdgs = facets(self._cw, rset, context=self.__regid__, + mainvar=mainvar, **kwargs) + assert wdgs + self._cw.add_js(self.needs_js) + self._cw.add_css(self.needs_css) + self._cw.html_headers.define_var('facetLoadingMsg', + self._cw._('facet-loading-msg')) + if vidargs is not None: + warn("[3.14] vidargs is deprecated. Maybe you're using some TableView?", + DeprecationWarning, stacklevel=2) + else: + vidargs = {} + vidargs = dict((k, v) for k, v in vidargs.items() if v) + facetargs = xml_escape(json_dumps([divid, vid, paginate, vidargs])) + w(u'
                ' % (divid, cssclass, facetargs)) + w(u'
                ') + if hiddens is None: + hiddens = {} + if mainvar: + hiddens['mainvar'] = mainvar + filter_hiddens(w, baserql, wdgs, **hiddens) + self.layout_widgets(w, self.sorted_widgets(wdgs)) + + # is supposed to submit the form only if there is a single + # input:text field. However most browsers will submit the form + # on anyway if there is an input:submit field. + # + # see: http://www.w3.org/MarkUp/html-spec/html-spec_8.html#SEC8.2 + # + # Firefox 7.0.1 does not submit form on if there is more than a + # input:text field and not input:submit but does it if there is an + # input:submit. + # + # IE 6 or Firefox 2 behave the same way. + w(u'') + # + w(u'
                \n') + w(u'
                \n') + + def sorted_widgets(self, wdgs): + """sort widgets: by default sort by widget height, then according to + widget.order (the original widgets order) + """ + return sorted(wdgs, key=lambda x: 99 * (not x.facet.start_unfolded) or x.height ) + + def layout_widgets(self, w, wdgs): + """layout widgets: by default simply render each of them + (i.e. succession of
                ) + """ + for wdg in wdgs: + wdg.render(w=w) + + +class FilterBox(FacetFilterMixIn, component.CtxComponent): + """filter results of a query""" + __regid__ = 'facet.filterbox' + __select__ = ((non_final_entity() & has_facets()) + | contextview_selector()) # can't use has_facets because of + # contextview mecanism + context = 'left' # XXX doesn't support 'incontext', only 'left' or 'right' + title = _('facet.filters') + visible = True # functionality provided by the search box by default + order = 1 + + bk_linkbox_template = u'
                %s
                ' + + def render_body(self, w, **kwargs): + req = self._cw + rset, vid, divid, paginate = self._get_context() + assert len(rset) > 1 + if vid is None: + vid = req.form.get('vid') + if self.bk_linkbox_template and req.vreg.schema['Bookmark'].has_perm(req, 'add'): + w(self.bookmark_link(rset)) + w(self.focus_link(rset)) + hiddens = {} + for param in ('subvid', 'vtitle'): + if param in req.form: + hiddens[param] = req.form[param] + self.generate_form(w, rset, divid, vid, paginate=paginate, + hiddens=hiddens, **self.cw_extra_kwargs) + + def _get_context(self): + view = self.cw_extra_kwargs.get('view') + context = getattr(view, 'filter_box_context_info', lambda: None)() + if context: + rset, vid, divid, paginate = context + else: + rset = self.cw_rset + vid, divid = None, 'pageContent' + paginate = view and view.paginable + return rset, vid, divid, paginate + + def bookmark_link(self, rset): + req = self._cw + bk_path = u'rql=%s' % req.url_quote(rset.printable_rql()) + if req.form.get('vid'): + bk_path += u'&vid=%s' % req.url_quote(req.form['vid']) + bk_path = u'view?' + bk_path + bk_title = req._('my custom search') + linkto = u'bookmarked_by:%s:subject' % req.user.eid + bkcls = req.vreg['etypes'].etype_class('Bookmark') + bk_add_url = bkcls.cw_create_url(req, path=bk_path, title=bk_title, + __linkto=linkto) + bk_base_url = bkcls.cw_create_url(req, title=bk_title, __linkto=linkto) + bk_link = u'%s' % ( + xml_escape(bk_base_url), xml_escape(bk_add_url), + req._('bookmark this search')) + return self.bk_linkbox_template % bk_link + + def focus_link(self, rset): + return self.bk_linkbox_template % tags.a(self._cw._('focus on this selection'), + href=self._cw.url(), id='focusLink') + +class FilterTable(FacetFilterMixIn, AnyRsetView): + __regid__ = 'facet.filtertable' + __select__ = has_facets() + average_perfacet_uncomputable_overhead = .3 + + def call(self, vid, divid, vidargs=None, cssclass=''): + hiddens = self.cw_extra_kwargs.setdefault('hiddens', {}) + hiddens['fromformfilter'] = '1' + self.generate_form(self.w, self.cw_rset, divid, vid, vidargs=vidargs, + cssclass=cssclass, **self.cw_extra_kwargs) + + @cachedproperty + def per_facet_height_overhead(self): + return (css_em_num_value(self._cw.vreg, 'facet_MarginBottom', .2) + + css_em_num_value(self._cw.vreg, 'facet_Padding', .2) + + self.average_perfacet_uncomputable_overhead) + + def layout_widgets(self, w, wdgs): + """layout widgets: put them in a table where each column should have + sum(wdg.height) < wdg_stack_size. + """ + w(u'
                \n') + widget_queue = [] + queue_height = 0 + wdg_stack_size = facetbase._DEFAULT_FACET_GROUP_HEIGHT + for wdg in wdgs: + height = wdg.height + self.per_facet_height_overhead + if queue_height + height <= wdg_stack_size: + widget_queue.append(wdg) + queue_height += height + continue + w(u'
                ') + for queued in widget_queue: + queued.render(w=w) + w(u'
                ') + widget_queue = [wdg] + queue_height = height + if widget_queue: + w(u'
                ') + for queued in widget_queue: + queued.render(w=w) + w(u'
                ') + w(u'
                \n') + +# python-ajax remote functions used by facet widgets ######################### + +@ajaxfunc(output_type='json') +def filter_build_rql(self, names, values): + form = self._rebuild_posted_form(names, values) + self._cw.form = form + builder = facetbase.FilterRQLBuilder(self._cw) + return builder.build_rql() + +@ajaxfunc(output_type='json') +def filter_select_content(self, facetids, rql, mainvar): + # Union unsupported yet + select = self._cw.vreg.parse(self._cw, rql).children[0] + filtered_variable = facetbase.get_filtered_variable(select, mainvar) + facetbase.prepare_select(select, filtered_variable) + update_map = {} + for fid in facetids: + fobj = facetbase.get_facet(self._cw, fid, select, filtered_variable) + update_map[fid] = fobj.possible_values() + return update_map + + + +# facets ###################################################################### + +class CWSourceFacet(facetbase.RelationFacet): + __regid__ = 'cw_source-facet' + rtype = 'cw_source' + target_attr = 'name' + +class CreatedByFacet(facetbase.RelationFacet): + __regid__ = 'created_by-facet' + rtype = 'created_by' + target_attr = 'login' + +class InGroupFacet(facetbase.RelationFacet): + __regid__ = 'in_group-facet' + rtype = 'in_group' + target_attr = 'name' + +class InStateFacet(facetbase.RelationAttributeFacet): + __regid__ = 'in_state-facet' + rtype = 'in_state' + target_attr = 'name' + + +# inherit from RelationFacet to benefit from its possible_values implementation +class ETypeFacet(facetbase.RelationFacet): + __regid__ = 'etype-facet' + __select__ = yes() + order = 1 + rtype = 'is' + target_attr = 'name' + + @property + def title(self): + return self._cw._('entity type') + + def vocabulary(self): + """return vocabulary for this facet, eg a list of 2-uple (label, value) + """ + etypes = self.cw_rset.column_types(0) + return sorted((self._cw._(etype), etype) for etype in etypes) + + def add_rql_restrictions(self): + """add restriction for this facet into the rql syntax tree""" + value = self._cw.form.get(self.__regid__) + if not value: + return + self.select.add_type_restriction(self.filtered_variable, value) + + def possible_values(self): + """return a list of possible values (as string since it's used to + compare to a form value in javascript) for this facet + """ + select = self.select + select.save_state() + try: + facetbase.cleanup_select(select, self.filtered_variable) + etype_var = facetbase.prepare_vocabulary_select( + select, self.filtered_variable, self.rtype, self.role) + attrvar = select.make_variable() + select.add_selected(attrvar) + select.add_relation(etype_var, 'name', attrvar) + return [etype for _, etype in self.rqlexec(select.as_string())] + finally: + select.recover() + + +class HasTextFacet(facetbase.AbstractFacet): + __select__ = relation_possible('has_text', 'subject') & match_context_prop() + __regid__ = 'has_text-facet' + rtype = 'has_text' + role = 'subject' + order = 0 + + @property + def wdgclass(self): + return facetbase.FacetStringWidget + + @property + def title(self): + return self._cw._('has_text') + + def get_widget(self): + """return the widget instance to use to display this facet + + default implentation expects a .vocabulary method on the facet and + return a combobox displaying this vocabulary + """ + return self.wdgclass(self) + + def add_rql_restrictions(self): + """add restriction for this facet into the rql syntax tree""" + value = self._cw.form.get(self.__regid__) + if not value: + return + self.select.add_constant_restriction(self.filtered_variable, 'has_text', value, 'String') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/formrenderers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/formrenderers.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,546 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +Renderers +--------- + +.. Note:: + Form renderers are responsible to layout a form to HTML. + +Here are the base renderers available: + +.. autoclass:: cubicweb.web.views.formrenderers.FormRenderer +.. autoclass:: cubicweb.web.views.formrenderers.HTableFormRenderer +.. autoclass:: cubicweb.web.views.formrenderers.EntityCompositeFormRenderer +.. autoclass:: cubicweb.web.views.formrenderers.EntityFormRenderer +.. autoclass:: cubicweb.web.views.formrenderers.EntityInlinedFormRenderer + +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from six import text_type + +from logilab.mtconverter import xml_escape +from logilab.common.registry import yes + +from cubicweb import tags, uilib +from cubicweb.appobject import AppObject +from cubicweb.predicates import is_instance +from cubicweb.utils import json_dumps, support_args +from cubicweb.web import eid_param, formwidgets as fwdgs + + +def checkbox(name, value, attrs='', checked=None): + if checked is None: + checked = value + checked = checked and 'checked="checked"' or '' + return u'' % ( + name, value, checked, attrs) + +def field_label(form, field): + if callable(field.label): + return field.label(form, field) + # XXX with 3.6 we can now properly rely on 'if field.role is not None' and + # stop having a tuple for label + if isinstance(field.label, tuple): # i.e. needs contextual translation + return form._cw.pgettext(*field.label) + return form._cw._(field.label) + + + +class FormRenderer(AppObject): + """This is the 'default' renderer, displaying fields in a two columns table: + + +--------------+--------------+ + | field1 label | field1 input | + +--------------+--------------+ + | field2 label | field2 input | + +--------------+--------------+ + + +---------+ + | buttons | + +---------+ + """ + __registry__ = 'formrenderers' + __regid__ = 'default' + + _options = ('display_label', 'display_help', + 'display_progress_div', 'table_class', 'button_bar_class', + # add entity since it may be given to select the renderer + 'entity') + display_label = True + display_help = True + display_progress_div = True + table_class = u'attributeForm' + button_bar_class = u'formButtonBar' + + def __init__(self, req=None, rset=None, row=None, col=None, **kwargs): + super(FormRenderer, self).__init__(req, rset=rset, row=row, col=col) + if self._set_options(kwargs): + raise ValueError('unconsumed arguments %s' % kwargs) + + def _set_options(self, kwargs): + for key in self._options: + try: + setattr(self, key, kwargs.pop(key)) + except KeyError: + continue + return kwargs + + # renderer interface ###################################################### + + def render(self, w, form, values): + self._set_options(values) + form.add_media() + data = [] + _w = data.append + _w(self.open_form(form, values)) + self.render_content(_w, form, values) + _w(self.close_form(form, values)) + errormsg = self.error_message(form) + if errormsg: + data.insert(0, errormsg) + # NOTE: we call unicode because `tag` objects may be found within data + # e.g. from the cwtags library + w(''.join(text_type(x) for x in data)) + + def render_content(self, w, form, values): + if self.display_progress_div: + w(u'
                %s
                ' % self._cw._('validating...')) + w(u'\n
                \n') + self.render_fields(w, form, values) + self.render_buttons(w, form) + w(u'\n
                \n') + + def render_label(self, form, field): + if field.label is None: + return u'' + label = field_label(form, field) + attrs = {'for': field.dom_id(form)} + if field.required: + attrs['class'] = 'required' + return tags.label(label, **attrs) + + def render_help(self, form, field): + help = [] + descr = field.help + if callable(descr): + descr = descr(form, field) + if descr: + help.append('
                %s
                ' % self._cw._(descr)) + example = field.example_format(self._cw) + if example: + help.append('
                (%s: %s)
                ' + % (self._cw._('sample format'), example)) + return u' '.join(help) + + # specific methods (mostly to ease overriding) ############################# + + def error_message(self, form): + """return formatted error message + + This method should be called once inlined field errors has been consumed + """ + req = self._cw + errex = form.form_valerror + # get extra errors + if errex is not None: + errormsg = req._('please correct the following errors:') + errors = form.remaining_errors() + if errors: + if len(errors) > 1: + templstr = u'
              • %s
              • \n' + else: + templstr = u' %s\n' + for field, err in errors: + if field is None: + errormsg += templstr % err + else: + errormsg += templstr % '%s: %s' % (req._(field), err) + if len(errors) > 1: + errormsg = '
                  %s
                ' % errormsg + return u'
                %s
                ' % errormsg + return u'' + + def open_form(self, form, values, **attrs): + if form.needs_multipart: + enctype = u'multipart/form-data' + else: + enctype = u'application/x-www-form-urlencoded' + attrs.setdefault('enctype', enctype) + attrs.setdefault('method', 'post') + attrs.setdefault('action', form.form_action() or '#') + if form.domid: + attrs.setdefault('id', form.domid) + if form.onsubmit: + attrs.setdefault('onsubmit', form.onsubmit) + if form.cssstyle: + attrs.setdefault('style', form.cssstyle) + if form.cssclass: + attrs.setdefault('class', form.cssclass) + if form.cwtarget: + attrs.setdefault('target', form.cwtarget) + if not form.autocomplete: + attrs.setdefault('autocomplete', 'off') + return '
                ' % uilib.sgml_attributes(attrs) + + def close_form(self, form, values): + """seems dumb but important for consistency w/ close form, and necessary + for form renderers overriding open_form to use something else or more than + and + """ + out = u'
                ' + if form.cwtarget: + attrs = {'name': form.cwtarget, 'id': form.cwtarget, + 'width': '0px', 'height': '0px', + 'src': 'javascript: void(0);'} + out = (u'\n' % uilib.sgml_attributes(attrs)) + out + return out + + def render_fields(self, w, form, values): + fields = self._render_hidden_fields(w, form) + if fields: + self._render_fields(fields, w, form) + self.render_child_forms(w, form, values) + + def render_child_forms(self, w, form, values): + # render + for childform in getattr(form, 'forms', []): + self.render_fields(w, childform, values) + + def _render_hidden_fields(self, w, form): + fields = form.fields[:] + for field in form.fields: + if not field.is_visible(): + w(field.render(form, self)) + w(u'\n') + fields.remove(field) + return fields + + def _render_fields(self, fields, w, form): + byfieldset = {} + for field in fields: + byfieldset.setdefault(field.fieldset, []).append(field) + if form.fieldsets_in_order: + fieldsets = form.fieldsets_in_order + else: + fieldsets = byfieldset + for fieldset in list(fieldsets): + try: + fields = byfieldset.pop(fieldset) + except KeyError: + self.warning('no such fieldset: %s (%s)', fieldset, form) + continue + w(u'
                \n') + if fieldset: + w(u'%s' % self._cw.__(fieldset)) + w(u'\n' % self.table_class) + for field in fields: + w(u'\n' % (field.name, field.role)) + if self.display_label and field.label is not None: + w(u'\n' % self.render_label(form, field)) + w(u'\n') + w(field.render(form, self)) + w(u'\n') + if error: + self.render_error(w, error) + if self.display_help: + w(self.render_help(form, field)) + w(u'\n') + w(u'
                %s
                \n') + if byfieldset: + self.warning('unused fieldsets: %s', ', '.join(byfieldset)) + + def render_buttons(self, w, form): + if not form.form_buttons: + return + w(u'\n\n' % self.button_bar_class) + for button in form.form_buttons: + w(u'\n' % button.render(form)) + w(u'
                %s
                ') + + def render_error(self, w, err): + """return validation error for widget's field, if any""" + w(u'%s' % err) + + + +class BaseFormRenderer(FormRenderer): + """use form_renderer_id = 'base' if you want base FormRenderer layout even + when selected for an entity + """ + __regid__ = 'base' + + + +class HTableFormRenderer(FormRenderer): + """The 'htable' form renderer display fields horizontally in a table: + + +--------------+--------------+---------+ + | field1 label | field2 label | | + +--------------+--------------+---------+ + | field1 input | field2 input | buttons | + +--------------+--------------+---------+ + """ + __regid__ = 'htable' + + display_help = False + def _render_fields(self, fields, w, form): + w(u'') + w(u'') + for field in fields: + if self.display_label: + w(u'' % self.render_label(form, field)) + if self.display_help: + w(self.render_help(form, field)) + # empty slot for buttons + w(u'') + w(u'') + w(u'') + for field in fields: + error = form.field_error(field) + if error: + w(u'') + w(u'') + w(u'') + w(u'
                %s 
                ') + self.render_error(w, error) + else: + w(u'') + w(field.render(form, self)) + w(u'') + for button in form.form_buttons: + w(button.render(form)) + w(u'
                ') + + def render_buttons(self, w, form): + pass + + +class OneRowTableFormRenderer(FormRenderer): + """The 'htable' form renderer display fields horizontally in a table: + + +--------------+--------------+--------------+--------------+---------+ + | field1 label | field1 input | field2 label | field2 input | buttons | + +--------------+--------------+--------------+--------------+---------+ + """ + __regid__ = 'onerowtable' + + display_help = False + def _render_fields(self, fields, w, form): + w(u'') + w(u'') + for field in fields: + if self.display_label: + w(u'' % self.render_label(form, field)) + if self.display_help: + w(self.render_help(form, field)) + error = form.field_error(field) + if error: + w(u'') + w(u'') + w(u'') + w(u'
                %s') + self.render_error(w, error) + else: + w(u'') + w(field.render(form, self)) + w(u'') + for button in form.form_buttons: + w(button.render(form)) + w(u'
                ') + + def render_buttons(self, w, form): + pass + + +class EntityCompositeFormRenderer(FormRenderer): + """This is a specific renderer for the multiple entities edition form + ('muledit'). + + Each entity form will be displayed in row off a table, with a check box for + each entities to indicate which ones are edited. Those checkboxes should be + automatically updated when something is edited. + """ + __regid__ = 'composite' + + _main_display_fields = None + + def render_fields(self, w, form, values): + if form.parent_form is None: + w(u'') + # get fields from the first subform with something to display (we + # may have subforms with nothing editable that will simply be + # skipped later) + for subform in form.forms: + subfields = [field for field in subform.fields + if field.is_visible()] + if subfields: + break + if subfields: + # main form, display table headers + w(u'') + w(u'' % + tags.input(type='checkbox', + title=self._cw._('toggle check boxes'), + onclick="setCheckboxesState('eid', null, this.checked)")) + for field in subfields: + w(u'' % field_label(form, field)) + w(u'') + super(EntityCompositeFormRenderer, self).render_fields(w, form, values) + if form.parent_form is None: + w(u'
                %s%s
                ') + if self._main_display_fields: + super(EntityCompositeFormRenderer, self)._render_fields( + self._main_display_fields, w, form) + + def _render_fields(self, fields, w, form): + if form.parent_form is not None: + entity = form.edited_entity + values = form.form_previous_values + qeid = eid_param('eid', entity.eid) + cbsetstate = "setCheckboxesState('eid', %s, 'checked')" % \ + xml_escape(json_dumps(entity.eid)) + w(u'
          %s') + self.render_error(w, error) + else: + w(u'') + if isinstance(field.widget, (fwdgs.Select, fwdgs.CheckBox, + fwdgs.Radio)): + field.widget.attrs['onchange'] = cbsetstate + elif isinstance(field.widget, fwdgs.Input): + field.widget.attrs['onkeypress'] = cbsetstate + # XXX else + w(u'
          %s
          ' % field.render(form, self)) + w(u'
          + + + +
          + %s + + %s + %s +
          """ % tuple(button.render(form) for button in form.form_buttons)) + else: + super(EntityFormRenderer, self).render_buttons(w, form) + + +class EntityInlinedFormRenderer(EntityFormRenderer): + """This is a specific renderer for entity's form inlined into another + entity's form. + """ + __regid__ = 'inline' + fieldset_css_class = 'subentity' + + def render_title(self, w, form, values): + w(u'
          ') + w(u'%(title)s ' + '#%(counter)s ' % values) + if values['removejs']: + values['removemsg'] = self._cw._('remove-inlined-entity-form') + w(u'[%(removemsg)s]' + % values) + w(u'
          ') + + def render(self, w, form, values): + form.add_media() + self.open_form(w, form, values) + self.render_title(w, form, values) + # XXX that stinks + # cleanup values + for key in ('title', 'removejs', 'removemsg'): + values.pop(key, None) + self.render_fields(w, form, values) + self.close_form(w, form, values) + + def open_form(self, w, form, values): + try: + w(u'
          ' % values) + except KeyError: + w(u'
          ' % values) + else: + w(u'
          %s
          ' % ( + values['divid'], self._cw._('click on the box to cancel the deletion'))) + w(u'
          ') + + def close_form(self, w, form, values): + w(u'
          ') + + def render_fields(self, w, form, values): + w(u'
          ' % values) + fields = self._render_hidden_fields(w, form) + w(u'
          ') + w(u'
          ' % self.fieldset_css_class) + if fields: + self._render_fields(fields, w, form) + self.render_child_forms(w, form, values) + w(u'
          ') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/forms.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/forms.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,483 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +Base form classes +----------------- + +.. Note: + + Form is the glue that bind a context to a set of fields, and is rendered + using a form renderer. No display is actually done here, though you'll find + some attributes of form that are used to control the rendering process. + +Besides the automagic form we'll see later, there are roughly two main +form classes in |cubicweb|: + +.. autoclass:: cubicweb.web.views.forms.FieldsForm +.. autoclass:: cubicweb.web.views.forms.EntityFieldsForm + +As you have probably guessed, choosing between them is easy. Simply ask you the +question 'I am editing an entity or not?'. If the answer is yes, use +:class:`EntityFieldsForm`, else use :class:`FieldsForm`. + +Actually there exists a third form class: + +.. autoclass:: cubicweb.web.views.forms.CompositeForm + +but you'll use this one rarely. +""" + +__docformat__ = "restructuredtext en" + + +import time +import inspect + +from six import text_type + +from logilab.common import dictattr, tempattr +from logilab.common.decorators import iclassmethod, cached +from logilab.common.textutils import splitstrip + +from cubicweb import ValidationError, neg_role +from cubicweb.predicates import non_final_entity, match_kwargs, one_line_rset +from cubicweb.web import RequestError, ProcessFormError +from cubicweb.web import form +from cubicweb.web.views import uicfg +from cubicweb.web.formfields import guess_field + + +class FieldsForm(form.Form): + """This is the base class for fields based forms. + + **Attributes** + + The following attributes may be either set on subclasses or given on + form selection to customize the generated form: + + :attr:`needs_js` + sequence of javascript files that should be added to handle this form + (through :meth:`~cubicweb.web.request.Request.add_js`) + + :attr:`needs_css` + sequence of css files that should be added to handle this form (through + :meth:`~cubicweb.web.request.Request.add_css`) + + :attr:`domid` + value for the "id" attribute of the tag + + :attr:`action` + value for the "action" attribute of the tag + + :attr:`onsubmit` + value for the "onsubmit" attribute of the tag + + :attr:`cssclass` + value for the "class" attribute of the tag + + :attr:`cssstyle` + value for the "style" attribute of the tag + + :attr:`cwtarget` + value for the "target" attribute of the tag + + :attr:`redirect_path` + relative to redirect to after submitting the form + + :attr:`copy_nav_params` + flag telling if navigation parameters should be copied back in hidden + inputs + + :attr:`form_buttons` + sequence of form control (:class:`~cubicweb.web.formwidgets.Button` + widgets instances) + + :attr:`form_renderer_id` + identifier of the form renderer to use to render the form + + :attr:`fieldsets_in_order` + sequence of fieldset names , to control order + + :attr:`autocomplete` + set to False to add 'autocomplete=off' in the form open tag + + **Generic methods** + + .. automethod:: cubicweb.web.form.Form.field_by_name(name, role=None) + .. automethod:: cubicweb.web.form.Form.fields_by_name(name, role=None) + + **Form construction methods** + + .. automethod:: cubicweb.web.form.Form.remove_field(field) + .. automethod:: cubicweb.web.form.Form.append_field(field) + .. automethod:: cubicweb.web.form.Form.insert_field_before(field, name, role=None) + .. automethod:: cubicweb.web.form.Form.insert_field_after(field, name, role=None) + .. automethod:: cubicweb.web.form.Form.add_hidden(name, value=None, **kwargs) + + **Form rendering methods** + + .. automethod:: cubicweb.web.views.forms.FieldsForm.render + + **Form posting methods** + + Once a form is posted, you can retrieve the form on the controller side and + use the following methods to ease processing. For "simple" forms, this + should looks like : + + .. sourcecode :: python + + form = self._cw.vreg['forms'].select('myformid', self._cw) + posted = form.process_posted() + # do something with the returned dictionary + + Notice that form related to entity edition should usually use the + `edit` controller which will handle all the logic for you. + + .. automethod:: cubicweb.web.views.forms.FieldsForm.process_posted + .. automethod:: cubicweb.web.views.forms.FieldsForm.iter_modified_fields + """ + __regid__ = 'base' + + + # attributes overrideable by subclasses or through __init__ + needs_js = ('cubicweb.ajax.js', 'cubicweb.edition.js',) + needs_css = ('cubicweb.form.css',) + action = None + cssclass = None + cssstyle = None + cwtarget = None + redirect_path = None + form_buttons = None + form_renderer_id = 'default' + fieldsets_in_order = None + autocomplete = True + + @property + def needs_multipart(self): + """true if the form needs enctype=multipart/form-data""" + return any(field.needs_multipart for field in self.fields) + + def _get_onsubmit(self): + try: + return self._onsubmit + except AttributeError: + return "return freezeFormButtons('%(domid)s');" % dictattr(self) + def _set_onsubmit(self, value): + self._onsubmit = value + onsubmit = property(_get_onsubmit, _set_onsubmit) + + def add_media(self): + """adds media (CSS & JS) required by this widget""" + if self.needs_js: + self._cw.add_js(self.needs_js) + if self.needs_css: + self._cw.add_css(self.needs_css) + + def render(self, formvalues=None, renderer=None, **kwargs): + """Render this form, using the `renderer` given as argument or the + default according to :attr:`form_renderer_id`. The rendered form is + returned as a unicode string. + + `formvalues` is an optional dictionary containing values that will be + considered as field's value. + + Extra keyword arguments will be given to renderer's :meth:`render` method. + """ + w = kwargs.pop('w', None) + self.build_context(formvalues) + if renderer is None: + renderer = self.default_renderer() + renderer.render(w, self, kwargs) + + def default_renderer(self): + return self._cw.vreg['formrenderers'].select( + self.form_renderer_id, self._cw, + rset=self.cw_rset, row=self.cw_row, col=self.cw_col or 0) + + formvalues = None + def build_context(self, formvalues=None): + """build form context values (the .context attribute which is a + dictionary with field instance as key associated to a dictionary + containing field 'name' (qualified), 'id', 'value' (for display, always + a string). + """ + if self.formvalues is not None: + return # already built + self.formvalues = formvalues or {} + # use a copy in case fields are modified while context is built (eg + # __linkto handling for instance) + for field in self.fields[:]: + for field in field.actual_fields(self): + field.form_init(self) + # store used field in an hidden input for later usage by a controller + fields = set() + eidfields = set() + for field in self.fields: + if field.eidparam: + eidfields.add(field.role_name()) + elif field.name not in self.control_fields: + fields.add(field.role_name()) + if fields: + self.add_hidden('_cw_fields', u','.join(fields)) + if eidfields: + self.add_hidden('_cw_entity_fields', u','.join(eidfields), + eidparam=True) + + _default_form_action_path = 'edit' + def form_action(self): + action = self.action + if action is None: + return self._cw.build_url(self._default_form_action_path) + return action + + # controller form processing methods ####################################### + + def iter_modified_fields(self, editedfields=None, entity=None): + """return a generator on field that has been modified by the posted + form. + """ + if editedfields is None: + try: + editedfields = self._cw.form['_cw_fields'] + except KeyError: + raise RequestError(self._cw._('no edited fields specified')) + entityform = entity and len(inspect.getargspec(self.field_by_name)) == 4 # XXX + for editedfield in splitstrip(editedfields): + try: + name, role = editedfield.split('-') + except Exception: + name = editedfield + role = None + if entityform: + field = self.field_by_name(name, role, eschema=entity.e_schema) + else: + field = self.field_by_name(name, role) + if field.has_been_modified(self): + yield field + + def process_posted(self): + """use this method to process the content posted by a simple form. it + will return a dictionary with field names as key and typed value as + associated value. + """ + with tempattr(self, 'formvalues', {}): # init fields value cache + errors = [] + processed = {} + for field in self.iter_modified_fields(): + try: + for field, value in field.process_posted(self): + processed[field.role_name()] = value + except ProcessFormError as exc: + errors.append((field, exc)) + if errors: + errors = dict((f.role_name(), text_type(ex)) for f, ex in errors) + raise ValidationError(None, errors) + return processed + + +class EntityFieldsForm(FieldsForm): + """This class is designed for forms used to edit some entities. It should + handle for you all the underlying stuff necessary to properly work with the + generic :class:`~cubicweb.web.views.editcontroller.EditController`. + """ + + __regid__ = 'base' + __select__ = (match_kwargs('entity') + | (one_line_rset() & non_final_entity())) + domid = 'entityForm' + uicfg_aff = uicfg.autoform_field + uicfg_affk = uicfg.autoform_field_kwargs + + @iclassmethod + def field_by_name(cls_or_self, name, role=None, eschema=None): + """return field with the given name and role. If field is not explicitly + defined for the form but `eclass` is specified, guess_field will be + called. + """ + try: + return super(EntityFieldsForm, cls_or_self).field_by_name(name, role) + except form.FieldNotFound: + if eschema is None or role is None or not name in eschema.schema: + raise + rschema = eschema.schema.rschema(name) + # XXX use a sample target type. Document this. + tschemas = rschema.targets(eschema, role) + fieldcls = cls_or_self.uicfg_aff.etype_get( + eschema, rschema, role, tschemas[0]) + kwargs = cls_or_self.uicfg_affk.etype_get( + eschema, rschema, role, tschemas[0]) + if kwargs is None: + kwargs = {} + if fieldcls: + if not isinstance(fieldcls, type): + return fieldcls # already and instance + return fieldcls(name=name, role=role, eidparam=True, **kwargs) + if isinstance(cls_or_self, type): + req = None + else: + req = cls_or_self._cw + field = guess_field(eschema, rschema, role, req=req, eidparam=True, **kwargs) + if field is None: + raise + return field + + def __init__(self, _cw, rset=None, row=None, col=None, **kwargs): + try: + self.edited_entity = kwargs.pop('entity') + except KeyError: + self.edited_entity = rset.complete_entity(row or 0, col or 0) + msg = kwargs.pop('submitmsg', None) + super(EntityFieldsForm, self).__init__(_cw, rset, row, col, **kwargs) + self.uicfg_aff = self._cw.vreg['uicfg'].select( + 'autoform_field', self._cw, entity=self.edited_entity) + self.uicfg_affk = self._cw.vreg['uicfg'].select( + 'autoform_field_kwargs', self._cw, entity=self.edited_entity) + self.add_hidden('__type', self.edited_entity.cw_etype, eidparam=True) + + self.add_hidden('eid', self.edited_entity.eid) + self.add_generation_time() + # mainform default to true in parent, hence default to True + if kwargs.get('mainform', True) or kwargs.get('mainentity', False): + self.add_hidden(u'__maineid', self.edited_entity.eid) + # If we need to directly attach the new object to another one + if '__linkto' in self._cw.form: + if msg: + msg = '%s %s' % (msg, self._cw._('and linked')) + else: + msg = self._cw._('entity linked') + if msg: + msgid = self._cw.set_redirect_message(msg) + self.add_hidden('_cwmsgid', msgid) + + def add_generation_time(self): + # use %f to prevent (unlikely) display in exponential format + self.add_hidden('__form_generation_time', '%.6f' % time.time(), + eidparam=True) + + def add_linkto_hidden(self): + """add the __linkto hidden field used to directly attach the new object + to an existing other one when the relation between those two is not + already present in the form. + + Warning: this method must be called only when all form fields are setup + """ + for (rtype, role), eids in self.linked_to.items(): + # if the relation is already setup by a form field, do not add it + # in a __linkto hidden to avoid setting it twice in the controller + try: + self.field_by_name(rtype, role) + except form.FieldNotFound: + for eid in eids: + self.add_hidden('__linkto', '%s:%s:%s' % (rtype, eid, role)) + + def render(self, *args, **kwargs): + self.add_linkto_hidden() + return super(EntityFieldsForm, self).render(*args, **kwargs) + + @property + @cached + def linked_to(self): + linked_to = {} + # case where this is an embeded creation form + try: + eid = int(self.cw_extra_kwargs['peid']) + except (KeyError, ValueError): + # When parent is being created, its eid is not numeric (e.g. 'A') + # hence ValueError. + pass + else: + ltrtype = self.cw_extra_kwargs['rtype'] + ltrole = neg_role(self.cw_extra_kwargs['role']) + linked_to[(ltrtype, ltrole)] = [eid] + # now consider __linkto if the current form is the main form + try: + self.field_by_name('__maineid') + except form.FieldNotFound: + return linked_to + for linkto in self._cw.list_form_param('__linkto'): + ltrtype, eid, ltrole = linkto.split(':') + linked_to.setdefault((ltrtype, ltrole), []).append(int(eid)) + return linked_to + + def session_key(self): + """return the key that may be used to store / retreive data about a + previous post which failed because of a validation error + """ + if self.force_session_key is not None: + return self.force_session_key + # XXX if this is a json request, suppose we should redirect to the + # entity primary view + if self._cw.ajax_request and self.edited_entity.has_eid(): + return '%s#%s' % (self.edited_entity.absolute_url(), self.domid) + # XXX we should not consider some url parameters that may lead to + # different url after a validation error + return '%s#%s' % (self._cw.url(), self.domid) + + def default_renderer(self): + return self._cw.vreg['formrenderers'].select( + self.form_renderer_id, self._cw, rset=self.cw_rset, row=self.cw_row, + col=self.cw_col, entity=self.edited_entity) + + def should_display_add_new_relation_link(self, rschema, existant, card): + return False + + # controller side method (eg POST reception handling) + + def actual_eid(self, eid): + # should be either an int (existant entity) or a variable (to be + # created entity) + assert eid or eid == 0, repr(eid) # 0 is a valid eid + try: + return int(eid) + except ValueError: + try: + return self._cw.data['eidmap'][eid] + except KeyError: + self._cw.data['eidmap'][eid] = None + return None + + def editable_relations(self): + return () + + +class CompositeFormMixIn(object): + __regid__ = 'composite' + form_renderer_id = __regid__ + + def __init__(self, *args, **kwargs): + super(CompositeFormMixIn, self).__init__(*args, **kwargs) + self.forms = [] + + def add_subform(self, subform): + """mark given form as a subform and append it""" + subform.parent_form = self + self.forms.append(subform) + + def build_context(self, formvalues=None): + super(CompositeFormMixIn, self).build_context(formvalues) + for form in self.forms: + form.build_context(formvalues) + + +class CompositeForm(CompositeFormMixIn, FieldsForm): + """Form composed of sub-forms. Typical usage is edition of multiple entities + at once. + """ + +class CompositeEntityForm(CompositeFormMixIn, EntityFieldsForm): + pass # XXX why is this class necessary? diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/ibreadcrumbs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/ibreadcrumbs.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,203 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""breadcrumbs components definition for CubicWeb web client""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from six import text_type + +from logilab.mtconverter import xml_escape + +from cubicweb import tags, uilib +from cubicweb.entity import Entity +from cubicweb.predicates import (is_instance, one_line_rset, adaptable, + one_etype_rset, multi_lines_rset, any_rset, + match_form_params) +from cubicweb.view import EntityView, EntityAdapter +from cubicweb.web.views import basecomponents +# don't use AnyEntity since this may cause bug with isinstance() due to reloading + + + +class IBreadCrumbsAdapter(EntityAdapter): + """adapters for entities which can be"located" on some path to display in + the web ui + """ + __regid__ = 'IBreadCrumbs' + __select__ = is_instance('Any', accept_none=False) + + def parent_entity(self): + itree = self.entity.cw_adapt_to('ITree') + if itree is not None: + return itree.parent() + return None + + def breadcrumbs(self, view=None, recurs=None): + """return a list containing some: + + * tuple (url, label) + * entity + * simple label string + + defining path from a root to the current view + + the main view is given as argument so breadcrumbs may vary according to + displayed view (may be None). When recursing on a parent entity, the + `recurs` argument should be a set of already traversed nodes (infinite + loop safety belt). + """ + parent = self.parent_entity() + if parent is not None: + if recurs: + _recurs = recurs + else: + _recurs = set() + if _recurs and parent.eid in _recurs: + self.error('cycle in breadcrumbs for entity %s' % self.entity) + return [] + _recurs.add(parent.eid) + adapter = parent.cw_adapt_to('IBreadCrumbs') + path = adapter.breadcrumbs(view, _recurs) + [self.entity] + else: + path = [self.entity] + if not recurs: + if view is None: + if 'vtitle' in self._cw.form: + # embeding for instance + path.append( self._cw.form['vtitle'] ) + elif view.__regid__ != 'primary' and hasattr(view, 'title'): + path.append( self._cw._(view.title) ) + return path + + +class BreadCrumbEntityVComponent(basecomponents.HeaderComponent): + __regid__ = 'breadcrumbs' + __select__ = (basecomponents.HeaderComponent.__select__ + & one_line_rset() & adaptable('IBreadCrumbs')) + order = basecomponents.ApplicationName.order + 1 + context = basecomponents.ApplicationName.context + separator = u' > ' + link_template = u'%s' + first_separator = True + + # XXX support kwargs for compat with other components which gets the view as + # argument + def render(self, w, **kwargs): + try: + entity = self.cw_extra_kwargs['entity'] + except KeyError: + entity = self.cw_rset.get_entity(0, 0) + adapter = entity.cw_adapt_to('IBreadCrumbs') + view = self.cw_extra_kwargs.get('view') + path = adapter.breadcrumbs(view) + if path: + self.open_breadcrumbs(w) + self.render_breadcrumbs(w, entity, path) + self.close_breadcrumbs(w) + + def open_breadcrumbs(self, w): + w(u'') + if self.first_separator: + w(self.separator) + + def close_breadcrumbs(self, w): + w(u'') + + def render_breadcrumbs(self, w, contextentity, path): + root = path.pop(0) + if isinstance(root, Entity): + w(self.link_template % (self._cw.build_url(root.__regid__), + root.dc_type('plural'))) + w(self.separator) + self.wpath_part(w, root, contextentity, not path) + for i, parent in enumerate(path): + w(self.separator) + w(u"\n") + self.wpath_part(w, parent, contextentity, i == len(path) - 1) + + def wpath_part(self, w, part, contextentity, last=False): # XXX deprecates last argument? + if isinstance(part, Entity): + w(part.view('breadcrumbs')) + elif isinstance(part, tuple): + url, title = part + textsize = self._cw.property_value('navigation.short-line-size') + w(self.link_template % ( + xml_escape(url), xml_escape(uilib.cut(title, textsize)))) + else: + textsize = self._cw.property_value('navigation.short-line-size') + w(xml_escape(uilib.cut(text_type(part), textsize))) + + +class BreadCrumbETypeVComponent(BreadCrumbEntityVComponent): + __select__ = (basecomponents.HeaderComponent.__select__ + & multi_lines_rset() & one_etype_rset() + & adaptable('IBreadCrumbs')) + + def render_breadcrumbs(self, w, contextentity, path): + # XXX hack: only display etype name or first non entity path part + root = path.pop(0) + if isinstance(root, Entity): + w(u'%s' % (self._cw.build_url(root.__regid__), + root.dc_type('plural'))) + else: + self.wpath_part(w, root, contextentity, not path) + + +class BreadCrumbAnyRSetVComponent(BreadCrumbEntityVComponent): + __select__ = basecomponents.HeaderComponent.__select__ & any_rset() + + # XXX support kwargs for compat with other components which gets the view as + # argument + def render(self, w, **kwargs): + self.open_breadcrumbs(w) + w(self._cw._('search')) + self.close_breadcrumbs(w) + + +class BreadCrumbLinkToVComponent(BreadCrumbEntityVComponent): + __select__ = basecomponents.HeaderComponent.__select__ & match_form_params('__linkto') + + def render(self, w, **kwargs): + eid = self._cw.list_form_param('__linkto')[0].split(':')[1] + entity = self._cw.entity_from_eid(eid) + ecmp = self._cw.vreg[self.__registry__].select( + self.__regid__, self._cw, entity=entity, **kwargs) + ecmp.render(w, **kwargs) + + +class BreadCrumbView(EntityView): + __regid__ = 'breadcrumbs' + + def cell_call(self, row, col, **kwargs): + entity = self.cw_rset.get_entity(row, col) + desc = uilib.cut(entity.dc_description(), 50) + # NOTE remember camember: tags.a autoescapes + self.w(tags.a(entity.view('breadcrumbtext'), + href=entity.absolute_url(), title=desc)) + + +class BreadCrumbTextView(EntityView): + __regid__ = 'breadcrumbtext' + + def cell_call(self, row, col, **kwargs): + entity = self.cw_rset.get_entity(row, col) + textsize = self._cw.property_value('navigation.short-line-size') + self.w(uilib.cut(entity.dc_title(), textsize)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/idownloadable.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/idownloadable.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,203 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +Specific views for entities adapting to IDownloadable +===================================================== +""" +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six.moves import range + +from logilab.mtconverter import BINARY_ENCODINGS, TransformError, xml_escape +from logilab.common.deprecation import class_renamed, deprecated + +from cubicweb import tags +from cubicweb.view import EntityView +from cubicweb.predicates import (one_line_rset, is_instance, match_context_prop, + adaptable, has_mimetype) +from cubicweb.mttransforms import ENGINE +from cubicweb.web import component, httpcache +from cubicweb.web.views import primary, baseviews + + +class DownloadBox(component.EntityCtxComponent): + """add download box""" + __regid__ = 'download_box' # no download box for images + __select__ = (component.EntityCtxComponent.__select__ & + adaptable('IDownloadable') & ~has_mimetype('image/')) + + order = 10 + title = _('download') + + def init_rendering(self): + self.items = [self.entity] + + def render_body(self, w): + for item in self.items: + idownloadable = item.cw_adapt_to('IDownloadable') + w(u'%s %s' + % (xml_escape(idownloadable.download_url()), + self._cw.uiprops['DOWNLOAD_ICON'], + self._cw._('download icon'), + xml_escape(idownloadable.download_file_name()))) + + +class DownloadView(EntityView): + """download view + + this view is replacing the deprecated 'download' controller and allow + downloading of entities providing the necessary interface + """ + __regid__ = 'download' + __select__ = one_line_rset() & adaptable('IDownloadable') + + templatable = False + content_type = 'application/octet-stream' + binary = True + http_cache_manager = httpcache.EntityHTTPCacheManager + add_to_breadcrumbs = False + + def set_request_content_type(self): + """overriden to set the correct filetype and filename""" + entity = self.cw_rset.complete_entity(self.cw_row or 0, self.cw_col or 0) + adapter = entity.cw_adapt_to('IDownloadable') + encoding = adapter.download_encoding() + if encoding in BINARY_ENCODINGS: + contenttype = 'application/%s' % encoding + encoding = None + else: + contenttype = adapter.download_content_type() + self._cw.set_content_type(contenttype or self.content_type, + filename=adapter.download_file_name(), + encoding=encoding, + disposition='attachment') + + def call(self): + entity = self.cw_rset.complete_entity(self.cw_row or 0, self.cw_col or 0) + adapter = entity.cw_adapt_to('IDownloadable') + self.w(adapter.download_data()) + + def last_modified(self): + return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0).modification_date + + +class DownloadLinkView(EntityView): + """view displaying a link to download the file""" + __regid__ = 'downloadlink' + __select__ = adaptable('IDownloadable') + title = None # should not be listed in possible views + + + def cell_call(self, row, col, title=None, **kwargs): + entity = self.cw_rset.get_entity(row, col) + url = xml_escape(entity.cw_adapt_to('IDownloadable').download_url()) + self.w(u'%s' % (url, xml_escape(title or entity.dc_title()))) + + +class IDownloadablePrimaryView(primary.PrimaryView): + __select__ = adaptable('IDownloadable') + + def render_entity_attributes(self, entity): + self.w(u'
          ') + adapter = entity.cw_adapt_to('IDownloadable') + contenttype = adapter.download_content_type() + if contenttype.startswith('image/'): + self._cw.add_js('cubicweb.image.js') + self.wview('image', entity.cw_rset, row=entity.cw_row, col=entity.cw_col, + link=True, klass='contentimage') + super(IDownloadablePrimaryView, self).render_entity_attributes(entity) + elif contenttype.endswith('html'): + self.wview('downloadlink', entity.cw_rset, title=self._cw._('download'), row=entity.cw_row) + self.wview('ehtml', entity.cw_rset, row=entity.cw_row, col=entity.cw_col, + height='600px', width='100%') + else: + super(IDownloadablePrimaryView, self).render_entity_attributes(entity) + self.wview('downloadlink', entity.cw_rset, title=self._cw._('download'), row=entity.cw_row) + self.render_data(entity, contenttype, 'text/html') + self.w(u'
          ') + + def render_data(self, entity, sourcemt, targetmt): + adapter = entity.cw_adapt_to('IDownloadable') + if ENGINE.find_path(sourcemt, targetmt): + try: + self.w(entity._cw_mtc_transform(adapter.download_data(), sourcemt, + targetmt, adapter.download_encoding())) + except Exception as ex: + self.exception('while rendering data for %s', entity) + msg = self._cw._("can't display data, unexpected error: %s") \ + % xml_escape(unicode(ex)) + self.w('
          %s
          ' % msg) + return True + return False + + +class IDownloadableOneLineView(baseviews.OneLineView): + __select__ = adaptable('IDownloadable') + + def cell_call(self, row, col, title=None, **kwargs): + """the oneline view is a link to download the file""" + entity = self.cw_rset.get_entity(row, col) + url = xml_escape(entity.absolute_url()) + adapter = entity.cw_adapt_to('IDownloadable') + name = xml_escape(title or entity.dc_title()) + durl = xml_escape(adapter.download_url()) + self.w(u'%s [%s]' % + (url, name, durl, self._cw._('download'))) + + +class AbstractEmbeddedView(EntityView): + __abstract__ = True + + _embedding_tag = None + + def call(self, **kwargs): + rset = self.cw_rset + for i in range(len(rset)): + self.w(u'
          ') + self.wview(self.__regid__, rset, row=i, col=0, **kwargs) + self.w(u'
          ') + + def cell_call(self, row, col, link=False, **kwargs): + entity = self.cw_rset.get_entity(row, col) + adapter = entity.cw_adapt_to('IDownloadable') + tag = self._embedding_tag(src=adapter.download_url(), # pylint: disable=E1102 + alt=(self._cw._('download %s') % adapter.download_file_name()), + **kwargs) + if link: + self.w(u'%s' % (adapter.download_url(), tag)) + else: + self.w(tag) + + +class ImageView(AbstractEmbeddedView): + """image embedded view""" + __regid__ = 'image' + __select__ = has_mimetype('image/') + + title = _('image') + _embedding_tag = tags.img + + +class EHTMLView(AbstractEmbeddedView): + """html embedded view""" + __regid__ = 'ehtml' + __select__ = has_mimetype('text/html') + + title = _('embedded html') + _embedding_tag = tags.iframe diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/igeocodable.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/igeocodable.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,37 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Specific views for entities implementing IGeocodable""" + +try: + from cubes.geocoding.views import (IGeocodableAdapter, + GeocodingJsonView, + GoogleMapBubbleView, + GoogleMapsView, + GoogeMapsLegend) + + from logilab.common.deprecation import class_moved + + msg = '[3.17] cubicweb.web.views.igeocodable moved to cubes.geocoding.views' + IGeocodableAdapter = class_moved(IGeocodableAdapter, message=msg) + GeocodingJsonView = class_moved(GeocodingJsonView, message=msg) + GoogleMapBubbleView = class_moved(GoogleMapBubbleView, message=msg) + GoogleMapsView = class_moved(GoogleMapsView, message=msg) + GoogeMapsLegend = class_moved(GoogeMapsLegend, message=msg) +except ImportError: + from cubicweb.web import LOGGER + LOGGER.warning('[3.17] igeocoding extracted to cube geocoding that was not found. try installing it.') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/isioc.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/isioc.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,35 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Specific views for SIOC (Semantically-Interlinked Online Communities) + +http://sioc-project.org +""" + +from logilab.common.deprecation import class_moved + +try: + from cubes.sioc.views import * + + ISIOCItemAdapter = class_moved(ISIOCItemAdapter, message='[3.17] ISIOCItemAdapter moved to cubes.isioc.views') + ISIOCContainerAdapter = class_moved(ISIOCContainerAdapter, message='[3.17] ISIOCContainerAdapter moved to cubes.isioc.views') + SIOCView = class_moved(SIOCView, message='[3.17] SIOCView moved to cubes.is.view') + SIOCContainerView = class_moved(SIOCContainerView, message='[3.17] SIOCContainerView moved to cubes.is.view') + SIOCItemView = class_moved(SIOCItemView, message='[3.17] SIOCItemView moved to cubes.is.view') +except ImportError: + from cubicweb.web import LOGGER + LOGGER.warning('[3.17] isioc extracted to cube sioc that was not found. try installing it.') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/json.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/json.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,150 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""json export views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from cubicweb.uilib import rest_traceback + +from cubicweb.utils import json_dumps +from cubicweb.predicates import ExpectedValuePredicate, any_rset, empty_rset +from cubicweb.view import EntityView, AnyRsetView +from cubicweb.web.application import anonymized_request +from cubicweb.web.views import basecontrollers, management + + +class JsonpController(basecontrollers.ViewController): + """The jsonp controller is the same as a ViewController but : + + - anonymize request (avoid CSRF attacks) + - if ``vid`` parameter is passed, make sure it's sensible (i.e. either + "jsonexport" or "ejsonexport") + - if ``callback`` request parameter is passed, it's used as json padding + + + Response's content-type will either be ``application/javascript`` or + ``application/json`` depending on ``callback`` parameter presence or not. + """ + __regid__ = 'jsonp' + + def publish(self, rset=None): + if 'vid' in self._cw.form: + vid = self._cw.form['vid'] + if vid not in ('jsonexport', 'ejsonexport'): + self.warning("vid %s can't be used with jsonp controller, " + "falling back to jsonexport", vid) + self._cw.form['vid'] = 'jsonexport' + else: # if no vid is specified, use jsonexport + self._cw.form['vid'] = 'jsonexport' + if self._cw.vreg.config['anonymize-jsonp-queries']: + with anonymized_request(self._cw): + return self._get_json_data(rset) + else: + return self._get_json_data(rset) + + def _get_json_data(self, rset): + json_data = super(JsonpController, self).publish(rset) + if 'callback' in self._cw.form: # jsonp + json_padding = self._cw.form['callback'].encode('ascii') + # use ``application/javascript`` if ``callback`` parameter is + # provided, keep ``application/json`` otherwise + self._cw.set_content_type('application/javascript') + json_data = json_padding + b'(' + json_data + b')' + return json_data + + +class JsonMixIn(object): + """mixin class for json views + + Handles the following optional request parameters: + + - ``_indent`` : must be an integer. If found, it is used to pretty print + json output + """ + templatable = False + content_type = 'application/json' + binary = True + + def wdata(self, data): + if '_indent' in self._cw.form: + indent = int(self._cw.form['_indent']) + else: + indent = None + # python's json.dumps escapes non-ascii characters + self.w(json_dumps(data, indent=indent).encode('ascii')) + + +class JsonRsetView(JsonMixIn, AnyRsetView): + """dumps raw result set in JSON format""" + __regid__ = 'jsonexport' + __select__ = any_rset() # means rset might be empty or have any shape + title = _('json-export-view') + + def call(self): + # XXX mimic w3c recommandations to serialize SPARQL results in json? + # http://www.w3.org/TR/rdf-sparql-json-res/ + self.wdata(self.cw_rset.rows) + + +class JsonEntityView(JsonMixIn, EntityView): + """dumps rset entities in JSON + + The following additional metadata is added to each row : + + - ``cw_etype`` : entity type + - ``cw_source`` : source url + """ + __regid__ = 'ejsonexport' + __select__ = EntityView.__select__ | empty_rset() + title = _('json-entities-export-view') + + def call(self): + entities = [] + for entity in self.cw_rset.entities(): + serializer = entity.cw_adapt_to('ISerializable') + entities.append(serializer.serialize()) + self.wdata(entities) + + +class _requested_vid(ExpectedValuePredicate): + """predicate that checks vid parameter value + + It differs from ``match_view`` in that it doesn't expect a ``view`` + parameter to be given to ``select`` but will rather check + ``req.form['vid']`` to match expected vid. + """ + def __call__(self, cls, req, rset=None, **kwargs): + return req.form.get('vid') in self.expected + + +class JsonErrorView(JsonMixIn, management.ErrorView): + """custom error view selected when client asks for a json view + + The returned json object will contain err / traceback informations. + """ + __select__ = (management.ErrorView.__select__ & + _requested_vid('jsonexport', 'ejsonexport')) + + def call(self): + errmsg, exclass, excinfo = self._excinfo() + self.wdata({ + 'errmsg': errmsg, + 'exclass': exclass, + 'traceback': rest_traceback(excinfo, errmsg), + }) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/magicsearch.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/magicsearch.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,660 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""a query processor to handle quick search shortcuts for cubicweb +""" + +__docformat__ = "restructuredtext en" + +import re +from logging import getLogger + +from six import text_type + +from yams.interfaces import IVocabularyConstraint + +from rql import RQLSyntaxError, BadRQLQuery, parse +from rql.utils import rqlvar_maker +from rql.nodes import Relation + +from cubicweb import Unauthorized +from cubicweb.view import Component +from cubicweb.web.views.ajaxcontroller import ajaxfunc + +LOGGER = getLogger('cubicweb.magicsearch') + +def _get_approriate_translation(translations_found, eschema): + """return the first (should be the only one) possible translation according + to the given entity type + """ + # get the list of all attributes / relations for this kind of entity + existing_relations = set(eschema.subject_relations()) + consistent_translations = translations_found & existing_relations + if len(consistent_translations) == 0: + return None + return consistent_translations.pop() + + +def translate_rql_tree(rqlst, translations, schema): + """Try to translate each relation in the RQL syntax tree + + :type rqlst: `rql.stmts.Statement` + :param rqlst: the RQL syntax tree + + :type translations: dict + :param translations: the reverted l10n dict + + :type schema: `cubicweb.schema.Schema` + :param schema: the instance's schema + """ + # var_types is used as a map : var_name / var_type + vartypes = {} + # ambiguous_nodes is used as a map : relation_node / (var_name, available_translations) + ambiguous_nodes = {} + # For each relation node, check if it's a localized relation name + # If it's a localized name, then use the original relation name, else + # keep the existing relation name + for relation in rqlst.get_nodes(Relation): + rtype = relation.r_type + lhs, rhs = relation.get_variable_parts() + if rtype == 'is': + try: + etype = translations[rhs.value] + rhs.value = etype + except KeyError: + # If no translation found, leave the entity type as is + etype = rhs.value + # Memorize variable's type + vartypes[lhs.name] = etype + else: + try: + translation_set = translations[rtype] + except KeyError: + pass # If no translation found, leave the relation type as is + else: + # Only one possible translation, no ambiguity + if len(translation_set) == 1: + relation.r_type = next(iter(translations[rtype])) + # More than 1 possible translation => resolve it later + else: + ambiguous_nodes[relation] = (lhs.name, translation_set) + if ambiguous_nodes: + resolve_ambiguities(vartypes, ambiguous_nodes, schema) + + +def resolve_ambiguities(var_types, ambiguous_nodes, schema): + """Tries to resolve remaining ambiguities for translation + /!\ An ambiguity is when two different string can be localized with + the same string + A simple example: + - 'name' in a company context will be localized as 'nom' in French + - but ... 'surname' will also be localized as 'nom' + + :type var_types: dict + :param var_types: a map : var_name / var_type + + :type ambiguous_nodes: dict + :param ambiguous_nodes: a map : relation_node / (var_name, available_translations) + + :type schema: `cubicweb.schema.Schema` + :param schema: the instance's schema + """ + # Now, try to resolve ambiguous translations + for relation, (var_name, translations_found) in ambiguous_nodes.items(): + try: + vartype = var_types[var_name] + except KeyError: + continue + # Get schema for this entity type + eschema = schema.eschema(vartype) + rtype = _get_approriate_translation(translations_found, eschema) + if rtype is None: + continue + relation.r_type = rtype + + + +QUOTED_SRE = re.compile(r'(.*?)(["\'])(.+?)\2') + +TRANSLATION_MAPS = {} +def trmap(config, schema, lang): + try: + return TRANSLATION_MAPS[lang] + except KeyError: + assert lang in config.translations, '%s %s' % (lang, config.translations) + tr, ctxtr = config.translations[lang] + langmap = {} + for etype in schema.entities(): + etype = str(etype) + langmap[tr(etype).capitalize()] = etype + langmap[etype.capitalize()] = etype + for rtype in schema.relations(): + rtype = str(rtype) + langmap.setdefault(tr(rtype).lower(), set()).add(rtype) + langmap.setdefault(rtype, set()).add(rtype) + TRANSLATION_MAPS[lang] = langmap + return langmap + + +class BaseQueryProcessor(Component): + __abstract__ = True + __regid__ = 'magicsearch_processor' + # set something if you want explicit component search facility for the + # component + name = None + + def process_query(self, uquery): + args = self.preprocess_query(uquery) + try: + return self._cw.execute(*args) + finally: + # rollback necessary to avoid leaving the connection in a bad state + self._cw.cnx.rollback() + + def preprocess_query(self, uquery): + raise NotImplementedError() + + + + +class DoNotPreprocess(BaseQueryProcessor): + """this one returns the raw query and should be placed in first position + of the chain + """ + name = 'rql' + priority = 0 + def preprocess_query(self, uquery): + return uquery, + + +class QueryTranslator(BaseQueryProcessor): + """ parses through rql and translates into schema language entity names + and attributes + """ + priority = 2 + def preprocess_query(self, uquery): + rqlst = parse(uquery, print_errors=False) + schema = self._cw.vreg.schema + # rql syntax tree will be modified in place if necessary + translate_rql_tree(rqlst, trmap(self._cw.vreg.config, schema, self._cw.lang), + schema) + return rqlst.as_string(), + + +class QSPreProcessor(BaseQueryProcessor): + """Quick search preprocessor + + preprocessing query in shortcut form to their RQL form + """ + priority = 4 + + def preprocess_query(self, uquery): + """try to get rql from a unicode query string""" + args = None + try: + # Process as if there was a quoted part + args = self._quoted_words_query(uquery) + ## No quoted part + except BadRQLQuery: + words = uquery.split() + if len(words) == 1: + args = self._one_word_query(*words) + elif len(words) == 2: + args = self._two_words_query(*words) + elif len(words) == 3: + args = self._three_words_query(*words) + else: + raise + return args + + def _get_entity_type(self, word): + """check if the given word is matching an entity type, return it if + it's the case or raise BadRQLQuery if not + """ + etype = word.capitalize() + try: + return trmap(self._cw.vreg.config, self._cw.vreg.schema, self._cw.lang)[etype] + except KeyError: + raise BadRQLQuery('%s is not a valid entity name' % etype) + + def _get_attribute_name(self, word, eschema): + """check if the given word is matching an attribute of the given entity type, + return it normalized if found or return it untransformed else + """ + """Returns the attributes's name as stored in the DB""" + # Need to convert from unicode to string (could be whatever) + rtype = word.lower() + # Find the entity name as stored in the DB + translations = trmap(self._cw.vreg.config, self._cw.vreg.schema, self._cw.lang) + try: + translations = translations[rtype] + except KeyError: + raise BadRQLQuery('%s is not a valid attribute for %s entity type' + % (word, eschema)) + rtype = _get_approriate_translation(translations, eschema) + if rtype is None: + raise BadRQLQuery('%s is not a valid attribute for %s entity type' + % (word, eschema)) + return rtype + + def _one_word_query(self, word): + """Specific process for one word query (case (1) of preprocess_rql) + """ + # if this is an integer, then directly go to eid + try: + eid = int(word) + return 'Any X WHERE X eid %(x)s', {'x': eid}, 'x' + except ValueError: + etype = self._get_entity_type(word) + return '%s %s' % (etype, etype[0]), + + def _complete_rql(self, searchstr, etype, rtype=None, var=None, searchattr=None): + searchop = '' + if '%' in searchstr: + if rtype: + possible_etypes = self._cw.vreg.schema.rschema(rtype).objects(etype) + else: + possible_etypes = [self._cw.vreg.schema.eschema(etype)] + if searchattr or len(possible_etypes) == 1: + searchattr = searchattr or possible_etypes[0].main_attribute() + searchop = 'LIKE ' + searchattr = searchattr or 'has_text' + if var is None: + var = etype[0] + return '%s %s %s%%(text)s' % (var, searchattr, searchop) + + def _two_words_query(self, word1, word2): + """Specific process for two words query (case (2) of preprocess_rql) + """ + etype = self._get_entity_type(word1) + # this is a valid RQL query : ("Person X", or "Person TMP1") + if len(word2) == 1 and word2.isupper(): + return '%s %s' % (etype, word2), + # else, suppose it's a shortcut like : Person Smith + restriction = self._complete_rql(word2, etype) + if ' has_text ' in restriction: + rql = '%s %s ORDERBY FTIRANK(%s) DESC WHERE %s' % ( + etype, etype[0], etype[0], restriction) + else: + rql = '%s %s WHERE %s' % ( + etype, etype[0], restriction) + return rql, {'text': word2} + + def _three_words_query(self, word1, word2, word3): + """Specific process for three words query (case (3) of preprocess_rql) + """ + etype = self._get_entity_type(word1) + eschema = self._cw.vreg.schema.eschema(etype) + rtype = self._get_attribute_name(word2, eschema) + # expand shortcut if rtype is a non final relation + if not self._cw.vreg.schema.rschema(rtype).final: + return self._expand_shortcut(etype, rtype, word3) + if '%' in word3: + searchop = 'LIKE ' + else: + searchop = '' + rql = '%s %s WHERE %s' % (etype, etype[0], + self._complete_rql(word3, etype, searchattr=rtype)) + return rql, {'text': word3} + + def _expand_shortcut(self, etype, rtype, searchstr): + """Expands shortcut queries on a non final relation to use has_text or + the main attribute (according to possible entity type) if '%' is used in the + search word + + Transforms : 'person worksat IBM' into + 'Personne P WHERE P worksAt C, C has_text "IBM"' + """ + # check out all possilbe entity types for the relation represented + # by 'rtype' + mainvar = etype[0] + searchvar = mainvar + '1' + restriction = self._complete_rql(searchstr, etype, rtype=rtype, + var=searchvar) + if ' has_text ' in restriction: + rql = ('%s %s ORDERBY FTIRANK(%s) DESC ' + 'WHERE %s %s %s, %s' % (etype, mainvar, searchvar, + mainvar, rtype, searchvar, # P worksAt C + restriction)) + else: + rql = ('%s %s WHERE %s %s %s, %s' % (etype, mainvar, + mainvar, rtype, searchvar, # P worksAt C + restriction)) + return rql, {'text': searchstr} + + + def _quoted_words_query(self, ori_rql): + """Specific process when there's a "quoted" part + """ + m = QUOTED_SRE.match(ori_rql) + # if there's no quoted part, then no special pre-processing to do + if m is None: + raise BadRQLQuery("unable to handle request %r" % ori_rql) + left_words = m.group(1).split() + quoted_part = m.group(3) + # Case (1) : Company "My own company" + if len(left_words) == 1: + try: + word1 = left_words[0] + return self._two_words_query(word1, quoted_part) + except BadRQLQuery as error: + raise BadRQLQuery("unable to handle request %r" % ori_rql) + # Case (2) : Company name "My own company"; + elif len(left_words) == 2: + word1, word2 = left_words + return self._three_words_query(word1, word2, quoted_part) + # return ori_rql + raise BadRQLQuery("unable to handle request %r" % ori_rql) + + + +class FullTextTranslator(BaseQueryProcessor): + priority = 10 + name = 'text' + + def preprocess_query(self, uquery): + """suppose it's a plain text query""" + return 'Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s', {'text': uquery} + + + +class MagicSearchComponent(Component): + __regid__ = 'magicsearch' + def __init__(self, req, rset=None): + super(MagicSearchComponent, self).__init__(req, rset=rset) + processors = [] + self.by_name = {} + for processorcls in self._cw.vreg['components']['magicsearch_processor']: + # instantiation needed + processor = processorcls(self._cw) + processors.append(processor) + if processor.name is not None: + assert not processor.name in self.by_name + self.by_name[processor.name.lower()] = processor + self.processors = sorted(processors, key=lambda x: x.priority) + + def process_query(self, uquery): + assert isinstance(uquery, text_type) + try: + procname, query = uquery.split(':', 1) + proc = self.by_name[procname.strip().lower()] + uquery = query.strip() + except Exception: + # use processor chain + unauthorized = None + for proc in self.processors: + try: + return proc.process_query(uquery) + # FIXME : we don't want to catch any exception type here ! + except (RQLSyntaxError, BadRQLQuery): + pass + except Unauthorized as ex: + unauthorized = ex + continue + except Exception as ex: + LOGGER.debug('%s: %s', ex.__class__.__name__, ex) + continue + if unauthorized: + raise unauthorized + else: + # explicitly specified processor: don't try to catch the exception + return proc.process_query(uquery) + raise BadRQLQuery(self._cw._('sorry, the server is unable to handle this query')) + + + +## RQL suggestions builder #################################################### +class RQLSuggestionsBuilder(Component): + """main entry point is `build_suggestions()` which takes + an incomplete RQL query and returns a list of suggestions to complete + the query. + + This component is enabled by default and is used to provide autocompletion + in the RQL search bar. If you don't want this feature in your application, + just unregister it or make it unselectable. + + .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.build_suggestions + .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.etypes_suggestion_set + .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.possible_etypes + .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.possible_relations + .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.vocabulary + """ + __regid__ = 'rql.suggestions' + + #: maximum number of results to fetch when suggesting attribute values + attr_value_limit = 20 + + def build_suggestions(self, user_rql): + """return a list of suggestions to complete `user_rql` + + :param user_rql: an incomplete RQL query + """ + req = self._cw + try: + if 'WHERE' not in user_rql: # don't try to complete if there's no restriction + return [] + variables, restrictions = [part.strip() for part in user_rql.split('WHERE', 1)] + if ',' in restrictions: + restrictions, incomplete_part = restrictions.rsplit(',', 1) + user_rql = '%s WHERE %s' % (variables, restrictions) + else: + restrictions, incomplete_part = '', restrictions + user_rql = variables + select = parse(user_rql, print_errors=False).children[0] + req.vreg.rqlhelper.annotate(select) + req.vreg.solutions(req, select, {}) + if restrictions: + return ['%s, %s' % (user_rql, suggestion) + for suggestion in self.rql_build_suggestions(select, incomplete_part)] + else: + return ['%s WHERE %s' % (user_rql, suggestion) + for suggestion in self.rql_build_suggestions(select, incomplete_part)] + except Exception as exc: # we never want to crash + self.debug('failed to build suggestions: %s', exc) + return [] + + ## actual completion entry points ######################################### + def rql_build_suggestions(self, select, incomplete_part): + """ + :param select: the annotated select node (rql syntax tree) + :param incomplete_part: the part of the rql query that needs + to be completed, (e.g. ``X is Pr``, ``X re``) + """ + chunks = incomplete_part.split(None, 2) + if not chunks: # nothing to complete + return [] + if len(chunks) == 1: # `incomplete` looks like "MYVAR" + return self._complete_rqlvar(select, *chunks) + elif len(chunks) == 2: # `incomplete` looks like "MYVAR some_rel" + return self._complete_rqlvar_and_rtype(select, *chunks) + elif len(chunks) == 3: # `incomplete` looks like "MYVAR some_rel something" + return self._complete_relation_object(select, *chunks) + else: # would be anything else, hard to decide what to do here + return [] + + # _complete_* methods are considered private, at least while the API + # isn't stabilized. + def _complete_rqlvar(self, select, rql_var): + """return suggestions for "variable only" incomplete_part + + as in : + + - Any X WHERE X + - Any X WHERE X is Project, Y + - etc. + """ + return ['%s %s %s' % (rql_var, rtype, dest_var) + for rtype, dest_var in self.possible_relations(select, rql_var)] + + def _complete_rqlvar_and_rtype(self, select, rql_var, user_rtype): + """return suggestions for "variable + rtype" incomplete_part + + as in : + + - Any X WHERE X is + - Any X WHERE X is Person, X firstn + - etc. + """ + # special case `user_type` == 'is', return every possible type. + if user_rtype == 'is': + return self._complete_is_relation(select, rql_var) + else: + return ['%s %s %s' % (rql_var, rtype, dest_var) + for rtype, dest_var in self.possible_relations(select, rql_var) + if rtype.startswith(user_rtype)] + + def _complete_relation_object(self, select, rql_var, user_rtype, user_value): + """return suggestions for "variable + rtype + some_incomplete_value" + + as in : + + - Any X WHERE X is Per + - Any X WHERE X is Person, X firstname " + - Any X WHERE X is Person, X firstname "Pa + - etc. + """ + # special case `user_type` == 'is', return every possible type. + if user_rtype == 'is': + return self._complete_is_relation(select, rql_var, user_value) + elif user_value: + if user_value[0] in ('"', "'"): + # if finished string, don't suggest anything + if len(user_value) > 1 and user_value[-1] == user_value[0]: + return [] + user_value = user_value[1:] + return ['%s %s "%s"' % (rql_var, user_rtype, value) + for value in self.vocabulary(select, rql_var, + user_rtype, user_value)] + return [] + + def _complete_is_relation(self, select, rql_var, prefix=''): + """return every possible types for rql_var + + :param prefix: if specified, will only return entity types starting + with the specified value. + """ + return ['%s is %s' % (rql_var, etype) + for etype in self.possible_etypes(select, rql_var, prefix)] + + def etypes_suggestion_set(self): + """returns the list of possible entity types to suggest + + The default is to return any non-final entity type available + in the schema. + + Can be overridden for instance if an application decides + to restrict this list to a meaningful set of business etypes. + """ + schema = self._cw.vreg.schema + return set(eschema.type for eschema in schema.entities() if not eschema.final) + + def possible_etypes(self, select, rql_var, prefix=''): + """return all possible etypes for `rql_var` + + The returned list will always be a subset of meth:`etypes_suggestion_set` + + :param select: the annotated select node (rql syntax tree) + :param rql_var: the variable name for which we want to know possible types + :param prefix: if specified, will only return etypes starting with it + """ + available_etypes = self.etypes_suggestion_set() + possible_etypes = set() + for sol in select.solutions: + if rql_var in sol and sol[rql_var] in available_etypes: + possible_etypes.add(sol[rql_var]) + if not possible_etypes: + # `Any X WHERE X is Person, Y is` + # -> won't have a solution, need to give all etypes + possible_etypes = available_etypes + return sorted(etype for etype in possible_etypes if etype.startswith(prefix)) + + def possible_relations(self, select, rql_var, include_meta=False): + """returns a list of couple (rtype, dest_var) for each possible + relations with `rql_var` as subject. + + ``dest_var`` will be picked among availabel variables if types match, + otherwise a new one will be created. + """ + schema = self._cw.vreg.schema + relations = set() + untyped_dest_var = next(rqlvar_maker(defined=select.defined_vars)) + # for each solution + # 1. find each possible relation + # 2. for each relation: + # 2.1. if the relation is meta, skip it + # 2.2. for each possible destination type, pick up possible + # variables for this type or use a new one + for sol in select.solutions: + etype = sol[rql_var] + sol_by_types = {} + for varname, var_etype in sol.items(): + # don't push subject var to avoid "X relation X" suggestion + if varname != rql_var: + sol_by_types.setdefault(var_etype, []).append(varname) + for rschema in schema[etype].subject_relations(): + if include_meta or not rschema.meta: + for dest in rschema.objects(etype): + for varname in sol_by_types.get(dest.type, (untyped_dest_var,)): + suggestion = (rschema.type, varname) + if suggestion not in relations: + relations.add(suggestion) + return sorted(relations) + + def vocabulary(self, select, rql_var, user_rtype, rtype_incomplete_value): + """return acceptable vocabulary for `rql_var` + `user_rtype` in `select` + + Vocabulary is either found from schema (Yams) definition or + directly from database. + """ + schema = self._cw.vreg.schema + vocab = [] + for sol in select.solutions: + # for each solution : + # - If a vocabulary constraint exists on `rql_var+user_rtype`, use it + # to define possible values + # - Otherwise, query the database to fetch available values from + # database (limiting results to `self.attr_value_limit`) + try: + eschema = schema.eschema(sol[rql_var]) + rdef = eschema.rdef(user_rtype) + except KeyError: # unknown relation + continue + cstr = rdef.constraint_by_interface(IVocabularyConstraint) + if cstr is not None: + # a vocabulary is found, use it + vocab += [value for value in cstr.vocabulary() + if value.startswith(rtype_incomplete_value)] + elif rdef.final: + # no vocab, query database to find possible value + vocab_rql = 'DISTINCT Any V LIMIT %s WHERE X is %s, X %s V' % ( + self.attr_value_limit, eschema.type, user_rtype) + vocab_kwargs = {} + if rtype_incomplete_value: + vocab_rql += ', X %s LIKE %%(value)s' % user_rtype + vocab_kwargs['value'] = u'%s%%' % rtype_incomplete_value + vocab += [value for value, in + self._cw.execute(vocab_rql, vocab_kwargs)] + return sorted(set(vocab)) + + + +@ajaxfunc(output_type='json') +def rql_suggest(self): + rql_builder = self._cw.vreg['components'].select_or_none('rql.suggestions', self._cw) + if rql_builder: + return rql_builder.build_suggestions(self._cw.form['term']) + return [] diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/management.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/management.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,200 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""security management and error screens""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + + +from logilab.mtconverter import xml_escape +from logilab.common.registry import yes + +from cubicweb.predicates import none_rset, match_user_groups, authenticated_user +from cubicweb.view import AnyRsetView, StartupView, EntityView, View +from cubicweb.uilib import html_traceback, rest_traceback, exc_message +from cubicweb.web import formwidgets as wdgs +from cubicweb.web.formfields import guess_field +from cubicweb.web.views.schema import SecurityViewMixIn + +from yams.buildobjs import EntityType + +SUBMIT_MSGID = _('Submit bug report') +MAIL_SUBMIT_MSGID = _('Submit bug report by mail') + +class SecurityManagementView(SecurityViewMixIn, EntityView): + """display security information for a given entity""" + __regid__ = 'security' + __select__ = EntityView.__select__ & authenticated_user() + + title = _('security') + + def call(self): + self.w(u'
          %s
          ' % self._cw._('validating...')) + super(SecurityManagementView, self).call() + + def entity_call(self, entity): + self._cw.add_js('cubicweb.edition.js') + self._cw.add_css('cubicweb.acl.css') + w = self.w + _ = self._cw._ + w(u'

          %s %s

          ' + % (entity.dc_type().capitalize(), + xml_escape(entity.absolute_url()), + xml_escape(entity.dc_title()))) + # first show permissions defined by the schema + self.w('

          %s

          ' % _('Schema\'s permissions definitions')) + self.permissions_table(entity.e_schema) + self.w('

          %s

          ' % _('Manage security')) + # ownership information + if self._cw.vreg.schema.rschema('owned_by').has_perm(self._cw, 'add', + fromeid=entity.eid): + self.owned_by_edit_form(entity) + else: + self.owned_by_information(entity) + + def owned_by_edit_form(self, entity): + self.w('

          %s

          ' % self._cw._('Ownership')) + msg = self._cw._('ownerships have been changed') + form = self._cw.vreg['forms'].select('base', self._cw, entity=entity, + form_renderer_id='onerowtable', submitmsg=msg, + form_buttons=[wdgs.SubmitButton()], + domid='ownership%s' % entity.eid, + __redirectvid='security', + __redirectpath=entity.rest_path()) + field = guess_field(entity.e_schema, + self._cw.vreg.schema['owned_by'], + req=self._cw) + form.append_field(field) + form.render(w=self.w, display_progress_div=False) + + def owned_by_information(self, entity): + ownersrset = entity.related('owned_by') + if ownersrset: + self.w('

          %s

          ' % self._cw._('Ownership')) + self.w(u'
          ') + self.w(self._cw._('this entity is currently owned by') + ' ') + self.wview('csv', entity.related('owned_by'), 'null') + self.w(u'
          ') + # else we don't know if this is because entity has no owner or becayse + # user as no access to owner users entities + + +class ErrorView(AnyRsetView): + """default view when no result has been found""" + __select__ = yes() + __regid__ = 'error' + + def page_title(self): + """returns a title according to the result set - used for the + title in the HTML header + """ + return self._cw._('an error occurred') + + def _excinfo(self): + req = self._cw + ex = req.data.get('ex') + excinfo = req.data.get('excinfo') + if 'errmsg' in req.data: + errmsg = req.data['errmsg'] + exclass = None + else: + errmsg = exc_message(ex, req.encoding) + exclass = ex.__class__.__name__ + return errmsg, exclass, excinfo + + def call(self): + req = self._cw.reset_headers() + w = self.w + title = self._cw._('an error occurred') + w(u'

          %s

          ' % title) + ex, exclass, excinfo = self._excinfo() + if excinfo is not None and self._cw.vreg.config['print-traceback']: + if exclass is None: + w(u'
          %s
          ' + % xml_escape(ex).replace("\n","
          ")) + else: + w(u'
          %s: %s
          ' + % (exclass, xml_escape(ex).replace("\n","
          "))) + w(u'
          ') + w(u'
          %s
          ' % html_traceback(excinfo, ex, '')) + else: + w(u'
          %s
          ' % (xml_escape(ex).replace("\n","
          "))) + # if excinfo is not None, it's probably not a bug + if excinfo is None: + return + vcconf = self._cw.cnx.repo.get_versions() + w(u"
          ") + eversion = vcconf.get('cubicweb', self._cw._('no version information')) + # NOTE: tuple wrapping needed since eversion is itself a tuple + w(u"CubicWeb version: %s
          \n" % (eversion,)) + cversions = [] + for cube in self._cw.vreg.config.cubes(): + cubeversion = vcconf.get(cube, self._cw._('no version information')) + w(u"Cube %s version: %s
          \n" % (cube, cubeversion)) + cversions.append((cube, cubeversion)) + w(u"
          ") + # creates a bug submission link if submit-mail is set + if self._cw.vreg.config['submit-mail']: + form = self._cw.vreg['forms'].select('base', self._cw, rset=None, + mainform=False) + binfo = text_error_description(ex, excinfo, req, eversion, cversions) + form.add_hidden('description', binfo, + # we must use a text area to keep line breaks + widget=wdgs.TextArea({'class': 'hidden'})) + # add a signature so one can't send arbitrary text + form.add_hidden('__signature', req.vreg.config.sign_text(binfo)) + form.add_hidden('__bugreporting', '1') + form.form_buttons = [wdgs.SubmitButton(MAIL_SUBMIT_MSGID)] + form.action = req.build_url('reportbug') + form.render(w=w) + + +def text_error_description(ex, excinfo, req, eversion, cubes): + binfo = rest_traceback(excinfo, xml_escape(ex)) + binfo += u'\n\n:URL: %s\n' % req.url() + if not '__bugreporting' in req.form: + binfo += u'\n:form params:\n' + binfo += u'\n'.join(u' * %s = %s' % (k, v) for k, v in req.form.items()) + binfo += u'\n\n:CubicWeb version: %s\n' % (eversion,) + for pkg, pkgversion in cubes: + binfo += u":Cube %s version: %s\n" % (pkg, pkgversion) + binfo += '\n' + return binfo + + +class CwStats(View): + """A textual stats output for monitoring tools such as munin """ + + __regid__ = 'processinfo' + content_type = 'text/plain' + templatable = False + __select__ = none_rset() & match_user_groups('users', 'managers') + + def call(self): + stats = self._cw.call_service('repo_stats') + stats['looping_tasks'] = ', '.join('%s (%s seconds)' % (n, i) for n, i in stats['looping_tasks']) + stats['threads'] = ', '.join(sorted(stats['threads'])) + for k in stats: + if k in ('extid_cache_size', 'type_source_cache_size'): + continue + if k.endswith('_cache_size'): + stats[k] = '%s / %s' % (stats[k]['size'], stats[k]['maxsize']) + results = [] + for element in stats: + results.append(u'%s %s' % (element, stats[element])) + self.w(u'\n'.join(results)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/massmailing.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/massmailing.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,40 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Mass mailing handling: send mail to entities adaptable to IEmailable""" + +try: + from cubes.massmailing.views import (SendEmailAction, + recipient_vocabulary, + MassMailingForm, + MassMailingFormRenderer, + MassMailingFormView, + SendMailController) + + + from logilab.common.deprecation import class_moved, moved + + msg = '[3.17] cubicweb.web.views.massmailing moved to cubes.massmailing.views' + SendEmailAction = class_moved(SendEmailAction, message=msg) + recipient_vocabulary = moved('cubes.massmailing.views', 'recipient_vocabulary') + MassMailingForm = class_moved(MassMailingForm, message=msg) + MassMailingFormRenderer = class_moved(MassMailingFormRenderer, message=msg) + MassMailingFormView = class_moved(MassMailingFormView, message=msg) + SendMailController = class_moved(SendMailController, message=msg) +except ImportError: + from cubicweb.web import LOGGER + LOGGER.warning('[3.17] massmailing extracted to cube massmailing that was not found. try installing it.') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/navigation.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/navigation.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,415 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""This module provides some generic components to navigate in the web +application. + +Pagination +---------- + +Several implementations for large result set pagination are provided: + +.. autoclass:: PageNavigation +.. autoclass:: PageNavigationSelect +.. autoclass:: SortedNavigation + +Pagination will appear when needed according to the `page-size` ui property. + +This module monkey-patch the :func:`paginate` function to the base :class:`View` +class, so that you can ask pagination explicitly on every result-set based views. + +.. autofunction:: paginate + + +Previous / next navigation +-------------------------- + +An adapter and its related component for the somewhat usal "previous / next" +navigation are provided. + + .. autoclass:: IPrevNextAdapter + .. autoclass:: NextPrevNavigationComponent +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from datetime import datetime + +from six import text_type + +from rql.nodes import VariableRef, Constant + +from logilab.mtconverter import xml_escape +from logilab.common.deprecation import deprecated + +from cubicweb.predicates import paginated_rset, sorted_rset, adaptable +from cubicweb.uilib import cut +from cubicweb.view import EntityAdapter +from cubicweb.web.component import EmptyComponent, EntityCtxComponent, NavigationComponent + + +class PageNavigation(NavigationComponent): + """The default pagination component: display link to pages where each pages + is identified by the item number of its first and last elements. + """ + def call(self): + """displays a resultset by page""" + params = dict(self._cw.form) + self.clean_params(params) + basepath = self._cw.relative_path(includeparams=False) + self.w(u'') + + def index_display(self, start, stop): + return u'%s - %s' % (start+1, stop+1) + + def iter_page_links(self, basepath, params): + rset = self.cw_rset + page_size = self.page_size + start = 0 + while start < rset.rowcount: + stop = min(start + page_size - 1, rset.rowcount - 1) + yield self.page_link(basepath, params, start, stop, + self.index_display(start, stop)) + start = stop + 1 + + +class PageNavigationSelect(PageNavigation): + """This pagination component displays a result-set by page as + :class:`PageNavigation` but in a ') + for option in self.iter_page_links(basepath, params): + w(option) + w(u'') + w(u'  %s' % self.next_link(basepath, params)) + w(u'
          ') + + +class SortedNavigation(NavigationComponent): + """This pagination component will be selected by default if there are less + than 4 pages and if the result set is sorted. + + Displayed links to navigate accross pages of a result set are done according + to the first variable on which the sort is done, and looks like: + + [ana - cro] | [cro - ghe] | ... | [tim - zou] + + You may want to override this component to customize display in some cases. + + .. automethod:: sort_on + .. automethod:: display_func + .. automethod:: format_link_content + .. automethod:: write_links + + Below an example from the tracker cube: + + .. sourcecode:: python + + class TicketsNavigation(navigation.SortedNavigation): + __select__ = (navigation.SortedNavigation.__select__ + & ~paginated_rset(4) & is_instance('Ticket')) + def sort_on(self): + col, attrname = super(TicketsNavigation, self).sort_on() + if col == 6: + # sort on state, we don't want that + return None, None + return col, attrname + + The idea is that in trackers'ticket tables, result set is first ordered on + ticket's state while this doesn't make any sense in the navigation. So we + override :meth:`sort_on` so that if we detect such sorting, we disable the + feature to go back to item number in the pagination. + + Also notice the `~paginated_rset(4)` in the selector so that if there are + more than 4 pages to display, :class:`PageNavigationSelect` will still be + selected. + """ + __select__ = paginated_rset() & sorted_rset() + + # number of considered chars to build page links + nb_chars = 5 + + def call(self): + # attrname = the name of attribute according to which the sort + # is done if any + col, attrname = self.sort_on() + index_display = self.display_func(self.cw_rset, col, attrname) + basepath = self._cw.relative_path(includeparams=False) + params = dict(self._cw.form) + self.clean_params(params) + blocklist = [] + start = 0 + total = self.cw_rset.rowcount + while start < total: + stop = min(start + self.page_size - 1, total - 1) + cell = self.format_link_content(index_display(start), index_display(stop)) + blocklist.append(self.page_link(basepath, params, start, stop, cell)) + start = stop + 1 + self.write_links(basepath, params, blocklist) + + def display_func(self, rset, col, attrname): + """Return a function that will be called with a row number as argument + and should return a string to use as link for it. + """ + if attrname is not None: + def index_display(row): + if not rset[row][col]: # outer join + return u'' + entity = rset.get_entity(row, col) + return entity.printable_value(attrname, format='text/plain') + elif col is None: # smart links disabled. + def index_display(row): + return text_type(row) + elif self._cw.vreg.schema.eschema(rset.description[0][col]).final: + def index_display(row): + return text_type(rset[row][col]) + else: + def index_display(row): + return rset.get_entity(row, col).view('text') + return index_display + + def sort_on(self): + """Return entity column number / attr name to use for nice display by + inspecting the rset'syntax tree. + """ + rschema = self._cw.vreg.schema.rschema + for sorterm in self.cw_rset.syntax_tree().children[0].orderby: + if isinstance(sorterm.term, Constant): + col = sorterm.term.value - 1 + return col, None + var = sorterm.term.get_nodes(VariableRef)[0].variable + col = None + for ref in var.references(): + rel = ref.relation() + if rel is None: + continue + attrname = rel.r_type + if attrname in ('is', 'has_text'): + continue + if not rschema(attrname).final: + col = var.selected_index() + attrname = None + if col is None: + # final relation or not selected non final relation + if var is rel.children[0]: + relvar = rel.children[1].children[0].get_nodes(VariableRef)[0] + else: + relvar = rel.children[0].variable + col = relvar.selected_index() + if col is not None: + break + else: + # no relation but maybe usable anyway if selected + col = var.selected_index() + attrname = None + if col is not None: + # if column type is date[time], set proper 'nb_chars' + if var.stinfo['possibletypes'] & frozenset(('TZDatetime', 'Datetime', + 'Date')): + self.nb_chars = len(self._cw.format_date(datetime.today())) + return col, attrname + # nothing usable found, use the first column + return 0, None + + def format_link_content(self, startstr, stopstr): + """Return text for a page link, where `startstr` and `stopstr` are the + text for the lower/upper boundaries of the page. + + By default text are stripped down to :attr:`nb_chars` characters. + """ + text = u'%s - %s' % (startstr.lower()[:self.nb_chars], + stopstr.lower()[:self.nb_chars]) + return xml_escape(text) + + def write_links(self, basepath, params, blocklist): + """Return HTML for the whole navigation: `blocklist` is a list of HTML + snippets for each page, `basepath` and `params` will be necessary to + build previous/next links. + """ + self.w(u'') + + +def do_paginate(view, rset=None, w=None, show_all_option=True, page_size=None): + """write pages index in w stream (default to view.w) and then limit the + result set (default to view.rset) to the currently displayed page if we're + not explicitly told to display everything (by setting __force_display in + req.form) + """ + req = view._cw + if rset is None: + rset = view.cw_rset + if w is None: + w = view.w + nav = req.vreg['components'].select_or_none( + 'navigation', req, rset=rset, page_size=page_size, view=view) + if nav: + if w is None: + w = view.w + if req.form.get('__force_display'): + # allow to come back to the paginated view + params = dict(req.form) + basepath = req.relative_path(includeparams=False) + del params['__force_display'] + url = nav.page_url(basepath, params) + w(u'\n' + % (xml_escape(url), req._('back to pagination (%s results)') + % nav.page_size)) + else: + # get boundaries before component rendering + start, stop = nav.page_boundaries() + nav.render(w=w) + params = dict(req.form) + nav.clean_params(params) + # make a link to see them all + if show_all_option: + basepath = req.relative_path(includeparams=False) + params['__force_display'] = 1 + params['__fromnavigation'] = 1 + url = nav.page_url(basepath, params) + w(u'\n' + % (xml_escape(url), req._('show %s results') % len(rset))) + rset.limit(offset=start, limit=stop-start, inplace=True) + + +def paginate(view, show_all_option=True, w=None, page_size=None, rset=None): + """paginate results if the view is paginable + """ + if view.paginable: + do_paginate(view, rset, w, show_all_option, page_size) + +# monkey patch base View class to add a .paginate([...]) +# method to be called to write pages index in the view and then limit the result +# set to the current page +from cubicweb.view import View +View.do_paginate = do_paginate +View.paginate = paginate +View.handle_pagination = False + + + +class IPrevNextAdapter(EntityAdapter): + """Interface for entities which can be linked to a previous and/or next + entity + + .. automethod:: next_entity + .. automethod:: previous_entity + """ + __needs_bw_compat__ = True + __regid__ = 'IPrevNext' + __abstract__ = True + + def next_entity(self): + """return the 'next' entity""" + raise NotImplementedError + + def previous_entity(self): + """return the 'previous' entity""" + raise NotImplementedError + + +class NextPrevNavigationComponent(EntityCtxComponent): + """Entities adaptable to the 'IPrevNext' should have this component + automatically displayed. You may want to override this component to have a + different look and feel. + """ + + __regid__ = 'prevnext' + # register msg not generated since no entity implements IPrevNext in cubicweb + # itself + help = _('ctxcomponents_prevnext_description') + __select__ = EntityCtxComponent.__select__ & adaptable('IPrevNext') + context = 'navbottom' + order = 10 + + @property + def prev_icon(self): + return '%s' % ( + xml_escape(self._cw.data_url('go_prev.png')), self._cw._('previous page')) + + @property + def next_icon(self): + return '%s' % ( + xml_escape(self._cw.data_url('go_next.png')), self._cw._('next page')) + + def init_rendering(self): + adapter = self.entity.cw_adapt_to('IPrevNext') + self.previous = adapter.previous_entity() + self.next = adapter.next_entity() + if not (self.previous or self.next): + raise EmptyComponent() + + def render_body(self, w): + w(u'
          ') + self.prevnext(w) + w(u'
          ') + w(u'
          ') + + def prevnext(self, w): + if self.previous: + self.prevnext_entity(w, self.previous, 'prev') + if self.next: + self.prevnext_entity(w, self.next, 'next') + + def prevnext_entity(self, w, entity, type): + textsize = self._cw.property_value('navigation.short-line-size') + content = xml_escape(cut(entity.dc_title(), textsize)) + if type == 'prev': + title = self._cw._('i18nprevnext_previous') + icon = self.prev_icon + cssclass = u'previousEntity left' + content = icon + '  ' + content + else: + title = self._cw._('i18nprevnext_next') + icon = self.next_icon + cssclass = u'nextEntity right' + content = content + '  ' + icon + self.prevnext_div(w, type, cssclass, entity.absolute_url(), + title, content) + + def prevnext_div(self, w, type, cssclass, url, title, content): + w(u'
          ' % cssclass) + w(u'%s' % (xml_escape(url), + xml_escape(title), + content)) + w(u'
          ') + self._cw.html_headers.add_raw('' % ( + type, xml_escape(url))) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/owl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/owl.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,233 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""produces some Ontology Web Language schema and views + +""" +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six.moves import range + +from logilab.mtconverter import TransformError, xml_escape + +from cubicweb.view import StartupView, EntityView +from cubicweb.predicates import none_rset, match_view +from cubicweb.web.action import Action +from cubicweb.web.views import schema + +OWL_CARD_MAP = {'1': '', + '?': '1', + '+': '1', + '*': '' + } + +OWL_TYPE_MAP = {'String': 'xsd:string', + 'Bytes': 'xsd:byte', + 'Password': 'xsd:byte', + + 'Boolean': 'xsd:boolean', + 'Int': 'xsd:int', + 'BigInt': 'xsd:int', + 'Float': 'xsd:float', + 'Decimal' : 'xsd:decimal', + + 'Date':'xsd:date', + 'Datetime': 'xsd:dateTime', + 'TZDatetime': 'xsd:dateTime', + 'Time': 'xsd:time', + 'TZTime': 'xsd:time', + 'Interval': 'xsd:duration' + } + +OWL_OPENING_ROOT = u''' + + +]> + + + + + %(appid)s Cubicweb OWL Ontology + + ''' + +OWL_CLOSING_ROOT = u'' + + +class OWLView(StartupView): + """This view export in owl format schema database. It is the TBOX""" + __regid__ = 'owl' + title = _('owl') + templatable = False + content_type = 'application/xml' # 'text/xml' + + def call(self, writeprefix=True): + skipmeta = int(self._cw.form.get('skipmeta', True)) + if writeprefix: + self.w(OWL_OPENING_ROOT % {'appid': self._cw.vreg.schema.name}) + self.visit_schema(skiptypes=skipmeta and schema.SKIP_TYPES or ()) + if writeprefix: + self.w(OWL_CLOSING_ROOT) + + def should_display_rschema(self, eschema, rschema, role): + return not rschema in self.skiptypes and ( + rschema.may_have_permission('read', self._cw, eschema, role)) + + def visit_schema(self, skiptypes): + """get a layout for a whole schema""" + self.skiptypes = skiptypes + entities = sorted(eschema for eschema in self._cw.vreg.schema.entities() + if not eschema.final or eschema in skiptypes) + self.w(u'') + for eschema in entities: + self.visit_entityschema(eschema) + self.w(u'') + self.visit_property_schema(eschema) + self.w(u'') + self.visit_property_object_schema(eschema) + + def visit_entityschema(self, eschema): + """get a layout for an entity OWL schema""" + self.w(u''% eschema) + self.w(u'') + for rschema, targetschemas, role in eschema.relation_definitions(): + if not self.should_display_rschema(eschema, rschema, role): + continue + for oeschema in targetschemas: + card = rschema.role_rdef(eschema, oeschema, role).role_cardinality(role) + cardtag = OWL_CARD_MAP[card] + if cardtag: + self.w(u''' + + + %s + +''' % (rschema, cardtag)) + + self.w(u'') + for rschema, aschema in eschema.attribute_definitions(): + if not self.should_display_rschema(eschema, rschema, 'subject'): + continue + self.w(u''' + + + + +''' % rschema) + self.w(u'') + + def visit_property_schema(self, eschema): + """get a layout for property entity OWL schema""" + for rschema, targetschemas, role in eschema.relation_definitions(): + if not self.should_display_rschema(eschema, rschema, role): + continue + for oeschema in targetschemas: + self.w(u''' + + +''' % (rschema, eschema, oeschema.type)) + + def visit_property_object_schema(self, eschema): + for rschema, aschema in eschema.attribute_definitions(): + if not self.should_display_rschema(eschema, rschema, 'subject'): + continue + self.w(u''' + + +''' % (rschema, eschema, OWL_TYPE_MAP[aschema.type])) + + +class OWLABOXView(EntityView): + '''This view represents a part of the ABOX for a given entity.''' + __regid__ = 'owlabox' + title = _('owlabox') + templatable = False + content_type = 'application/xml' # 'text/xml' + + def call(self): + self.w(OWL_OPENING_ROOT % {'appid': self._cw.vreg.schema.name}) + for i in range(self.cw_rset.rowcount): + self.cell_call(i, 0) + self.w(OWL_CLOSING_ROOT) + + def cell_call(self, row, col): + self.wview('owlaboxitem', self.cw_rset, row=row, col=col) + + +class OWLABOXItemView(EntityView): + '''This view represents a part of the ABOX for a given entity.''' + __regid__ = 'owlaboxitem' + templatable = False + content_type = 'application/xml' # 'text/xml' + + def cell_call(self, row, col): + entity = self.cw_rset.complete_entity(row, col) + eschema = entity.e_schema + self.w(u'<%s rdf:ID="%s">' % (eschema, entity.eid)) + self.w(u'') + for rschema, aschema in eschema.attribute_definitions(): + if rschema.meta: + continue + rdef = rschema.rdef(eschema, aschema) + if not rdef.may_have_permission('read', self._cw): + continue + aname = rschema.type + if aname == 'eid': + continue + try: + attr = entity.printable_value(aname, format='text/plain') + if attr: + self.w(u'<%s>%s' % (aname, xml_escape(attr), aname)) + except TransformError: + pass + self.w(u'') + for rschema, targetschemas, role in eschema.relation_definitions(): + if rschema.meta: + continue + for tschema in targetschemas: + rdef = rschema.role_rdef(eschema, tschema, role) + if rdef.may_have_permission('read', self._cw): + break + else: + # no read perms to any relation of this type. Skip. + continue + if role == 'object': + attr = 'reverse_%s' % rschema.type + else: + attr = rschema.type + for x in getattr(entity, attr): + self.w(u'<%s>%s %s' % (attr, x.__regid__, x.eid, attr)) + self.w(u''% eschema) + + +class DownloadOWLSchemaAction(Action): + __regid__ = 'download_as_owl' + __select__ = none_rset() & match_view('schema') + + category = 'mainactions' + title = _('download schema as owl') + + def url(self): + return self._cw.build_url('view', vid='owl') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/plots.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/plots.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,228 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""basic plot views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six import add_metaclass +from six.moves import range + +from logilab.common.date import datetime2ticks +from logilab.common.deprecation import class_deprecated +from logilab.common.registry import objectify_predicate +from logilab.mtconverter import xml_escape + +from cubicweb.utils import UStringIO, json_dumps +from cubicweb.predicates import multi_columns_rset +from cubicweb.web.views import baseviews + +@objectify_predicate +def all_columns_are_numbers(cls, req, rset=None, *args, **kwargs): + """accept result set with at least one line and two columns of result + all columns after second must be of numerical types""" + for etype in rset.description[0]: + if etype not in ('Int', 'BigInt', 'Float'): + return 0 + return 1 + +@objectify_predicate +def second_column_is_number(cls, req, rset=None, *args, **kwargs): + etype = rset.description[0][1] + if etype not in ('Int', 'BigInt', 'Float'): + return 0 + return 1 + +@objectify_predicate +def columns_are_date_then_numbers(cls, req, rset=None, *args, **kwargs): + etypes = rset.description[0] + if etypes[0] not in ('Date', 'Datetime', 'TZDatetime'): + return 0 + for etype in etypes[1:]: + if etype not in ('Int', 'BigInt', 'Float'): + return 0 + return 1 + + +def filterout_nulls(abscissa, plot): + filtered = [] + for x, y in zip(abscissa, plot): + if x is None or y is None: + continue + filtered.append( (x, y) ) + return sorted(filtered) + +class PlotWidget(object): + # XXX refactor with cubicweb.web.views.htmlwidgets.HtmlWidget + def _initialize_stream(self, w=None): + if w: + self.w = w + else: + self._stream = UStringIO() + self.w = self._stream.write + + def render(self, *args, **kwargs): + w = kwargs.pop('w', None) + self._initialize_stream(w) + self._render(*args, **kwargs) + if w is None: + return self._stream.getvalue() + + def _render(self, *args, **kwargs): + raise NotImplementedError + + +@add_metaclass(class_deprecated) +class FlotPlotWidget(PlotWidget): + """PlotRenderer widget using Flot""" + __deprecation_warning__ = '[3.14] cubicweb.web.views.plots module is deprecated, use the jqplot cube instead' + onload = u""" +var fig = jQuery('#%(figid)s'); +if (fig.attr('cubicweb:type') != 'prepared-plot') { + %(plotdefs)s + jQuery.plot(jQuery('#%(figid)s'), [%(plotdata)s], + {points: {show: true}, + lines: {show: true}, + grid: {hoverable: true}, + /*yaxis : {tickFormatter : suffixFormatter},*/ + xaxis: {mode: %(mode)s}}); + jQuery('#%(figid)s').data({mode: %(mode)s, dateformat: %(dateformat)s}); + jQuery('#%(figid)s').bind('plothover', onPlotHover); + fig.attr('cubicweb:type','prepared-plot'); +} +""" + + def __init__(self, labels, plots, timemode=False): + self.labels = labels + self.plots = plots # list of list of couples + self.timemode = timemode + + def dump_plot(self, plot): + if self.timemode: + plot = [(datetime2ticks(x), y) for x, y in plot] + return json_dumps(plot) + + def _render(self, req, width=500, height=400): + if req.ie_browser(): + req.add_js('excanvas.js') + req.add_js(('jquery.flot.js', 'cubicweb.flot.js')) + figid = u'figure%s' % next(req.varmaker) + plotdefs = [] + plotdata = [] + self.w(u'
          ' % + (figid, width, height)) + for idx, (label, plot) in enumerate(zip(self.labels, self.plots)): + plotid = '%s_%s' % (figid, idx) + plotdefs.append('var %s = %s;' % (plotid, self.dump_plot(plot))) + # XXX ugly but required in order to not crash my demo + plotdata.append("{label: '%s', data: %s}" % (label.replace(u'&', u''), plotid)) + fmt = req.property_value('ui.date-format') # XXX datetime-format + # XXX TODO make plot options customizable + req.html_headers.add_onload(self.onload % + {'plotdefs': '\n'.join(plotdefs), + 'figid': figid, + 'plotdata': ','.join(plotdata), + 'mode': self.timemode and "'time'" or 'null', + 'dateformat': '"%s"' % fmt}) + + +@add_metaclass(class_deprecated) +class PlotView(baseviews.AnyRsetView): + __deprecation_warning__ = '[3.14] cubicweb.web.views.plots module is deprecated, use the jqplot cube instead' + __regid__ = 'plot' + title = _('generic plot') + __select__ = multi_columns_rset() & all_columns_are_numbers() + timemode = False + paginable = False + + def call(self, width=500, height=400): + # prepare data + rqlst = self.cw_rset.syntax_tree() + # XXX try to make it work with unions + varnames = [var.name for var in rqlst.children[0].get_selected_variables()][1:] + abscissa = [row[0] for row in self.cw_rset] + plots = [] + nbcols = len(self.cw_rset.rows[0]) + for col in range(1, nbcols): + data = [row[col] for row in self.cw_rset] + plots.append(filterout_nulls(abscissa, data)) + plotwidget = FlotPlotWidget(varnames, plots, timemode=self.timemode) + plotwidget.render(self._cw, width, height, w=self.w) + + +class TimeSeriePlotView(PlotView): + __select__ = multi_columns_rset() & columns_are_date_then_numbers() + timemode = True + + +try: + from GChartWrapper import Pie, Pie3D +except ImportError: + pass +else: + + class PieChartWidget(PlotWidget): + def __init__(self, labels, values, pieclass=Pie, title=None): + self.labels = labels + self.values = values + self.pieclass = pieclass + self.title = title + + def _render(self, width=None, height=None): + piechart = self.pieclass(self.values) + piechart.label(*self.labels) + if width is not None: + height = height or width + piechart.size(width, height) + if self.title: + piechart.title(self.title) + self.w(u'' % xml_escape(piechart.url)) + + class PieChartView(baseviews.AnyRsetView): + __regid__ = 'piechart' + pieclass = Pie + paginable = False + + __select__ = multi_columns_rset() & second_column_is_number() + + def _guess_vid(self, row): + etype = self.cw_rset.description[row][0] + if self._cw.vreg.schema.eschema(etype).final: + return 'final' + return 'textincontext' + + def call(self, title=None, width=None, height=None): + labels = [] + values = [] + for rowidx, (_, value) in enumerate(self.cw_rset): + if value is not None: + vid = self._guess_vid(rowidx) + label = '%s: %s' % (self._cw.view(vid, self.cw_rset, row=rowidx, col=0), + value) + labels.append(label.encode(self._cw.encoding)) + values.append(value) + pie = PieChartWidget(labels, values, pieclass=self.pieclass, + title=title) + if width is not None: + height = height or width + pie.render(width, height, w=self.w) + + + class PieChart3DView(PieChartView): + __regid__ = 'piechart3D' + pieclass = Pie3D diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/primary.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/primary.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,444 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +Public API of the PrimaryView class +```````````````````````````````````` +.. autoclass:: cubicweb.web.views.primary.PrimaryView + +Views that may be used to display an entity's attribute or relation +``````````````````````````````````````````````````````````````````` + +Yoy may easily the display of an attribute or relation by simply configuring the +view using one of `primaryview_display_ctrl` or `reledit_ctrl` to use one of the +views describled below. For instance: + +.. sourcecode:: python + + primaryview_display_ctrl.tag_attribute(('Foo', 'bar'), {'vid': 'attribute'}) + + +.. autoclass:: AttributeView +.. autoclass:: URLAttributeView +.. autoclass:: VerbatimAttributeView +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from logilab.common.deprecation import deprecated +from logilab.mtconverter import xml_escape + +from cubicweb import Unauthorized, NoSelectableObject +from cubicweb.utils import support_args +from cubicweb.predicates import match_kwargs, match_context +from cubicweb.view import EntityView +from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, display_name +from cubicweb.web import component +from cubicweb.web.views import uicfg + + +class PrimaryView(EntityView): + """ + The basic layout of a primary view is as in the :ref:`primary_view_layout` + section. This layout is actually drawn by the `render_entity` method. + + The methods you may want to modify while customizing a ``PrimaryView`` + are: + + .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_title + .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_attributes + .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_relations + .. automethod:: cubicweb.web.views.primary.PrimaryView.render_side_boxes + + The placement of relations in the relations section or in side boxes + can be controlled through the :ref:`primary_view_configuration` mechanism. + + .. automethod:: cubicweb.web.views.primary.PrimaryView.content_navigation_components + + Also, please note that by setting the following attributes in your + subclass, you can already customize some of the rendering: + + :attr:`show_attr_label` + Renders the attribute label next to the attribute value if set to `True`. + Otherwise, does only display the attribute value. + + :attr:`show_rel_label` + Renders the relation label next to the relation value if set to `True`. + Otherwise, does only display the relation value. + + :attr:`main_related_section` + Renders the relations of the entity if set to `True`. + + A good practice is for you to identify the content of your entity type for + which the default rendering does not answer your need so that you can focus + on the specific method (from the list above) that needs to be modified. We + do not advise you to overwrite ``render_entity`` unless you want a + completely different layout. + """ + + __regid__ = 'primary' + title = _('primary') + show_attr_label = True + show_rel_label = True + rsection = None + display_ctrl = None + main_related_section = True + + def html_headers(self): + """return a list of html headers (eg something to be inserted between + and of the returned page + + by default primary views are indexed + """ + return [] + + def entity_call(self, entity, **kwargs): + entity.complete() + uicfg_reg = self._cw.vreg['uicfg'] + if self.rsection is None: + self.rsection = uicfg_reg.select('primaryview_section', + self._cw, entity=entity) + if self.display_ctrl is None: + self.display_ctrl = uicfg_reg.select('primaryview_display_ctrl', + self._cw, entity=entity) + self.render_entity(entity) + + def render_entity(self, entity): + self.render_entity_toolbox(entity) + self.render_entity_title(entity) + # entity's attributes and relations, excluding meta data + # if the entity isn't meta itself + if self.is_primary(): + boxes = self._prepare_side_boxes(entity) + else: + boxes = None + if boxes or hasattr(self, 'render_side_related'): + self.w(u'
          ') + + self.w(u'
          ') + self.content_navigation_components('navcontenttop') + self.render_entity_attributes(entity) + if self.main_related_section: + self.render_entity_relations(entity) + self.content_navigation_components('navcontentbottom') + self.w(u'
          ') + # side boxes + if boxes or hasattr(self, 'render_side_related'): + self.w(u'
          ') + self.w(u'
          ') + self.render_side_boxes(boxes) + self.w(u'
          ') + self.w(u'
          ') + + def content_navigation_components(self, context): + """This method is applicable only for entity type implementing the + interface `IPrevNext`. This interface is for entities which can be + linked to a previous and/or next entity. This method will render the + navigation links between entities of this type, either at the top or at + the bottom of the page given the context (navcontent{top|bottom}). + """ + self.w(u'
          ' % context) + for comp in self._cw.vreg['ctxcomponents'].poss_visible_objects( + self._cw, rset=self.cw_rset, view=self, context=context): + # XXX bw compat code + try: + comp.render(w=self.w, row=self.cw_row, view=self) + except TypeError: + comp.render(w=self.w) + self.w(u'
          ') + + def render_entity_title(self, entity): + """Renders the entity title, by default using entity's + :meth:`dc_title()` method. + """ + title = xml_escape(entity.dc_title()) + if title: + if self.is_primary(): + self.w(u'

          %s

          ' % title) + else: + atitle = self._cw._('follow this link for more information on this %s') % entity.dc_type() + self.w(u'

          %s

          ' + % (entity.absolute_url(), atitle, title)) + + def render_entity_toolbox(self, entity): + self.content_navigation_components('ctxtoolbar') + + def render_entity_attributes(self, entity): + """Renders all attributes and relations in the 'attributes' section. + """ + display_attributes = [] + for rschema, _, role, dispctrl in self._section_def(entity, 'attributes'): + vid = dispctrl.get('vid', 'reledit') + if rschema.final or vid == 'reledit' or dispctrl.get('rtypevid'): + value = entity.view(vid, rtype=rschema.type, role=role, + initargs={'dispctrl': dispctrl}) + else: + rset = self._relation_rset(entity, rschema, role, dispctrl) + if rset: + value = self._cw.view(vid, rset) + else: + value = None + if value is not None and value != '': + display_attributes.append( (rschema, role, dispctrl, value) ) + if display_attributes: + self.w(u'') + for rschema, role, dispctrl, value in display_attributes: + label = self._rel_label(entity, rschema, role, dispctrl) + self.render_attribute(label, value, table=True) + self.w(u'
          ') + + def render_attribute(self, label, value, table=False): + self.field(label, value, tr=False, table=table) + + def render_entity_relations(self, entity): + """Renders all relations in the 'relations' section.""" + defaultlimit = self._cw.property_value('navigation.related-limit') + for rschema, tschemas, role, dispctrl in self._section_def(entity, 'relations'): + if rschema.final or dispctrl.get('rtypevid'): + vid = dispctrl.get('vid', 'reledit') + try: + rview = self._cw.vreg['views'].select( + vid, self._cw, rset=entity.cw_rset, row=entity.cw_row, + col=entity.cw_col, dispctrl=dispctrl, + rtype=rschema, role=role) + except NoSelectableObject: + continue + value = rview.render(row=entity.cw_row, col=entity.cw_col, + rtype=rschema.type, role=role) + else: + vid = dispctrl.get('vid', 'autolimited') + limit = dispctrl.get('limit', defaultlimit) if vid == 'autolimited' else None + if limit is not None: + limit += 1 # need one more so the view can check if there is more than the limit + rset = self._relation_rset(entity, rschema, role, dispctrl, limit=limit) + if not rset: + continue + try: + rview = self._cw.vreg['views'].select( + vid, self._cw, rset=rset, dispctrl=dispctrl) + except NoSelectableObject: + continue + value = rview.render() + label = self._rel_label(entity, rschema, role, dispctrl) + self.render_relation(label, value) + + def render_relation(self, label, value): + self.w(u'
          ') + if label: + self.w(u'

          %s

          ' % label) + self.w(value) + self.w(u'
          ') + + def render_side_boxes(self, boxes): + """Renders side boxes on the right side of the content. This will + generate a box for each relation in the 'sidebox' section, as well as + explicit box appobjects selectable in this context. + """ + for box in boxes: + try: + box.render(w=self.w, row=self.cw_row) + except TypeError: + box.render(w=self.w) + + def _prepare_side_boxes(self, entity): + sideboxes = [] + boxesreg = self._cw.vreg['ctxcomponents'] + defaultlimit = self._cw.property_value('navigation.related-limit') + for rschema, tschemas, role, dispctrl in self._section_def(entity, 'sideboxes'): + vid = dispctrl.get('vid', 'autolimited') + limit = defaultlimit if vid == 'autolimited' else None + rset = self._relation_rset(entity, rschema, role, dispctrl, limit=limit) + if not rset: + continue + label = self._rel_label(entity, rschema, role, dispctrl) + box = boxesreg.select('rsetbox', self._cw, rset=rset, + vid=vid, title=label, dispctrl=dispctrl, + context='incontext') + sideboxes.append(box) + sideboxes += boxesreg.poss_visible_objects( + self._cw, rset=self.cw_rset, view=self, + context='incontext') + # XXX since we've two sorted list, it may be worth using bisect + def get_order(x): + if 'order' in x.cw_property_defs: + return x.cw_propval('order') + # default to 9999 so view boxes occurs after component boxes + return x.cw_extra_kwargs.get('dispctrl', {}).get('order', 9999) + return sorted(sideboxes, key=get_order) + + def _section_def(self, entity, where): + rdefs = [] + eschema = entity.e_schema + for rschema, tschemas, role in eschema.relation_definitions(True): + if rschema in VIRTUAL_RTYPES: + continue + matchtschemas = [] + for tschema in tschemas: + section = self.rsection.etype_get(eschema, rschema, role, + tschema) + if section == where: + matchtschemas.append(tschema) + if matchtschemas: + dispctrl = self.display_ctrl.etype_get(eschema, rschema, role, '*') + rdefs.append( (rschema, matchtschemas, role, dispctrl) ) + return sorted(rdefs, key=lambda x: x[-1]['order']) + + def _relation_rset(self, entity, rschema, role, dispctrl, limit=None): + try: + rset = entity.related(rschema.type, role, limit=limit) + except Unauthorized: + return + if 'filter' in dispctrl: + rset = dispctrl['filter'](rset) + return rset + + def _rel_label(self, entity, rschema, role, dispctrl): + if rschema.final: + showlabel = dispctrl.get('showlabel', self.show_attr_label) + else: + showlabel = dispctrl.get('showlabel', self.show_rel_label) + if showlabel: + if dispctrl.get('label'): + label = self._cw._(dispctrl['label']) + else: + label = display_name(self._cw, rschema.type, role, + context=entity.cw_etype) + return label + return u'' + + +class RelatedView(EntityView): + """Display a rset, usually containing entities linked to another entity + being displayed. + + It will try to display nicely according to the number of items in the result + set. + + XXX include me in the doc + """ + __regid__ = 'autolimited' + + def call(self, **kwargs): + if 'dispctrl' in self.cw_extra_kwargs: + if 'limit' in self.cw_extra_kwargs['dispctrl']: + limit = self.cw_extra_kwargs['dispctrl']['limit'] + else: + limit = self._cw.property_value('navigation.related-limit') + list_limit = self.cw_extra_kwargs['dispctrl'].get('use_list_limit', 5) + subvid = self.cw_extra_kwargs['dispctrl'].get('subvid', 'incontext') + else: + limit = list_limit = None + subvid = 'incontext' + if limit is None or self.cw_rset.rowcount <= limit: + if self.cw_rset.rowcount == 1: + self.wview(subvid, self.cw_rset, row=0) + elif list_limit is None or 1 < self.cw_rset.rowcount <= list_limit: + self.wview('csv', self.cw_rset, subvid=subvid) + else: + self.w(u'
          ') + self.wview('simplelist', self.cw_rset, subvid=subvid) + self.w(u'
          ') + # else show links to display related entities + else: + rql = self.cw_rset.printable_rql() + rset = self.cw_rset.limit(limit) # remove extra entity + if list_limit is None: + self.wview('csv', rset, subvid=subvid) + self.w(u'[%s]' % ( + xml_escape(self._cw.build_url(rql=rql, vid=subvid)), + self._cw._('see them all'))) + else: + self.w(u'
          ') + self.wview('simplelist', rset, subvid=subvid) + self.w(u'[%s]' % ( + xml_escape(self._cw.build_url(rql=rql, vid=subvid)), + self._cw._('see them all'))) + self.w(u'
          ') + + +class AttributeView(EntityView): + """:__regid__: *attribute* + + This view is generally used to disable the *reledit* feature. It works on + both relations and attributes. + """ + __regid__ = 'attribute' + __select__ = EntityView.__select__ & match_kwargs('rtype') + + def entity_call(self, entity, rtype, role='subject', **kwargs): + if self._cw.vreg.schema.rschema(rtype).final: + self.w(entity.printable_value(rtype)) + else: + dispctrl = uicfg.primaryview_display_ctrl.etype_get( + entity.e_schema, rtype, role, '*') + rset = entity.related(rtype, role) + if rset: + self.wview('autolimited', rset, initargs={'dispctrl': dispctrl}) + + +class URLAttributeView(EntityView): + """:__regid__: *urlattr* + + This view will wrap an attribute value (hence expect a string) into an '' + HTML tag to display a clickable link. + """ + __regid__ = 'urlattr' + __select__ = EntityView.__select__ & match_kwargs('rtype') + + def entity_call(self, entity, rtype, **kwargs): + url = entity.printable_value(rtype) + if url: + self.w(u'%s' % (url, url)) + + +class VerbatimAttributeView(EntityView): + """:__regid__: *verbatimattr* + + This view will wrap an attribute value into an '
          ' HTML tag to display
          +    arbitrary text where EOL will be respected. It usually make sense for
          +    attributes whose value is a multi-lines string where new lines matters.
          +    """
          +    __regid__ = 'verbatimattr'
          +    __select__ = EntityView.__select__ & match_kwargs('rtype')
          +
          +    def entity_call(self, entity, rtype, **kwargs):
          +        value = entity.printable_value(rtype)
          +        if value:
          +            self.w(u'
          %s
          ' % value) + + + + + +class ToolbarLayout(component.Layout): + # XXX include me in the doc + __select__ = match_context('ctxtoolbar') + + def render(self, w): + if self.init_rendering(): + self.cw_extra_kwargs['view'].render_body(w) + + +## default primary ui configuration ########################################### + +_pvs = uicfg.primaryview_section +for rtype in META_RTYPES: + _pvs.tag_subject_of(('*', rtype, '*'), 'hidden') + _pvs.tag_object_of(('*', rtype, '*'), 'hidden') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/pyviews.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/pyviews.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,127 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Basic views for python values (eg without any result set) +""" +__docformat__ = "restructuredtext en" + +from six import text_type +from six.moves import range + +from cubicweb.view import View +from cubicweb.predicates import match_kwargs +from cubicweb.web.views import tableview + + +class PyValTableColRenderer(tableview.AbstractColumnRenderer): + """Default column renderer for :class:`PyValTableView`.""" + def bind(self, view, colid): + super(PyValTableColRenderer, self).bind(view, colid) + self.header = view.headers[colid] if view.headers else None + self.data = view.pyvalue + + def render_header(self, w): + if self.header: + w(self._cw._(self.header)) + else: + w(self.empty_cell_content) + + def render_cell(self, w, rownum): + w(text_type(self.data[rownum][self.colid])) + + +class PyValTableView(tableview.TableMixIn, View): + """This table view is designed to be used a list of list of unicode values + given as a mandatory `pyvalue` argument. Take care, content is NOT + xml-escaped. + + It's configured through the following selection arguments. + + If `headers` is specified, it is expected to be a list of headers to be + inserted as first row (in ). + + `header_column_idx` may be used to specify a column index or a set of column + indiced where values should be inserted inside tag instead of . + + `cssclass` is the CSS class used on the tag, and default to + 'listing' (so that the table will look similar to those generated by the + table view). + """ + __regid__ = 'pyvaltable' + __select__ = match_kwargs('pyvalue') + default_column_renderer_class = PyValTableColRenderer + paginable = False # not supported + headers = None + cssclass = None + domid = None + + def __init__(self, req, pyvalue, headers=None, cssclass=None, + header_column_idx=None, **kwargs): + super(PyValTableView, self).__init__(req, **kwargs) + self.pyvalue = pyvalue + if headers is not None: + self.headers = headers + elif self.headers: # headers set on a class attribute, translate + self.headers = [self._cw._(header) for header in self.headers] + if cssclass is not None: + self.cssclass = cssclass + self.header_column_idx = header_column_idx + + @property + def layout_args(self): + args = {} + if self.cssclass: + args['cssclass'] = self.cssclass + if self.header_column_idx is not None: + args['header_column_idx'] = self.header_column_idx + return args + + # layout callbacks ######################################################### + + @property + def table_size(self): + """return the number of rows (header excluded) to be displayed""" + return len(self.pyvalue) + + @property + def has_headers(self): + return self.headers + + def build_column_renderers(self): + return [self.column_renderer(colid) + for colid in range(len(self.pyvalue[0]))] + + def facets_form(self, mainvar=None): + return None # not supported + + def table_actions(self): + return [] # not supported + + +class PyValListView(View): + """display a list of values into an html list. + + Take care, content is NOT xml-escaped. + """ + __regid__ = 'pyvallist' + __select__ = match_kwargs('pyvalue') + + def call(self, pyvalue): + self.w(u'
            \n') + for line in pyvalue: + self.w(u'
          • %s
          • \n' % line) + self.w(u'
          \n') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/rdf.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/rdf.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,111 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""base xml and rss views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six.moves import range + +from yams import xy + +from cubicweb.schema import VIRTUAL_RTYPES +from cubicweb.view import EntityView +from cubicweb.web.views.xmlrss import SERIALIZERS + +try: + import rdflib +except ImportError: + rdflib = None + +if rdflib is not None: + RDF = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') + CW = rdflib.Namespace('http://ns.cubicweb.org/cubicweb/0.0/') + from rdflib import Literal, URIRef, Namespace + + def urijoin(item): + base, ext = item + return URIRef(Namespace(base)[ext]) + + SKIP_RTYPES = VIRTUAL_RTYPES | set(['cwuri', 'is', 'is_instance_of']) + + class RDFView(EntityView): + """rdf view for entities""" + __regid__ = 'rdf' + title = _('rdf export') + templatable = False + binary = True + format = 'xml' + content_type = 'text/xml' # +rdf + + def call(self): + graph = rdflib.Graph() + graph.bind('cw', CW) + for prefix, xmlns in xy.XY.prefixes.items(): + graph.bind(prefix, rdflib.Namespace(xmlns)) + for i in range(self.cw_rset.rowcount): + entity = self.cw_rset.complete_entity(i, 0) + self.entity2graph(graph, entity) + self.w(graph.serialize(format=self.format)) + + def entity_call(self, entity): + self.call() + + def entity2graph(self, graph, entity): + cwuri = URIRef(entity.cwuri) + add = graph.add + add( (cwuri, RDF.type, CW[entity.e_schema.type]) ) + try: + for item in xy.xeq(entity.e_schema.type): + add( (cwuri, RDF.type, urijoin(item)) ) + except xy.UnsupportedVocabulary: + pass + for rschema, eschemas, role in entity.e_schema.relation_definitions('relation'): + rtype = rschema.type + if rtype in SKIP_RTYPES or rtype.endswith('_permission'): + continue + for eschema in eschemas: + if eschema.final: + try: + value = entity.cw_attr_cache[rtype] + except KeyError: + continue # assuming rtype is Bytes + if value is not None: + add( (cwuri, CW[rtype], Literal(value)) ) + try: + for item in xy.xeq('%s %s' % (entity.e_schema.type, rtype)): + add( (cwuri, urijoin(item[1]), Literal(value)) ) + except xy.UnsupportedVocabulary: + pass + else: + for related in entity.related(rtype, role, entities=True, safe=True): + if role == 'subject': + add( (cwuri, CW[rtype], URIRef(related.cwuri)) ) + try: + for item in xy.xeq('%s %s' % (entity.e_schema.type, rtype)): + add( (cwuri, urijoin(item[1]), URIRef(related.cwuri)) ) + except xy.UnsupportedVocabulary: + pass + else: + add( (URIRef(related.cwuri), CW[rtype], cwuri) ) + + + class RDFN3View(RDFView): + __regid__ = 'n3rdf' + format = 'n3' + content_type = 'text/n3' diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/reledit.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/reledit.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,404 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""edit entity attributes/relations from any view, without going to the entity +form +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +import copy +from warnings import warn + +from logilab.mtconverter import xml_escape +from logilab.common.deprecation import deprecated, class_renamed +from logilab.common.decorators import cached + +from cubicweb import neg_role +from cubicweb.schema import display_name +from cubicweb.utils import json, json_dumps +from cubicweb.predicates import non_final_entity, match_kwargs +from cubicweb.view import EntityView +from cubicweb.web import stdmsgs +from cubicweb.web.views import uicfg +from cubicweb.web.form import FieldNotFound +from cubicweb.web.formwidgets import Button, SubmitButton +from cubicweb.web.views.ajaxcontroller import ajaxfunc + +class _DummyForm(object): + __slots__ = ('event_args',) + def form_render(self, **_args): + return u'' + def render(self, *_args, **_kwargs): + return u'' + def append_field(self, *args): + pass + def add_hidden(self, *args): + pass + +class AutoClickAndEditFormView(EntityView): + __regid__ = 'reledit' + __select__ = non_final_entity() & match_kwargs('rtype') + + # ui side continuations + _onclick = (u"cw.reledit.loadInlineEditionForm('%(formid)s', %(eid)s, '%(rtype)s', '%(role)s', " + "'%(divid)s', %(reload)s, '%(vid)s', '%(action)s');") + _cancelclick = "cw.reledit.cleanupAfterCancel('%s')" + + # ui side actions/buttons + _addzone = u'%(msg)s' + _addmsg = _('click to add a value') + _addlogo = 'plus.png' + _deletezone = u'%(msg)s' + _deletemsg = _('click to delete this value') + _deletelogo = 'cancel.png' + _editzone = u'%(msg)s' + _editzonemsg = _('click to edit this field') + _editlogo = 'pen_icon.png' + + # renderer + _form_renderer_id = 'base' + + def entity_call(self, entity, rtype=None, role='subject', + reload=False, # controls reloading the whole page after change + # boolean, eid (to redirect), or + # function taking the subject entity & returning a boolean or an eid + rvid=None, # vid to be applied to other side of rtype (non final relations only) + default_value=None, + formid='base', + action=None + ): + """display field to edit entity's `rtype` relation on click""" + assert rtype + self._cw.add_css('cubicweb.form.css') + self._cw.add_js(('cubicweb.reledit.js', 'cubicweb.edition.js', 'cubicweb.ajax.js')) + self.entity = entity + rschema = self._cw.vreg.schema[rtype] + rctrl = self._cw.vreg['uicfg'].select('reledit', self._cw, entity=entity) + self._rules = rctrl.etype_get(self.entity.e_schema.type, rschema.type, role, '*') + reload = self._compute_reload(rschema, role, reload) + divid = self._build_divid(rtype, role, self.entity.eid) + if rschema.final: + self._handle_attribute(rschema, role, divid, reload, action) + else: + if self._is_composite(): + self._handle_composite(rschema, role, divid, reload, formid, action) + else: + self._handle_relation(rschema, role, divid, reload, formid, action) + + def _handle_attribute(self, rschema, role, divid, reload, action): + rvid = self._rules.get('rvid', None) + if rvid is not None: + value = self._cw.view(rvid, entity=self.entity, + rtype=rschema.type, role=role) + else: + value = self.entity.printable_value(rschema.type) + if not self._should_edit_attribute(rschema): + self.w(value) + return + form, renderer = self._build_form(self.entity, rschema, role, divid, + 'base', reload, action) + value = value or self._compute_default_value(rschema, role) + self.view_form(divid, value, form, renderer) + + def _compute_formid_value(self, rschema, role, rvid, formid): + related_rset = self.entity.related(rschema.type, role) + if related_rset: + value = self._cw.view(rvid, related_rset) + else: + value = self._compute_default_value(rschema, role) + if not self._should_edit_relation(rschema, role): + return None, value + return formid, value + + def _handle_relation(self, rschema, role, divid, reload, formid, action): + rvid = self._rules.get('rvid', 'autolimited') + formid, value = self._compute_formid_value(rschema, role, rvid, formid) + if formid is None: + return self.w(value) + form, renderer = self._build_form(self.entity, rschema, role, divid, formid, + reload, action, dict(vid=rvid)) + self.view_form(divid, value, form, renderer) + + def _handle_composite(self, rschema, role, divid, reload, formid, action): + # this is for attribute-like composites (1 target type, 1 related entity at most, for now) + entity = self.entity + related_rset = entity.related(rschema.type, role) + add_related = self._may_add_related(related_rset, rschema, role) + edit_related = self._may_edit_related_entity(related_rset, rschema, role) + delete_related = edit_related and self._may_delete_related(related_rset, rschema, role) + rvid = self._rules.get('rvid', 'autolimited') + formid, value = self._compute_formid_value(rschema, role, rvid, formid) + if formid is None or not (edit_related or add_related): + # till we learn to handle cases where not (edit_related or add_related) + self.w(value) + return + form, renderer = self._build_form(entity, rschema, role, divid, formid, + reload, action, dict(vid=rvid)) + self.view_form(divid, value, form, renderer, + edit_related, add_related, delete_related) + + @cached + def _compute_ttypes(self, rschema, role): + dual_role = neg_role(role) + return getattr(rschema, '%ss' % dual_role)() + + def _compute_reload(self, rschema, role, reload): + ctrl_reload = self._rules.get('reload', reload) + if callable(ctrl_reload): + ctrl_reload = ctrl_reload(self.entity) + if isinstance(ctrl_reload, int) and ctrl_reload > 1: # not True/False + ctrl_reload = self._cw.build_url(ctrl_reload) + return ctrl_reload + + def _compute_default_value(self, rschema, role): + default = self._rules.get('novalue_label') + if default is None: + if self._rules.get('novalue_include_rtype'): + default = self._cw._('<%s not specified>') % display_name( + self._cw, rschema.type, role) + else: + default = self._cw._('') + else: + default = self._cw._(default) + return xml_escape(default) + + def _is_composite(self): + return self._rules.get('edit_target') == 'related' + + def _may_add_related(self, related_rset, rschema, role): + """ ok for attribute-like composite entities """ + ttypes = self._compute_ttypes(rschema, role) + if len(ttypes) > 1: # many etypes: learn how to do it + return False + rdef = rschema.role_rdef(self.entity.e_schema, ttypes[0], role) + card = rdef.role_cardinality(role) + if related_rset or card not in '?1': + return False + if role == 'subject': + kwargs = {'fromeid': self.entity.eid} + else: + kwargs = {'toeid': self.entity.eid} + return rdef.has_perm(self._cw, 'add', **kwargs) + + def _may_edit_related_entity(self, related_rset, rschema, role): + """ controls the edition of the related entity """ + ttypes = self._compute_ttypes(rschema, role) + if len(ttypes) > 1 or len(related_rset.rows) != 1: + return False + if self.entity.e_schema.rdef(rschema, role).role_cardinality(role) not in '?1': + return False + return related_rset.get_entity(0, 0).cw_has_perm('update') + + def _may_delete_related(self, related_rset, rschema, role): + # we assume may_edit_related, only 1 related entity + if not related_rset: + return False + rentity = related_rset.get_entity(0, 0) + entity = self.entity + if role == 'subject': + kwargs = {'fromeid': entity.eid, 'toeid': rentity.eid} + cardinality = rschema.rdefs[(entity.cw_etype, rentity.cw_etype)].cardinality[0] + else: + kwargs = {'fromeid': rentity.eid, 'toeid': entity.eid} + cardinality = rschema.rdefs[(rentity.cw_etype, entity.cw_etype)].cardinality[1] + if cardinality in '1+': + return False + # NOTE: should be sufficient given a well built schema/security + return rschema.has_perm(self._cw, 'delete', **kwargs) + + def _build_zone(self, zonedef, msg, logo): + return zonedef % {'msg': xml_escape(self._cw._(msg)), + 'logo': xml_escape(self._cw.data_url(logo))} + + def _build_edit_zone(self): + return self._build_zone(self._editzone, self._editzonemsg, self._editlogo) + + def _build_delete_zone(self): + return self._build_zone(self._deletezone, self._deletemsg, self._deletelogo) + + def _build_add_zone(self): + return self._build_zone(self._addzone, self._addmsg, self._addlogo) + + def _build_divid(self, rtype, role, entity_eid): + """ builds an id for the root div of a reledit widget """ + return '%s-%s-%s' % (rtype, role, entity_eid) + + def _build_args(self, entity, rtype, role, formid, reload, action, + extradata=None): + divid = self._build_divid(rtype, role, entity.eid) + event_args = {'divid' : divid, 'eid' : entity.eid, 'rtype' : rtype, 'formid': formid, + 'reload' : json_dumps(reload), 'action': action, + 'role' : role, 'vid' : u''} + if extradata: + event_args.update(extradata) + return event_args + + def _prepare_form(self, entity, rschema, role, action): + assert action in ('edit_rtype', 'edit_related', 'add', 'delete'), action + if action == 'edit_rtype': + return False, entity + label = True + if action in ('edit_related', 'delete'): + edit_entity = entity.related(rschema, role).get_entity(0, 0) + elif action == 'add': + add_etype = self._compute_ttypes(rschema, role)[0] + _new_entity = self._cw.vreg['etypes'].etype_class(add_etype)(self._cw) + _new_entity.eid = next(self._cw.varmaker) + edit_entity = _new_entity + # XXX see forms.py ~ 276 and entities.linked_to method + # is there another way? + self._cw.form['__linkto'] = '%s:%s:%s' % (rschema, entity.eid, neg_role(role)) + assert edit_entity + return label, edit_entity + + def _build_renderer(self, related_entity, display_label): + return self._cw.vreg['formrenderers'].select( + self._form_renderer_id, self._cw, entity=related_entity, + display_label=display_label, + table_class='attributeForm' if display_label else '', + display_help=False, button_bar_class='buttonbar', + display_progress_div=False) + + def _build_form(self, entity, rschema, role, divid, formid, reload, action, + extradata=None, **formargs): + rtype = rschema.type + event_args = self._build_args(entity, rtype, role, formid, reload, action, extradata) + if not action: + form = _DummyForm() + form.event_args = event_args + return form, None + label, edit_entity = self._prepare_form(entity, rschema, role, action) + cancelclick = self._cancelclick % divid + form = self._cw.vreg['forms'].select( + formid, self._cw, rset=edit_entity.as_rset(), entity=edit_entity, + domid='%s-form' % divid, formtype='inlined', + action=self._cw.build_url('validateform', __onsuccess='window.parent.cw.reledit.onSuccess'), + cwtarget='eformframe', cssclass='releditForm', + **formargs) + # pass reledit arguments + for pname, pvalue in event_args.items(): + form.add_hidden('__reledit|' + pname, pvalue) + # handle buttons + if form.form_buttons: # edition, delete + form_buttons = [] + for button in form.form_buttons: + if not button.label.endswith('apply'): + if button.label.endswith('cancel'): + button = copy.deepcopy(button) + button.cwaction = None + button.onclick = cancelclick + form_buttons.append(button) + form.form_buttons = form_buttons + else: # base + form.form_buttons = [SubmitButton(), + Button(stdmsgs.BUTTON_CANCEL, onclick=cancelclick)] + form.event_args = event_args + if formid == 'base': + field = form.field_by_name(rtype, role, entity.e_schema) + form.append_field(field) + return form, self._build_renderer(edit_entity, label) + + def _should_edit_attribute(self, rschema): + entity = self.entity + rdef = entity.e_schema.rdef(rschema) + # check permissions + if not entity.cw_has_perm('update'): + return False + rdef = entity.e_schema.rdef(rschema) + return rdef.has_perm(self._cw, 'update', eid=entity.eid) + + def _should_edit_relation(self, rschema, role): + eeid = self.entity.eid + perm_args = {'fromeid': eeid} if role == 'subject' else {'toeid': eeid} + return rschema.has_perm(self._cw, 'add', **perm_args) + + def _open_form_wrapper(self, divid, value, form, renderer, + _edit_related, _add_related, _delete_related): + w = self.w + w(u'
          ' % + {'id': divid, 'css': 'releditField', + 'out': "jQuery('#%s').addClass('invisible')" % divid, + 'over': "jQuery('#%s').removeClass('invisible')" % divid}) + w(u'
          ' % divid) + w(value) + w(u'
          ') + form.render(w=w, renderer=renderer) + w(u'') + self.w(u'
          ') + + def view_form(self, divid, value, form=None, renderer=None, + edit_related=False, add_related=False, delete_related=False): + self._open_form_wrapper(divid, value, form, renderer, + edit_related, add_related, delete_related) + args = form.event_args.copy() + self._edit_action(divid, args, edit_related, add_related, delete_related) + self._add_action(divid, args, edit_related, add_related, delete_related) + self._del_action(divid, args, edit_related, add_related, delete_related) + self._close_form_wrapper() + + +ClickAndEditFormView = class_renamed('ClickAndEditFormView', AutoClickAndEditFormView) + + +@ajaxfunc(output_type='xhtml') +def reledit_form(self): + req = self._cw + args = dict((x, req.form[x]) + for x in ('formid', 'rtype', 'role', 'reload', 'action')) + rset = req.eid_rset(int(self._cw.form['eid'])) + try: + args['reload'] = json.loads(args['reload']) + except ValueError: # not true/false, an absolute url + assert args['reload'].startswith('http') + view = req.vreg['views'].select('reledit', req, rset=rset, rtype=args['rtype']) + return self._call_view(view, **args) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/schema.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,712 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Specific views for schema related entities""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from itertools import cycle + +import tempfile +import os, os.path as osp +import codecs + +from six import text_type + +from logilab.common.graph import GraphGenerator, DotBackend +from logilab.common.ureports import Section, Table +from logilab.common.registry import yes +from logilab.mtconverter import xml_escape +from yams import BASE_TYPES, schema2dot as s2d +from yams.buildobjs import DEFAULT_ATTRPERMS + +from cubicweb.predicates import (is_instance, match_user_groups, match_kwargs, + has_related_entities, authenticated_user) +from cubicweb.schema import (META_RTYPES, SCHEMA_TYPES, SYSTEM_RTYPES, + WORKFLOW_TYPES, INTERNAL_TYPES) +from cubicweb.utils import make_uid +from cubicweb.view import EntityView, StartupView +from cubicweb import tags, uilib +from cubicweb.web import action, facet, schemaviewer +from cubicweb.web.views import uicfg, primary, baseviews, tabs, tableview, ibreadcrumbs + +ALWAYS_SKIP_TYPES = BASE_TYPES | SCHEMA_TYPES +SKIP_TYPES = (ALWAYS_SKIP_TYPES | META_RTYPES | SYSTEM_RTYPES | WORKFLOW_TYPES + | INTERNAL_TYPES) +SKIP_TYPES.update(set(('CWUser', 'CWGroup', 'EmailAddress', 'Bookmark'))) + +def skip_types(req): + if int(req.form.get('skipmeta', True)): + return SKIP_TYPES + return ALWAYS_SKIP_TYPES + +_pvs = uicfg.primaryview_section +_pvdc = uicfg.primaryview_display_ctrl + +for _action in ('read', 'add', 'update', 'delete'): + _pvs.tag_subject_of(('*', '%s_permission' % _action, '*'), 'hidden') + _pvs.tag_object_of(('*', '%s_permission' % _action, '*'), 'hidden') + +for _etype in ('CWEType', 'CWRType', 'CWAttribute', 'CWRelation'): + _pvdc.tag_attribute((_etype, 'description'), {'showlabel': False}) + +_pvs.tag_attribute(('CWEType', 'name'), 'hidden') +_pvs.tag_attribute(('CWEType', 'final'), 'hidden') +_pvs.tag_object_of(('*', 'workflow_of', 'CWEType'), 'hidden') +_pvs.tag_subject_of(('CWEType', 'default_workflow', '*'), 'hidden') +_pvs.tag_object_of(('*', 'specializes', 'CWEType'), 'hidden') +_pvs.tag_subject_of(('CWEType', 'specializes', '*'), 'hidden') +_pvs.tag_object_of(('*', 'from_entity', 'CWEType'), 'hidden') +_pvs.tag_object_of(('*', 'to_entity', 'CWEType'), 'hidden') + +_pvs.tag_attribute(('CWRType', 'name'), 'hidden') +_pvs.tag_attribute(('CWRType', 'final'), 'hidden') +_pvs.tag_object_of(('*', 'relation_type', 'CWRType'), 'hidden') + +_pvs.tag_subject_of(('CWAttribute', 'constrained_by', '*'), 'hidden') +_pvs.tag_subject_of(('CWRelation', 'constrained_by', '*'), 'hidden') + + +class SecurityViewMixIn(object): + """mixin providing methods to display security information for a entity, + relation or relation definition schema + """ + cssclass = "listing schemaInfo" + + def permissions_table(self, erschema, permissions=None): + self._cw.add_css('cubicweb.acl.css') + w = self.w + _ = self._cw._ + w(u'
          ' % self.cssclass) + w(u'' % ( + _("permission"), _('granted to groups'), _('rql expressions'))) + for action in erschema.ACTIONS: + w(u'\n') + w(u'
          %s%s%s
          %s' % _(action)) + if permissions is None: + groups = erschema.get_groups(action) + rqlexprs = sorted(e.expression for e in erschema.get_rqlexprs(action)) + else: + groups = permissions[action][0] + rqlexprs = permissions[action][1] + # XXX get group entity and call it's incontext view + groups = [u'%s' % ( + group, self._cw.build_url('cwgroup/%s' % group), label) + for label, group in sorted((_(g), g) for g in groups)] + w(u'
          '.join(groups)) + w(u'
          ') + w(u'
          '.join(rqlexprs)) + w(u'
          ') + + def grouped_permissions_table(self, rschema): + # group relation definitions with identical permissions + perms = {} + for rdef in rschema.rdefs.values(): + rdef_perms = [] + for action in rdef.ACTIONS: + groups = sorted(rdef.get_groups(action)) + exprs = sorted(e.expression for e in rdef.get_rqlexprs(action)) + rdef_perms.append( (action, (tuple(groups), tuple(exprs))) ) + rdef_perms = tuple(rdef_perms) + if rdef_perms in perms: + perms[rdef_perms].append( (rdef.subject, rdef.object) ) + else: + perms[rdef_perms] = [(rdef.subject, rdef.object)] + # set layout permissions in a table for each group of relation + # definition + w = self.w + _ = self._cw._ + w(u'
          ') + tmpl = u'%s %s %s' + for perm, rdefs in perms.items(): + w(u'
          %s
          ' % u', '.join( + tmpl % (_(s.type), _(rschema.type), _(o.type)) for s, o in rdefs)) + # accessing rdef from previous loop by design: only used to get + # ACTIONS + self.permissions_table(rdef, dict(perm)) + w(u'
          ') + + +# global schema view ########################################################### + +class SchemaView(tabs.TabsMixin, StartupView): + """display schema information (graphically, listing tables...) in tabs""" + __regid__ = 'schema' + title = _('data model schema') + tabs = [_('schema-diagram'), _('schema-entity-types'), + _('schema-relation-types')] + default_tab = 'schema-diagram' + + def call(self): + self.w(u'

          %s

          ' % self._cw._(self.title)) + self.render_tabs(self.tabs, self.default_tab) + + +class SchemaImageTab(StartupView): + __regid__ = 'schema-diagram' + + def call(self): + _ = self._cw._ + self.w(self._cw._( + u'
          This schema of the data model excludes the ' + 'meta-data, but you can also display a complete ' + 'schema with meta-data.
          ') + % xml_escape(self._cw.build_url('view', vid='schemagraph', skipmeta=0))) + self.w(u'' % + (self._cw.build_url('view', vid='owl'), + self._cw._(u'Download schema as OWL'))) + self.wview('schemagraph') + +class SchemaETypeTab(StartupView): + __regid__ = 'schema-entity-types' + + def call(self): + self.wview('table', self._cw.execute( + 'Any X ORDERBY N WHERE X is CWEType, X name N, X final FALSE')) + + +class SchemaRTypeTab(StartupView): + __regid__ = 'schema-relation-types' + + def call(self): + self.wview('table', self._cw.execute( + 'Any X ORDERBY N WHERE X is CWRType, X name N, X final FALSE')) + +# CWEType ###################################################################### + +# register msgid generated in entity relations tables +_('i18ncard_1'), _('i18ncard_?'), _('i18ncard_+'), _('i18ncard_*') + +class CWETypePrimaryView(tabs.TabbedPrimaryView): + __select__ = is_instance('CWEType') + tabs = [_('cwetype-description'), _('cwetype-box'), _('cwetype-workflow'), + _('cwetype-views'), _('cwetype-permissions')] + default_tab = 'cwetype-description' + + +class CWETypeDescriptionTab(tabs.PrimaryTab): + __regid__ = 'cwetype-description' + __select__ = tabs.PrimaryTab.__select__ & is_instance('CWEType') + + def render_entity_attributes(self, entity): + super(CWETypeDescriptionTab, self).render_entity_attributes(entity) + _ = self._cw._ + # inheritance + if entity.specializes: + self.w(u'
          %s' % _('Parent class:')) + self.wview('csv', entity.related('specializes', 'subject')) + self.w(u'
          ') + if entity.reverse_specializes: + self.w(u'
          %s' % _('Sub-classes:')) + self.wview('csv', entity.related('specializes', 'object')) + self.w(u'
          ') + # entity schema image + self.wview('schemagraph', etype=entity.name) + # entity schema attributes + self.w(u'

          %s

          ' % _('CWAttribute_plural')) + rset = self._cw.execute( + 'Any A,ON,D,C,A,DE,A, IDX,FTI,I18N,R,O,RN,S ORDERBY AA ' + 'WHERE A is CWAttribute, A from_entity S, S eid %(x)s, ' + 'A ordernum AA, A defaultval D, A description DE, A cardinality C, ' + 'A fulltextindexed FTI, A internationalizable I18N, A indexed IDX, ' + 'A relation_type R, R name RN, A to_entity O, O name ON', + {'x': entity.eid}) + self.wview('table', rset, 'null', + cellvids={0: 'rdef-name-cell', + 2: 'etype-attr-defaultval-cell', + 3: 'etype-attr-cardinality-cell', + 4: 'rdef-constraints-cell', + 6: 'rdef-options-cell'}, + headers=(_(u'name'), _(u'type'), + _(u'default value'), _(u'required'), + _(u'constraints'), _(u'description'), _('options'))) + # entity schema relations + self.w(u'

          %s

          ' % _('CWRelation_plural')) + cellvids = {0: 'rdef-name-cell', + 2: 'etype-rel-cardinality-cell', + 3: 'rdef-constraints-cell', + 4: 'rdef-options-cell'} + headers= [_(u'name'), _(u'object type'), _(u'cardinality'), + _(u'constraints'), _(u'options')] + rset = self._cw.execute( + 'Any A,TT,"i18ncard_"+SUBSTRING(C,1,1),A,A, K,TTN,R,RN ORDERBY RN ' + 'WHERE A is CWRelation, A from_entity S, S eid %(x)s, ' + 'A composite K, A cardinality C, ' + 'A relation_type R, R name RN, A to_entity TT, TT name TTN', + {'x': entity.eid}) + if rset: + self.w(u'
          %s %s
          ' % (entity.name, _('is subject of:'))) + self.wview('table', rset, cellvids=cellvids, headers=headers) + rset = self._cw.execute( + 'Any A,TT,"i18ncard_"+SUBSTRING(C,1,1),A,A, K,TTN,R,RN ORDERBY RN ' + 'WHERE A is CWRelation, A to_entity O, O eid %(x)s, ' + 'A composite K, A cardinality C, ' + 'A relation_type R, R name RN, A from_entity TT, TT name TTN', + {'x': entity.eid}) + if rset: + cellvids[0] = 'rdef-object-name-cell' + headers[1] = _(u'subject type') + self.w(u'
          %s %s
          ' % (entity.name, _('is object of:'))) + self.wview('table', rset, cellvids=cellvids, headers=headers) + + +class CWETypeAttributeCardinalityCell(baseviews.FinalView): + __regid__ = 'etype-attr-cardinality-cell' + + def cell_call(self, row, col): + if self.cw_rset.rows[row][col][0] == '1': + self.w(self._cw._(u'yes')) + else: + self.w(self._cw._(u'no')) + + +class CWETypeAttributeDefaultValCell(baseviews.FinalView): + __regid__ = 'etype-attr-defaultval-cell' + + def cell_call(self, row, col): + defaultval = self.cw_rset.rows[row][col] + if defaultval is not None: + self.w(text_type(self.cw_rset.rows[row][col].unzpickle())) + +class CWETypeRelationCardinalityCell(baseviews.FinalView): + __regid__ = 'etype-rel-cardinality-cell' + + def cell_call(self, row, col): + self.w(self._cw._(self.cw_rset.rows[row][col])) + + +class CWETypeBoxTab(EntityView): + __regid__ = 'cwetype-box' + __select__ = is_instance('CWEType') + + def cell_call(self, row, col): + viewer = schemaviewer.SchemaViewer(self._cw) + entity = self.cw_rset.get_entity(row, col) + eschema = self._cw.vreg.schema.eschema(entity.name) + layout = viewer.visit_entityschema(eschema) + self.w(uilib.ureport_as_html(layout)) + self.w(u'
          ') + + +class CWETypePermTab(SecurityViewMixIn, EntityView): + __regid__ = 'cwetype-permissions' + __select__ = is_instance('CWEType') & authenticated_user() + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + eschema = self._cw.vreg.schema.eschema(entity.name) + self.w(u'

          %s

          ' % self._cw._('This entity type permissions:')) + self.permissions_table(eschema) + self.w(u'
          ') + self.w(u'

          %s

          ' % self._cw._('Attributes permissions:')) + for attr, etype in eschema.attribute_definitions(): + if attr not in META_RTYPES: + rdef = eschema.rdef(attr) + attrtype = str(rdef.rtype) + self.w(u'

          %s (%s)

          ' + % (attrtype, self._cw._(attrtype))) + self.permissions_table(rdef) + self.w(u'
          ') + + +class CWETypeWorkflowTab(EntityView): + __regid__ = 'cwetype-workflow' + __select__ = (is_instance('CWEType') + & has_related_entities('workflow_of', 'object')) + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + if entity.default_workflow: + wf = entity.default_workflow[0] + if len(entity.reverse_workflow_of) > 1: + self.w(u'

          %s (%s)

          ' + % (wf.name, self._cw._('default_workflow'))) + self.display_workflow(wf) + defaultwfeid = wf.eid + else: + self.w(u'
          %s
          ' + % self._cw._('There is no default workflow')) + defaultwfeid = None + for altwf in entity.reverse_workflow_of: + if altwf.eid == defaultwfeid: + continue + self.w(u'

          %s

          ' % altwf.name) + self.display_workflow(altwf) + + def display_workflow(self, wf): + self.w(wf.view('wfgraph')) + self.w('%s' % ( + wf.absolute_url(), self._cw._('more info about this workflow'))) + + +class CWETypeViewsTab(EntityView): + """possible views for this entity type""" + __regid__ = 'cwetype-views' + __select__ = EntityView.__select__ & is_instance('CWEType') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + _ = self._cw._ + self.w('
          %s
          ' % _('Non exhaustive list of views that may ' + 'apply to entities of this type')) + views = [(view.content_type, view.__regid__, _(view.title)) + for view in self.possible_views(entity.name)] + self.wview('pyvaltable', pyvalue=sorted(views), + headers=(_(u'content type'), _(u'view identifier'), + _(u'view title'))) + + def possible_views(self, etype): + rset = self._cw.etype_rset(etype) + return [v for v in self._cw.vreg['views'].possible_views(self._cw, rset) + if v.category != 'startupview'] + + +class CWETypeOneLineView(baseviews.OneLineView): + __select__ = is_instance('CWEType') + + def cell_call(self, row, col, **kwargs): + entity = self.cw_rset.get_entity(row, col) + if entity.final: + self.w(u'') + super(CWETypeOneLineView, self).cell_call(row, col, **kwargs) + if entity.final: + self.w(u'') + + +# CWRType ###################################################################### + +class CWRTypePrimaryView(tabs.TabbedPrimaryView): + __select__ = is_instance('CWRType') + tabs = [_('cwrtype-description'), _('cwrtype-permissions')] + default_tab = 'cwrtype-description' + + +class CWRTypeDescriptionTab(tabs.PrimaryTab): + __regid__ = 'cwrtype-description' + __select__ = is_instance('CWRType') + + def render_entity_attributes(self, entity): + super(CWRTypeDescriptionTab, self).render_entity_attributes(entity) + _ = self._cw._ + if not entity.final: + self.wview('schemagraph', rtype=entity.name) + rset = self._cw.execute('Any R,C,R,R, RT WHERE ' + 'R relation_type RT, RT eid %(x)s, ' + 'R cardinality C', {'x': entity.eid}) + self.wview('table', rset, 'null', + headers=(_(u'relation'), _(u'cardinality'), _(u'constraints'), + _(u'options')), + cellvids={2: 'rdef-constraints-cell', + 3: 'rdef-options-cell'}) + + +class CWRTypePermTab(SecurityViewMixIn, EntityView): + __regid__ = 'cwrtype-permissions' + __select__ = is_instance('CWRType') & authenticated_user() + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rschema = self._cw.vreg.schema.rschema(entity.name) + self.grouped_permissions_table(rschema) + + +# CWAttribute / CWRelation ##################################################### + +class RDEFPrimaryView(tabs.TabbedPrimaryView): + __select__ = is_instance('CWRelation', 'CWAttribute') + tabs = [_('rdef-description'), _('rdef-permissions')] + default_tab = 'rdef-description' + + +class RDEFDescriptionTab(tabs.PrimaryTab): + __regid__ = 'rdef-description' + __select__ = is_instance('CWRelation', 'CWAttribute') + + def render_entity_attributes(self, entity): + super(RDEFDescriptionTab, self).render_entity_attributes(entity) + rdef = entity.yams_schema() + if rdef.constraints: + self.w(u'

          %s

          ' % self._cw._('constrained_by')) + self.w(entity.view('rdef-constraints-cell')) + + +class RDEFPermTab(SecurityViewMixIn, EntityView): + __regid__ = 'rdef-permissions' + __select__ = is_instance('CWRelation', 'CWAttribute') & authenticated_user() + + def cell_call(self, row, col): + self.permissions_table(self.cw_rset.get_entity(row, col).yams_schema()) + + +class RDEFNameView(tableview.CellView): + """display relation name and its translation only in a cell view, link to + relation definition's primary view (for use in entity type relations table + for instance) + """ + __regid__ = 'rdef-name-cell' + __select__ = is_instance('CWRelation', 'CWAttribute') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rtype = entity.relation_type[0].name + # XXX use context entity + pgettext + self.w(u'%s (%s)' % ( + entity.absolute_url(), rtype, self._cw._(rtype))) + +class RDEFObjectNameView(tableview.CellView): + """same as RDEFNameView but when the context is the object entity + """ + __regid__ = 'rdef-object-name-cell' + __select__ = is_instance('CWRelation', 'CWAttribute') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rtype = entity.relation_type[0].name + # XXX use context entity + pgettext + self.w(u'%s (%s)' % ( + entity.absolute_url(), rtype, self._cw.__(rtype + '_object'))) + +class RDEFConstraintsCell(EntityView): + __regid__ = 'rdef-constraints-cell' + __select__ = is_instance('CWAttribute', 'CWRelation') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rschema = self._cw.vreg.schema.rschema(entity.rtype.name) + rdef = rschema.rdefs[(entity.stype.name, entity.otype.name)] + constraints = [xml_escape(text_type(c)) for c in getattr(rdef, 'constraints')] + self.w(u'
          '.join(constraints)) + +class CWAttributeOptionsCell(EntityView): + __regid__ = 'rdef-options-cell' + __select__ = is_instance('CWAttribute') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + options = [] + if entity.indexed: + options.append(self._cw._('indexed')) + if entity.fulltextindexed: + options.append(self._cw._('fulltextindexed')) + if entity.internationalizable: + options.append(self._cw._('internationalizable')) + self.w(u','.join(options)) + +class CWRelationOptionsCell(EntityView): + __regid__ = 'rdef-options-cell' + __select__ = is_instance('CWRelation',) + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rtype = entity.rtype + options = [] + if rtype.symmetric: + options.append(self._cw._('symmetric')) + if rtype.inlined: + options.append(self._cw._('inlined')) + if rtype.fulltext_container: + options.append('%s=%s' % (self._cw._('fulltext_container'), + self._cw._(rtype.fulltext_container))) + if entity.composite: + options.append('%s=%s' % (self._cw._('composite'), + self._cw._(entity.composite))) + self.w(u','.join(options)) + + +# schema images ############################################################### + +class RestrictedSchemaVisitorMixIn(object): + def __init__(self, req, *args, **kwargs): + self._cw = req + super(RestrictedSchemaVisitorMixIn, self).__init__(*args, **kwargs) + + def should_display_schema(self, rschema): + return (super(RestrictedSchemaVisitorMixIn, self).should_display_schema(rschema) + and rschema.may_have_permission('read', self._cw)) + + def should_display_attr(self, eschema, rschema): + return (super(RestrictedSchemaVisitorMixIn, self).should_display_attr(eschema, rschema) + and eschema.rdef(rschema).may_have_permission('read', self._cw)) + + +class FullSchemaVisitor(RestrictedSchemaVisitorMixIn, s2d.FullSchemaVisitor): + pass + +class OneHopESchemaVisitor(RestrictedSchemaVisitorMixIn, + s2d.OneHopESchemaVisitor): + pass + +class OneHopRSchemaVisitor(RestrictedSchemaVisitorMixIn, + s2d.OneHopRSchemaVisitor): + pass + +class CWSchemaDotPropsHandler(s2d.SchemaDotPropsHandler): + def __init__(self, visitor, cw): + self.visitor = visitor + self.cw = cw + self._cycle = iter(cycle(('#ff7700', '#000000', '#ebbc69', '#888888'))) + self.nextcolor = lambda: next(self._cycle) + + self.colors = {} + + def node_properties(self, eschema): + """return DOT drawing options for an entity schema include href""" + label = ['{',eschema.type,'|'] + label.append(r'\l'.join('%s (%s)' % (rel.type, eschema.rdef(rel.type).object) + for rel in eschema.ordered_relations() + if rel.final and self.visitor.should_display_attr(eschema, rel))) + label.append(r'\l}') # trailing \l ensure alignement of the last one + return {'label' : ''.join(label), 'shape' : "record", + 'fontname' : "Courier", 'style' : "filled", + 'href': self.cw.build_url('cwetype/%s' % eschema.type), + 'fontsize': '10px' + } + + def edge_properties(self, rschema, subjnode, objnode): + """return default DOT drawing options for a relation schema""" + # Inheritance relation (i.e 'specializes'). + if rschema is None: + kwargs = {'label': 'Parent class', + 'color' : 'grey', 'style' : 'filled', + 'arrowhead': 'empty', + 'fontsize': '10px'} + # symmetric rels are handled differently, let yams decide what's best + elif rschema.symmetric: + kwargs = {'label': rschema.type, + 'color': '#887788', 'style': 'dashed', + 'dir': 'both', 'arrowhead': 'normal', 'arrowtail': 'normal', + 'fontsize': '10px', + 'href': self.cw.build_url('cwrtype/%s' % rschema.type)} + else: + kwargs = {'label': rschema.type, + 'color' : 'black', 'style' : 'filled', 'fontsize': '10px', + 'href': self.cw.build_url('cwrtype/%s' % rschema.type)} + rdef = rschema.rdef(subjnode, objnode) + composite = rdef.composite + if rdef.composite == 'subject': + kwargs['arrowhead'] = 'none' + kwargs['arrowtail'] = 'diamond' + elif rdef.composite == 'object': + kwargs['arrowhead'] = 'diamond' + kwargs['arrowtail'] = 'none' + else: + kwargs['arrowhead'] = 'open' + kwargs['arrowtail'] = 'none' + # UML like cardinalities notation, omitting 1..1 + if rdef.cardinality[1] != '1': + kwargs['taillabel'] = s2d.CARD_MAP[rdef.cardinality[1]] + if rdef.cardinality[0] != '1': + kwargs['headlabel'] = s2d.CARD_MAP[rdef.cardinality[0]] + try: + kwargs['color'] = self.colors[rschema] + except KeyError: + kwargs['color'] = self.nextcolor() + self.colors[rschema] = kwargs['color'] + kwargs['fontcolor'] = kwargs['color'] + # dot label decoration is just awful (1 line underlining the label + # + 1 line going to the closest edge spline point) + kwargs['decorate'] = 'false' + #kwargs['labelfloat'] = 'true' + return kwargs + + +class SchemaGraphView(StartupView): + __regid__ = 'schemagraph' + + def call(self, etype=None, rtype=None, alt=''): + if 'MSIE 8' in self._cw.useragent(): + return + schema = self._cw.vreg.schema + if etype: + assert rtype is None + visitor = OneHopESchemaVisitor(self._cw, schema.eschema(etype), + skiptypes=skip_types(self._cw)) + alt = self._cw._('graphical representation of the %(etype)s ' + 'entity type from %(appid)s data model') + elif rtype: + visitor = OneHopRSchemaVisitor(self._cw, schema.rschema(rtype), + skiptypes=skip_types(self._cw)) + alt = self._cw._('graphical representation of the %(rtype)s ' + 'relation type from %(appid)s data model') + else: + visitor = FullSchemaVisitor(self._cw, schema, + skiptypes=skip_types(self._cw)) + alt = self._cw._('graphical representation of %(appid)s data model') + alt %= {'rtype': rtype, 'etype': etype, + 'appid': self._cw.vreg.config.appid} + prophdlr = CWSchemaDotPropsHandler(visitor, self._cw) + generator = GraphGenerator(DotBackend('schema', 'BT', + ratio='compress',size=None, + renderer='dot', + additionnal_param={ + 'overlap':'false', + 'splines':'true', + 'sep':'0.2', + })) + # svg image file + fd, tmpfile = tempfile.mkstemp('.svg') + try: + os.close(fd) + generator.generate(visitor, prophdlr, tmpfile) + with codecs.open(tmpfile, 'rb', encoding='utf-8') as svgfile: + self.w(svgfile.read()) + finally: + os.unlink(tmpfile) + +# breadcrumbs ################################################################## + +class CWRelationIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('CWRelation') + def parent_entity(self): + return self.entity.rtype + +class CWAttributeIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('CWAttribute') + def parent_entity(self): + return self.entity.stype + +class CWConstraintIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('CWConstraint') + def parent_entity(self): + if self.entity.reverse_constrained_by: + return self.entity.reverse_constrained_by[0] + +class RQLExpressionIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('RQLExpression') + def parent_entity(self): + return self.entity.expression_of + + +# misc: facets, actions ######################################################## + +class CWFinalFacet(facet.AttributeFacet): + __regid__ = 'cwfinal-facet' + __select__ = facet.AttributeFacet.__select__ & is_instance('CWEType', 'CWRType') + rtype = 'final' + + +class ViewSchemaAction(action.Action): + __regid__ = 'schema' + __select__ = yes() + + title = _('data model schema') + order = 30 + category = 'manage' + + def url(self): + return self._cw.build_url(self.__regid__) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/searchrestriction.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/searchrestriction.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,27 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""contains utility functions and some visual component to restrict results of +a search + +""" +__docformat__ = "restructuredtext en" + +from logilab.common.deprecation import moved + +insert_attr_select_relation = moved('cubicweb.web.facet', + 'insert_attr_select_relation') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/sessions.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/sessions.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,180 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""web session: by default the session is actually the db connection """ +__docformat__ = "restructuredtext en" + +from time import time +from logging import getLogger + +from logilab.common.registry import RegistrableObject, yes + +from cubicweb import RepositoryError, Unauthorized, set_log_methods +from cubicweb.web import InvalidSession + +from cubicweb.web.views import authentication + + +class AbstractSessionManager(RegistrableObject): + """manage session data associated to a session identifier""" + __abstract__ = True + __select__ = yes() + __registry__ = 'sessions' + __regid__ = 'sessionmanager' + + def __init__(self, repo): + vreg = repo.vreg + self.session_time = vreg.config['http-session-time'] or None + self.authmanager = authentication.RepositoryAuthenticationManager(repo) + interval = (self.session_time or 0) / 2. + if vreg.config.anonymous_user()[0] is not None: + self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60 + assert self.cleanup_anon_session_time > 0 + if self.session_time is not None: + self.cleanup_anon_session_time = min(self.session_time, + self.cleanup_anon_session_time) + interval = self.cleanup_anon_session_time / 2. + # we don't want to check session more than once every 5 minutes + self.clean_sessions_interval = max(5 * 60, interval) + + def clean_sessions(self): + """cleanup sessions which has not been unused since a given amount of + time. Return the number of sessions which have been closed. + """ + self.debug('cleaning http sessions') + session_time = self.session_time + closed, total = 0, 0 + for session in self.current_sessions(): + total += 1 + last_usage_time = session.mtime + no_use_time = (time() - last_usage_time) + if session.anonymous_session: + if no_use_time >= self.cleanup_anon_session_time: + self.close_session(session) + closed += 1 + elif session_time is not None and no_use_time >= session_time: + self.close_session(session) + closed += 1 + return closed, total - closed + + def current_sessions(self): + """return currently open sessions""" + raise NotImplementedError() + + def get_session(self, req, sessionid): + """return existing session for the given session identifier""" + raise NotImplementedError() + + def open_session(self, req): + """open and return a new session for the given request. + + raise :exc:`cubicweb.AuthenticationError` if authentication failed + (no authentication info found or wrong user/password) + """ + raise NotImplementedError() + + def close_session(self, session): + """close session on logout or on invalid session detected (expired out, + corrupted...) + """ + raise NotImplementedError() + + +set_log_methods(AbstractSessionManager, getLogger('cubicweb.sessionmanager')) + + +class InMemoryRepositorySessionManager(AbstractSessionManager): + """manage session data associated to a session identifier""" + + def __init__(self, *args, **kwargs): + super(InMemoryRepositorySessionManager, self).__init__(*args, **kwargs) + # XXX require a RepositoryAuthenticationManager which violates + # authenticate interface by returning a session instead of a user + #assert isinstance(self.authmanager, RepositoryAuthenticationManager) + self._sessions = {} + + # dump_data / restore_data to avoid loosing open sessions on registry + # reloading + def dump_data(self): + return self._sessions + def restore_data(self, data): + self._sessions = data + + def current_sessions(self): + return self._sessions.values() + + def get_session(self, req, sessionid): + """return existing session for the given session identifier""" + if sessionid not in self._sessions: + raise InvalidSession() + session = self._sessions[sessionid] + try: + user = self.authmanager.validate_session(req, session) + except InvalidSession: + self.close_session(session) + raise + if session.closed: + self.close_session(session) + raise InvalidSession() + return session + + def open_session(self, req): + """open and return a new session for the given request. The session is + also bound to the request. + + raise :exc:`cubicweb.AuthenticationError` if authentication failed + (no authentication info found or wrong user/password) + """ + session, login = self.authmanager.authenticate(req) + self._sessions[session.sessionid] = session + session.mtime = time() + return session + + def postlogin(self, req, session): + """postlogin: the user have been related to a session + + Both req and session are passed to this function because actually + linking the request to the session is not yet done and not the + responsability of this object. + """ + # Update last connection date + # XXX: this should be in a post login hook in the repository, but there + # we can't differentiate actual login of automatic session + # reopening. Is it actually a problem? + if 'last_login_time' in req.vreg.schema: + self._update_last_login_time(session) + req.set_message(req._('welcome %s!') % session.user.login) + + def _update_last_login_time(self, session): + # XXX should properly detect missing permission / non writeable source + # and avoid "except (RepositoryError, Unauthorized)" below + try: + with session.new_cnx() as cnx: + cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', + {'x' : session.user.eid}) + cnx.commit() + except (RepositoryError, Unauthorized): + pass + + def close_session(self, session): + """close session on logout or on invalid session detected (expired out, + corrupted...) + """ + self.info('closing http session %s' % session.sessionid) + self._sessions.pop(session.sessionid, None) + if not session.closed: + session.repo.close(session.sessionid) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/sparql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/sparql.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,145 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""SPARQL integration""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six.moves import range + +from yams import xy +from rql import TypeResolverException + +from lxml import etree +from lxml.builder import E + +from cubicweb.view import StartupView, AnyRsetView +from cubicweb.web import Redirect, form, formfields, formwidgets as fwdgs +from cubicweb.web.views import forms +try: + from cubicweb.spa2rql import Sparql2rqlTranslator, UnsupportedQuery +except ImportError: + # fyzz not available (only a recommends) + Sparql2rqlTranslator = None + +class SparqlForm(forms.FieldsForm): + __regid__ = 'sparql' + sparql = formfields.StringField(help=_('type here a sparql query')) + resultvid = formfields.StringField(choices=((_('table'), 'table'), + (_('sparql xml'), 'sparqlxml')), + widget=fwdgs.Radio, + value='table') + form_buttons = [fwdgs.SubmitButton()] + @property + def action(self): + return self._cw.url() + + +class SparqlFormView(form.FormViewMixIn, StartupView): + __regid__ = 'sparql' + def call(self): + form = self._cw.vreg['forms'].select('sparql', self._cw) + form.render(w=self.w) + sparql = self._cw.form.get('sparql') + vid = self._cw.form.get('resultvid', 'table') + if sparql: + try: + qinfo = Sparql2rqlTranslator(self._cw.vreg.schema).translate(sparql) + except TypeResolverException as exc: + self.w(self._cw._('can not resolve entity types:') + u' ' + unicode(exc)) + except UnsupportedQuery: + self.w(self._cw._('we are not yet ready to handle this query')) + except xy.UnsupportedVocabulary as exc: + self.w(self._cw._('unknown vocabulary:') + u' ' + unicode(exc)) + else: + rql, args = qinfo.finalize() + if vid == 'sparqlxml': + url = self._cw.build_url('view', rql=rql % args, vid=vid) + raise Redirect(url) + rset = self._cw.execute(rql, args) + self.wview(vid, rset, 'null') + + +## sparql resultset views ##################################################### + +YAMS_XMLSCHEMA_MAPPING = { + 'String': 'string', + + 'Boolean': 'boolean', + 'Int': 'integer', + 'BigInt': 'integer', + 'Float': 'float', + + 'Datetime': 'dateTime', + 'TZDatetime': 'dateTime', + 'Date': 'date', + 'Time': 'time', + 'TZTime': 'time', + + # XXX the following types don't have direct mapping + 'Decimal': 'string', + 'Interval': 'duration', + 'Bytes': 'base64Binary', + 'Password': 'string', + } + +def xmlschema(yamstype): + return 'http://www.w3.org/2001/XMLSchema#%s' % YAMS_XMLSCHEMA_MAPPING[yamstype] + +class SparqlResultXmlView(AnyRsetView): + """The spec can be found here: http://www.w3.org/TR/rdf-sparql-XMLres/ + """ + __regid__ = 'sparqlxml' + content_type = 'application/sparql-results+xml' + templatable = False + + def call(self): + # XXX handle UNION + rqlst = self.cw_rset.syntax_tree().children[0] + varnames = [var.name for var in rqlst.selection] + results = E.results() + for rowidx in range(len(self.cw_rset)): + result = E.result() + for colidx, varname in enumerate(varnames): + result.append(self.cell_binding(rowidx, colidx, varname)) + results.append(result) + sparql = E.sparql(E.head(*(E.variable(name=name) for name in varnames)), + results) + self.w(u'\n') + self.w(etree.tostring(sparql, encoding=unicode, pretty_print=True)) + + def cell_binding(self, row, col, varname): + celltype = self.cw_rset.description[row][col] + if self._cw.vreg.schema.eschema(celltype).final: + cellcontent = self._cw.view('cell', self.cw_rset, row=row, col=col) + return E.binding(E.literal(cellcontent, + datatype=xmlschema(celltype)), + name=varname) + else: + entity = self.cw_rset.get_entity(row, col) + return E.binding(E.uri(entity.absolute_url()), name=varname) + + def set_request_content_type(self): + """overriden to set the correct filetype and filename""" + self._cw.set_content_type(self.content_type, + filename='sparql.xml', + encoding=self._cw.encoding) + +def registration_callback(vreg): + if Sparql2rqlTranslator is not None: + vreg.register_all(globals().values(), __name__) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/startup.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/startup.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,174 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""This module contains the default index page and management view. + +.. autoclass:: IndexView +.. autoclass:: ManageView +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from logilab.common.textutils import unormalize +from logilab.common.deprecation import deprecated +from logilab.mtconverter import xml_escape + +from cubicweb.view import StartupView +from cubicweb.predicates import match_user_groups, is_instance +from cubicweb.schema import display_name +from cubicweb.web import httpcache +from cubicweb.web.views import uicfg + +class ManageView(StartupView): + """:__regid__: *manage* + + The manage view, display some information about what's contained by your + site and provides access to administration stuff such as user and groups + management. + + Regarding the section displaying link to entity type, notice by default it + won't display entity types which are related to another one using a + mandatory (cardinality == 1) composite relation. + + You can still configure that behaviour manually using the + `indexview_etype_section` as explained in :mod:`cubicweb.web.uicfg`. + """ + __regid__ = 'manage' + title = _('manage') + http_cache_manager = httpcache.EtagHTTPCacheManager + add_etype_links = () + skip_startup_views = set( ('index', 'manage', 'schema', 'owl', + 'systempropertiesform', 'propertiesform', + 'loggedout', 'login', + 'cw.users-and-groups-management', 'cw.groups-management', + 'cw.users-management', 'cw.sources-management', + 'siteinfo', 'info', 'registry', 'gc', + 'tree') ) + + def call(self, **kwargs): + """The default view representing the instance's management""" + self._cw.add_css('cubicweb.manageview.css') + self.w(u'

          %s

          ' % self._cw.property_value('ui.site-title')) + self.entities() + self.manage_actions() + self.startup_views() + + def manage_actions(self): + allactions = self._cw.vreg['actions'].possible_actions(self._cw) + if allactions.get('manage'): + self.w(u'
           
          ') + self.w(u'

          %s

          \n' % self._cw._('Manage')) + self.w(u'
            ') + for action in allactions['manage']: + self.w(u'
          • %s
          • ' % ( + action.url(), self._cw._(action.title))) + self.w(u'
          ') + + def startup_views(self): + views = [v for v in self._cw.vreg['views'].possible_views(self._cw, None) + if v.category == 'startupview' + and v.__regid__ not in self.skip_startup_views] + if not views: + return + self.w(u'
           
          ') + self.w(u'

          %s

          \n' % self._cw._('Startup views')) + self.w(u'
            ') + for v in sorted(views, key=lambda x: self._cw._(x.title)): + self.w('
          • %s
          • ' % ( + xml_escape(v.url()), xml_escape(self._cw._(v.title).capitalize()))) + self.w(u'
          ') + + def entities(self): + schema = self._cw.vreg.schema + eschemas = [eschema for eschema in schema.entities() + if uicfg.indexview_etype_section.get(eschema) == 'application'] + if eschemas: + self.w(u'
           
          ') + self.w(u'

          %s

          \n' % self._cw._('Browse by entity type')) + self.w(u'') + self.entity_types_table(eschemas) + self.w(u'
          ') + + def entity_types_table(self, eschemas): + infos = sorted(self.entity_types(eschemas), + key=lambda t: unormalize(t[0])) + q, r = divmod(len(infos), 2) + if r: + infos.append( (None, ' ', ' ') ) + infos = zip(infos[:q+r], infos[q+r:]) + for (_, etypelink, addlink), (_, etypelink2, addlink2) in infos: + self.w(u'\n') + self.w(u'%s%s\n' % (addlink, etypelink)) + self.w(u'%s%s\n' % (addlink2, etypelink2)) + self.w(u'\n') + + def entity_types(self, eschemas): + """return an iterator on formatted links to get a list of entities of + each entity types + """ + req = self._cw + for eschema in eschemas: + if eschema.final or not eschema.may_have_permission('read', req): + continue + etype = eschema.type + nb = req.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] + if nb > 1: + label = display_name(req, etype, 'plural') + else: + label = display_name(req, etype) + nb = req.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] + url = self._cw.build_url(etype) + etypelink = u' %s (%d)' % ( + xml_escape(url), label, nb) + if eschema.has_perm(req, 'add'): + yield (label, etypelink, self.add_entity_link(etype)) + else: + yield (label, etypelink, u'') + + def create_links(self): + self.w(u'') + + def add_entity_link(self, etype): + """creates a [+] link for adding an entity""" + url = self._cw.vreg["etypes"].etype_class(etype).cw_create_url(self._cw) + return u'[+]' % ( + xml_escape(url), self._cw.__('New %s' % etype)) + + + +class IndexView(ManageView): + """:__regid__: *index* + + The default index view, that you'll get when accessing your site's root url. + It's by default indentical to the + :class:`~cubicweb.web.views.startup.ManageView`, but you'll usually want to + customize this one. + """ + __regid__ = 'index' + title = _('view_index') + + @deprecated('[3.11] display_folders method is deprecated, backport it if needed') + def display_folders(self): + return 'Folder' in self._cw.vreg.schema and self._cw.execute('Any COUNT(X) WHERE X is Folder')[0][0] diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/staticcontrollers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/staticcontrollers.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,272 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Set of static resources controllers for : + +- /data/... +- /static/... +- /fckeditor/... +""" + +import os +import os.path as osp +import hashlib +import mimetypes +import threading +import tempfile +from time import mktime +from datetime import datetime, timedelta +from logging import getLogger + +from cubicweb import Forbidden +from cubicweb.web import NotFound, Redirect +from cubicweb.web.http_headers import generateDateTime +from cubicweb.web.controller import Controller +from cubicweb.web.views.urlrewrite import URLRewriter + + + +class StaticFileController(Controller): + """an abtract class to serve static file + + Make sure to add your subclass to the STATIC_CONTROLLERS list""" + __abstract__ = True + directory_listing_allowed = False + + def max_age(self, path): + """max cache TTL""" + return 60*60*24*7 + + def static_file(self, path): + """Return full content of a static file. + + XXX iterable content would be better + """ + debugmode = self._cw.vreg.config.debugmode + if osp.isdir(path): + if self.directory_listing_allowed: + return u'' + raise Forbidden(path) + if not osp.isfile(path): + raise NotFound() + if not debugmode: + # XXX: Don't provide additional resource information to error responses + # + # the HTTP RFC recommends not going further than 1 year ahead + expires = datetime.now() + timedelta(seconds=self.max_age(path)) + self._cw.set_header('Expires', generateDateTime(mktime(expires.timetuple()))) + self._cw.set_header('Cache-Control', 'max-age=%s' % self.max_age(path)) + + # XXX system call to os.stats could be cached once and for all in + # production mode (where static files are not expected to change) + # + # Note that: we do a osp.isdir + osp.isfile before and a potential + # os.read after. Improving this specific call will not help + # + # Real production environment should use dedicated static file serving. + self._cw.set_header('last-modified', generateDateTime(os.stat(path).st_mtime)) + if self._cw.is_client_cache_valid(): + return '' + # XXX elif uri.startswith('/https/'): uri = uri[6:] + mimetype, encoding = mimetypes.guess_type(path) + if mimetype is None: + mimetype = 'application/octet-stream' + self._cw.set_content_type(mimetype, osp.basename(path), encoding) + with open(path, 'rb') as resource: + return resource.read() + + @property + def relpath(self): + """path of a requested file relative to the controller""" + path = self._cw.form.get('static_relative_path') + if path is None: + path = self._cw.relative_path(includeparams=True) + return path + + +class ConcatFilesHandler(object): + """Emulating the behavior of modconcat + + this serve multiple file as a single one. + """ + + def __init__(self, config): + self._resources = {} + self.config = config + self.logger = getLogger('cubicweb.web') + self.lock = threading.Lock() + + def _resource(self, path): + """get the resouce""" + try: + return self._resources[path] + except KeyError: + self._resources[path] = self.config.locate_resource(path) + return self._resources[path] + + def _up_to_date(self, filepath, paths): + """ + The concat-file is considered up-to-date if it exists. + In debug mode, an additional check is performed to make sure that + concat-file is more recent than all concatenated files + """ + if not osp.isfile(filepath): + return False + if self.config.debugmode: + concat_lastmod = os.stat(filepath).st_mtime + for path in paths: + dirpath, rid = self._resource(path) + if rid is None: + raise NotFound(path) + path = osp.join(dirpath, rid) + if os.stat(path).st_mtime > concat_lastmod: + return False + return True + + def build_filepath(self, paths): + """return the filepath that will be used to cache concatenation of `paths` + """ + _, ext = osp.splitext(paths[0]) + fname = 'cache_concat_' + hashlib.md5((';'.join(paths)).encode('ascii')).hexdigest() + ext + return osp.join(self.config.appdatahome, 'uicache', fname) + + def concat_cached_filepath(self, paths): + filepath = self.build_filepath(paths) + if not self._up_to_date(filepath, paths): + with self.lock: + if self._up_to_date(filepath, paths): + # first check could have raced with some other thread + # updating the file + return filepath + fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(filepath)) + try: + f = os.fdopen(fd, 'wb') + for path in paths: + dirpath, rid = self._resource(path) + if rid is None: + # In production mode log an error, do not return a 404 + # XXX the erroneous content is cached anyway + self.logger.error('concatenated data url error: %r file ' + 'does not exist', path) + if self.config.debugmode: + raise NotFound(path) + else: + with open(osp.join(dirpath, rid), 'rb') as source: + for line in source: + f.write(line) + f.write(b'\n') + f.close() + except: + os.remove(tmpfile) + raise + else: + os.rename(tmpfile, filepath) + return filepath + + +class DataController(StaticFileController): + """Controller in charge of serving static files in /data/ + + Handles mod_concat-like URLs. + """ + + __regid__ = 'data' + + def __init__(self, *args, **kwargs): + super(DataController, self).__init__(*args, **kwargs) + config = self._cw.vreg.config + self.base_datapath = config.data_relpath() + self.data_modconcat_basepath = '%s??' % self.base_datapath + self.concat_files_registry = ConcatFilesHandler(config) + + def publish(self, rset=None): + config = self._cw.vreg.config + # includeparams=True for modconcat-like urls + relpath = self.relpath + if relpath.startswith(self.data_modconcat_basepath): + paths = relpath[len(self.data_modconcat_basepath):].split(',') + filepath = self.concat_files_registry.concat_cached_filepath(paths) + else: + if not relpath.startswith(self.base_datapath): + # /data/foo, redirect to /data/{hash}/foo + prefix = 'data/' + relpath = relpath[len(prefix):] + raise Redirect(self._cw.data_url(relpath), 302) + # skip leading '/data/{hash}/' and url params + prefix = self.base_datapath + relpath = relpath[len(prefix):] + relpath = relpath.split('?', 1)[0] + dirpath, rid = config.locate_resource(relpath) + if dirpath is None: + raise NotFound() + filepath = osp.join(dirpath, rid) + return self.static_file(filepath) + + +class FCKEditorController(StaticFileController): + """Controller in charge of serving FCKEditor related file + + The motivational for a dedicated controller have been lost. + """ + + __regid__ = 'fckeditor' + + def publish(self, rset=None): + config = self._cw.vreg.config + if self._cw.https: + uiprops = config.https_uiprops + else: + uiprops = config.uiprops + relpath = self.relpath + if relpath.startswith('fckeditor/'): + relpath = relpath[len('fckeditor/'):] + relpath = relpath.split('?', 1)[0] + return self.static_file(osp.join(uiprops['FCKEDITOR_PATH'], relpath)) + + +class StaticDirectoryController(StaticFileController): + """Controller in charge of serving static file in /static/ + """ + __regid__ = 'static' + + def publish(self, rset=None): + staticdir = self._cw.vreg.config.static_directory + relpath = self.relpath[len(self.__regid__) + 1:] + return self.static_file(osp.join(staticdir, relpath)) + +STATIC_CONTROLLERS = [DataController, FCKEditorController, + StaticDirectoryController] + +class StaticControlerRewriter(URLRewriter): + """a quick and dirty rewritter in charge of server static file. + + This is a work around the flatness of url handling in cubicweb.""" + + __regid__ = 'static' + + priority = 10 + + def rewrite(self, req, uri): + for ctrl in STATIC_CONTROLLERS: + if uri.startswith('/%s/' % ctrl.__regid__): + break + else: + self.debug("not a static file uri: %s", uri) + raise KeyError(uri) + relpath = self._cw.relative_path(includeparams=False) + self._cw.form['static_relative_path'] = self._cw.relative_path(includeparams=True) + return ctrl.__regid__, None diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/tableview.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/tableview.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1338 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""This module contains table views, with the following features that may be +provided (depending on the used implementation): + +* facets filtering +* pagination +* actions menu +* properly sortable content +* odd/row/hover line styles + +The three main implementation are described below. Each implementation is +suitable for a particular case, but they each attempt to display tables that +looks similar. + +.. autoclass:: cubicweb.web.views.tableview.RsetTableView + :members: + +.. autoclass:: cubicweb.web.views.tableview.EntityTableView + :members: + +.. autoclass:: cubicweb.web.views.pyviews.PyValTableView + :members: + +All those classes are rendered using a *layout*: + +.. autoclass:: cubicweb.web.views.tableview.TableLayout + :members: + +There is by default only one table layout, using the 'table_layout' identifier, +that is referenced by table views +:attr:`cubicweb.web.views.tableview.TableMixIn.layout_id`. If you want to +customize the look and feel of your table, you can either replace the default +one by yours, having multiple variants with proper selectors, or change the +`layout_id` identifier of your table to use your table specific implementation. + +Notice you can gives options to the layout using a `layout_args` dictionary on +your class. + +If you still can't find a view that suit your needs, you should take a look at the +class below that is the common abstract base class for the three views defined +above and implement your own class. + +.. autoclass:: cubicweb.web.views.tableview.TableMixIn + :members: +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn +from copy import copy +from types import MethodType + +from six import string_types, add_metaclass, create_bound_method +from six.moves import range + +from logilab.mtconverter import xml_escape +from logilab.common.decorators import cachedproperty +from logilab.common.deprecation import class_deprecated +from logilab.common.registry import yes + +from cubicweb import NoSelectableObject, tags +from cubicweb.predicates import nonempty_rset, match_kwargs, objectify_predicate +from cubicweb.schema import display_name +from cubicweb.utils import make_uid, js_dumps, JSString, UStringIO +from cubicweb.uilib import toggle_action, limitsize, htmlescape, sgml_attributes, domid +from cubicweb.view import EntityView, AnyRsetView +from cubicweb.web import jsonize, component +from cubicweb.web.htmlwidgets import (TableWidget, TableColumn, MenuWidget, + PopupBoxMenu) + + +@objectify_predicate +def unreloadable_table(cls, req, rset=None, + displaycols=None, headers=None, cellvids=None, + paginate=False, displayactions=False, displayfilter=False, + **kwargs): + # one may wish to specify one of headers/displaycols/cellvids as long as he + # doesn't want pagination nor actions nor facets + if not kwargs and (displaycols or headers or cellvids) and not ( + displayfilter or displayactions or paginate): + return 1 + return 0 + + +class TableLayout(component.Component): + """The default layout for table. When `render` is called, this will use + the API described on :class:`TableMixIn` to feed the generated table. + + This layout behaviour may be customized using the following attributes / + selection arguments: + + * `cssclass`, a string that should be used as HTML class attribute. Default + to "listing". + + * `needs_css`, the CSS files that should be used together with this + table. Default to ('cubicweb.tablesorter.css', 'cubicweb.tableview.css'). + + * `needs_js`, the Javascript files that should be used together with this + table. Default to ('jquery.tablesorter.js',) + + * `display_filter`, tells if the facets filter should be displayed when + possible. Allowed values are: + - `None`, don't display it + - 'top', display it above the table + - 'bottom', display it below the table + + * `display_actions`, tells if a menu for available actions should be + displayed when possible (see two following options). Allowed values are: + - `None`, don't display it + - 'top', display it above the table + - 'bottom', display it below the table + + * `hide_filter`, when true (the default), facets filter will be hidden by + default, with an action in the actions menu allowing to show / hide it. + + * `show_all_option`, when true, a *show all results* link will be displayed + below the navigation component. + + * `add_view_actions`, when true, actions returned by view.table_actions() + will be included in the actions menu. + + * `header_column_idx`, if not `None`, should be a colum index or a set of + column index where tags should be generated instead of + """ #'# make emacs happier + __regid__ = 'table_layout' + cssclass = "listing" + needs_css = ('cubicweb.tableview.css',) + needs_js = () + display_filter = None # None / 'top' / 'bottom' + display_actions = 'top' # None / 'top' / 'bottom' + hide_filter = True + show_all_option = True # make navcomp generate a 'show all' results link + add_view_actions = False + header_column_idx = None + enable_sorting = True + sortvalue_limit = 10 + tablesorter_settings = { + 'textExtraction': JSString('cw.sortValueExtraction'), + 'selectorHeaders': "thead tr:first th[class='sortable']", # only plug on the first row + } + + def _setup_tablesorter(self, divid): + self._cw.add_css('cubicweb.tablesorter.css') + self._cw.add_js('jquery.tablesorter.js') + self._cw.add_onload('''$(document).ready(function() { + $("#%s table").tablesorter(%s); +});''' % (divid, js_dumps(self.tablesorter_settings))) + + def __init__(self, req, view, **kwargs): + super(TableLayout, self).__init__(req, **kwargs) + for key, val in list(self.cw_extra_kwargs.items()): + if hasattr(self.__class__, key) and not key[0] == '_': + setattr(self, key, val) + self.cw_extra_kwargs.pop(key) + self.view = view + if self.header_column_idx is None: + self.header_column_idx = frozenset() + elif isinstance(self.header_column_idx, int): + self.header_column_idx = frozenset( (self.header_column_idx,) ) + + @cachedproperty + def initial_load(self): + """We detect a bit heuristically if we are built for the first time or + from subsequent calls by the form filter or by the pagination hooks. + """ + form = self._cw.form + return 'fromformfilter' not in form and '__fromnavigation' not in form + + def render(self, w, **kwargs): + assert self.display_filter in (None, 'top', 'bottom'), self.display_filter + if self.needs_css: + self._cw.add_css(self.needs_css) + if self.needs_js: + self._cw.add_js(self.needs_js) + if self.enable_sorting: + self._setup_tablesorter(self.view.domid) + # Notice facets form must be rendered **outside** the main div as it + # shouldn't be rendered on ajax call subsequent to facet restriction + # (hence the 'fromformfilter' parameter added by the form + generate_form = self.initial_load + if self.display_filter and generate_form: + facetsform = self.view.facets_form() + else: + facetsform = None + if facetsform and self.display_filter == 'top': + cssclass = u'hidden' if self.hide_filter else u'' + facetsform.render(w, vid=self.view.__regid__, cssclass=cssclass, + divid=self.view.domid) + actions = [] + if self.display_actions: + if self.add_view_actions: + actions = self.view.table_actions() + if self.display_filter and self.hide_filter and (facetsform or not generate_form): + actions += self.show_hide_filter_actions(not generate_form) + self.render_table(w, actions, self.view.paginable) + if facetsform and self.display_filter == 'bottom': + cssclass = u'hidden' if self.hide_filter else u'' + facetsform.render(w, vid=self.view.__regid__, cssclass=cssclass, + divid=self.view.domid) + + def render_table_headers(self, w, colrenderers): + w(u'') + for colrenderer in colrenderers: + if colrenderer.sortable: + w(u'') + else: + w(u'') + colrenderer.render_header(w) + w(u'') + w(u'\n') + + def render_table_body(self, w, colrenderers): + w(u'') + for rownum in range(self.view.table_size): + self.render_row(w, rownum, colrenderers) + w(u'') + + def render_table(self, w, actions, paginate): + view = self.view + divid = view.domid + if divid is not None: + w(u'
          ' % divid) + else: + assert not (actions or paginate) + nav_html = UStringIO() + if paginate: + view.paginate(w=nav_html.write, show_all_option=self.show_all_option) + w(nav_html.getvalue()) + if actions and self.display_actions == 'top': + self.render_actions(w, actions) + colrenderers = view.build_column_renderers() + attrs = self.table_attributes() + w(u'' % sgml_attributes(attrs)) + if self.view.has_headers: + self.render_table_headers(w, colrenderers) + self.render_table_body(w, colrenderers) + w(u'
          ') + if actions and self.display_actions == 'bottom': + self.render_actions(w, actions) + w(nav_html.getvalue()) + if divid is not None: + w(u'
          ') + + def table_attributes(self): + return {'class': self.cssclass} + + def render_row(self, w, rownum, renderers): + attrs = self.row_attributes(rownum) + w(u'' % sgml_attributes(attrs)) + for colnum, renderer in enumerate(renderers): + self.render_cell(w, rownum, colnum, renderer) + w(u'\n') + + def row_attributes(self, rownum): + return {'class': 'odd' if (rownum%2==1) else 'even', + 'onmouseover': '$(this).addClass("highlighted");', + 'onmouseout': '$(this).removeClass("highlighted")'} + + def render_cell(self, w, rownum, colnum, renderer): + attrs = self.cell_attributes(rownum, colnum, renderer) + if colnum in self.header_column_idx: + tag = u'th' + else: + tag = u'td' + w(u'<%s %s>' % (tag, sgml_attributes(attrs))) + renderer.render_cell(w, rownum) + w(u'' % tag) + + def cell_attributes(self, rownum, _colnum, renderer): + attrs = renderer.attributes.copy() + if renderer.sortable: + sortvalue = renderer.sortvalue(rownum) + if isinstance(sortvalue, string_types): + sortvalue = sortvalue[:self.sortvalue_limit] + if sortvalue is not None: + attrs[u'cubicweb:sortvalue'] = js_dumps(sortvalue) + return attrs + + def render_actions(self, w, actions): + box = MenuWidget('', '', _class='tableActionsBox', islist=False) + label = tags.span(self._cw._('action menu')) + menu = PopupBoxMenu(label, isitem=False, link_class='actionsBox', + ident='%sActions' % self.view.domid) + box.append(menu) + for action in actions: + menu.append(action) + box.render(w=w) + w(u'
          ') + + def show_hide_filter_actions(self, currentlydisplayed=False): + divid = self.view.domid + showhide = u';'.join(toggle_action('%s%s' % (divid, what))[11:] + for what in ('Form', 'Show', 'Hide', 'Actions')) + showhide = 'javascript:' + showhide + self._cw.add_onload(u'''\ +$(document).ready(function() { + if ($('#%(id)sForm[class=\"hidden\"]').length) { + $('#%(id)sHide').attr('class', 'hidden'); + } else { + $('#%(id)sShow').attr('class', 'hidden'); + } +});''' % {'id': divid}) + showlabel = self._cw._('show filter form') + hidelabel = self._cw._('hide filter form') + return [component.Link(showhide, showlabel, id='%sShow' % divid), + component.Link(showhide, hidelabel, id='%sHide' % divid)] + + +class AbstractColumnRenderer(object): + """Abstract base class for column renderer. Interface of a column renderer follows: + + .. automethod:: cubicweb.web.views.tableview.AbstractColumnRenderer.bind + .. automethod:: cubicweb.web.views.tableview.AbstractColumnRenderer.render_header + .. automethod:: cubicweb.web.views.tableview.AbstractColumnRenderer.render_cell + .. automethod:: cubicweb.web.views.tableview.AbstractColumnRenderer.sortvalue + + Attributes on this base class are: + + :attr: `header`, the column header. If None, default to `_(colid)` + :attr: `addcount`, if True, add the table size in parenthezis beside the header + :attr: `trheader`, should the header be translated + :attr: `escapeheader`, should the header be xml_escaped + :attr: `sortable`, tell if the column is sortable + :attr: `view`, the table view + :attr: `_cw`, the request object + :attr: `colid`, the column identifier + :attr: `attributes`, dictionary of attributes to put on the HTML tag when + the cell is rendered + """ #'# make emacs + attributes = {} + empty_cell_content = u' ' + + def __init__(self, header=None, addcount=False, trheader=True, + escapeheader=True, sortable=True): + self.header = header + self.trheader = trheader + self.escapeheader = escapeheader + self.addcount = addcount + self.sortable = sortable + self.view = None + self._cw = None + self.colid = None + + def __str__(self): + return '<%s.%s (column %s) at 0x%x>' % (self.view.__class__.__name__, + self.__class__.__name__, + self.colid, id(self)) + + def bind(self, view, colid): + """Bind the column renderer to its view. This is where `_cw`, `view`, + `colid` are set and the method to override if you want to add more + view/request depending attributes on your column render. + """ + self.view = view + self._cw = view._cw + self.colid = colid + + def copy(self): + assert self.view is None + return copy(self) + + def default_header(self): + """Return header for this column if one has not been specified.""" + return self._cw._(self.colid) + + def render_header(self, w): + """Write label for the specified column by calling w().""" + header = self.header + if header is None: + header = self.default_header() + elif self.trheader and header: + header = self._cw._(header) + if self.addcount: + header = '%s (%s)' % (header, self.view.table_size) + if header: + if self.escapeheader: + header = xml_escape(header) + else: + header = self.empty_cell_content + if self.sortable: + header = tags.span( + header, escapecontent=False, + title=self._cw._('Click to sort on this column')) + w(header) + + def render_cell(self, w, rownum): + """Write value for the specified cell by calling w(). + + :param `rownum`: the row number in the table + """ + raise NotImplementedError() + + def sortvalue(self, _rownum): + """Return typed value to be used for sorting on the specified column. + + :param `rownum`: the row number in the table + """ + return None + + +class TableMixIn(component.LayoutableMixIn): + """Abstract mix-in class for layout based tables. + + This default implementation's call method simply delegate to + meth:`layout_render` that will select the renderer whose identifier is given + by the :attr:`layout_id` attribute. + + Then it provides some default implementation for various parts of the API + used by that layout. + + Abstract method you will have to override is: + + .. automethod:: build_column_renderers + + You may also want to overridde: + + .. autoattribute:: cubicweb.web.views.tableview.TableMixIn.table_size + + The :attr:`has_headers` boolean attribute tells if the table has some + headers to be displayed. Default to `True`. + """ + __abstract__ = True + # table layout to use + layout_id = 'table_layout' + # true if the table has some headers + has_headers = True + # dictionary {colid : column renderer} + column_renderers = {} + # default renderer class to use when no renderer specified for the column + default_column_renderer_class = None + # default layout handles inner pagination + handle_pagination = True + + def call(self, **kwargs): + self._cw.add_js('cubicweb.ajax.js') # for pagination + self.layout_render(self.w) + + def column_renderer(self, colid, *args, **kwargs): + """Return a column renderer for column of the given id.""" + try: + crenderer = self.column_renderers[colid].copy() + except KeyError: + crenderer = self.default_column_renderer_class(*args, **kwargs) + crenderer.bind(self, colid) + return crenderer + + # layout callbacks ######################################################### + + def facets_form(self, **kwargs):# XXX extracted from jqplot cube + return self._cw.vreg['views'].select_or_none( + 'facet.filtertable', self._cw, rset=self.cw_rset, view=self, + **kwargs) + + @cachedproperty + def domid(self): + return self._cw.form.get('divid') or domid('%s-%s' % (self.__regid__, make_uid())) + + @property + def table_size(self): + """Return the number of rows (header excluded) to be displayed. + + By default return the number of rows in the view's result set. If your + table isn't reult set based, override this method. + """ + return self.cw_rset.rowcount + + def build_column_renderers(self): + """Return a list of column renderers, one for each column to be + rendered. Prototype of a column renderer is described below: + + .. autoclass:: cubicweb.web.views.tableview.AbstractColumnRenderer + """ + raise NotImplementedError() + + def table_actions(self): + """Return a list of actions (:class:`~cubicweb.web.component.Link`) that + match the view's result set, and return those in the 'mainactions' + category. + """ + req = self._cw + actions = [] + actionsbycat = req.vreg['actions'].possible_actions(req, self.cw_rset) + for action in actionsbycat.get('mainactions', ()): + for action in action.actual_actions(): + actions.append(component.Link(action.url(), req._(action.title), + klass=action.html_class()) ) + return actions + + # interaction with navigation component #################################### + + def page_navigation_url(self, navcomp, _path, params): + params['divid'] = self.domid + params['vid'] = self.__regid__ + return navcomp.ajax_page_url(**params) + + +class RsetTableColRenderer(AbstractColumnRenderer): + """Default renderer for :class:`RsetTableView`.""" + + def __init__(self, cellvid, **kwargs): + super(RsetTableColRenderer, self).__init__(**kwargs) + self.cellvid = cellvid + + def bind(self, view, colid): + super(RsetTableColRenderer, self).bind(view, colid) + self.cw_rset = view.cw_rset + def render_cell(self, w, rownum): + self._cw.view(self.cellvid, self.cw_rset, 'empty-cell', + row=rownum, col=self.colid, w=w) + + # limit value's length as much as possible (e.g. by returning the 10 first + # characters of a string) + def sortvalue(self, rownum): + colid = self.colid + val = self.cw_rset[rownum][colid] + if val is None: + return u'' + etype = self.cw_rset.description[rownum][colid] + if etype is None: + return u'' + if self._cw.vreg.schema.eschema(etype).final: + entity, rtype = self.cw_rset.related_entity(rownum, colid) + if entity is None: + return val # remove_html_tags() ? + return entity.sortvalue(rtype) + entity = self.cw_rset.get_entity(rownum, colid) + return entity.sortvalue() + + +class RsetTableView(TableMixIn, AnyRsetView): + """This table view accepts any non-empty rset. It uses introspection on the + result set to compute column names and the proper way to display the cells. + + It is highly configurable and accepts a wealth of options, but take care to + check what you're trying to achieve wouldn't be a job for the + :class:`EntityTableView`. Basically the question is: does this view should + be tied to the result set query's shape or no? If yes, than you're fine. If + no, you should take a look at the other table implementation. + + The following class attributes may be used to control the table: + + * `finalvid`, a view identifier that should be called on final entities + (e.g. attribute values). Default to 'final'. + + * `nonfinalvid`, a view identifier that should be called on + entities. Default to 'incontext'. + + * `displaycols`, if not `None`, should be a list of rset's columns to be + displayed. + + * `headers`, if not `None`, should be a list of headers for the table's + columns. `None` values in the list will be replaced by computed column + names. + + * `cellvids`, if not `None`, should be a dictionary with table column index + as key and a view identifier as value, telling the view that should be + used in the given column. + + Notice `displaycols`, `headers` and `cellvids` may be specified at selection + time but then the table won't have pagination and shouldn't be configured to + display the facets filter nor actions (as they wouldn't behave as expected). + + This table class use the :class:`RsetTableColRenderer` as default column + renderer. + + .. autoclass:: RsetTableColRenderer + """ #'# make emacs happier + __regid__ = 'table' + # selector trick for bw compath with the former :class:TableView + __select__ = AnyRsetView.__select__ & (~match_kwargs( + 'title', 'subvid', 'displayfilter', 'headers', 'displaycols', + 'displayactions', 'actions', 'divid', 'cellvids', 'cellattrs', + 'mainindex', 'paginate', 'page_size', mode='any') + | unreloadable_table()) + title = _('table') + # additional configuration parameters + finalvid = 'final' + nonfinalvid = 'incontext' + displaycols = None + headers = None + cellvids = None + default_column_renderer_class = RsetTableColRenderer + + def linkable(self): + # specific subclasses of this view usually don't want to be linkable + # since they depends on a particular shape (being linkable meaning view + # may be listed in possible views + return self.__regid__ == 'table' + + def call(self, headers=None, displaycols=None, cellvids=None, + paginate=None, **kwargs): + if self.headers: + self.headers = [h and self._cw._(h) for h in self.headers] + if (headers or displaycols or cellvids or paginate): + if headers is not None: + self.headers = headers + if displaycols is not None: + self.displaycols = displaycols + if cellvids is not None: + self.cellvids = cellvids + if paginate is not None: + self.paginable = paginate + if kwargs: + # old table view arguments that we can safely ignore thanks to + # selectors + if len(kwargs) > 1: + msg = '[3.14] %s arguments are deprecated' % ', '.join(kwargs) + else: + msg = '[3.14] %s argument is deprecated' % ', '.join(kwargs) + warn(msg, DeprecationWarning, stacklevel=2) + super(RsetTableView, self).call(**kwargs) + + def main_var_index(self): + """returns the index of the first non-attribute variable among the RQL + selected variables + """ + eschema = self._cw.vreg.schema.eschema + for i, etype in enumerate(self.cw_rset.description[0]): + if not eschema(etype).final: + return i + return None + + # layout callbacks ######################################################### + + @property + def table_size(self): + """return the number of rows (header excluded) to be displayed""" + return self.cw_rset.rowcount + + def build_column_renderers(self): + headers = self.headers + # compute displayed columns + if self.displaycols is None: + if headers is not None: + displaycols = list(range(len(headers))) + else: + rqlst = self.cw_rset.syntax_tree() + displaycols = list(range(len(rqlst.children[0].selection))) + else: + displaycols = self.displaycols + # compute table headers + main_var_index = self.main_var_index() + computed_titles = self.columns_labels(main_var_index) + # compute build renderers + cellvids = self.cellvids + renderers = [] + for colnum, colid in enumerate(displaycols): + addcount = False + # compute column header + title = None + if headers is not None: + title = headers[colnum] + if title is None: + title = computed_titles[colid] + if colid == main_var_index: + addcount = True + # compute cell vid for the column + if cellvids is not None and colnum in cellvids: + cellvid = cellvids[colnum] + else: + coltype = self.cw_rset.description[0][colid] + if coltype is not None and self._cw.vreg.schema.eschema(coltype).final: + cellvid = self.finalvid + else: + cellvid = self.nonfinalvid + # get renderer + renderer = self.column_renderer(colid, header=title, trheader=False, + addcount=addcount, cellvid=cellvid) + renderers.append(renderer) + return renderers + + +class EntityTableColRenderer(AbstractColumnRenderer): + """Default column renderer for :class:`EntityTableView`. + + You may use the :meth:`entity` method to retrieve the main entity for a + given row number. + + .. automethod:: cubicweb.web.views.tableview.EntityTableColRenderer.entity + .. automethod:: cubicweb.web.views.tableview.EntityTableColRenderer.render_entity + .. automethod:: cubicweb.web.views.tableview.EntityTableColRenderer.entity_sortvalue + """ + def __init__(self, renderfunc=None, sortfunc=None, sortable=None, **kwargs): + if renderfunc is None: + renderfunc = self.render_entity + # if renderfunc nor sortfunc nor sortable specified, column will be + # sortable using the default implementation. + if sortable is None: + sortable = True + # no sortfunc given but asked to be sortable: use the default sort + # method. Sub-class may set `entity_sortvalue` to None if they don't + # support sorting. + if sortfunc is None and sortable: + sortfunc = self.entity_sortvalue + # at this point `sortable` may still be unspecified while `sortfunc` is + # sure to be set to someting else than None if the column is sortable. + sortable = sortfunc is not None + super(EntityTableColRenderer, self).__init__(sortable=sortable, **kwargs) + self.renderfunc = renderfunc + self.sortfunc = sortfunc + + def copy(self): + assert self.view is None + # copy of attribute referencing a method doesn't work with python < 2.7 + renderfunc = self.__dict__.pop('renderfunc') + sortfunc = self.__dict__.pop('sortfunc') + try: + acopy = copy(self) + for aname, member in[('renderfunc', renderfunc), + ('sortfunc', sortfunc)]: + if isinstance(member, MethodType): + member = create_bound_method(member.__func__, acopy) + setattr(acopy, aname, member) + return acopy + finally: + self.renderfunc = renderfunc + self.sortfunc = sortfunc + + def render_cell(self, w, rownum): + entity = self.entity(rownum) + if entity is None: + w(self.empty_cell_content) + else: + self.renderfunc(w, entity) + + def sortvalue(self, rownum): + entity = self.entity(rownum) + if entity is None: + return None + else: + return self.sortfunc(entity) + + def entity(self, rownum): + """Convenience method returning the table's main entity.""" + return self.view.entity(rownum) + + def render_entity(self, w, entity): + """Sort value if `renderfunc` nor `sortfunc` specified at + initialization. + + This default implementation consider column id is an entity attribute + and print its value. + """ + w(entity.printable_value(self.colid)) + + def entity_sortvalue(self, entity): + """Cell rendering implementation if `renderfunc` nor `sortfunc` + specified at initialization. + + This default implementation consider column id is an entity attribute + and return its sort value by calling `entity.sortvalue(colid)`. + """ + return entity.sortvalue(self.colid) + + +class MainEntityColRenderer(EntityTableColRenderer): + """Renderer to be used for the column displaying the 'main entity' of a + :class:`EntityTableView`. + + By default display it using the 'incontext' view. You may specify another + view identifier using the `vid` argument. + + If header not specified, it would be built using entity types in the main + column. + """ + def __init__(self, vid='incontext', addcount=True, **kwargs): + super(MainEntityColRenderer, self).__init__(addcount=addcount, **kwargs) + self.vid = vid + + def default_header(self): + view = self.view + if len(view.cw_rset) > 1: + suffix = '_plural' + else: + suffix = '' + return u', '.join(self._cw.__(et + suffix) + for et in view.cw_rset.column_types(view.cw_col or 0)) + + def render_entity(self, w, entity): + entity.view(self.vid, w=w) + + def entity_sortvalue(self, entity): + return entity.sortvalue() + + +class RelatedEntityColRenderer(MainEntityColRenderer): + """Renderer to be used for column displaying an entity related the 'main + entity' of a :class:`EntityTableView`. + + By default display it using the 'incontext' view. You may specify another + view identifier using the `vid` argument. + + If header not specified, it would be built by translating the column id. + """ + def __init__(self, getrelated, addcount=False, **kwargs): + super(RelatedEntityColRenderer, self).__init__(addcount=addcount, **kwargs) + self.getrelated = getrelated + + def entity(self, rownum): + entity = super(RelatedEntityColRenderer, self).entity(rownum) + return self.getrelated(entity) + + def default_header(self): + return self._cw._(self.colid) + + +class RelationColRenderer(EntityTableColRenderer): + """Renderer to be used for column displaying a list of entities related the + 'main entity' of a :class:`EntityTableView`. By default, the main entity is + considered as the subject of the relation but you may specify otherwise + using the `role` argument. + + By default display the related rset using the 'csv' view, using + 'outofcontext' sub-view for each entity. You may specify another view + identifier using respectivly the `vid` and `subvid` arguments. + + If you specify a 'rtype view', such as 'reledit', you should add a + is_rtype_view=True parameter. + + If header not specified, it would be built by translating the column id, + properly considering role. + """ + def __init__(self, role='subject', vid='csv', subvid=None, + fallbackvid='empty-cell', is_rtype_view=False, **kwargs): + super(RelationColRenderer, self).__init__(**kwargs) + self.role = role + self.vid = vid + if subvid is None and vid in ('csv', 'list'): + subvid = 'outofcontext' + self.subvid = subvid + self.fallbackvid = fallbackvid + self.is_rtype_view = is_rtype_view + + def render_entity(self, w, entity): + kwargs = {'w': w} + if self.is_rtype_view: + rset = None + kwargs['entity'] = entity + kwargs['rtype'] = self.colid + kwargs['role'] = self.role + else: + rset = entity.related(self.colid, self.role) + if self.subvid is not None: + kwargs['subvid'] = self.subvid + self._cw.view(self.vid, rset, self.fallbackvid, **kwargs) + + def default_header(self): + return display_name(self._cw, self.colid, self.role) + + entity_sortvalue = None # column not sortable by default + + +class EntityTableView(TableMixIn, EntityView): + """This abstract table view is designed to be used with an + :class:`is_instance()` or :class:`adaptable` predicate, hence doesn't depend + the result set shape as the :class:`RsetTableView` does. + + It will display columns that should be defined using the `columns` class + attribute containing a list of column ids. By default, each column is + renderered by :class:`EntityTableColRenderer` which consider that the column + id is an attribute of the table's main entity (ie the one for which the view + is selected). + + You may wish to specify :class:`MainEntityColRenderer` or + :class:`RelatedEntityColRenderer` renderer for a column in the + :attr:`column_renderers` dictionary. + + .. autoclass:: cubicweb.web.views.tableview.EntityTableColRenderer + .. autoclass:: cubicweb.web.views.tableview.MainEntityColRenderer + .. autoclass:: cubicweb.web.views.tableview.RelatedEntityColRenderer + .. autoclass:: cubicweb.web.views.tableview.RelationColRenderer + """ + __abstract__ = True + default_column_renderer_class = EntityTableColRenderer + columns = None # to be defined in concret class + + def call(self, columns=None, **kwargs): + if columns is not None: + self.columns = columns + self.layout_render(self.w) + + @property + def table_size(self): + return self.cw_rset.rowcount + + def build_column_renderers(self): + return [self.column_renderer(colid) for colid in self.columns] + + def entity(self, rownum): + """Return the table's main entity""" + return self.cw_rset.get_entity(rownum, self.cw_col or 0) + + +class EmptyCellView(AnyRsetView): + __regid__ = 'empty-cell' + __select__ = yes() + def call(self, **kwargs): + self.w(u' ') + cell_call = call + + +################################################################################ +# DEPRECATED tables ############################################################ +################################################################################ + + +@add_metaclass(class_deprecated) +class TableView(AnyRsetView): + """The table view accepts any non-empty rset. It uses introspection on the + result set to compute column names and the proper way to display the cells. + + It is however highly configurable and accepts a wealth of options. + """ + __deprecation_warning__ = '[3.14] %(cls)s is deprecated' + __regid__ = 'table' + title = _('table') + finalview = 'final' + + table_widget_class = TableWidget + table_column_class = TableColumn + + tablesorter_settings = { + 'textExtraction': JSString('cw.sortValueExtraction'), + 'selectorHeaders': 'thead tr:first th', # only plug on the first row + } + handle_pagination = True + + def form_filter(self, divid, displaycols, displayactions, displayfilter, + paginate, hidden=True): + try: + filterform = self._cw.vreg['views'].select( + 'facet.filtertable', self._cw, rset=self.cw_rset) + except NoSelectableObject: + return () + vidargs = {'paginate': paginate, + 'displaycols': displaycols, + 'displayactions': displayactions, + 'displayfilter': displayfilter} + cssclass = hidden and 'hidden' or '' + filterform.render(self.w, vid=self.__regid__, divid=divid, + vidargs=vidargs, cssclass=cssclass) + return self.show_hide_actions(divid, not hidden) + + def main_var_index(self): + """Returns the index of the first non final variable of the rset. + + Used to select the main etype to help generate accurate column headers. + XXX explain the concept + + May return None if none is found. + """ + eschema = self._cw.vreg.schema.eschema + for i, etype in enumerate(self.cw_rset.description[0]): + try: + if not eschema(etype).final: + return i + except KeyError: # XXX possible? + continue + return None + + def displaycols(self, displaycols, headers): + if displaycols is None: + if 'displaycols' in self._cw.form: + displaycols = [int(idx) for idx in self._cw.form['displaycols']] + elif headers is not None: + displaycols = list(range(len(headers))) + else: + displaycols = list(range(len(self.cw_rset.syntax_tree().children[0].selection))) + return displaycols + + def _setup_tablesorter(self, divid): + req = self._cw + req.add_js('jquery.tablesorter.js') + req.add_onload('''$(document).ready(function() { + $("#%s table.listing").tablesorter(%s); +});''' % (divid, js_dumps(self.tablesorter_settings))) + req.add_css(('cubicweb.tablesorter.css', 'cubicweb.tableview.css')) + + @cachedproperty + def initial_load(self): + """We detect a bit heuristically if we are built for the first time or + from subsequent calls by the form filter or by the pagination + hooks. + + """ + form = self._cw.form + return 'fromformfilter' not in form and '__start' not in form + + def call(self, title=None, subvid=None, displayfilter=None, headers=None, + displaycols=None, displayactions=None, actions=(), divid=None, + cellvids=None, cellattrs=None, mainindex=None, + paginate=False, page_size=None): + """Produces a table displaying a composite query + + :param title: title added before table + :param subvid: cell view + :param displayfilter: filter that selects rows to display + :param headers: columns' titles + :param displaycols: indexes of columns to display (first column is 0) + :param displayactions: if True, display action menu + """ + req = self._cw + divid = divid or req.form.get('divid') or 'rs%s' % make_uid(id(self.cw_rset)) + self._setup_tablesorter(divid) + # compute label first since the filter form may remove some necessary + # information from the rql syntax tree + if mainindex is None: + mainindex = self.main_var_index() + computed_labels = self.columns_labels(mainindex) + if not subvid and 'subvid' in req.form: + subvid = req.form.pop('subvid') + actions = list(actions) + if mainindex is None: + displayfilter, displayactions = False, False + else: + if displayfilter is None and req.form.get('displayfilter'): + displayfilter = True + if displayactions is None and req.form.get('displayactions'): + displayactions = True + displaycols = self.displaycols(displaycols, headers) + if self.initial_load: + self.w(u'
          ') + if not title and 'title' in req.form: + title = req.form['title'] + if title: + self.w(u'

          %s

          \n' % title) + if displayfilter: + actions += self.form_filter(divid, displaycols, displayfilter, + displayactions, paginate) + elif displayfilter: + actions += self.show_hide_actions(divid, True) + self.w(u'
          ' % divid) + if displayactions: + actionsbycat = self._cw.vreg['actions'].possible_actions(req, self.cw_rset) + for action in actionsbycat.get('mainactions', ()): + for action in action.actual_actions(): + actions.append( (action.url(), req._(action.title), + action.html_class(), None) ) + # render actions menu + if actions: + self.render_actions(divid, actions) + # render table + if paginate: + self.divid = divid # XXX iirk (see usage in page_navigation_url) + self.paginate(page_size=page_size, show_all_option=False) + table = self.table_widget_class(self) + for column in self.get_columns(computed_labels, displaycols, headers, + subvid, cellvids, cellattrs, mainindex): + table.append_column(column) + table.render(self.w) + self.w(u'
          \n') + if self.initial_load: + self.w(u'
          \n') + + def page_navigation_url(self, navcomp, path, params): + """Build a URL to the current view using the attributes + + :param navcomp: a NavigationComponent to call a URL method on. + :param path: expected to be json here? + :param params: params to give to build_url method + + this is called by :class:`cubiweb.web.component.NavigationComponent` + """ + if hasattr(self, 'divid'): + # XXX this assert a single call + params['divid'] = self.divid + params['vid'] = self.__regid__ + return navcomp.ajax_page_url(**params) + + def show_hide_actions(self, divid, currentlydisplayed=False): + showhide = u';'.join(toggle_action('%s%s' % (divid, what))[11:] + for what in ('Form', 'Show', 'Hide', 'Actions')) + showhide = 'javascript:' + showhide + showlabel = self._cw._('show filter form') + hidelabel = self._cw._('hide filter form') + if currentlydisplayed: + return [(showhide, showlabel, 'hidden', '%sShow' % divid), + (showhide, hidelabel, None, '%sHide' % divid)] + return [(showhide, showlabel, None, '%sShow' % divid), + (showhide, hidelabel, 'hidden', '%sHide' % divid)] + + def render_actions(self, divid, actions): + box = MenuWidget('', 'tableActionsBox', _class='', islist=False) + label = tags.img(src=self._cw.uiprops['PUCE_DOWN'], + alt=xml_escape(self._cw._('action(s) on this selection'))) + menu = PopupBoxMenu(label, isitem=False, link_class='actionsBox', + ident='%sActions' % divid) + box.append(menu) + for url, label, klass, ident in actions: + menu.append(component.Link(url, label, klass=klass, id=ident)) + box.render(w=self.w) + self.w(u'
          ') + + def get_columns(self, computed_labels, displaycols, headers, subvid, + cellvids, cellattrs, mainindex): + """build columns description from various parameters + + : computed_labels: columns headers computed from rset to be used if there is no headers entry + : displaycols: see :meth:`call` + : headers: explicitly define columns headers + : subvid: see :meth:`call` + : cellvids: see :meth:`call` + : cellattrs: see :meth:`call` + : mainindex: see :meth:`call` + + return a list of columns description to be used by + :class:`~cubicweb.web.htmlwidgets.TableWidget` + """ + columns = [] + eschema = self._cw.vreg.schema.eschema + for colindex, label in enumerate(computed_labels): + if colindex not in displaycols: + continue + # compute column header + if headers is not None: + _label = headers[displaycols.index(colindex)] + if _label is not None: + label = _label + if colindex == mainindex and label is not None: + label += ' (%s)' % self.cw_rset.rowcount + column = self.table_column_class(label, colindex) + coltype = self.cw_rset.description[0][colindex] + # compute column cell view (if coltype is None, it's a left outer + # join, use the default non final subvid) + if cellvids and colindex in cellvids: + column.append_renderer(cellvids[colindex], colindex) + elif coltype is not None and eschema(coltype).final: + column.append_renderer(self.finalview, colindex) + else: + column.append_renderer(subvid or 'incontext', colindex) + if cellattrs and colindex in cellattrs: + for name, value in cellattrs[colindex].items(): + column.add_attr(name, value) + # add column + columns.append(column) + return columns + + + def render_cell(self, cellvid, row, col, w): + self._cw.view('cell', self.cw_rset, row=row, col=col, cellvid=cellvid, w=w) + + def get_rows(self): + return self.cw_rset + + @htmlescape + @jsonize + @limitsize(10) + def sortvalue(self, row, col): + # XXX it might be interesting to try to limit value's + # length as much as possible (e.g. by returning the 10 + # first characters of a string) + val = self.cw_rset[row][col] + if val is None: + return u'' + etype = self.cw_rset.description[row][col] + if etype is None: + return u'' + if self._cw.vreg.schema.eschema(etype).final: + entity, rtype = self.cw_rset.related_entity(row, col) + if entity is None: + return val # remove_html_tags() ? + return entity.sortvalue(rtype) + entity = self.cw_rset.get_entity(row, col) + return entity.sortvalue() + + +class EditableTableView(TableView): + __regid__ = 'editable-table' + finalview = 'editable-final' + title = _('editable-table') + + +@add_metaclass(class_deprecated) +class CellView(EntityView): + __deprecation_warning__ = '[3.14] %(cls)s is deprecated' + __regid__ = 'cell' + __select__ = nonempty_rset() + + def cell_call(self, row, col, cellvid=None): + """ + :param row, col: indexes locating the cell value in view's result set + :param cellvid: cell view (defaults to 'outofcontext') + """ + etype, val = self.cw_rset.description[row][col], self.cw_rset[row][col] + if etype is None or not self._cw.vreg.schema.eschema(etype).final: + if val is None: + # This is usually caused by a left outer join and in that case, + # regular views will most certainly fail if they don't have + # a real eid + # XXX if cellvid is e.g. reledit, we may wanna call it anyway + self.w(u' ') + else: + self.wview(cellvid or 'outofcontext', self.cw_rset, row=row, col=col) + else: + # XXX why do we need a fallback view here? + self.wview(cellvid or 'final', self.cw_rset, 'null', row=row, col=col) + + +class InitialTableView(TableView): + """same display as table view but consider two rql queries : + + * the default query (ie `rql` form parameter), which is only used to select + this view and to build the filter form. This query should have the same + structure as the actual without actual restriction (but link to + restriction variables) and usually with a limit for efficiency (limit set + to 2 is advised) + + * the actual query (`actualrql` form parameter) whose results will be + displayed with default restrictions set + """ + __regid__ = 'initialtable' + __select__ = nonempty_rset() + # should not be displayed in possible view since it expects some specific + # parameters + title = None + + def call(self, title=None, subvid=None, headers=None, divid=None, + paginate=False, displaycols=None, displayactions=None, + mainindex=None): + """Dumps a table displaying a composite query""" + try: + actrql = self._cw.form['actualrql'] + except KeyError: + actrql = self.cw_rset.printable_rql() + else: + self._cw.ensure_ro_rql(actrql) + displaycols = self.displaycols(displaycols, headers) + if displayactions is None and 'displayactions' in self._cw.form: + displayactions = True + if divid is None and 'divid' in self._cw.form: + divid = self._cw.form['divid'] + self.w(u'
          ') + if not title and 'title' in self._cw.form: + # pop title so it's not displayed by the table view as well + title = self._cw.form.pop('title') + if title: + self.w(u'

          %s

          \n' % title) + if mainindex is None: + mainindex = self.main_var_index() + if mainindex is not None: + actions = self.form_filter(divid, displaycols, displayactions, + displayfilter=True, paginate=paginate, + hidden=True) + else: + actions = () + if not subvid and 'subvid' in self._cw.form: + subvid = self._cw.form.pop('subvid') + self._cw.view('table', self._cw.execute(actrql), + 'noresult', w=self.w, displayfilter=False, subvid=subvid, + displayactions=displayactions, displaycols=displaycols, + actions=actions, headers=headers, divid=divid) + self.w(u'
          \n') + + +class EditableInitialTableTableView(InitialTableView): + __regid__ = 'editable-initialtable' + finalview = 'editable-final' + + +@add_metaclass(class_deprecated) +class EntityAttributesTableView(EntityView): + """This table displays entity attributes in a table and allow to set a + specific method to help building cell content for each attribute as well as + column header. + + Table will render entity cell by using the appropriate build_COLNAME_cell + methods if defined otherwise cell content will be entity.COLNAME. + + Table will render column header using the method header_for_COLNAME if + defined otherwise COLNAME will be used. + """ + __deprecation_warning__ = '[3.14] %(cls)s is deprecated' + __abstract__ = True + columns = () + table_css = "listing" + css_files = () + + def call(self, columns=None): + if self.css_files: + self._cw.add_css(self.css_files) + _ = self._cw._ + self.columns = columns or self.columns + sample = self.cw_rset.get_entity(0, 0) + self.w(u'' % self.table_css) + self.table_header(sample) + self.w(u'') + for row in range(self.cw_rset.rowcount): + self.cell_call(row=row, col=0) + self.w(u'') + self.w(u'
          ') + + def cell_call(self, row, col): + _ = self._cw._ + entity = self.cw_rset.get_entity(row, col) + entity.complete() + infos = {} + for col in self.columns: + meth = getattr(self, 'build_%s_cell' % col, None) + # find the build method or try to find matching attribute + if meth: + content = meth(entity) + else: + content = entity.printable_value(col) + infos[col] = content + self.w(u"""""") + line = u''.join(u'%%(%s)s' % col for col in self.columns) + self.w(line % infos) + self.w(u'\n') + + def table_header(self, sample): + """builds the table's header""" + self.w(u'') + for column in self.columns: + meth = getattr(self, 'header_for_%s' % column, None) + if meth: + colname = meth(sample) + else: + colname = self._cw._(column) + self.w(u'%s' % xml_escape(colname)) + self.w(u'\n') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/tabs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/tabs.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,249 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""base classes to handle tabbed views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six import string_types + +from logilab.common.deprecation import class_renamed +from logilab.mtconverter import xml_escape + +from cubicweb import NoSelectableObject, role +from cubicweb import tags, uilib, utils +from cubicweb.predicates import partial_has_related_entities +from cubicweb.view import EntityView +from cubicweb.web.views import primary + +class LazyViewMixin(object): + """provides two convenience methods for the tab machinery. + + Can also be used to lazy-load arbitrary views. + """ + + def _prepare_bindings(self, vid, reloadable): + self._cw.add_onload(u""" + jQuery('#lazy-%(vid)s').bind('%(event)s', function(event) { + loadNow('#lazy-%(vid)s', '#%(vid)s-hole', %(reloadable)s); + });""" % {'event': 'load_%s' % vid, 'vid': vid, + 'reloadable' : str(reloadable).lower()}) + + def lazyview(self, vid, rql=None, eid=None, rset=None, tabid=None, + reloadable=False, show_spinbox=True, w=None): + """a lazy version of wview""" + w = w or self.w + self._cw.add_js('cubicweb.ajax.js') + # the form is copied into urlparams to please the inner views + # that might want to take params from it + # beware of already present rql or eid elements + # to be safe of collision a proper argument passing protocol + # (with namespaces) should be used instead of the current + # ad-hockery + urlparams = self._cw.form.copy() + urlparams.pop('rql', None) + urlparams.pop('eid', None) + urlparams.update({'vid' : vid, 'fname' : 'view'}) + if rql: + urlparams['rql'] = rql + elif eid: + urlparams['eid'] = eid + elif rset: + urlparams['rql'] = rset.printable_rql() + if tabid is None: + tabid = uilib.domid(vid) + w(u'
          ' % ( + tabid, xml_escape(self._cw.build_url('ajax', **urlparams)))) + if show_spinbox: + # Don't use ``alt`` since image is a *visual* helper for ajax + w(u'' + % (xml_escape(self._cw.data_url('loading.gif')), tabid)) + else: + w(u'
          ' % tabid) + w(u'' + % (xml_escape(self._cw._('Link:')), + tabid, + xml_escape(self._cw.build_url(**urlparams)), + xml_escape(self._cw._(tabid)))) + w(u'
          ') + self._prepare_bindings(tabid, reloadable) + + def forceview(self, vid): + """trigger an event that will force immediate loading of the view on dom + readyness + """ + self._cw.add_onload(uilib.js.triggerLoad(vid)) + + +class TabsMixin(LazyViewMixin): + """a tab mixin to easily get jQuery based, lazy, ajax tabs""" + lazy = True + + @property + def cookie_name(self): + return str('%s_active_tab' % self._cw.vreg.config.appid) + + def active_tab(self, default): + if 'tab' in self._cw.form: + return self._cw.form['tab'] + cookies = self._cw.get_cookie() + cookiename = self.cookie_name + activetab = cookies.get(cookiename) + if activetab is None: + domid = uilib.domid(default) + self._cw.set_cookie(cookiename, domid) + return domid + return activetab.value + + def prune_tabs(self, tabs, default_tab): + selected_tabs = [] + may_be_active_tab = self.active_tab(default_tab) + active_tab = uilib.domid(default_tab) + viewsvreg = self._cw.vreg['views'] + for tab in tabs: + if isinstance(tab, string_types): + tabid, tabkwargs = tab, {} + else: + tabid, tabkwargs = tab + tabkwargs = tabkwargs.copy() + tabkwargs.setdefault('rset', self.cw_rset) + vid = tabkwargs.get('vid', tabid) + domid = uilib.domid(tabid) + try: + viewsvreg.select(vid, self._cw, tabid=domid, **tabkwargs) + except NoSelectableObject: + continue + selected_tabs.append((tabid, domid, tabkwargs)) + if domid == may_be_active_tab: + active_tab = domid + return selected_tabs, active_tab + + def render_tabs(self, tabs, default, entity=None): + # delegate to the default tab if there is more than one entity + # in the result set (tabs are pretty useless there) + if entity and len(self.cw_rset) > 1: + entity.view(default, w=self.w) + return + self._cw.add_css('jquery.ui.css') + self._cw.add_js(('jquery.ui.js', 'cubicweb.ajax.js', 'jquery.cookie.js')) + # prune tabs : not all are to be shown + tabs, active_tab = self.prune_tabs(tabs, default) + # build the html structure + w = self.w + uid = entity and entity.eid or utils.make_uid('tab') + w(u'
          ' % uid) + w(u'') + for tabid, domid, tabkwargs in tabs: + w(u'
          ' % domid) + if self.lazy: + tabkwargs.setdefault('tabid', domid) + tabkwargs.setdefault('vid', tabid) + self.lazyview(**tabkwargs) + else: + self._cw.view(tabid, w=self.w, **tabkwargs) + w(u'
          ') + w(u'
          ') + # call the setTab() JS function *after* each tab is generated + # because the callback binding needs to be done before + # XXX make work history: true + if self.lazy: + self._cw.add_onload(u""" + jQuery('#entity-tabs-%(uid)s').tabs( + { active: %(tabindex)s, + activate: function(event, ui) { + setTab(ui.newPanel.attr('id'), '%(cookiename)s'); + } + }); + setTab('%(domid)s', '%(cookiename)s'); +""" % {'tabindex' : active_tab_idx, + 'domid' : active_tab, + 'uid' : uid, + 'cookiename' : self.cookie_name}) + else: + self._cw.add_onload( + u"jQuery('#entity-tabs-%(uid)s').tabs({active: %(tabindex)s});" + % {'tabindex': active_tab_idx, 'uid': uid}) + + +class EntityRelationView(EntityView): + """view displaying entity related stuff. + Such a view _must_ provide the rtype, target and vid attributes : + + Example : + + class ProjectScreenshotsView(EntityRelationView): + '''display project's screenshots''' + __regid__ = title = _('projectscreenshots') + __select__ = EntityRelationView.__select__ & is_instance('Project') + rtype = 'screenshot' + role = 'subject' + vid = 'gallery' + + in this example, entities related to project entity by the 'screenshot' + relation (where the project is subject of the relation) will be displayed + using the 'gallery' view. + """ + __select__ = EntityView.__select__ & partial_has_related_entities() + vid = 'list' + # to be defined in concrete classes + rtype = title = None + + def cell_call(self, row, col): + rset = self.cw_rset.get_entity(row, col).related(self.rtype, role(self)) + self.w(u'
          ') + if self.title: + self.w(tags.h1(self._cw._(self.title))) + self.wview(self.vid, rset, 'noresult') + self.w(u'
          ') + + +class TabbedPrimaryView(TabsMixin, primary.PrimaryView): + __abstract__ = True # don't register + + tabs = [_('main_tab')] + default_tab = 'main_tab' + + def render_entity(self, entity): + self.render_entity_toolbox(entity) + self.w(u'
          ') + self.render_entity_title(entity) + self.render_tabs(self.tabs, self.default_tab, entity) + +TabedPrimaryView = class_renamed('TabedPrimaryView', TabbedPrimaryView) + +class PrimaryTab(primary.PrimaryView): + __regid__ = 'main_tab' + title = None # should not appear in possible views + + def is_primary(self): + return True + + def render_entity_title(self, entity): + pass + def render_entity_toolbox(self, entity): + pass diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/timeline.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/timeline.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,34 @@ +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +try: + from cubes.timeline.views import ( + TimelineJsonView, + TimelineViewMixIn, + TimelineView, + StaticTimelineView) + +except ImportError: + pass +else: + from logilab.common.deprecation import class_moved + + TimelineJsonView = class_moved(TimelineJsonView, 'TimelineJsonView') + TimelineViewMixIn = class_moved(TimelineViewMixIn, 'TimelineViewMixIn') + TimelineView = class_moved(TimelineView, 'TimelineView') + StaticTimelineView = class_moved(StaticTimelineView, 'StaticTimelineView') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/timetable.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/timetable.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,221 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""html timetable views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six.moves import range + +from logilab.mtconverter import xml_escape +from logilab.common.date import ONEDAY, date_range, todatetime + +from cubicweb.predicates import adaptable +from cubicweb.view import EntityView + + +class _TaskEntry(object): + def __init__(self, task, color, column): + self.task = task + self.color = color + self.column = column + self.lines = 1 + +MIN_COLS = 3 # minimum number of task columns for a single user +ALL_USERS = object() + +class TimeTableView(EntityView): + __regid__ = 'timetable' + title = _('timetable') + __select__ = adaptable('ICalendarable') + paginable = False + + def call(self, title=None): + """Dumps a timetable from a resultset composed of a note (anything + with start/stop) and a user (anything)""" + self._cw.add_css('cubicweb.timetable.css') + dates = {} + users = [] + users_max = {} + # XXX: try refactoring with calendar.py:OneMonthCal + for row in range(self.cw_rset.rowcount): + task = self.cw_rset.get_entity(row, 0) + icalendarable = task.cw_adapt_to('ICalendarable') + if len(self.cw_rset[row]) > 1 and self.cw_rset.description[row][1] == 'CWUser': + user = self.cw_rset.get_entity(row, 1) + else: + user = ALL_USERS + the_dates = [] + if icalendarable.start and icalendarable.stop: + if icalendarable.start.toordinal() == icalendarable.stop.toordinal(): + the_dates.append(icalendarable.start) + else: + the_dates += date_range(icalendarable.start, + icalendarable.stop + ONEDAY) + elif icalendarable.start: + the_dates.append(icalendarable.start) + elif icalendarable.stop: + the_dates.append(icalendarable.stop) + for d in the_dates: + d = todatetime(d) + d_users = dates.setdefault(d, {}) + u_tasks = d_users.setdefault(user, set()) + u_tasks.add( task ) + task_max = users_max.setdefault(user, 0) + if len(u_tasks)>task_max: + users_max[user] = len(u_tasks) + if user not in users: + # keep original ordering + users.append(user) + if not dates: + return + date_min = min(dates) + date_max = max(dates) + #users = list(sorted(users, key=lambda u:u.login)) + + rows = [] + # colors here are class names defined in cubicweb.css + colors = ["col%x" % i for i in range(12)] + next_color_index = 0 + + visited_tasks = {} # holds a description of a task for a user + task_colors = {} # remember a color assigned to a task + for date in date_range(date_min, date_max + ONEDAY): + columns = [date] + d_users = dates.get(date, {}) + for user in users: + # every user has its column "splitted" in at least MIN_COLS + # sub-columns (for overlapping tasks) + user_columns = [None] * max(MIN_COLS, users_max[user]) + # every task that is "visited" for the first time + # require a special treatment, so we put them in + # 'postpone' + postpone = [] + for task in d_users.get(user, []): + key = (task, user) + if key in visited_tasks: + task_descr = visited_tasks[ key ] + user_columns[task_descr.column] = task_descr, False + task_descr.lines += 1 + else: + postpone.append(key) + for key in postpone: + # to every 'new' task we must affect a color + # (which must be the same for every user concerned + # by the task) + task, user = key + for i, t in enumerate(user_columns): + if t is None: + if task in task_colors: + color = task_colors[task] + else: + color = colors[next_color_index] + next_color_index = (next_color_index+1)%len(colors) + task_colors[task] = color + task_descr = _TaskEntry(task, color, i) + user_columns[i] = task_descr, True + visited_tasks[key] = task_descr + break + else: + raise RuntimeError("is it possible we got it wrong?") + + columns.append( user_columns ) + rows.append( columns ) + + widths = [ len(col) for col in rows[0][1:] ] + self.w(u'
          ') + if title: + self.w(u'

          %s

          \n' % title) + self.w(u'') + self.render_col_headers(users, widths) + self.render_rows(rows) + self.w(u'
          ') + self.w(u'
          \n') + + def render_col_headers(self, users, widths): + """ render column headers """ + self.w(u'\n') + + self.w(u' \n') + columns = [] + for user, width in zip(users, widths): + self.w(u'' % max(MIN_COLS, width)) + if user is ALL_USERS: + self.w(u'*') + else: + user.view('oneline', w=self.w) + self.w(u'') + self.w(u'\n') + return columns + + def render_rows(self, rows): + """ render table content (row headers and central content) """ + odd = False + previous_is_empty = False + for row in rows: + date = row[0] + empty_line = True + for group in row[1:]: + for value in group: + if value: + empty_line = False + break + else: + continue + break + if empty_line and previous_is_empty: + continue + previous_is_empty = False + + klass = "even" + if date.weekday() in (5, 6) and not empty_line: + klass = "odd" + self.w(u'' % klass) + odd = not odd + + if not empty_line: + self.w(u'%s' % self._cw.format_date(date) ) + else: + self.w(u'...' ) + previous_is_empty = True + + empty_klasses = [ "ttle", "ttme", "ttre" ] + filled_klasses = [ "ttlf", "ttmf", "ttrf" ] + kj = 0 # 0: left, 1: mid, 2: right + for uid, group in enumerate(row[1:]): + for i, value in enumerate(group): + if i == 0: + kj = 0 + elif i == len(group): + kj = 2 + else: + kj = 1 + if value: + task_descr, first_row = value + if first_row: + url = xml_escape(task_descr.task.absolute_url(vid="edition")) + self.w(u' 
          ' % ( + task_descr.lines, task_descr.color, filled_klasses[kj], url)) + task_descr.task.view('tooltip', w=self.w) + self.w(u'
          ') + else: + if empty_line: + self.w(u' ') + else: + self.w(u' ' % empty_klasses[kj] ) + self.w(u'\n') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/treeview.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/treeview.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,315 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Set of tree views / tree-building widgets, some based on jQuery treeview +plugin. +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from warnings import warn + +from logilab.mtconverter import xml_escape + +from cubicweb.utils import make_uid, json +from cubicweb.predicates import adaptable +from cubicweb.view import EntityView +from cubicweb.web.views import baseviews +from cubicweb.web.views.ajaxcontroller import ajaxfunc + +def treecookiename(treeid): + return str('%s-treestate' % treeid) + +def _done_init(done, view, row, col): + """handle an infinite recursion safety belt""" + if done is None: + done = set() + entity = view.cw_rset.get_entity(row, col) + if entity.eid in done: + msg = entity._cw._('loop in %(rel)s relation (%(eid)s)') % { + 'rel': entity.cw_adapt_to('ITree').tree_relation, + 'eid': entity.eid + } + return None, msg + done.add(entity.eid) + return done, entity + + +class BaseTreeView(baseviews.ListView): + """base tree view""" + __regid__ = 'tree' + __select__ = adaptable('ITree') + item_vid = 'treeitem' + + def call(self, done=None, **kwargs): + if done is None: + done = set() + super(BaseTreeView, self).call(done=done, **kwargs) + + def cell_call(self, row, col=0, vid=None, done=None, maxlevel=None, klass=None, **kwargs): + assert maxlevel is None or maxlevel > 0 + done, entity = _done_init(done, self, row, col) + if done is None: + # entity is actually an error message + self.w(u'
        1. %s
        2. ' % entity) + return + self.open_item(entity) + entity.view(vid or self.item_vid, w=self.w, **kwargs) + if maxlevel is not None: + maxlevel -= 1 + if maxlevel == 0: + self.close_item(entity) + return + relatedrset = entity.cw_adapt_to('ITree').children(entities=False) + self.wview(self.__regid__, relatedrset, 'null', done=done, + maxlevel=maxlevel, klass=klass, **kwargs) + self.close_item(entity) + + def open_item(self, entity): + self.w(u'
        3. \n' % entity.cw_etype.lower()) + def close_item(self, entity): + self.w(u'
        4. \n') + + +class TreePathView(EntityView): + """a recursive path view""" + __regid__ = 'path' + __select__ = adaptable('ITree') + item_vid = 'oneline' + separator = u' > ' + + def call(self, **kwargs): + self.w(u'
          ') + super(TreePathView, self).call(**kwargs) + self.w(u'
          ') + + def cell_call(self, row, col=0, vid=None, done=None, **kwargs): + done, entity = _done_init(done, self, row, col) + if done is None: + # entity is actually an error message + self.w(u'%s' % entity) + return + parent = entity.cw_adapt_to('ITree').parent() + if parent: + parent.view(self.__regid__, w=self.w, done=done) + self.w(self.separator) + entity.view(vid or self.item_vid, w=self.w) + + +class TreeComboBoxView(TreePathView): + """display folder in edition's combobox""" + __regid__ = 'combobox' + item_vid = 'text' + separator = u' > ' + +# XXX rename regid to ajaxtree/foldabletree or something like that (same for +# treeitemview) +class TreeView(EntityView): + """ajax tree view, click to expand folder""" + + __regid__ = 'treeview' + itemvid = 'treeitemview' + subvid = 'oneline' + cssclass = 'treeview widget' + title = _('tree view') + + def _init_params(self, subvid, treeid, initial_load, initial_thru_ajax, morekwargs): + form = self._cw.form + if subvid is None: + subvid = form.pop('treesubvid', self.subvid) # consume it + if treeid is None: + treeid = form.pop('treeid', None) + if treeid is None: + treeid = 'throw_away' + make_uid('uid') + if 'morekwargs' in self._cw.form: + ajaxargs = json.loads(form.pop('morekwargs')) + # got unicode & python keywords must be strings + morekwargs.update(dict((str(k), v) + for k, v in ajaxargs.items())) + toplevel_thru_ajax = form.pop('treeview_top', False) or initial_thru_ajax + toplevel = toplevel_thru_ajax or (initial_load and not form.get('fname')) + return subvid, treeid, toplevel_thru_ajax, toplevel + + def _init_headers(self, treeid): + self._cw.add_css(('jquery-treeview/jquery.treeview.css', 'cubicweb.treeview.css')) + self._cw.add_js(('cubicweb.ajax.js', 'cubicweb.widgets.js', 'jquery-treeview/jquery.treeview.js')) + self._cw.html_headers.add_onload(u""" +jQuery("#tree-%s").treeview({toggle: toggleTree, prerendered: true});""" % treeid) + + def call(self, subvid=None, treeid=None, + initial_load=True, initial_thru_ajax=False, **morekwargs): + subvid, treeid, toplevel_thru_ajax, toplevel = self._init_params( + subvid, treeid, initial_load, initial_thru_ajax, morekwargs) + ulid = ' ' + if toplevel: + self._init_headers(treeid) + ulid = ' id="tree-%s"' % treeid + self.w(u'' % (ulid, self.cssclass)) + # XXX force sorting on x.sortvalue() (which return dc_title by default) + # we need proper ITree & co specification to avoid this. + # (pb when type ambiguity at the other side of the tree relation, + # unability to provide generic implementation on eg Folder...) + for i, entity in enumerate(sorted(self.cw_rset.entities(), + key=lambda x: x.sortvalue())): + if i+1 < len(self.cw_rset): + morekwargs['is_last'] = False + else: + morekwargs['is_last'] = True + entity.view(self.itemvid, vid=subvid, parentvid=self.__regid__, + treeid=treeid, w=self.w, **morekwargs) + self.w(u'
') + + def cell_call(self, *args, **allargs): + """ does not makes much sense until you have to invoke + somentity.view('treeview') """ + allargs.pop('row') + allargs.pop('col') + self.call(*args, **allargs) + + +class FileTreeView(TreeView): + """specific version of the treeview to display file trees + """ + __regid__ = 'filetree' + cssclass = 'treeview widget filetree' + title = _('file tree view') + + def call(self, subvid=None, treeid=None, initial_load=True, **kwargs): + super(FileTreeView, self).call(treeid=treeid, subvid='filetree-oneline', + initial_load=initial_load, **kwargs) + +class FileItemInnerView(EntityView): + """inner view used by the TreeItemView instead of oneline view + + This view adds an enclosing with some specific CSS classes + around the oneline view. This is needed by the jquery treeview plugin. + """ + __regid__ = 'filetree-oneline' + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + itree = entity.cw_adapt_to('ITree') + if itree and not itree.is_leaf(): + self.w(u'
%s
\n' % entity.view('oneline')) + else: + # XXX define specific CSS classes according to mime types + self.w(u'
%s
\n' % entity.view('oneline')) + + +class DefaultTreeViewItemView(EntityView): + """default treeitem view for entities which don't adapt to ITree""" + __regid__ = 'treeitemview' + + def cell_call(self, row, col, vid='oneline', treeid=None, **morekwargs): + assert treeid is not None + itemview = self._cw.view(vid, self.cw_rset, row=row, col=col) + last_class = morekwargs['is_last'] and ' class="last"' or '' + self.w(u'%s' % (last_class, itemview)) + + +class TreeViewItemView(EntityView): + """specific treeitem view for entities which adapt to ITree + + (each item should be expandable if it's not a tree leaf) + """ + __regid__ = 'treeitemview' + __select__ = adaptable('ITree') + default_branch_state_is_open = False + + def open_state(self, eeid, treeid): + cookies = self._cw.get_cookie() + treestate = cookies.get(treecookiename(treeid)) + if treestate: + return str(eeid) in treestate.value.split(':') + return self.default_branch_state_is_open + + def cell_call(self, row, col, treeid, vid='oneline', parentvid='treeview', + is_last=False, **morekwargs): + w = self.w + entity = self.cw_rset.get_entity(row, col) + itree = entity.cw_adapt_to('ITree') + liclasses = [] + if self._cw.url(includeparams=False) == entity.absolute_url(): + liclasses.append(u'selected') + is_open = self.open_state(entity.eid, treeid) + is_leaf = itree is None or itree.is_leaf() + if is_leaf: + if is_last: + liclasses.append('last') + w(u'
  • ' % u' '.join(liclasses)) + else: + rql = itree.children_rql() % {'x': entity.eid} + url = xml_escape(self._cw.build_url('ajax', rql=rql, vid=parentvid, + pageid=self._cw.pageid, + treeid=treeid, + fname='view', + treesubvid=vid, + morekwargs=json.dumps(morekwargs))) + divclasses = ['hitarea'] + if is_open: + liclasses.append('collapsable') + divclasses.append('collapsable-hitarea') + else: + liclasses.append('expandable') + divclasses.append('expandable-hitarea') + if is_last: + if is_open: + liclasses.append('lastCollapsable') + divclasses.append('lastCollapsable-hitarea') + else: + liclasses.append('lastExpandable') + divclasses.append('lastExpandable-hitarea') + if is_open: + w(u'
  • ' % u' '.join(liclasses)) + else: + w(u'
  • ' % (url, u' '.join(liclasses))) + if treeid.startswith('throw_away'): + divtail = '' + else: + divtail = """ onclick="asyncRemoteExec('node_clicked', '%s', '%s')" """ % ( + treeid, entity.eid) + w(u'
    ' % (u' '.join(divclasses), divtail)) + + # add empty
      because jquery's treeview plugin checks for + # sublists presence + if not is_open: + w(u'
      • place holder
      ') + # the local node info + self.wview(vid, self.cw_rset, row=row, col=col, **morekwargs) + if is_open and not is_leaf: # => rql is defined + self.wview(parentvid, itree.children(entities=False), subvid=vid, + treeid=treeid, initial_load=False, **morekwargs) + w(u'') + + + +@ajaxfunc +def node_clicked(self, treeid, nodeeid): + """add/remove eid in treestate cookie""" + cookies = self._cw.get_cookie() + statename = treecookiename(treeid) + treestate = cookies.get(statename) + if treestate is None: + self._cw.set_cookie(statename, nodeeid) + else: + marked = set(filter(None, treestate.value.split(':'))) + if nodeeid in marked: + marked.remove(nodeeid) + else: + marked.add(nodeeid) + self._cw.set_cookie(statename, ':'.join(marked)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/uicfg.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/uicfg.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,693 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""This module (``cubicweb.web.views.uicfg``) regroups a set of structures that may be +used to configure various options of the generated web interface. + +To configure the interface generation, we use ``RelationTag`` objects. + +Index view configuration +```````````````````````` +:indexview_etype_section: + entity type category in the index/manage page. May be one of: + + * ``application`` + * ``system`` + * ``schema`` + * ``subobject`` (not displayed by default) + + By default only entities on the ``application`` category are shown. + +.. sourcecode:: python + + from cubicweb.web.views import uicfg + # force hiding + uicfg.indexview_etype_section['HideMe'] = 'subobject' + # force display + uicfg.indexview_etype_section['ShowMe'] = 'application' + + +Actions box configuration +````````````````````````` +:actionbox_appearsin_addmenu: + simple boolean relation tags used to control the "add entity" submenu. + Relations whose rtag is True will appears, other won't. + +.. sourcecode:: python + + # Adds all subjects of the entry_of relation in the add menu of the ``Blog`` + # primary view + uicfg.actionbox_appearsin_addmenu.tag_object_of(('*', 'entry_of', 'Blog'), True) +""" +__docformat__ = "restructuredtext en" + +from warnings import warn + +from six import string_types + +from cubicweb import neg_role +from cubicweb.rtags import (RelationTags, RelationTagsBool, RelationTagsSet, + RelationTagsDict, NoTargetRelationTagsDict, + _ensure_str_key) +from cubicweb.schema import META_RTYPES, INTERNAL_TYPES, WORKFLOW_TYPES + + +# primary view configuration ################################################## + +class PrimaryViewSectionRelationTags(RelationTags): + """primary view section configuration""" + __regid__ = 'primaryview_section' + + _allowed_values = frozenset(('attributes', 'relations', + 'sideboxes', 'hidden')) + + def _init(self, sschema, rschema, oschema, role): + if self.get(sschema, rschema, oschema, role) is None: + rdef = rschema.rdef(sschema, oschema) + if rschema.final: + if rschema.meta or sschema.is_metadata(rschema) \ + or oschema.type in ('Password', 'Bytes'): + section = 'hidden' + else: + section = 'attributes' + else: + if rdef.role_cardinality(role) in '1+': + section = 'attributes' + elif rdef.composite == neg_role(role): + section = 'relations' + else: + section = 'sideboxes' + self.tag_relation((sschema, rschema, oschema, role), section) + +primaryview_section = PrimaryViewSectionRelationTags() + + +class DisplayCtrlRelationTags(NoTargetRelationTagsDict): + """primary view display controller configuration""" + __regid__ = 'primaryview_display_ctrl' + + def __init__(self, *args, **kwargs): + super(DisplayCtrlRelationTags, self).__init__(*args, **kwargs) + self.counter = 0 + + def _init(self, sschema, rschema, oschema, role): + if role == 'subject': + oschema = '*' + else: + sschema = '*' + self.counter += 1 + self.setdefault((sschema, rschema, oschema, role), + 'order', + self.counter) + + def set_fields_order(self, etype, relations): + """specify the field order in `etype` primary view. + + :param etype: the entity type as a string + :param attrs: the ordered list of attribute names (or relations) + + `attrs` can be strings or 2-tuples (relname, role_of_etype_in_the_rel) + + Unspecified fields will be displayed after specified ones, their + order being consistent with the schema definition. + + Examples: + + .. sourcecode:: python + + from cubicweb.web.views.uicfg import primaryview_display_ctrl as pvdc + pvdc.set_fields_order('CWUser', ('firstname', ('in_group', 'subject'), + 'surname', 'login')) + + """ + for index, relation in enumerate(relations): + if not isinstance(relation, tuple): + relation = (relation, 'subject') + rtype, role = relation + if role == 'subject': + self.tag_subject_of((etype, rtype, '*'), {'order': index}) + else: + self.tag_object_of((etype, rtype, '*'), {'order': index}) + + +primaryview_display_ctrl = DisplayCtrlRelationTags() + + +# index view configuration #################################################### +# entity type section in the index/manage page. May be one of +# * 'application' +# * 'system' +# * 'schema' +# * 'hidden' +# * 'subobject' (not displayed by default) + +class InitializableDict(dict): # XXX not a rtag. Turn into an appobject? + def __init__(self, *args, **kwargs): + super(InitializableDict, self).__init__(*args, **kwargs) + self.__defaults = dict(self) + + def init(self, schema, check=True): + self.update(self.__defaults) + for eschema in schema.entities(): + if eschema.final: + continue + if eschema.schema_entity(): + self.setdefault(eschema, 'schema') + elif eschema in INTERNAL_TYPES or eschema in WORKFLOW_TYPES: + self.setdefault(eschema, 'system') + elif eschema.is_subobject(strict=True): + self.setdefault(eschema, 'subobject') + else: + self.setdefault(eschema, 'application') + +indexview_etype_section = InitializableDict( + EmailAddress='subobject', + Bookmark='system', + # entity types in the 'system' table by default (managers only) + CWUser='system', CWGroup='system', + ) + + +# autoform.AutomaticEntityForm configuration ################################## + +def _formsections_as_dict(formsections): + result = {} + for formsection in formsections: + formtype, section = formsection.split('_', 1) + result[formtype] = section + return result + +def _card_and_comp(sschema, rschema, oschema, role): + rdef = rschema.rdef(sschema, oschema) + if role == 'subject': + card = rdef.cardinality[0] + composed = not rschema.final and rdef.composite == 'object' + else: + card = rdef.cardinality[1] + composed = not rschema.final and rdef.composite == 'subject' + return card, composed + +class AutoformSectionRelationTags(RelationTagsSet): + """autoform relations'section""" + __regid__ = 'autoform_section' + + _allowed_form_types = ('main', 'inlined', 'muledit') + _allowed_values = {'main': ('attributes', 'inlined', 'relations', + 'metadata', 'hidden'), + 'inlined': ('attributes', 'inlined', 'hidden'), + 'muledit': ('attributes', 'hidden'), + } + + def init(self, schema, check=True): + super(AutoformSectionRelationTags, self).init(schema, check) + self.apply(schema, self._initfunc_step2) + + def _init(self, sschema, rschema, oschema, role): + formsections = self.init_get(sschema, rschema, oschema, role) + if formsections is None: + formsections = self.tag_container_cls() + if not any(tag.startswith('inlined') for tag in formsections): + if not rschema.final: + negsects = self.init_get(sschema, rschema, oschema, neg_role(role)) + if 'main_inlined' in negsects: + formsections.add('inlined_hidden') + key = _ensure_str_key( (sschema, rschema, oschema, role) ) + self._tagdefs[key] = formsections + + def _initfunc_step2(self, sschema, rschema, oschema, role): + formsections = self.get(sschema, rschema, oschema, role) + sectdict = _formsections_as_dict(formsections) + if rschema in META_RTYPES: + sectdict.setdefault('main', 'hidden') + sectdict.setdefault('muledit', 'hidden') + sectdict.setdefault('inlined', 'hidden') + elif role == 'subject' and rschema in sschema.meta_attributes(): + # meta attribute, usually embeded by the described attribute's field + # (eg RichTextField, FileField...) + sectdict.setdefault('main', 'hidden') + sectdict.setdefault('muledit', 'hidden') + sectdict.setdefault('inlined', 'hidden') + # ensure we have a tag for each form type + if not 'main' in sectdict: + if not rschema.final and ( + sectdict.get('inlined') == 'attributes' or + 'inlined_attributes' in self.init_get(sschema, rschema, oschema, + neg_role(role))): + sectdict['main'] = 'hidden' + elif sschema.is_metadata(rschema): + sectdict['main'] = 'metadata' + else: + card, composed = _card_and_comp(sschema, rschema, oschema, role) + if card in '1+': + sectdict['main'] = 'attributes' + if not 'muledit' in sectdict: + sectdict['muledit'] = 'attributes' + elif rschema.final: + sectdict['main'] = 'attributes' + else: + sectdict['main'] = 'relations' + if not 'muledit' in sectdict: + sectdict['muledit'] = 'hidden' + if sectdict['main'] == 'attributes': + card, composed = _card_and_comp(sschema, rschema, oschema, role) + if card in '1+' and not composed: + sectdict['muledit'] = 'attributes' + if not 'inlined' in sectdict: + sectdict['inlined'] = sectdict['main'] + # recompute formsections and set it to avoid recomputing + for formtype, section in sectdict.items(): + formsections.add('%s_%s' % (formtype, section)) + + def tag_relation(self, key, formtype, section): + if isinstance(formtype, tuple): + for ftype in formtype: + self.tag_relation(key, ftype, section) + return + assert formtype in self._allowed_form_types, \ + 'formtype should be in (%s), not %s' % ( + ','.join(self._allowed_form_types), formtype) + assert section in self._allowed_values[formtype], \ + 'section for %s should be in (%s), not %s' % ( + formtype, ','.join(self._allowed_values[formtype]), section) + rtags = self._tagdefs.setdefault(_ensure_str_key(key), + self.tag_container_cls()) + # remove previous section for this form type if any + if rtags: + for tag in rtags.copy(): + if tag.startswith(formtype): + rtags.remove(tag) + rtags.add('%s_%s' % (formtype, section)) + return rtags + + def init_get(self, stype, rtype, otype, tagged): + key = (stype, rtype, otype, tagged) + rtags = {} + for key in self._get_keys(stype, rtype, otype, tagged): + tags = self._tagdefs.get(key, ()) + for tag in tags: + assert '_' in tag, (tag, tags) + section, value = tag.split('_', 1) + rtags[section] = value + cls = self.tag_container_cls + rtags = cls('_'.join([section,value]) + for section,value in rtags.items()) + return rtags + + def get(self, *key): + # overriden to avoid recomputing done in parent classes + return self._tagdefs.get(key, ()) + + def relations_by_section(self, entity, formtype, section, permission, + strict=False): + """return a list of (relation schema, target schemas, role) for the + given entity matching categories and permission. + + `strict`: + bool telling if having local role is enough (strict = False) or not + """ + tag = '%s_%s' % (formtype, section) + eschema = entity.e_schema + cw = entity._cw + permsoverrides = cw.vreg['uicfg'].select('autoform_permissions_overrides', cw, entity=entity) + if entity.has_eid(): + eid = entity.eid + else: + eid = None + strict = False + if permission == 'update': + assert section in ('attributes', 'metadata', 'hidden') + relpermission = 'add' + else: + assert section not in ('metadata', 'hidden') + relpermission = permission + for rschema, targetschemas, role in eschema.relation_definitions(True): + _targetschemas = [] + for tschema in targetschemas: + # check section's tag first, potentially lower cost than + # checking permission which may imply rql queries + if not tag in self.etype_get(eschema, rschema, role, tschema): + continue + rdef = rschema.role_rdef(eschema, tschema, role) + if rschema.final: + if not rdef.has_perm(cw, permission, eid=eid, + creating=eid is None): + continue + elif strict or not rdef.has_local_role(relpermission): + if role == 'subject': + if not rdef.has_perm(cw, relpermission, fromeid=eid): + continue + elif role == 'object': + if not rdef.has_perm(cw, relpermission, toeid=eid): + continue + _targetschemas.append(tschema) + if not _targetschemas: + continue + targetschemas = _targetschemas + rdef = eschema.rdef(rschema, role=role, targettype=targetschemas[0]) + # XXX tag allowing to hijack the permission machinery when + # permission is not verifiable until the entity is actually + # created... + if eid is None and '%s_on_new' % permission in permsoverrides.etype_get(eschema, rschema, role): + yield (rschema, targetschemas, role) + continue + if not rschema.final and role == 'subject': + # on relation with cardinality 1 or ?, we need delete perm as well + # if the relation is already set + if (relpermission == 'add' + and rdef.role_cardinality(role) in '1?' + and eid and entity.related(rschema.type, role) + and not rdef.has_perm(cw, 'delete', fromeid=eid, + toeid=entity.related(rschema.type, role)[0][0])): + continue + elif role == 'object': + # on relation with cardinality 1 or ?, we need delete perm as well + # if the relation is already set + if (relpermission == 'add' + and rdef.role_cardinality(role) in '1?' + and eid and entity.related(rschema.type, role) + and not rdef.has_perm(cw, 'delete', toeid=eid, + fromeid=entity.related(rschema.type, role)[0][0])): + continue + yield (rschema, targetschemas, role) + + def hide_field(self, etype, attr, desttype='*', formtype='main'): + """hide `attr` in `etype` forms. + + :param etype: the entity type as a string + :param attr: the name of the attribute or relation to hide + :param formtype: which form will be affected ('main', 'inlined', etc.), + *main* by default. + + `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_rel) + + Examples: + + .. sourcecode:: python + + from cubicweb.web.views.uicfg import autoform_section as afs + afs.hide_field('CWUser', 'login') + afs.hide_field('*', 'name') + afs.hide_field('CWUser', 'use_email', formtype='inlined') + + """ + self._tag_etype_attr(etype, attr, desttype, + formtype=formtype, section='hidden') + + def hide_fields(self, etype, attrs, formtype='main'): + """simple for-loop wrapper around :func:`hide_field`. + + :param etype: the entity type as a string + :param attrs: the ordered list of attribute names (or relations) + :param formtype: which form will be affected ('main', 'inlined', etc.), + *main* by default. + + `attrs` can be strings or 2-tuples (relname, role_of_etype_in_the_rel) + + Examples: + + .. sourcecode:: python + + from cubicweb.web.views.uicfg import autoform_section as afs + afs.hide_fields('CWUser', ('login', ('use_email', 'subject')), + formtype='inlined') + """ + for attr in attrs: + self.hide_field(etype, attr, formtype=formtype) + + def edit_inline(self, etype, attr, desttype='*', formtype=('main', 'inlined')): + """edit `attr` with and inlined form. + + :param etype: the entity type as a string + :param attr: the name of the attribute or relation + :param desttype: the destination type(s) concerned, default is everything + :param formtype: which form will be affected ('main', 'inlined', etc.), + *main* and *inlined* by default. + + `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) + + Examples: + + .. sourcecode:: python + + from cubicweb.web.views.uicfg import autoform_section as afs + + afs.edit_inline('*', 'use_email') + """ + self._tag_etype_attr(etype, attr, desttype, formtype=formtype, + section='inlined') + + def edit_as_attr(self, etype, attr, desttype='*', formtype=('main', 'muledit')): + """make `attr` appear in the *attributes* section of `etype` form. + + :param etype: the entity type as a string + :param attr: the name of the attribute or relation + :param desttype: the destination type(s) concerned, default is everything + :param formtype: which form will be affected ('main', 'inlined', etc.), + *main* and *muledit* by default. + + `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) + + Examples: + + .. sourcecode:: python + + from cubicweb.web.views.uicfg import autoform_section as afs + + afs.edit_as_attr('CWUser', 'in_group') + """ + self._tag_etype_attr(etype, attr, desttype, + formtype=formtype, section='attributes') + + def set_muledit_editable(self, etype, attrs): + """make `attrs` appear in muledit form of `etype`. + + :param etype: the entity type as a string + :param attrs: the ordered list of attribute names (or relations) + + `attrs` can be strings or 2-tuples (relname, role_of_etype_in_the_relation) + + Examples: + + .. sourcecode:: python + + from cubicweb.web.views.uicfg import autoform_section as afs + + afs.set_muledit_editable('CWUser', ('firstname', 'surname', 'in_group')) + """ + for attr in attrs: + self.edit_as_attr(self, etype, attr, formtype='muledit') + +autoform_section = AutoformSectionRelationTags() + + +# relations'field class + +class AutoformFieldTags(RelationTags): + __regid__ = 'autoform_field' + + def set_field(self, etype, attr, field): + """sets the `attr` field of `etype`. + + :param etype: the entity type as a string + :param attr: the name of the attribute or relation + + `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) + + """ + self._tag_etype_attr(etype, attr, '*', field) + +autoform_field = AutoformFieldTags() + + +# relations'field explicit kwargs (given to field's __init__) + +class AutoformFieldKwargsTags(RelationTagsDict): + __regid__ = 'autoform_field_kwargs' + + def set_fields_order(self, etype, attrs): + """specify the field order in `etype` main edition form. + + :param etype: the entity type as a string + :param attrs: the ordered list of attribute names (or relations) + + `attrs` can be strings or 2-tuples (relname, role_of_etype_in_the_rel) + + Unspecified fields will be displayed after specified ones, their + order being consistent with the schema definition. + + Examples: + + .. sourcecode:: python + + from cubicweb.web.views.uicfg import autoform_field_kwargs as affk + affk.set_fields_order('CWUser', ('firstname', 'surname', 'login')) + affk.set_fields_order('CWUser', ('firstname', ('in_group', 'subject'), + 'surname', 'login')) + + """ + for index, attr in enumerate(attrs): + self._tag_etype_attr(etype, attr, '*', {'order': index}) + + def set_field_kwargs(self, etype, attr, **kwargs): + """tag `attr` field of `etype` with additional named paremeters. + + :param etype: the entity type as a string + :param attr: the name of the attribute or relation + + `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) + + Examples: + + .. sourcecode:: python + + from cubicweb.web.views.uicfg import autoform_field_kwargs as affk + affk.set_field_kwargs('Person', 'works_for', widget=fwdgs.AutoCompletionWidget()) + affk.set_field_kwargs('CWUser', 'login', label=_('login or email address'), + widget=fwdgs.TextInput(attrs={'size': 30})) + """ + self._tag_etype_attr(etype, attr, '*', kwargs) + + +autoform_field_kwargs = AutoformFieldKwargsTags() + + +# set of tags of the form _on_new on relations. is a +# schema action (add/update/delete/read), and when such a tag is found +# permissions checking is by-passed and supposed to be ok +class AutoFormPermissionsOverrides(RelationTagsSet): + __regid__ = 'autoform_permissions_overrides' + +autoform_permissions_overrides = AutoFormPermissionsOverrides() + + +class ReleditTags(NoTargetRelationTagsDict): + """Associate to relation a dictionary to control `reledit` (e.g. edition of + attributes / relations from within views). + + Possible keys and associated values are: + + * `novalue_label`, alternative default value (shown when there is no value). + + * `novalue_include_rtype`, when `novalue_label` is not specified, this boolean + flag control wether the generated default value should contains the + relation label or not. Will be the opposite of the `showlabel` value found + in the `primaryview_display_ctrl` rtag by default. + + * `reload`, boolean, eid (to reload to) or function taking subject and + returning bool/eid. This is useful when editing a relation (or attribute) + that impacts the url or another parts of the current displayed + page. Defaults to False. + + * `rvid`, alternative view id (as str) for relation or composite edition. + Default is 'autolimited'. + + * `edit_target`, may be either 'rtype' (to edit the relation) or 'related' + (to edit the related entity). This controls whether to edit the relation + or the target entity of the relation. Currently only one-to-one relations + support target entity edition. By default, the 'related' option is taken + whenever the relation is composite. + """ + __regid__ = 'reledit' + _keys = frozenset('novalue_label novalue_include_rtype reload rvid edit_target'.split()) + + def tag_relation(self, key, tag): + for tagkey in tag: + assert tagkey in self._keys, 'tag %r not in accepted tags: %r' % (tag, self._keys) + return super(ReleditTags, self).tag_relation(key, tag) + + def _init(self, sschema, rschema, oschema, role): + values = self.get(sschema, rschema, oschema, role) + if not rschema.final: + composite = rschema.rdef(sschema, oschema).composite == role + if role == 'subject': + oschema = '*' + else: + sschema = '*' + edittarget = values.get('edit_target') + if edittarget not in (None, 'rtype', 'related'): + self.warning('reledit: wrong value for edit_target on relation %s: %s', + rschema, edittarget) + edittarget = None + if not edittarget: + edittarget = 'related' if composite else 'rtype' + self.tag_relation((sschema, rschema, oschema, role), + {'edit_target': edittarget}) + if not 'novalue_include_rtype' in values: + showlabel = primaryview_display_ctrl.get( + sschema, rschema, oschema, role).get('showlabel', True) + self.tag_relation((sschema, rschema, oschema, role), + {'novalue_include_rtype': not showlabel}) + +reledit_ctrl = ReleditTags() + + +# boxes.EditBox configuration ################################################# + +# 'link' / 'create' relation tags, used to control the "add entity" submenu + +class ActionBoxUicfg(RelationTagsBool): + __regid__ = 'actionbox_appearsin_addmenu' + + def _init(self, sschema, rschema, oschema, role): + if self.get(sschema, rschema, oschema, role) is None: + if rschema in META_RTYPES: + self.tag_relation((sschema, rschema, oschema, role), False) + return + rdef = rschema.rdef(sschema, oschema) + if not rdef.role_cardinality(role) in '?1' and rdef.composite == role: + self.tag_relation((sschema, rschema, oschema, role), True) + + def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs): + if isinstance(attr, string_types): + attr, role = attr, 'subject' + else: + attr, role = attr + if role == 'subject': + self.tag_subject_of((etype, attr, desttype), *args, **kwargs) + else: + self.tag_object_of((desttype, attr, etype), *args, **kwargs) + + def append_to_addmenu(self, etype, attr, createdtype='*'): + """adds `attr` in the actions box *addrelated* submenu of `etype`. + + :param etype: the entity type as a string + :param attr: the name of the attribute or relation to hide + :param createdtype: the target type of the relation (optional, defaults to '*' (all possible types)) + + `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) + + """ + self._tag_etype_attr(etype, attr, createdtype, True) + + def remove_from_addmenu(self, etype, attr, createdtype='*'): + """removes `attr` from the actions box *addrelated* submenu of `etype`. + + :param etype: the entity type as a string + :param attr: the name of the attribute or relation to hide + :param createdtype: the target type of the relation (optional, defaults to '*' (all possible types)) + + `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) + """ + self._tag_etype_attr(etype, attr, createdtype, False) + +actionbox_appearsin_addmenu = ActionBoxUicfg() + + + +def registration_callback(vreg): + vreg.register_all(globals().values(), __name__) + indexview_etype_section.init(vreg.schema) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/undohistory.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/undohistory.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,224 @@ +# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +__docformat__ = "restructuredtext en" +from cubicweb import _ + + +from logilab.common.registry import Predicate + +from cubicweb import UnknownEid, tags, transaction as tx +from cubicweb.view import View, StartupView +from cubicweb.predicates import match_kwargs, ExpectedValuePredicate +from cubicweb.schema import display_name + + +class undoable_action(Predicate): + """Select only undoable actions depending on filters provided. Undo Action + is expected to be specified by the `tx_action` argument. + + Currently the only implemented filter is: + + :param action_type: chars among CUDAR (standing for Create, Update, Delete, + Add, Remove) + """ + + # XXX FIXME : this selector should be completed to allow selection on the + # entity or relation types and public / private. + def __init__(self, action_type='CUDAR'): + assert not set(action_type) - set('CUDAR') + self.action_type = action_type + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, ', '.join( + "%s=%v" % (str(k), str(v)) for k, v in kwargs.items() )) + + def __call__(self, cls, req, tx_action=None, **kwargs): + # tx_action is expected to be a transaction.AbstractAction + if not isinstance(tx_action, tx.AbstractAction): + return 0 + # Filter according to action type + return int(tx_action.action in self.action_type) + + +class UndoHistoryView(StartupView): + __regid__ = 'undohistory' + title = _('Undoing') + item_vid = 'undoable-transaction-view' + cache_max_age = 0 + + redirect_path = 'view' #TODO + redirect_params = dict(vid='undohistory') #TODO + public_actions_only = True + + # TODO Allow to choose if if want all actions or only the public ones + # (default) + + def call(self, **kwargs): + txs = self._cw.cnx.undoable_transactions() + if txs : + self.w(u"
        ") + for tx in txs: + self.cell_call(tx) + self.w(u"
      ") + + def cell_call(self, tx): + self.w(u'
    • ') + self.wview(self.item_vid, None, txuuid=tx.uuid, + public=self.public_actions_only, + redirect_path=self.redirect_path, + redirect_params=self.redirect_params) + self.w(u'
    • \n') + + +class UndoableTransactionView(View): + __regid__ = 'undoable-transaction-view' + __select__ = View.__select__ & match_kwargs('txuuid') + + item_vid = 'undoable-action-list-view' + cache_max_age = 0 + + def build_undo_link(self, txuuid, + redirect_path=None, redirect_params=None): + """ the kwargs are passed to build_url""" + _ = self._cw._ + redirect = {} + if redirect_path: + redirect['__redirectpath'] = redirect_path + if redirect_params: + if isinstance(redirect_params, dict): + redirect['__redirectparams'] = self._cw.build_url_params(**redirect_params) + else: + redirect['__redirectparams'] = redirect_params + link_url = self._cw.build_url('undo', txuuid=txuuid, **redirect) + msg = u"%s" % tags.a( _('undo'), href=link_url) + return msg + + def call(self, txuuid, public=True, + redirect_path=None, redirect_params=None): + _ = self._cw._ + txinfo = self._cw.cnx.transaction_info(txuuid) + try: + #XXX Under some unknown circumstances txinfo.user_eid=-1 + user = self._cw.entity_from_eid(txinfo.user_eid) + except UnknownEid: + user = None + undo_url = self.build_undo_link(txuuid, + redirect_path=redirect_path, + redirect_params=redirect_params) + txinfo_dict = dict( dt = self._cw.format_date(txinfo.datetime, time=True), + user_eid = txinfo.user_eid, + user = user and user.view('outofcontext') or _("undefined user"), + txuuid = txuuid, + undo_link = undo_url) + self.w( _("By %(user)s on %(dt)s [%(undo_link)s]") % txinfo_dict) + + tx_actions = txinfo.actions_list(public=public) + if tx_actions : + self.wview(self.item_vid, None, tx_actions=tx_actions) + + +class UndoableActionListView(View): + __regid__ = 'undoable-action-list-view' + __select__ = View.__select__ & match_kwargs('tx_actions') + title = _('Undoable actions') + item_vid = 'undoable-action-view' + cache_max_age = 0 + + def call(self, tx_actions): + if tx_actions : + self.w(u"
        ") + for action in tx_actions: + self.cell_call(action) + self.w(u"
      ") + + def cell_call(self, action): + self.w(u'
    • ') + self.wview(self.item_vid, None, tx_action=action) + self.w(u'
    • \n') + + +class UndoableActionBaseView(View): + __regid__ = 'undoable-action-view' + __abstract__ = True + + def call(self, tx_action): + raise NotImplementedError(self) + + def _build_entity_link(self, eid): + try: + entity = self._cw.entity_from_eid(eid) + return entity.view('outofcontext') + except UnknownEid: + return _("(suppressed) entity #%d") % eid + + def _build_relation_info(self, rtype, eid_from, eid_to): + return dict( rtype=display_name(self._cw, rtype), + entity_from=self._build_entity_link(eid_from), + entity_to=self._build_entity_link(eid_to) ) + + def _build_entity_info(self, etype, eid, changes): + return dict( etype=display_name(self._cw, etype), + entity=self._build_entity_link(eid), + eid=eid, + changes=changes) + + +class UndoableAddActionView(UndoableActionBaseView): + __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='A') + + def call(self, tx_action): + _ = self._cw._ + self.w(_("Added relation : %(entity_from)s %(rtype)s %(entity_to)s") % + self._build_relation_info(tx_action.rtype, tx_action.eid_from, tx_action.eid_to)) + + +class UndoableRemoveActionView(UndoableActionBaseView): + __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='R') + + def call(self, tx_action): + _ = self._cw._ + self.w(_("Delete relation : %(entity_from)s %(rtype)s %(entity_to)s") % + self._build_relation_info(tx_action.rtype, tx_action.eid_from, tx_action.eid_to)) + + +class UndoableCreateActionView(UndoableActionBaseView): + __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='C') + + def call(self, tx_action): + _ = self._cw._ + self.w(_("Created %(etype)s : %(entity)s") % # : %(changes)s + self._build_entity_info( tx_action.etype, tx_action.eid, tx_action.changes) ) + + +class UndoableDeleteActionView(UndoableActionBaseView): + __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='D') + + def call(self, tx_action): + _ = self._cw._ + self.w(_("Deleted %(etype)s : %(entity)s") % + self._build_entity_info( tx_action.etype, tx_action.eid, tx_action.changes)) + + +class UndoableUpdateActionView(UndoableActionBaseView): + __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='U') + + def call(self, tx_action): + _ = self._cw._ + self.w(_("Updated %(etype)s : %(entity)s") % + self._build_entity_info( tx_action.etype, tx_action.eid, tx_action.changes)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/urlpublishing.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/urlpublishing.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,289 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Associate url's path to view identifier / rql queries. + +CubicWeb finds all registered URLPathEvaluators, orders them according +to their ``priority`` attribute and calls their ``evaluate_path()`` +method. The first that returns something and doesn't raise a +``PathDontMatch`` exception wins. + +Here is the default evaluator chain: + +1. :class:`cubicweb.web.views.urlpublishing.RawPathEvaluator` handles + unique url segments that match exactly one of the registered + controller's *__regid__*. Urls such as */view?*, */edit?*, */json?* + fall in that category; + +2. :class:`cubicweb.web.views.urlpublishing.EidPathEvaluator` handles + unique url segments that are eids (e.g. */1234*); + +3. :class:`cubicweb.web.views.urlpublishing.URLRewriteEvaluator` + selects all urlrewriter components, sorts them according to their + priority, call their ``rewrite()`` method, the first one that + doesn't raise a ``KeyError`` wins. This is where the + :mod:`cubicweb.web.views.urlrewrite` and + :class:`cubicweb.web.views.urlrewrite.SimpleReqRewriter` comes into + play; + +4. :class:`cubicweb.web.views.urlpublishing.RestPathEvaluator` handles + urls based on entity types and attributes : ((/])?/)? This is why ``cwuser/carlos`` works; + +5. :class:`cubicweb.web.views.urlpublishing.ActionPathEvaluator` + handles any of the previous paths with an additional trailing + "/" segment, being one of the registered actions' + __regid__. + + +.. note:: + + Actionpath executes a query whose results is lost + because of redirecting instead of direct traversal. +""" +__docformat__ = "restructuredtext en" + +from rql import TypeResolverException + +from cubicweb import RegistryException +from cubicweb.web import NotFound, Redirect, component, views + + +class PathDontMatch(Exception): + """exception used by url evaluators to notify they can't evaluate + a path + """ + +class URLPublisherComponent(component.Component): + """Associate url path to view identifier / rql queries, by + applying a chain of urlpathevaluator components. + + An evaluator is a URLPathEvaluator subclass with an .evaluate_path + method taking the request object and the path to publish as + argument. It will either return a publishing method identifier + and an rql query on success or raise a `PathDontMatch` exception + on failure. URL evaluators are called according to their + `priority` attribute, with 0 as the greatest priority and greater + values as lower priority. The first evaluator returning a result + or raising something else than `PathDontMatch` will stop the + handlers chain. + """ + __regid__ = 'urlpublisher' + vreg = None # XXX necessary until property for deprecation warning is on appobject + + def __init__(self, vreg, default_method='view'): + super(URLPublisherComponent, self).__init__() + self.vreg = vreg + self.default_method = default_method + evaluators = [] + for evaluatorcls in vreg['components']['urlpathevaluator']: + # instantiation needed + evaluator = evaluatorcls(self) + evaluators.append(evaluator) + self.evaluators = sorted(evaluators, key=lambda x: x.priority) + + def process(self, req, path): + """Given a URL (essentially characterized by a path on the + server, but additional information may be found in the request + object), return a publishing method identifier + (e.g. controller) and an optional result set. + + :type req: `cubicweb.web.request.CubicWebRequestBase` + :param req: the request object + + :type path: str + :param path: the path of the resource to publish. If empty, None or "/" + "view" is used as the default path. + + :rtype: tuple(str, `cubicweb.rset.ResultSet` or None) + :return: the publishing method identifier and an optional result set + + :raise NotFound: if no handler is able to decode the given path + """ + parts = [part for part in path.split('/') + if part != ''] or (self.default_method,) + if req.form.get('rql'): + if parts[0] in self.vreg['controllers']: + return parts[0], None + return 'view', None + for evaluator in self.evaluators: + try: + pmid, rset = evaluator.evaluate_path(req, parts[:]) + break + except PathDontMatch: + continue + else: + raise NotFound(path) + if pmid is None: + pmid = self.default_method + return pmid, rset + + +class URLPathEvaluator(component.Component): + __abstract__ = True + __regid__ = 'urlpathevaluator' + vreg = None # XXX necessary until property for deprecation warning is on appobject + + def __init__(self, urlpublisher): + self.urlpublisher = urlpublisher + self.vreg = urlpublisher.vreg + + +class RawPathEvaluator(URLPathEvaluator): + """handle path of the form:: + + ?parameters... + """ + priority = 0 + def evaluate_path(self, req, parts): + if len(parts) == 1 and parts[0] in self.vreg['controllers']: + return parts[0], None + raise PathDontMatch() + + +class EidPathEvaluator(URLPathEvaluator): + """handle path with the form:: + + + """ + priority = 1 + def evaluate_path(self, req, parts): + if len(parts) != 1: + raise PathDontMatch() + try: + rset = req.execute('Any X WHERE X eid %(x)s', {'x': int(parts[0])}) + except ValueError: + raise PathDontMatch() + if rset.rowcount == 0: + raise NotFound() + return None, rset + + +class RestPathEvaluator(URLPathEvaluator): + """handle path with the form:: + + [[/]/]* + """ + priority = 3 + + def evaluate_path(self, req, parts): + if not (0 < len(parts) < 4): + raise PathDontMatch() + try: + etype = self.vreg.case_insensitive_etypes[parts.pop(0).lower()] + except KeyError: + raise PathDontMatch() + cls = self.vreg['etypes'].etype_class(etype) + if parts: + if len(parts) == 2: + attrname = parts.pop(0).lower() + try: + cls.e_schema.subjrels[attrname] + except KeyError: + raise PathDontMatch() + else: + attrname = cls.cw_rest_attr_info()[0] + value = req.url_unquote(parts.pop(0)) + return self.handle_etype_attr(req, cls, attrname, value) + return self.handle_etype(req, cls) + + def set_vid_for_rset(self, req, cls, rset): # cls is there to ease overriding + if rset.rowcount == 0: + raise NotFound() + if 'vid' not in req.form: + # check_table=False tells vid_from_rset not to try to use a table view if fetch_rql + # include some non final relation + req.form['vid'] = views.vid_from_rset(req, rset, req.vreg.schema, + check_table=False) + + def handle_etype(self, req, cls): + rset = req.execute(cls.fetch_rql(req.user)) + self.set_vid_for_rset(req, cls, rset) + return None, rset + + def handle_etype_attr(self, req, cls, attrname, value): + st = cls.fetch_rqlst(req.user, ordermethod=None) + st.add_constant_restriction(st.get_variable('X'), attrname, + 'x', 'Substitute') + if attrname == 'eid': + try: + rset = req.execute(st.as_string(), {'x': int(value)}) + except (ValueError, TypeResolverException): + # conflicting eid/type + raise PathDontMatch() + else: + rset = req.execute(st.as_string(), {'x': value}) + self.set_vid_for_rset(req, cls, rset) + return None, rset + + +class URLRewriteEvaluator(URLPathEvaluator): + """tries to find a rewrite rule to apply + + URL rewrite rule definitions are stored in URLRewriter objects + """ + priority = 2 + + def evaluate_path(self, req, parts): + # uri <=> req._twreq.path or req._twreq.uri + uri = req.url_unquote('/' + '/'.join(parts)) + evaluators = sorted(self.vreg['urlrewriting'].all_objects(), + key=lambda x: x.priority, reverse=True) + for rewritercls in evaluators: + rewriter = rewritercls(req) + try: + # XXX we might want to chain url rewrites + return rewriter.rewrite(req, uri) + except KeyError: + continue + raise PathDontMatch() + + +class ActionPathEvaluator(URLPathEvaluator): + """handle path with the form:: + + / + """ + priority = 4 + + def evaluate_path(self, req, parts): + if len(parts) < 2: + raise PathDontMatch() + # remove last part and see if this is something like an actions + # if so, call + # XXX bad smell: refactor to simpler code + try: + actionsreg = self.vreg['actions'] + requested = parts.pop(-1) + actions = actionsreg[requested] + except RegistryException: + raise PathDontMatch() + for evaluator in self.urlpublisher.evaluators: + if evaluator is self or evaluator.priority == 0: + continue + try: + pmid, rset = evaluator.evaluate_path(req, parts[:]) + except PathDontMatch: + continue + else: + try: + action = actionsreg._select_best(actions, req, rset=rset) + if action is not None: + raise Redirect(action.url()) + except RegistryException: + pass # continue searching + raise PathDontMatch() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/urlrewrite.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/urlrewrite.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,234 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Rules based url rewriter component, to get configurable RESTful urls""" + +import re + +from six import string_types, add_metaclass + +from cubicweb.uilib import domid +from cubicweb.appobject import AppObject + + +def rgx(pattern, flags=0): + """this is just a convenient shortcut to add the $ sign""" + return re.compile(pattern+'$', flags) + +class metarewriter(type): + """auto-extend rules dictionary""" + def __new__(mcs, name, bases, classdict): + # collect baseclass' rules + rules = [] + ignore_baseclass_rules = classdict.get('ignore_baseclass_rules', False) + if not ignore_baseclass_rules: + for base in bases: + rules[0:0] = getattr(base, 'rules', []) + rules[0:0] = classdict.get('rules', []) + inputs = set() + for data in rules[:]: + try: + input, output, groups = data + except ValueError: + input, output = data + if input in inputs: + rules.remove( (input, output) ) + else: + inputs.add(input) + classdict['rules'] = rules + return super(metarewriter, mcs).__new__(mcs, name, bases, classdict) + + +@add_metaclass(metarewriter) +class URLRewriter(AppObject): + """Base class for URL rewriters. + + Url rewriters should have a `rules` dict that maps an input URI + to something that should be used for rewriting. + + The actual logic that defines how the rules dict is used is implemented + in the `rewrite` method. + + A `priority` attribute might be used to indicate which rewriter + should be tried first. The higher the priority is, the earlier the + rewriter will be tried. + """ + __registry__ = 'urlrewriting' + __abstract__ = True + priority = 1 + + def rewrite(self, req, uri): + raise NotImplementedError + + +class SimpleReqRewriter(URLRewriter): + """The SimpleReqRewriters uses a `rules` dict that maps input URI + (regexp or plain string) to a dictionary to update the request's + form. + + If the input uri is a regexp, group substitution is allowed. + """ + __regid__ = 'simple' + + rules = [ + ('/_', dict(vid='manage')), + ('/_registry', dict(vid='registry')), +# (rgx('/_([^/]+?)/?'), dict(vid=r'\1')), + ('/schema', dict(vid='schema')), + ('/index', dict(vid='index')), + ('/myprefs', dict(vid='propertiesform')), + ('/siteconfig', dict(vid='systempropertiesform')), + ('/siteinfo', dict(vid='siteinfo')), + ('/manage', dict(vid='manage')), + ('/notfound', dict(vid='404')), + ('/error', dict(vid='error')), + ('/sparql', dict(vid='sparql')), + ('/processinfo', dict(vid='processinfo')), + (rgx('/cwuser', re.I), dict(vid='cw.users-and-groups-management', + tab=domid('cw.users-management'))), + (rgx('/cwgroup', re.I), dict(vid='cw.users-and-groups-management', + tab=domid('cw.groups-management'))), + (rgx('/cwsource', re.I), dict(vid='cw.sources-management')), + # XXX should be case insensitive as 'create', but I would like to find another way than + # relying on the etype_selector + (rgx('/schema/([^/]+?)/?'), dict(vid='primary', rql=r'Any X WHERE X is CWEType, X name "\1"')), + (rgx('/add/([^/]+?)/?'), dict(vid='creation', etype=r'\1')), + (rgx('/doc/images/(.+?)/?'), dict(vid='wdocimages', fid=r'\1')), + (rgx('/doc/?'), dict(vid='wdoc', fid=r'main')), + (rgx('/doc/(.+?)/?'), dict(vid='wdoc', fid=r'\1')), + ] + + def rewrite(self, req, uri): + """for each `input`, `output `in rules, if `uri` matches `input`, + req's form is updated with `output` + """ + for data in self.rules: + try: + inputurl, infos, required_groups = data + except ValueError: + inputurl, infos = data + required_groups = None + if required_groups and not req.user.matching_groups(required_groups): + continue + if isinstance(inputurl, string_types): + if inputurl == uri: + req.form.update(infos) + break + elif inputurl.match(uri): # it's a regexp + # XXX what about i18n? (vtitle for instance) + for param, value in infos.items(): + if isinstance(value, string_types): + req.form[param] = inputurl.sub(value, uri) + else: + req.form[param] = value + break + else: + self.debug("no simple rewrite rule found for %s", uri) + raise KeyError(uri) + return None, None + + +def build_rset(rql, rgxgroups=None, setuser=False, + vid=None, vtitle=None, form={}, **kwargs): + + def do_build_rset(inputurl, uri, req, schema, kwargs=kwargs): + kwargs = kwargs.copy() + if rgxgroups: + match = inputurl.match(uri) + for arg, group in rgxgroups: + kwargs[arg] = match.group(group) + req.form.update(form) + if setuser: + kwargs['u'] = req.user.eid + if vid: + req.form['vid'] = vid + if vtitle: + req.form['vtitle'] = req._(vtitle) % kwargs + return None, req.execute(rql, kwargs) + return do_build_rset + +def update_form(**kwargs): + def do_build_rset(inputurl, uri, req, schema): + match = inputurl.match(uri) + kwargs.update(match.groupdict()) + req.form.update(kwargs) + return None, None + return do_build_rset + +def rgx_action(rql=None, args=None, argsgroups=(), setuser=False, + form=None, formgroups=(), transforms={}, rqlformparams=(), controller=None): + def do_build_rset(inputurl, uri, req, schema, + ): + if rql: + kwargs = args and args.copy() or {} + if argsgroups: + match = inputurl.match(uri) + for key in argsgroups: + value = match.group(key) + try: + kwargs[key] = transforms[key](value) + except KeyError: + kwargs[key] = value + if setuser: + kwargs['u'] = req.user.eid + for param in rqlformparams: + kwargs.setdefault(param, req.form.get(param)) + rset = req.execute(rql, kwargs) + else: + rset = None + form2 = form and form.copy() or {} + if formgroups: + match = inputurl.match(uri) + for key in formgroups: + form2[key] = match.group(key) + if "vtitle" in form2: + form2['vtitle'] = req.__(form2['vtitle']) + if form2: + req.form.update(form2) + return controller, rset + return do_build_rset + + +class SchemaBasedRewriter(URLRewriter): + """Here, the rules dict maps regexps or plain strings to callbacks + that will be called with inputurl, uri, req, schema as parameters. + """ + __regid__ = 'schemabased' + rules = [ + # rgxp : callback + (rgx('/search/(.+)'), build_rset(rql=r'Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s', + rgxgroups=[('text', 1)])), + ] + + def rewrite(self, req, uri): + # XXX this could be refacted with SimpleReqRewriter + for data in self.rules: + try: + inputurl, callback, required_groups = data + except ValueError: + inputurl, callback = data + required_groups = None + if required_groups and not req.user.matching_groups(required_groups): + continue + if isinstance(inputurl, string_types): + if inputurl == uri: + return callback(inputurl, uri, req, self._cw.vreg.schema) + elif inputurl.match(uri): # it's a regexp + return callback(inputurl, uri, req, self._cw.vreg.schema) + else: + self.debug("no schemabased rewrite rule found for %s", uri) + raise KeyError(uri) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/vcard.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/vcard.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,66 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""vcard import / export + +""" +__docformat__ = "restructuredtext en" + +from cubicweb.predicates import is_instance +from cubicweb.view import EntityView + +from cubicweb import _ + +VCARD_PHONE_TYPES = {'home': 'HOME', 'office': 'WORK', 'mobile': 'CELL', 'fax': 'FAX'} + +class VCardCWUserView(EntityView): + """export a person information as a vcard""" + __regid__ = 'vcard' + title = _('vcard') + templatable = False + content_type = 'text/x-vcard' + __select__ = is_instance('CWUser') + + def set_request_content_type(self): + """overriden to set a .vcf filename""" + self._cw.set_content_type(self.content_type, filename='vcard.vcf') + + def cell_call(self, row, col): + self.vcard_header() + self.vcard_content(self.cw_rset.complete_entity(row, col)) + self.vcard_footer() + + def vcard_header(self): + self.w(u'BEGIN:vcard\n') + self.w(u'VERSION:3.0\n') + + def vcard_footer(self): + self.w(u'NOTE:this card has been generated by CubicWeb\n') + self.w(u'END:vcard\n') + + def vcard_content(self, entity): + who = u'%s %s' % (entity.surname or '', + entity.firstname or '') + w = self.w + w(u'FN:%s\n' % who) + w(u'N:%s;;;;\n' % entity.login) + w(u'TITLE:%s\n' % who) + for email in entity.use_email: + w(u'EMAIL;TYPE=INTERNET:%s\n' % email.address) + +from logilab.common.deprecation import class_renamed +VCardEuserView = VCardEUserView = class_renamed('VCardEuserView', VCardCWUserView) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/wdoc.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/wdoc.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,231 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""inline help system, rendering ReST files in the `wdoc` subdirectory of +CubicWeb and cubes + +""" +__docformat__ = "restructuredtext en" + +from itertools import chain +from os.path import join +from bisect import bisect_right +from datetime import date + +from logilab.common.changelog import ChangeLog +from logilab.common.date import strptime, todate +from logilab.common.registry import yes +from logilab.mtconverter import CHARSET_DECL_RGX + +from cubicweb.predicates import match_form_params +from cubicweb.view import StartupView +from cubicweb.uilib import rest_publish +from cubicweb.web import NotFound, action +from cubicweb import _ + +# table of content management ################################################# + +try: + from xml.etree.ElementTree import parse +except ImportError: + from elementtree.ElementTree import parse + +def build_toc_index(node, index): + try: + nodeidx = node.attrib['resource'] + assert not nodeidx in index, nodeidx + index[nodeidx] = node + except KeyError: + pass + for child in node: + build_toc_index(child, index) + child.parent = node + +def get_insertion_point(section, index): + if section.attrib.get('insertafter'): + snode = index[section.attrib['insertafter']] + node = snode.parent + idx = node.getchildren().index(snode) + 1 + elif section.attrib.get('insertbefore'): + snode = index[section.attrib['insertbefore']] + node = snode.parent + idx = node.getchildren().index(snode) + elif 'appendto' in section.attrib: + node = index[section.attrib['appendto']] + idx = None + else: + node, idx = None, None + return node, idx + +def build_toc(config): + alltocfiles = reversed(tuple(config.locate_all_files('toc.xml'))) + maintoc = parse(next(alltocfiles)).getroot() + maintoc.parent = None + index = {} + build_toc_index(maintoc, index) + # insert component documentation into the tree according to their toc.xml + # file + for fpath in alltocfiles: + toc = parse(fpath).getroot() + for section in toc: + node, idx = get_insertion_point(section, index) + if node is None: + continue + if idx is None: + node.append(section) + else: + node.insert(idx, section) + section.parent = node + build_toc_index(section, index) + return index + +def title_for_lang(node, lang): + fallback_title = None + for title in node.findall('title'): + title_lang = title.attrib['{http://www.w3.org/XML/1998/namespace}lang'] + if title_lang == lang: + return unicode(title.text) + if title_lang == 'en': + fallback_title = unicode(title.text) + return fallback_title + +def subsections(node): + return [child for child in node if child.tag == 'section'] + +# help views ################################################################## + +class InlineHelpView(StartupView): + __select__ = match_form_params('fid') + __regid__ = 'wdoc' + title = _('site documentation') + + def call(self): + fid = self._cw.form['fid'] + vreg = self._cw.vreg + for lang in chain((self._cw.lang, vreg.property_value('ui.language')), + vreg.config.available_languages()): + rid = '%s_%s.rst' % (fid, lang) + resourcedir = vreg.config.locate_doc_file(rid) + if resourcedir: + break + else: + raise NotFound + self.tocindex = build_toc(vreg.config) + try: + node = self.tocindex[fid] + except KeyError: + node = None + else: + self.navigation_links(node) + self.w(u'
      ') + self.w(u'

      %s

      ' % (title_for_lang(node, self._cw.lang))) + data = open(join(resourcedir, rid)).read() + self.w(rest_publish(self, data)) + if node is not None: + self.subsections_links(node) + self.w(u'
      ') + self.navigation_links(node) + + def navigation_links(self, node): + req = self._cw + parent = node.parent + if parent is None: + return + brothers = subsections(parent) + self.w(u'
      \n') + previousidx = brothers.index(node) - 1 + if previousidx >= 0: + self.navsection(brothers[previousidx], 'prev') + self.navsection(parent, 'up') + nextidx = brothers.index(node) + 1 + if nextidx < len(brothers): + self.navsection(brothers[nextidx], 'next') + self.w(u'
      \n') + + navinfo = {'prev': ('', 'data/previous.png', _('i18nprevnext_previous')), + 'next': ('', 'data/next.png', _('i18nprevnext_next')), + 'up': ('', 'data/up.png', _('i18nprevnext_up'))} + + def navsection(self, node, navtype): + htmlclass, imgpath, msgid = self.navinfo[navtype] + self.w(u'' % htmlclass) + self.w(u'%s : ' % self._cw._(msgid)) + self.w(u'%s' % ( + self._cw.build_url('doc/'+node.attrib['resource']), + title_for_lang(node, self._cw.lang))) + self.w(u'\n') + + def subsections_links(self, node, first=True): + sub = subsections(node) + if not sub: + return + if first: + self.w(u'
      ') + self.w(u'
        ') + for child in sub: + self.w(u'
      • %s' % ( + self._cw.build_url('doc/'+child.attrib['resource']), + title_for_lang(child, self._cw.lang))) + self.subsections_links(child, False) + self.w(u'
      • ') + self.w(u'
      \n') + + + +class InlineHelpImageView(StartupView): + __regid__ = 'wdocimages' + __select__ = match_form_params('fid') + binary = True + templatable = False + content_type = 'image/png' + + def call(self): + fid = self._cw.form['fid'] + for lang in chain((self._cw.lang, self._cw.vreg.property_value('ui.language')), + self._cw.vreg.config.available_languages()): + rid = join('images', '%s_%s.png' % (fid, lang)) + resourcedir = self._cw.vreg.config.locate_doc_file(rid) + if resourcedir: + break + else: + raise NotFound + self.w(open(join(resourcedir, rid)).read()) + + + +class HelpAction(action.Action): + __regid__ = 'help' + __select__ = yes() + + category = 'footer' + order = 0 + title = _('Help') + + def url(self): + return self._cw.build_url('doc/main') + + +class AboutAction(action.Action): + __regid__ = 'about' + __select__ = yes() + + category = 'footer' + order = 2 + title = _('About this site') + + def url(self): + return self._cw.build_url('doc/about') diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/workflow.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/workflow.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,452 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""workflow views: + +* IWorkflowable views and forms +* workflow entities views (State, Transition, TrInfo) +""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +import os +from warnings import warn + +from six import add_metaclass + +from logilab.mtconverter import xml_escape +from logilab.common.graph import escape +from logilab.common.deprecation import class_deprecated + +from cubicweb import Unauthorized +from cubicweb.predicates import (has_related_entities, one_line_rset, + relation_possible, match_form_params, + score_entity, is_instance, adaptable) +from cubicweb.view import EntityView +from cubicweb.schema import display_name +from cubicweb.web import stdmsgs, action, component, form, action +from cubicweb.web import formfields as ff, formwidgets as fwdgs +from cubicweb.web.views import TmpFileViewMixin +from cubicweb.web.views import uicfg, forms, primary, ibreadcrumbs +from cubicweb.web.views.tabs import TabbedPrimaryView, PrimaryTab +from cubicweb.web.views.dotgraphview import DotGraphView, DotPropsHandler + +_pvs = uicfg.primaryview_section +_pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden') +_pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden') +_pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden') +_pvs.tag_object_of(('*', 'wf_info_for', '*'), 'hidden') +for rtype in ('in_state', 'by_transition', 'from_state', 'to_state'): + _pvs.tag_subject_of(('*', rtype, '*'), 'hidden') + _pvs.tag_object_of(('*', rtype, '*'), 'hidden') +_pvs.tag_object_of(('*', 'wf_info_for', '*'), 'hidden') + +_abaa = uicfg.actionbox_appearsin_addmenu +_abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False) +_abaa.tag_subject_of(('State', 'allowed_transition', 'BaseTransition'), False) +_abaa.tag_object_of(('SubWorkflowExitPoint', 'destination_state', 'State'), + False) +_abaa.tag_subject_of(('*', 'wf_info_for', '*'), False) +_abaa.tag_object_of(('*', 'wf_info_for', '*'), False) + +_abaa.tag_object_of(('*', 'state_of', 'CWEType'), True) +_abaa.tag_object_of(('*', 'transition_of', 'CWEType'), True) +_abaa.tag_subject_of(('Transition', 'destination_state', '*'), True) +_abaa.tag_object_of(('*', 'allowed_transition', 'Transition'), True) +_abaa.tag_object_of(('*', 'destination_state', 'State'), True) +_abaa.tag_subject_of(('State', 'allowed_transition', '*'), True) +_abaa.tag_object_of(('State', 'state_of', 'Workflow'), True) +_abaa.tag_object_of(('Transition', 'transition_of', 'Workflow'), True) +_abaa.tag_object_of(('WorkflowTransition', 'transition_of', 'Workflow'), True) + +_afs = uicfg.autoform_section +_affk = uicfg.autoform_field_kwargs + +# IWorkflowable views ######################################################### + +class ChangeStateForm(forms.CompositeEntityForm): + # set dom id to ensure there is no conflict with edition form (see + # session_key() implementation) + __regid__ = domid = 'changestate' + + form_renderer_id = 'base' # don't want EntityFormRenderer + form_buttons = [fwdgs.SubmitButton(), + fwdgs.Button(stdmsgs.BUTTON_CANCEL, cwaction='cancel')] + + +class ChangeStateFormView(form.FormViewMixIn, EntityView): + __regid__ = 'statuschange' + title = _('status change') + __select__ = (one_line_rset() + & match_form_params('treid') + & adaptable('IWorkflowable')) + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + transition = self._cw.entity_from_eid(self._cw.form['treid']) + form = self.get_form(entity, transition) + self.w(u'

      %s %s

      \n' % (self._cw._(transition.name), + entity.view('oneline'))) + msg = self._cw._('status will change from %(st1)s to %(st2)s') % { + 'st1': entity.cw_adapt_to('IWorkflowable').printable_state, + 'st2': self._cw._(transition.destination(entity).name)} + self.w(u'

      %s

      \n' % msg) + form.render(w=self.w) + + def redirectpath(self, entity): + return entity.rest_path() + + def get_form(self, entity, transition, **kwargs): + # XXX used to specify both rset/row/col and entity in case implements + # selector (and not is_instance) is used on custom form + form = self._cw.vreg['forms'].select( + 'changestate', self._cw, entity=entity, transition=transition, + redirect_path=self.redirectpath(entity), **kwargs) + trinfo = self._cw.vreg['etypes'].etype_class('TrInfo')(self._cw) + trinfo.eid = next(self._cw.varmaker) + subform = self._cw.vreg['forms'].select('edition', self._cw, entity=trinfo, + mainform=False) + subform.field_by_name('wf_info_for', 'subject').value = entity.eid + trfield = subform.field_by_name('by_transition', 'subject') + trfield.widget = fwdgs.HiddenInput() + trfield.value = transition.eid + form.add_subform(subform) + return form + + +class WFHistoryView(EntityView): + __regid__ = 'wfhistory' + __select__ = relation_possible('wf_info_for', role='object') & \ + score_entity(lambda x: x.cw_adapt_to('IWorkflowable').workflow_history) + + title = _('Workflow history') + + def cell_call(self, row, col, view=None, title=title): + _ = self._cw._ + eid = self.cw_rset[row][col] + sel = 'Any FS,TS,C,D' + rql = ' ORDERBY D DESC WHERE WF wf_info_for X,'\ + 'WF from_state FS, WF to_state TS, WF comment C,'\ + 'WF creation_date D' + if self._cw.vreg.schema.eschema('CWUser').has_perm(self._cw, 'read'): + sel += ',U,WF' + rql += ', WF owned_by U?' + headers = (_('from_state'), _('to_state'), _('comment'), _('date'), + _('CWUser')) + else: + sel += ',WF' + headers = (_('from_state'), _('to_state'), _('comment'), _('date')) + rql = '%s %s, X eid %%(x)s' % (sel, rql) + try: + rset = self._cw.execute(rql, {'x': eid}) + except Unauthorized: + return + if rset: + if title: + self.w(u'

      %s

      \n' % _(title)) + self.wview('table', rset, headers=headers, + cellvids={2: 'editable-final'}) + + +class WFHistoryVComponent(component.EntityCtxComponent): + """display the workflow history for entities supporting it""" + __regid__ = 'wfhistory' + __select__ = component.EntityCtxComponent.__select__ & WFHistoryView.__select__ + context = 'navcontentbottom' + title = _('Workflow history') + + def render_body(self, w): + self.entity.view('wfhistory', w=w, title=None) + + +class InContextWithStateView(EntityView): + """display incontext view for an entity as well as its current state""" + __regid__ = 'incontext-state' + __select__ = adaptable('IWorkflowable') + def entity_call(self, entity): + iwf = entity.cw_adapt_to('IWorkflowable') + self.w(u'%s [%s]' % (entity.view('incontext'), iwf.printable_state)) + + +# workflow actions ############################################################# + +class WorkflowActions(action.Action): + """fill 'workflow' sub-menu of the actions box""" + __regid__ = 'workflow' + __select__ = (action.Action.__select__ & one_line_rset() & + relation_possible('in_state')) + + submenu = _('workflow') + order = 10 + + def fill_menu(self, box, menu): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + menu.label = u'%s: %s' % (self._cw._('state'), + entity.cw_adapt_to('IWorkflowable').printable_state) + menu.append_anyway = True + super(WorkflowActions, self).fill_menu(box, menu) + + def actual_actions(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + hastr = False + for tr in iworkflowable.possible_transitions(): + url = entity.absolute_url(vid='statuschange', treid=tr.eid) + yield self.build_action(self._cw._(tr.name), url) + hastr = True + # don't propose to see wf if user can't pass any transition + if hastr: + wfurl = iworkflowable.current_workflow.absolute_url() + yield self.build_action(self._cw._('view workflow'), wfurl) + if iworkflowable.workflow_history: + wfurl = entity.absolute_url(vid='wfhistory') + yield self.build_action(self._cw._('view history'), wfurl) + + +# workflow entity types views ################################################## + +_pvs = uicfg.primaryview_section +_pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden') +_pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden') +_pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden') +_pvs.tag_object_of(('*', 'default_workflow', 'Workflow'), 'hidden') + +_abaa = uicfg.actionbox_appearsin_addmenu +_abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False) +_abaa.tag_subject_of(('State', 'allowed_transition', 'BaseTransition'), False) +_abaa.tag_object_of(('SubWorkflowExitPoint', 'destination_state', 'State'), + False) +_abaa.tag_object_of(('State', 'state_of', 'Workflow'), True) +_abaa.tag_object_of(('BaseTransition', 'transition_of', 'Workflow'), False) +_abaa.tag_object_of(('Transition', 'transition_of', 'Workflow'), True) +_abaa.tag_object_of(('WorkflowTransition', 'transition_of', 'Workflow'), True) + +class WorkflowPrimaryView(TabbedPrimaryView): + __select__ = is_instance('Workflow') + tabs = [ _('wf_tab_info'), _('wfgraph'),] + default_tab = 'wf_tab_info' + + +class StateInContextView(EntityView): + """convenience trick, State's incontext view should not be clickable""" + __regid__ = 'incontext' + __select__ = is_instance('State') + + def cell_call(self, row, col): + self.w(xml_escape(self._cw.view('textincontext', self.cw_rset, + row=row, col=col))) + +class WorkflowTabTextView(PrimaryTab): + __regid__ = 'wf_tab_info' + __select__ = PrimaryTab.__select__ & one_line_rset() & is_instance('Workflow') + + def render_entity_attributes(self, entity): + _ = self._cw._ + self.w(u'
      %s
      ' % (entity.printable_value('description'))) + self.w(u'%s%s' % (_("workflow_of").capitalize(), _(" :"))) + html = [] + for e in entity.workflow_of: + view = e.view('outofcontext') + if entity.eid == e.default_workflow[0].eid: + view += u' [%s]' % _('default_workflow') + html.append(view) + self.w(', '.join(v for v in html)) + self.w(u'

      %s

      ' % _("Transition_plural")) + rset = self._cw.execute( + 'Any T,T,DS,T,TT ORDERBY TN WHERE T transition_of WF, WF eid %(x)s,' + 'T type TT, T name TN, T destination_state DS?', {'x': entity.eid}) + self.wview('table', rset, 'null', + cellvids={ 1: 'trfromstates', 2: 'outofcontext', 3:'trsecurity',}, + headers = (_('Transition'), _('from_state'), + _('to_state'), _('permissions'), _('type') ), + ) + + +class TransitionSecurityTextView(EntityView): + __regid__ = 'trsecurity' + __select__ = is_instance('Transition') + + def cell_call(self, row, col): + _ = self._cw._ + entity = self.cw_rset.get_entity(self.cw_row, self.cw_col) + if entity.require_group: + self.w(u'
      %s%s %s
      ' % + (_('groups'), _(" :"), + u', '.join((g.view('incontext') for g + in entity.require_group)))) + if entity.condition: + self.w(u'
      %s%s %s
      ' % + ( _('conditions'), _(" :"), + u'
      '.join((e.dc_title() for e + in entity.condition)))) + +class TransitionAllowedTextView(EntityView): + __regid__ = 'trfromstates' + __select__ = is_instance('Transition') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(self.cw_row, self.cw_col) + self.w(u', '.join((e.view('outofcontext') for e + in entity.reverse_allowed_transition))) + + +# workflow entity types edition ################################################ + +def _wf_items_for_relation(req, wfeid, wfrelation, field): + wf = req.entity_from_eid(wfeid) + rschema = req.vreg.schema[field.name] + param = 'toeid' if field.role == 'subject' else 'fromeid' + return sorted((e.view('combobox'), unicode(e.eid)) + for e in getattr(wf, 'reverse_%s' % wfrelation) + if rschema.has_perm(req, 'add', **{param: e.eid})) + +# TrInfo +_afs.tag_subject_of(('TrInfo', 'to_state', '*'), 'main', 'hidden') +_afs.tag_subject_of(('TrInfo', 'from_state', '*'), 'main', 'hidden') +_afs.tag_attribute(('TrInfo', 'tr_count'), 'main', 'hidden') + +# BaseTransition +# XXX * allowed_transition BaseTransition +# XXX BaseTransition destination_state * + +def transition_states_vocabulary(form, field): + entity = form.edited_entity + if entity.has_eid(): + wfeid = entity.transition_of[0].eid + else: + eids = form.linked_to.get(('transition_of', 'subject')) + if not eids: + return [] + wfeid = eids[0] + return _wf_items_for_relation(form._cw, wfeid, 'state_of', field) + +_afs.tag_subject_of(('*', 'destination_state', '*'), 'main', 'attributes') +_affk.tag_subject_of(('*', 'destination_state', '*'), + {'choices': transition_states_vocabulary}) +_afs.tag_object_of(('*', 'allowed_transition', '*'), 'main', 'attributes') +_affk.tag_object_of(('*', 'allowed_transition', '*'), + {'choices': transition_states_vocabulary}) + +# State + +def state_transitions_vocabulary(form, field): + entity = form.edited_entity + if entity.has_eid(): + wfeid = entity.state_of[0].eid + else : + eids = form.linked_to.get(('state_of', 'subject')) + if not eids: + return [] + wfeid = eids[0] + return _wf_items_for_relation(form._cw, wfeid, 'transition_of', field) + +_afs.tag_subject_of(('State', 'allowed_transition', '*'), 'main', 'attributes') +_affk.tag_subject_of(('State', 'allowed_transition', '*'), + {'choices': state_transitions_vocabulary}) + + +# adaptaters ################################################################### + +class WorkflowIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('Workflow') + # XXX what if workflow of multiple types? + def parent_entity(self): + return self.entity.workflow_of and self.entity.workflow_of[0] or None + +class WorkflowItemIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('BaseTransition', 'State') + def parent_entity(self): + return self.entity.workflow + +class TransitionItemIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('SubWorkflowExitPoint') + def parent_entity(self): + return self.entity.reverse_subworkflow_exit[0] + +class TrInfoIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): + __select__ = is_instance('TrInfo') + def parent_entity(self): + return self.entity.for_entity + + +# workflow images ############################################################## + +class WorkflowDotPropsHandler(DotPropsHandler): + + def node_properties(self, stateortransition): + """return default DOT drawing options for a state or transition""" + props = super(WorkflowDotPropsHandler, self).node_properties(stateortransition) + if hasattr(stateortransition, 'state_of'): + props['shape'] = 'box' + props['style'] = 'filled' + if stateortransition.reverse_initial_state: + props['fillcolor'] = '#88CC88' + else: + props['shape'] = 'ellipse' + return props + + +class WorkflowVisitor(object): + def __init__(self, entity): + self.entity = entity + + def nodes(self): + for state in self.entity.reverse_state_of: + state.complete() + yield state.eid, state + for transition in self.entity.reverse_transition_of: + transition.complete() + yield transition.eid, transition + + def edges(self): + for transition in self.entity.reverse_transition_of: + for incomingstate in transition.reverse_allowed_transition: + yield incomingstate.eid, transition.eid, transition + for outgoingstate in transition.potential_destinations(): + yield transition.eid, outgoingstate.eid, transition + +class WorkflowGraphView(DotGraphView): + __regid__ = 'wfgraph' + __select__ = EntityView.__select__ & one_line_rset() & is_instance('Workflow') + + def build_visitor(self, entity): + return WorkflowVisitor(entity) + + def build_dotpropshandler(self): + return WorkflowDotPropsHandler(self._cw) + + +@add_metaclass(class_deprecated) +class TmpPngView(TmpFileViewMixin, EntityView): + __deprecation_warning__ = '[3.18] %(cls)s is deprecated' + __regid__ = 'tmppng' + __select__ = match_form_params('tmpfile') + content_type = 'image/png' + binary = True + + def cell_call(self, row=0, col=0): + key = self._cw.form['tmpfile'] + if key not in self._cw.session.data: + # the temp file is gone and there's nothing + # we can do about it + # we should probably write it to some well + # behaved place and serve it + return + tmpfile = self._cw.session.data.pop(key) + self.w(open(tmpfile, 'rb').read()) + os.unlink(tmpfile) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/xbel.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/xbel.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,69 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""xbel views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from six.moves import range + +from logilab.mtconverter import xml_escape + +from cubicweb.predicates import is_instance +from cubicweb.view import EntityView +from cubicweb.web.views.xmlrss import XMLView + + +class XbelView(XMLView): + __regid__ = 'xbel' + title = _('xbel export') + templatable = False + content_type = 'text/xml' #application/xbel+xml + + def cell_call(self, row, col): + self.wview('xbelitem', self.cw_rset, row=row, col=col) + + def call(self): + """display a list of entities by calling their view""" + self.w(u'\n' % self._cw.encoding) + self.w(u'') + self.w(u'') + self.w(u'%s' % self._cw._('bookmarks')) + for i in range(self.cw_rset.rowcount): + self.cell_call(i, 0) + self.w(u"") + + +class XbelItemView(EntityView): + __regid__ = 'xbelitem' + + def cell_call(self, row, col): + entity = self.cw_rset.complete_entity(row, col) + self.w(u'' % xml_escape(self.url(entity))) + self.w(u' %s' % xml_escape(entity.dc_title())) + self.w(u'') + + def url(self, entity): + return entity.absolute_url() + + +class XbelItemBookmarkView(XbelItemView): + __select__ = is_instance('Bookmark') + + def url(self, entity): + return entity.actual_url() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/views/xmlrss.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/views/xmlrss.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,298 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""base xml and rss views""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +from base64 import b64encode +from time import timezone + +from six.moves import range + +from logilab.mtconverter import xml_escape + +from cubicweb.predicates import (is_instance, non_final_entity, one_line_rset, + appobject_selectable, adaptable) +from cubicweb.view import EntityView, EntityAdapter, AnyRsetView, Component +from cubicweb.uilib import simple_sgml_tag +from cubicweb.web import httpcache, component + +def encode_bytes(value): + return '' % b64encode(value.getvalue()) + +# see cubicweb.sobjects.parser.DEFAULT_CONVERTERS +SERIALIZERS = { + 'String': xml_escape, + 'Bytes': encode_bytes, + 'Date': lambda x: x.strftime('%Y-%m-%d'), + 'Datetime': lambda x: x.strftime('%Y-%m-%d %H:%M:%S'), + 'Time': lambda x: x.strftime('%H:%M:%S'), + 'TZDatetime': lambda x: x.strftime('%Y-%m-%d %H:%M:%S'), # XXX TZ + 'TZTime': lambda x: x.strftime('%H:%M:%S'), + 'Interval': lambda x: x.days * 60*60*24 + x.seconds, + } + +# base xml views ############################################################## + +class XMLView(EntityView): + """xml view for entities""" + __regid__ = 'xml' + title = _('xml export (entities)') + templatable = False + content_type = 'text/xml' + xml_root = 'rset' + item_vid = 'xmlitem' + + def cell_call(self, row, col): + self.wview(self.item_vid, self.cw_rset, row=row, col=col) + + def call(self): + """display a list of entities by calling their view""" + self.w(u'\n' % self._cw.encoding) + self.w(u'<%s size="%s">\n' % (self.xml_root, len(self.cw_rset))) + for i in range(self.cw_rset.rowcount): + self.cell_call(i, 0) + self.w(u'\n' % self.xml_root) + + +class XMLItemView(EntityView): + __regid__ = 'xmlitem' + + def entity_call(self, entity): + """element as an item for an xml feed""" + entity.complete() + source = entity.cw_metainformation()['source']['uri'] + self.w(u'<%s eid="%s" cwuri="%s" cwsource="%s">\n' + % (entity.cw_etype, entity.eid, xml_escape(entity.cwuri), + xml_escape(source))) + for rschema, attrschema in sorted(entity.e_schema.attribute_definitions()): + attr = rschema.type + if attr in ('eid', 'cwuri'): + continue + else: + try: + value = entity.cw_attr_cache[attr] + except KeyError: + # Bytes + continue + if value is None: + self.w(u' <%s/>\n' % attr) + else: + try: + value = SERIALIZERS[attrschema](value) + except KeyError: + pass + self.w(u' <%s>%s\n' % (attr, value, attr)) + for relstr in self._cw.list_form_param('relation'): + try: + rtype, role = relstr.split('-') + except ValueError: + self.error('badly formated relation name %r', relstr) + continue + if role == 'subject': + getrschema = entity.e_schema.subjrels + elif role == 'object': + getrschema = entity.e_schema.objrels + else: + self.error('badly formated relation name %r', relstr) + continue + if not rtype in getrschema: + self.error('unexisting relation %r', relstr) + continue + self.w(u' <%s role="%s">\n' % (rtype, role)) + self.wview('xmlrelateditem', entity.related(rtype, role, safe=True), 'null') + self.w(u' \n' % rtype) + self.w(u'\n' % (entity.e_schema)) + + +class XMLRelatedItemView(EntityView): + __regid__ = 'xmlrelateditem' + add_div_section = False + + def entity_call(self, entity): + # XXX put unique attributes as xml attribute, they are much probably + # used to search existing entities in client data feed, and putting it + # here may avoid an extra request to get those attributes values + self.w(u' <%s eid="%s" cwuri="%s"/>\n' + % (entity.e_schema, entity.eid, xml_escape(entity.cwuri))) + + +class XMLRelatedItemStateView(XMLRelatedItemView): + __select__ = is_instance('State') + + def entity_call(self, entity): + self.w(u' <%s eid="%s" cwuri="%s" name="%s"/>\n' + % (entity.e_schema, entity.eid, xml_escape(entity.cwuri), + xml_escape(entity.name))) + + +class XMLRsetView(AnyRsetView): + """dumps raw rset as xml""" + __regid__ = 'rsetxml' + title = _('xml export') + templatable = False + content_type = 'text/xml' + xml_root = 'rset' + + def call(self): + w = self.w + rset, descr = self.cw_rset, self.cw_rset.description + eschema = self._cw.vreg.schema.eschema + labels = self.columns_labels(tr=False) + w(u'\n' % self._cw.encoding) + w(u'<%s query="%s">\n' % (self.xml_root, xml_escape(rset.printable_rql()))) + for rowindex, row in enumerate(self.cw_rset): + w(u' \n') + for colindex, val in enumerate(row): + etype = descr[rowindex][colindex] + tag = labels[colindex] + attrs = {} + if '(' in tag: + attrs['expr'] = tag + tag = 'funccall' + if val is not None and not eschema(etype).final: + attrs['eid'] = val + # csvrow.append(val) # val is eid in that case + val = self._cw.view('textincontext', rset, + row=rowindex, col=colindex) + else: + val = self._cw.view('final', rset, row=rowindex, + col=colindex, format='text/plain') + w(simple_sgml_tag(tag, val, **attrs)) + w(u' \n') + w(u'\n' % self.xml_root) + + +# RSS stuff ################################################################### + +class IFeedAdapter(EntityAdapter): + __needs_bw_compat__ = True + __regid__ = 'IFeed' + __select__ = is_instance('Any') + + def rss_feed_url(self): + """return a URL to the rss feed for this entity""" + return self.entity.absolute_url(vid='rss') + + +class RSSFeedURL(Component): + __regid__ = 'rss_feed_url' + __select__ = non_final_entity() + + def feed_url(self): + return self._cw.build_url(rql=self.cw_rset.limited_rql(), vid='rss') + + +class RSSEntityFeedURL(Component): + __regid__ = 'rss_feed_url' + __select__ = one_line_rset() & adaptable('IFeed') + + def feed_url(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return entity.cw_adapt_to('IFeed').rss_feed_url() + + +class RSSIconBox(component.CtxComponent): + """just display the RSS icon on uniform result set""" + __regid__ = 'rss' + __select__ = (component.CtxComponent.__select__ + & appobject_selectable('components', 'rss_feed_url')) + + visible = False + order = 999 + + def render(self, w, **kwargs): + try: + rss = self._cw.uiprops['RSS_LOGO'] + except KeyError: + self.error('missing RSS_LOGO external resource') + return + urlgetter = self._cw.vreg['components'].select('rss_feed_url', self._cw, + rset=self.cw_rset) + url = urlgetter.feed_url() + w(u'rss\n' % (xml_escape(url), rss)) + + +class RSSView(XMLView): + __regid__ = 'rss' + title = _('rss export') + templatable = False + content_type = 'text/xml' + http_cache_manager = httpcache.MaxAgeHTTPCacheManager + cache_max_age = 60*60*2 # stay in http cache for 2 hours by default + item_vid = 'rssitem' + + def _open(self): + req = self._cw + self.w(u'\n' % req.encoding) + self.w(u'\n') + self.w(u' \n') + self.w(u' %s RSS Feed\n' + % xml_escape(self.page_title())) + self.w(u' %s\n' + % xml_escape(req.form.get('vtitle', ''))) + params = req.form.copy() + params.pop('vid', None) + self.w(u' %s\n' % xml_escape(self._cw.build_url(**params))) + + def _close(self): + self.w(u' \n') + self.w(u'') + + def call(self): + """display a list of entities by calling their view""" + self._open() + for i in range(self.cw_rset.rowcount): + self.cell_call(i, 0) + self._close() + + def cell_call(self, row, col): + self.wview(self.item_vid, self.cw_rset, row=row, col=col) + + +class RSSItemView(EntityView): + __regid__ = 'rssitem' + date_format = '%%Y-%%m-%%dT%%H:%%M%+03i:00' % (timezone / 3600) + add_div_section = False + + def cell_call(self, row, col): + entity = self.cw_rset.complete_entity(row, col) + self.w(u'\n') + self.w(u'%s\n' + % xml_escape(entity.absolute_url())) + self.render_title_link(entity) + self.render_description(entity) + self._marker('dc:date', entity.dc_date(self.date_format)) + self.render_entity_creator(entity) + self.w(u'\n') + + def render_description(self, entity): + self._marker('description', entity.dc_description(format='text/html')) + + def render_title_link(self, entity): + self._marker('title', entity.dc_long_title()) + self._marker('link', entity.absolute_url()) + + def render_entity_creator(self, entity): + if entity.creator: + self._marker('dc:creator', entity.dc_creator()) + + def _marker(self, marker, value): + if value: + self.w(u' <%s>%s\n' % (marker, xml_escape(value), marker)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/about_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/about_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,25 @@ +About this site +=============== + +This web application is based on the CubicWeb knowledge management system +from Logilab_. CubicWeb is made of an object database and a web +framework. It allows to develop an application by defining a set of +entities and their relationships, plus a set of views that present the +data selected from the object database using a specific query language. + +This application has a precise schema_ and can be considered an +example of a semantic web database, as it can export the data to XML +and others formats and is not limited to publishing HTML. + +Supported formats: |microformats|_ - JSON_ - |rss|_ - |dublincore|_ + +.. |microformats| image:: /data/microformats-button.png +.. _microformats: http://microformats.org +.. _JSON: http://www.json.org/ +.. |rss| image:: /data/rss-button.png +.. _rss: http://www.rssboard.org +.. |dublincore| image:: /data/dublincore-button.png +.. _dublincore: http://dublincore.org + +.. _Logilab: http://www.logilab.fr/ +.. _schema: ../schema diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/about_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/about_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,28 @@ +.. -*- coding: utf-8 -*- + +A propos de ce site +=================== + +Cette application web est basée sur le système de gestion de connaissance CubicWeb de +Logilab_. CubicWeb est composé d'une base de données objet et d'un framework web. Il +permet de développer une application en définissant un ensemble d'entités et de +relations entre ces entités, ainsi que des vues présentant les données +sélectionnées depuis la base de données en utilisant un langage de requête +spécifique. + +Cette application possède un schéma_ précis et peut être considérée comme un +exemple de site web sémantique, car elle n'est pas limitée à publier du HTML +mais peut également exporter les données en XML et d'autres formats. + +Formats supportés: |microformats|_ - JSON_ - |rss|_ - |dublincore|_ + +.. |microformats| image:: /data/microformats-button.png +.. _microformats: http://microformats.org +.. _JSON: http://www.json.org/ +.. |rss| image:: /data/rss-button.png +.. _rss: http://www.rssboard.org +.. |dublincore| image:: /data/dublincore-button.png +.. _dublincore: http://dublincore.org + +.. _Logilab: http://www.logilab.fr/ +.. _schéma: ../schema diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/add_content_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/add_content_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +Add some entities +----------------- +As manager, you can access to entity creation forms by using the `site management`_ page. + +.. _`site management`: ../manage + +As regular user, the index page or the action box may propose some links to create entities according to the context. + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/add_content_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/add_content_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,10 @@ +Ajouter des entités +------------------- +Pour un administrateur, la création des objets est toujours possible directement dans la `page de gestion de site`_. + +.. _`page de gestion de site`: ../manage + +Pour les utilisateurs, la page principale ou la boîte d'action des entités vous permettra la création de nouveaux contenus. +L'intérêt de la dernière méthode est de faciliter l'édition de la relation entre les objets. + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/advanced_usage_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/advanced_usage_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +.. winclude:: advanced_usage_schema + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/advanced_usage_schema_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/advanced_usage_schema_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,9 @@ +Schema of the data +------------------ + +First take a look at the data schema_ then try to remember that you are browsing +through a heap of data by applying stylesheets to the results of queries. This +site is not a content management system with items placed in folders. It is an +interface to a database which applies a view to retreived data. + +.. _schema: ../schema diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/advanced_usage_schema_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/advanced_usage_schema_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,14 @@ +.. -*- coding: utf-8 -*- + +Schéma des données +------------------ + +Commencez d'abord par jeter un œil au schéma_ de l'application, puis essayez de vous rappeler que vous naviguez dans un ensemble de données en appliquant des vues aux résultats de requête. Ce site n'est pas un système de gestion de contenu avec des objets dans des répertoires. C'est une interface vers une base de données qui vous permet d'appliquer une vue aux données récupérées. + +.. _schéma: ../schema + + +Relation entre les objets +------------------------- + +Une relation est définie pour donner un sens précis entre les éléments. Les relations peuvent être facilement ajoutées ou détruites selon le schéma_ de votre application. diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/bookmarks_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/bookmarks_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,21 @@ +Any authenticated user can defines its own bookmarks in the application. They +are similar to bookmarks in a browser but are dedicated to link into the +application and they are stored in the database (and so you'll get them from any +browser you use to connect to the application. They are available through the +bookmarks box, on the left of the screen. + +To add a bookmark on the page you're seeing, simply click on the "bookmark this +page" link in the "manage bookmarks" dropdown-menu. You'll then be asked to give +it a title. Once created, it will appear in the bookmarks box. + +In this dropdown-menu, the "pick existing bookmarks" link will show you a list +of bookmarks already defined by the other users of the application. If you are +interested by one of them, simply click the `[+]` sign to the left of it to add it +to your bookmarks. + +The "edit bookmarks" link will show you a form to edit your current bookmarks, +for instance to change their title. If you are using share bookmarks (picked +from another user), those bookmarks won't appear in the form unless you're +allowed to modify them. + +To remove a bookmark, simply click the `[-]` sign to the left of it. diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/bookmarks_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/bookmarks_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,30 @@ +.. -*- coding: utf-8 -*- + +Chaque utilisateur authentifié peut définir ses propres signets dans +l'application. Ils fonctionnent comme des signets dans un navigateur, mais sont +restreints aux liens internes au site et sont conservés dans la base de données, +ce qui vous permet de les retrouver quelque soit le navigateur que vous utilisez +pour vous connecter au site. Ils sont disponibles dans la boîte des signets, à +gauche de l'écran. + +Pour ajouter un signet sur la page que vous être en train de visualiser, vous +devez cliquer sur le menu déroulant `gérer des signets`; puis sur `poser un +signet ici` de la boîte pour faire apparaître le champ d'ajout. Il est alors +demandé de lui donner un titre. Une fois créé, le signet apparaît dans la boîte +des signets. + +Le signe `[-]` en face du signet permet sa suppression immédiate. + +Le lien `récupérer un signet` du menu déroulant permet de voir une liste des +signets déja définis par les autres utilisateurs de l'application. Si l'un de +ces signets vous intéresse, vous pouvez l'ajouter à vos signet en cliquant sur +le signe `[+]` à gauche de celui-ci. + +Le lien `éditer les signets` du menu déroulant permet de modifier vos signets +(par exemple changer leur titre) et le chemin relatif du signet. Si vous +utilisez des signets partagés (que vous avez récupéré d'un autre utilisateur), +ceux-ci n'apparaîtront pas dans le formulaire de modification à moins que vous +ayez le droit de les modifier. + + +.. _`préférences utilisateurs`: ../myprefs diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/custom_view_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/custom_view_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,18 @@ +Once you have read the `RQL tutorial`_ and you know the application'`schema`_, +you're ready to define your own views. Just type the RQL query retreiving data +you're interested in in the search field. If the view used to display them +doesn't fit your need, choose another one in the possible views box (this box +isn't displayed by default, but you can activate it in your `user +preferences`_). Once you're satified, add a `bookmark`_ if you want to reuse +this view later. + +.. _`RQL tutorial`: tut_rql +.. _`schema`: ../schema +.. _`user preferences`: ../myprefs +.. _`bookmark`: bookmarks + + +Below are some example of what can be acheived... + +.. winclude:: custom_view_last_update +.. winclude:: custom_view_rss diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/custom_view_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/custom_view_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,21 @@ +.. -*- coding: utf-8 -*- + +Une fois que vous avez lu le `tutoriel RQL`_ et que vous connaissez le `schéma`_ +de l'application, vous êtes prêt à définir vos propres vues. Tapez simplement +dans le champ de recherche la requête qui vous permet de récupérer les données +qui vous intéressent. Si la vue appliquée par défaut ne correspond pas à vos +besoins, sélectionnez en une autre dans la boîte des vues possibles (celle-ci +n'est pas affichée par défaut, vous pouvez l'activer dans vos `préférences +utilisateur`_). Une fois que vous êtes satisfait, ajoutez un `signet`_ si vous +voulez réutiliser votre vue plus tard. + +.. _`tutoriel RQL`: tut_rql +.. _`schéma`: ../schema +.. _`préférences utilisateur`: ../myprefs +.. _`signet`: bookmarks + + +Ci-dessous quelques exemples de ce que l'on peut faire... + +.. winclude:: custom_view_last_update +.. winclude:: custom_view_rss diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/custom_view_last_update_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/custom_view_last_update_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,8 @@ +Latest changes +-------------- + +* table of `all latest changes`_ + +Links below is providing useful RQL query example. + +.. _all latest changes: ../view?rql=Any+M%2CX+WHERE+X+modification_date+M+ORDERBY+M+DESC+LIMIT+30 diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/custom_view_last_update_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/custom_view_last_update_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,10 @@ +.. -*- coding: utf-8 -*- + +Dernières modifications +----------------------- + +* la table des `derniers changements`_ fournit un exemple d'utilisation de RQL + pour récupérer les derniers changements ayant eu lieu sur ce site. + +.. _`derniers changements`: ../view?rql=Any+M%2CX+WHERE+X+modification_date+M+ORDERBY+M+DESC+LIMIT+30 + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/custom_view_rss_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/custom_view_rss_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,14 @@ + +.. role:: raw-html(raw) + :format: html + +RSS driven +---------- + +RSS is a pretty useful technology that can be widely used on this +site. Any set of data can be presented as RSS. You can then plug in +an RSS reader into that and follow the site activity. For example : + +:raw-html:`

      latest changes

      ` diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/custom_view_rss_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/custom_view_rss_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,18 @@ +.. -*- coding: utf-8 -*- + +.. role:: raw-html(raw) + :format: html + +Flux RSS +-------- + +RSS est une technologie très utile qui peut être utilisée de manière très +générique sur ce site. N'importe quel résultat de requête peut-être présenté +comme un flux RSS. Vous pouvez donc ensuite connecter ce flux à n'importe quel +lecteur RSS pour suivre l'activité de ce cite. Par exemple pour avoir les +derniers changements sous forme de flux RSS: + +:raw-html:`

      latest changes

      ` + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/glossary_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/glossary_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,14 @@ +action box + Area visible in the upper left column. You have a list of available actions on the entity. The most frequently used entry is `modify`. + +object + All element created in the application + Example: project, ticket, user, ... + +relation editing module + HTML widget that let you define new relations amongst objects. + +relation + It's a kind of 'smart' link between 2 objets of the application. It has a specific sense that determine dynamic behaviour and add a new logic of the content. + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/glossary_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/glossary_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,14 @@ +boîte des actions + boîte visible dans la colonne de gauche en haut à gauche de l'écran. Cette boîte vous permet d'accéder aux actions disponibles pour cette entité. L'entrée la plus utilisée est `modifier`. + +module d'édition des relations entre objets + module HTML qui permet l'édition des relations entre objects. + +objet + Tout élement qui peut être créé au sein de l'application + Exemple: projet, ticket, étiquette, utilisateur, ... + +relation + Une relation est un lien 'intelligent' et bi-directionnel entre 2 objets de l'application. Il est intelligent au sens où il porte un sens et permet de définir des comportements dynamiques à l'application et ajouter une logique métier au contenu. + + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/images/userprefs_en.png Binary file cubicweb/web/wdoc/images/userprefs_en.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/images/userprefs_fr.png Binary file cubicweb/web/wdoc/images/userprefs_fr.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/main_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/main_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +.. winclude:: about diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/search_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/search_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,13 @@ + +.. winclude:: search_sample_queries + +You can as well type complex queries using the RQL_ query language, +used every where to build dynamic pages of this site. + +You can use one of the following prefixes to specify which kind of search you +want: + +* `rql` : RQL query +* `text` : full text search + +.. _RQL: tut_rql diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/search_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/search_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,15 @@ +.. -*- coding: utf-8 -*- + +.. winclude:: search_sample_queries + +Vous pouvez également taper des requêtes complexes en utilisant le langage de +requête RQL_, base sur laquelle ce site est construit. + +Vous pouvez préfixer votre recherche des mots clés suivants pour indiquer le +type de recherche que vous désirez : + +* `rql` : requête RQL +* `text` : recherche plein texte + +.. _RQL: tut_rql + diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/search_sample_queries_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/search_sample_queries_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +Experiment with the search bar. Try queries like "card sometextualcontent" or +"card wikiid index" or "345". diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/search_sample_queries_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/search_sample_queries_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,2 @@ +Essayer le champ de recherche. Essayer des recherches comme "fiche +unmotachercher", ou encore "fiche wikiid index" ou "345". diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/standard_usage_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/standard_usage_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,10 @@ +First of all, you can use this site as any web site by clicking on the +different links. The Logo on the top left of this page will lead you +to a start page from which you will be able to navigate to all the +data hosted on this site. + +The bookmarks box on the left hand side provides some useful +shortcuts. + +Most text is indexed and you can search all the content by typing +words in the search box. diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/standard_usage_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/standard_usage_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,11 @@ +.. -*- coding: utf-8 -*- + +Plusieurs éléments par défaut sont proposés pour faciliter la navigation: + +- le logo en haut de la page vous ramène à la page d'accueil du site qui fournit un point de départ pour la navigation vers les données de ce site. + +- la boîte de signet à gauche fournit des raccourcis utiles. + +- la notion d'étiquette vous permet de marquer de manière subjective les pages à se souvenir + +- le contenu textuel des entités est indexé et vous pouvez donc rechercher des entités en tapant simplement les mots à rechercher dans la boîte de recherche. diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/toc.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/toc.xml Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,54 @@ + + Site documentation + Documentation du site + +
      + Standard use + Utilisation standard + +
      + Using bookmarks + Utilisation des signets +
      + +
      + Search + Recherche +
      + +
      + User preferences + Préférences utilisateur +
      + +
      + Add content to the site + Ajouter du contenu au site +
      +
      + +
      + Advanced use + Utilisation avancée + +
      + "Relation Query Language" tutorial + Tutoriel "Relation Query Language" +
      + +
      + Defining your own views + Définir ses propres vues +
      +
      + +
      + Glossary + Glossaire +
      + +
      + A propos de ce site + About this site +
      +
      diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/tut_rql_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/tut_rql_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,258 @@ +.. -*- coding: utf-8 -*- + +Let's learn RQL by practice... + +.. contents:: + +Introduction +------------ + +RQL has a syntax and principle inspirated from the SQL query language, though +it's at a higher level in order to be more intuitive and suitable to easily make +advanced queries on a schema based database. + +* the main idea in RQL is that we'are following relations between entities +* attributes are a special case of relations +* RQL has been inspirated from SQL but is at a higher level +* a knowledge of the application'schema is necessary to build rql queries + +To use RQL, you'll have to know the basis of the language as well as a good +knowledge of the application'schema. You can always view it using the "schema" +link in user's dropdown menu (on the top-right of the screen) or by clicking here_. + +.. _here: ../schema + + +Some bits of theory +------------------- + +Variables et types +~~~~~~~~~~~~~~~~~~ +Entities and attributes'value to follow and / or select are represented by the +query by *variables* which must be written upper-case. + +Possible types for each variable are deducted from the schema according to +constraints in the query. + +You can explicitly constrain a variable's type using the special relation "is". + +Base types +~~~~~~~~~~ +* `String` (literal: between double or simple quotes) +* `Int`, `Float` (using '.' as separator) +* `Date`, `Datetime`, `Time` (literal: string YYYY/MM/DD[ hh:mm] or + `TODAY` and `NOW` keywords) +* `Boolean` (keywords `TRUE` et `FALSE`) +* keyword `NULL` + +Opérateurs +~~~~~~~~~~ +* Logical operators : `AND`, `OR`, `,` +* Mathematical operators: `+`, `-`, `*`, `/` +* Comparisons operators: `=`, `<`, `<=`, `>=`, `>`, `~=`, `LIKE`, `IN` + + * `=` is the default comparison operator + + * `LIKE` / `~=` permits use of the special character `%` in a string to tell + the string must begin or end with a prefix or suffix (as SQL LIKE operator) :: + + Any X WHERE X name ~= 'Th%' + Any X WHERE X name LIKE '%lt' + + * `IN` permits to give a list of possible values :: + + Any X WHERE X name IN ('chauvat', 'fayolle', 'di mascio', 'thenault') + +Grammaire des requêtes de recherche +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +:: + + [DISTINCT] V1(, V2)* + [GROUPBY V1(, V2)*] [ORDERBY ] + [WHERE ] + [LIMIT ] [OFFSET ] + +:entity type: + Type of the selected variable(s). You'll usually use `Any` type to not specify + any type. +:restriction: + List of relations to follow, in the form + `V1 relation V2|` +:orderterms: + Define a selection order : variable or column number, followed by the sort method + (`ASC`, `DESC`), with ASC as default when not specified + +notice about grouped query (e.g using a `GROUPBY` clause): every selected +variable should be either grouped or used in an aggregat function. + + +Example schema +-------------- + +In this document, we will suppose the application's schema is the one described +here. Available entity types are : + +:Person: + :: + + name (String, required) + birthday (Date) + + +:Company: + :: + + name (String) + + +:Note: + :: + + diem (Date) + type (String) + + +And relations between those entities: :: + + Person works_for Company + Person evaluated_by Note + Company evaluated_by Note + + +Meta-data +~~~~~~~~~ +Every entities'type have the following meta-data: + +* `eid (Int)`, a unique identifier +* `creation_date (Datetime)`, date on which the entity has been created +* `modification_date (Datetime)`, lastest date on which the entity has been modified + +* `created_by (CWUser)`, relation to the user which has created this entity + +* `owned_by (CWUser)`, relation to the user()s considered as owner of this + entity, the entity's creator by default + +* `is (Eetype)`, special relation to specify a variable type. + +A user's entity has the following schema: + +:CWUser: + :: + + login (String) not null + password (Password) + firstname (String) + surname (String) + + +Basis queries +------------- +0. *Every persons* :: + + Person X + + or :: + + Any X WHERE X is Person + + +1. *The company named Logilab* :: + + Company S WHERE S name 'Logilab' + + +2. *Every entities with a "name" attribute whose value starts with 'Log'* :: + + Any S WHERE S name LIKE 'Log%' + + or :: + + Any S WHERE S name ~= 'Log%' + + This query may return Person or Company entities. + + +3. *Every persons working for the Logilab company* :: + + Person P WHERE P works_for S, S name "Logilab" + + or :: + + Person P WHERE P works_for S AND S name "Logilab" + + +4. *Company named Caesium ou Logilab* :: + + Company S WHERE S name IN ('Logilab','Caesium') + + or :: + + Company S WHERE S name 'Logilab' OR S name 'Caesium' + + +5. *Every company but ones named Caesium ou Logilab* :: + + Company S WHERE NOT S name IN ('Logilab','Caesium') + + or :: + + Company S WHERE NOT S name 'Logilab' AND NOT S name 'Caesium' + + +6. *Entities evaluated by the note with eid 43* :: + + Any X WHERE X evaluated_by N, N eid 43 + + +7. *Every persons order by birthday from the youngest to the oldest* :: + + Person X ORDERBY D DESC WHERE X birthday D + + Notice you've to define a variable using the birthday relation to use it in the + sort term. + + +8. *Number of persons working for each known company* :: + + Any S, COUNT(X) GROUPBY S WHERE X works_for S + + Notice you've that since you're writing a grouped query on S, X have to be + either grouped as well or used in an aggregat function (as in this example). + + + +Advanced +-------- +0. *Person with no name specified (i.e NULL)* :: + + Person P WHERE P name NULL + + +1. *Person which are not working for any company* :: + + Person P WHERE NOT p works_for S + + +2. *Every company where person named toto isn't working* :: + + Company S WHERE NOT P works_for S , P name 'toto' + + +3. *Every entity which have been modified between today and yesterday* :: + + Any X WHERE X modification_date <= TODAY, X modification_date >= TODAY - 1 + + +4. *Every note without type, to be done in the next 7 days, ordered by date* :: + + Any N, D where N is Note, N type NULL, N diem D, N diem >= TODAY, + N diem < today + 7 ORDERBY D + + +5. *Person with an homonym (without duplicate)* :: + + DISTINCT Person X,Y where X name NX, Y name NX + + or even better (e.g. without both (Xeid, Yeid) and (Yeid, Xeid) in the results) :: + + Person X,Y where X name NX, Y name NX, X eid XE, Y eid > XE diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/tut_rql_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/tut_rql_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,268 @@ +.. -*- coding: utf-8 -*- + +Apprenons RQL par la pratique... + +.. contents:: + + +Introduction +------------ + +RQL est assez proche par sa syntaxe et ses principes du langage de requête des +bases de données relationnelles SQL. Il est cependant plus intuitif et mieux +adapté pour faire des recherches avancées sur des bases de données structurées +par un schéma de données. On retiendra les points suivants : + +* RQL est un langage mettant l'accent sur le parcours de relations. +* Les attributs sont considérés comme des cas particuliers de relations. +* RQL s'inspire de SQL mais se veut plus haut niveau. +* Une connaissance du schéma définissant l'application est nécessaire. + +Pour s'en servir, il convient de connaître les règles de base du langage RQL, +mais surtout d'avoir une bonne vision du schéma de données de l'application. Ce +schéma est toujours disponible dans l'application par le lien `schéma`, dans la +boîte affichée en cliquant sur le lien de l'utilisateur connectée (en haut à droite). +Vous pouvez également le voir en cliquant ici_. + +.. _ici: ../schema + + +Un peu de théorie +----------------- + +Variables et typage +~~~~~~~~~~~~~~~~~~~ +Les entités et valeurs à parcourir et / ou séléctionner sont représentées dans +la requête par des *variables* qui doivent être écrites en majuscule + +Les types possibles pour chaque variable sont déduits à partir du schéma en +fonction des contraintes présentes dans la requête. + +On peut contraindre les types possibles pour une variable à l'aide de la +relation spéciale `is`. + +Types de bases +~~~~~~~~~~~~~~ +* `String` (litéral: entre doubles ou simples quotes) +* `Int`, `Float` (le séparateur étant le '.') +* `Date`, `Datetime`, `Time` (litéral: chaîne YYYY/MM/DD[ hh:mm] ou mots-clés + `TODAY` et `NOW`) +* `Boolean` (mots-clés `TRUE` et `FALSE`) +* mot-clé `NULL` + +Opérateurs +~~~~~~~~~~ +* Opérateurs logiques : `AND`, `OR`, `,` +* Opérateurs mathématiques: `+`, `-`, `*`, `/` +* Operateur de comparaisons: `=`, `<`, `<=`, `>=`, `>`, `~=`, `LIKE`, `IN` + + * L'opérateur `=` est l'opérateur par défaut + + * L'opérateur `LIKE` / `~=` permet d'utiliser le caractère `%` dans une chaine + de caractère pour indiquer que la chaîne doit commencer ou terminer par un + préfix/suffixe :: + + Any X WHERE X nom ~= 'Th%' + Any X WHERE X nom LIKE '%lt' + + * L'opérateur `IN` permet de donner une liste de valeurs possibles :: + + Any X WHERE X nom IN ('chauvat', 'fayolle', 'di mascio', 'thenault') + +Grammaire des requêtes de recherche +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +:: + + [DISTINCT] V1(, V2)* + [GROUPBY V1(, V2)*] [ORDERBY ] + [WHERE ] + [LIMIT ] [OFFSET ] + +:type d'entité: + Type de la ou des variables séléctionnées. + Le type spécial `Any`, revient à ne pas spécifier de type. +:restriction: + liste des relations à parcourir sous la forme + `V1 relation V2|` +:orderterms: + Définition de l'ordre de sélection : variable ou n° de colonne suivie de la + méthode de tri (`ASC`, `DESC`), ASC étant la valeur par défaut + +note pour les requêtes groupées (i.e. avec une clause `GROUPBY`) : +toutes les variables sélectionnées doivent être soit groupée soit +aggrégée + + +Schéma +------ + +Nous supposerons dans la suite de ce document que le schéma de l'application est +le suivant. Les différentes entités disponibles sont : + +:Personne: + :: + + nom (String, obligatoire) + datenaiss (Date) + + +:Societe: + :: + + nom (String) + + +:Note: + :: + + diem (Date) + type (String) + + +Et les relations entre elles : :: + + Person travaille_pour Societe + Person evaluee_par Note + Societe evaluee_par Note + + +Méta-données +~~~~~~~~~~~~ +Tous les types d'entités ont les métadonnées suivantes : + +* `eid (Int)`, permettant d'identifier chaque instance de manière unique +* `creation_date (Datetime)`, date de création de l'entité +* `modification_date (Datetime)`, date de dernière modification de l'entité + +* `created_by (CWUser)`, relation vers l'utilisateur ayant créé l'entité + +* `owned_by (CWUser)`, relation vers le où les utilisateurs considérés comme + propriétaire de l'entité, par défaut le créateur de l'entité + +* `is (Eetype)`, relation spéciale permettant de spécifier le + type d'une variable. + +Enfin, le schéma standard d'un utilisateur est le suivant : + +:CWUser: + :: + + login (String, obligatoire) + password (Password) + firstname (String) + surname (String) + + +L'essentiel +----------- +0. *Toutes les personnes* :: + + Personne X + + ou :: + + Any X WHERE X is Personne + + +1. *La societé nommé Logilab* :: + + Societe S WHERE S nom 'Logilab' + + +2. *Toutes les entités ayant un attribut nom commençant par 'Log'* :: + + Any S WHERE S nom LIKE 'Log%' + + ou :: + + Any S WHERE S nom ~= 'Log%' + + Cette requête peut renvoyer des entités de type personne et de type + société. + + +3. *Toutes les personnes travaillant pour la société nommé Logilab* :: + + Personne P WHERE P travaille_pour S, S nom "Logilab" + + ou :: + + Personne P WHERE P travaille_pour S AND S nom "Logilab" + + +4. *Les societés nommées Caesium ou Logilab* :: + + Societe S WHERE S nom IN ('Logilab','Caesium') + + ou :: + + Societe S WHERE S nom 'Logilab' OR S nom 'Caesium' + + +5. *Toutes les societés sauf celles nommées Caesium ou Logilab* :: + + Societe S WHERE NOT S nom IN ('Logilab','Caesium') + + ou :: + + Societe S WHERE NOT S nom 'Logilab' AND NOT S nom 'Caesium' + + +6. *Les entités évalués par la note d'identifiant 43* :: + + Any X WHERE X evaluee_par N, N eid 43 + + +7. *Toutes les personnes triés par date de naissance dans l'ordre antechronologique* :: + + Personne X ORDERBY D DESC WHERE X datenaiss D + + On note qu'il faut définir une variable et la séléctionner pour s'en + servir pour le tri. + + +8. *Nombre de personne travaillant pour chaque société* :: + + Any S, COUNT(X) GROUPBY S WHERE X travaille_pour S + + On note qu'il faut définir une variable pour s'en servir pour le + groupage. De plus les variables séléctionnée doivent être groupée + (mais les variables groupées ne doivent pas forcément être sélectionnées). + + + +Exemples avancés +---------------- +0. *Toutes les personnes dont le champ nom n'est pas spécifié (i.e NULL)* :: + + Personne P WHERE P nom NULL + + +1. *Toutes les personnes ne travaillant pour aucune société* :: + + Personne P WHERE NOT p travaille_pour S + + +2. *Toutes les sociétés où la personne nommée toto ne travaille pas* :: + + Societe S WHERE NOT P travaille_pour S , P nom 'toto' + + +3. *Toutes les entités ayant été modifiées entre aujourd'hui et hier* :: + + Any X WHERE X modification_date <= TODAY, X modification_date >= TODAY - 1 + + +4. *Toutes les notes n'ayant pas de type et à effectuer dans les 7 jours, triées par date* :: + + Any N, D where N is Note, N type NULL, N diem D, N diem >= TODAY, + N diem < today + 7 ORDERBY D + + +5. *Les personnes ayant un homonyme (sans doublons)* :: + + DISTINCT Personne X,Y where X nom NX, Y nom NX + + ou mieux (sans avoir (Xeid, Yeid) et (Yeid, Xeid) dans les résultats) :: + + Personne X,Y where X nom NX, Y nom NX, X eid XE, Y eid > XE diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/userprefs_en.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/userprefs_en.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,10 @@ +The personal information describing a User can be modified using the edit form +of the user. You can access it through the dropdown-menu under the link on the +top-right of the window, labeled by your login. In this menu, click the +"profile" link to go to this form. + +Each user can as well customize the site appearance using the "user's +preferences" link in this menu. This will show you a form to configure which +boxes are displayed, in which order, etc... + +.. image:: images/userprefs diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/wdoc/userprefs_fr.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/wdoc/userprefs_fr.rst Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,47 @@ +Les données concernant l'utilisateur sont paramétrables par la page +d'édition de l'utilisateur. Vous pouvez accéder à celle ci par le menu +déroulant apparaissant en cliquant sur le lien en haut à droite de la +fenêtre de l'application, dont l'intitulé est votre login. Dans ce +menu, cliquez sur "information personnelles" pour modifier vos données +personnelles (y compris le mot de passe d'accès à l'application). + +Chaque utilisateur peut également personaliser l'apparence du site via le lien +`préférences utilisateur`_. Ce formulaire permet notamment de configurer les +boîtes qui seront affichées, leur ordre, etc... + +L'administrateur possède quant à lui un menu "configuration du site" qui reprend l'ensemble des préférences utilisateurs mais les applique par défaut au site. + + +Les types de préférences +======================== + +- navigation: détermine des caractériques plus personnelles pour l'ergonomie liée à la taille de votre écran (taille des champs d'entrées, nombre d'éléments à afficher dans des listes, ...) +- propriétés génériques de l'interface: détermine essentiellement la localisation de l'application avec des formats d'affichages particulier (champ date et heure). +- boîtes: éléments dynamiques et optionnels installés par les composants disponibles au sein de l'application. +- composants: éléments permettant l'usage d'une navigation plus évoluée +- composants contextuels: possibilité d'agir sur les comportements par défaut de l'application. + +Changement de la langue +----------------------- +Dans l'onglet **ui -> ui.language**, choisissez la langue voulue + +Changement de l'outil d'édition en ligne +---------------------------------------- +Il est possible de choisir le format de balisage par défaut pour les notes. Par défaut, le format html est proposé pour les débutants avec la possibilité d'utiliser un éditeur en ligne. + +Si vous êtes dans ce cas, vérifiez les deux entrées suivantes: + +- **ui -> ui.default-text-format** à HTML +- **ui -> ui.fckeditor** à 'yes' + +Usage avancé de RQL +------------------- +Il est possible d'afficher les requêtes RQL_ en jeu pour l'affichage d'une page en activant une barre d'entrée spécifique: + +- **components -> rql input box** à 'yes' + +Il est alors possible d'éditer et de relancer toute requête + +.. _`préférences utilisateur`: ../myprefs +.. _RQL: tut_rql +.. image:: images/userprefs diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/webconfig.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/webconfig.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,475 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""web ui configuration for cubicweb instances""" + +__docformat__ = "restructuredtext en" +from cubicweb import _ + +import os +import hmac +from uuid import uuid4 +from os.path import join, exists, split, isdir +from warnings import warn + +from six import text_type + +from logilab.common.decorators import cached, cachedproperty +from logilab.common.deprecation import deprecated +from logilab.common.configuration import merge_options + +from cubicweb import ConfigurationError +from cubicweb.toolsutils import read_config +from cubicweb.cwconfig import CubicWebConfiguration, register_persistent_options + + +register_persistent_options( ( + # site-wide only web ui configuration + ('site-title', + {'type' : 'string', 'default': 'unset title', + 'help': _('site title'), + 'sitewide': True, 'group': 'ui', + }), + ('main-template', + {'type' : 'string', 'default': 'main-template', + 'help': _('id of main template used to render pages'), + 'sitewide': True, 'group': 'ui', + }), + # user web ui configuration + ('fckeditor', + {'type' : 'yn', 'default': False, + 'help': _('should html fields being edited using fckeditor (a HTML ' + 'WYSIWYG editor). You should also select text/html as default ' + 'text format to actually get fckeditor.'), + 'group': 'ui', + }), + # navigation configuration + ('page-size', + {'type' : 'int', 'default': 40, + 'help': _('maximum number of objects displayed by page of results'), + 'group': 'navigation', + }), + ('related-limit', + {'type' : 'int', 'default': 8, + 'help': _('maximum number of related entities to display in the primary ' + 'view'), + 'group': 'navigation', + }), + ('combobox-limit', + {'type' : 'int', 'default': 20, + 'help': _('maximum number of entities to display in related combo box'), + 'group': 'navigation', + }), + + )) + + +class WebConfiguration(CubicWebConfiguration): + """the WebConfiguration is a singleton object handling instance's + configuration and preferences + """ + cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set([join('web', 'views')]) + cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(['views']) + + options = merge_options(CubicWebConfiguration.options + ( + ('repository-uri', + {'type' : 'string', + 'default': 'inmemory://', + 'help': 'see `cubicweb.dbapi.connect` documentation for possible value', + 'group': 'web', 'level': 2, + }), + + ('anonymous-user', + {'type' : 'string', + 'default': None, + 'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)', + 'group': 'web', 'level': 1, + }), + ('anonymous-password', + {'type' : 'string', + 'default': None, + 'help': 'password of the CubicWeb user account to use for anonymous user, ' + 'if anonymous-user is set', + 'group': 'web', 'level': 1, + }), + ('query-log-file', + {'type' : 'string', + 'default': None, + 'help': 'web instance query log file', + 'group': 'web', 'level': 3, + }), + # web configuration + ('https-url', + {'type' : 'string', + 'default': None, + 'help': 'web server root url on https. By specifying this option your '\ + 'site can be available as an http and https site. Authenticated users '\ + 'will in this case be authenticated and once done navigate through the '\ + 'https site. IMPORTANTE NOTE: to do this work, you should have your '\ + 'apache redirection include "https" as base url path so cubicweb can '\ + 'differentiate between http vs https access. For instance: \n'\ + 'RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]\n'\ + 'where the cubicweb web server is listening on port 8080.', + 'group': 'main', 'level': 3, + }), + ('datadir-url', + {'type': 'string', 'default': None, + 'help': ('base url for static data, if different from "${base-url}/data/". ' + 'If served from a different domain, that domain should allow ' + 'cross-origin requests.'), + 'group': 'web', + }), + ('auth-mode', + {'type' : 'choice', + 'choices' : ('cookie', 'http'), + 'default': 'cookie', + 'help': 'authentication mode (cookie / http)', + 'group': 'web', 'level': 3, + }), + ('realm', + {'type' : 'string', + 'default': 'cubicweb', + 'help': 'realm to use on HTTP authentication mode', + 'group': 'web', 'level': 3, + }), + ('http-session-time', + {'type' : 'time', + 'default': 0, + 'help': "duration of the cookie used to store session identifier. " + "If 0, the cookie will expire when the user exist its browser. " + "Should be 0 or greater than repository\'s session-time.", + 'group': 'web', 'level': 2, + }), + ('cleanup-anonymous-session-time', + {'type' : 'time', + 'default': '5min', + 'help': 'Same as cleanup-session-time but specific to anonymous ' + 'sessions. You can have a much smaller timeout here since it will be ' + 'transparent to the user. Default to 5min.', + 'group': 'web', 'level': 3, + }), + ('embed-allowed', + {'type' : 'regexp', + 'default': None, + 'help': 'regular expression matching URLs that may be embeded. \ +leave it blank if you don\'t want the embedding feature, or set it to ".*" \ +if you want to allow everything', + 'group': 'web', 'level': 3, + }), + ('submit-mail', + {'type' : 'string', + 'default': None, + 'help': ('Mail used as recipient to report bug in this instance, ' + 'if you want this feature on'), + 'group': 'web', 'level': 2, + }), + + ('language-negociation', + {'type' : 'yn', + 'default': True, + 'help': 'use Accept-Language http header to try to set user '\ + 'interface\'s language according to browser defined preferences', + 'group': 'web', 'level': 2, + }), + + ('print-traceback', + {'type' : 'yn', + 'default': CubicWebConfiguration.mode != 'system', + 'help': 'print the traceback on the error page when an error occurred', + 'group': 'web', 'level': 2, + }), + + ('captcha-font-file', + {'type' : 'string', + 'default': join(CubicWebConfiguration.shared_dir(), 'data', 'porkys.ttf'), + 'help': 'True type font to use for captcha image generation (you \ +must have the python imaging library installed to use captcha)', + 'group': 'web', 'level': 3, + }), + ('captcha-font-size', + {'type' : 'int', + 'default': 25, + 'help': 'Font size to use for captcha image generation (you must \ +have the python imaging library installed to use captcha)', + 'group': 'web', 'level': 3, + }), + + ('concat-resources', + {'type' : 'yn', + 'default': False, + 'help': 'use modconcat-like URLS to concat and serve JS / CSS files', + 'group': 'web', 'level': 2, + }), + ('anonymize-jsonp-queries', + {'type': 'yn', + 'default': True, + 'help': 'anonymize the connection before executing any jsonp query.', + 'group': 'web', 'level': 1 + }), + ('generate-staticdir', + {'type': 'yn', + 'default': True, + 'help': 'Generate the static data resource directory on upgrade.', + 'group': 'web', 'level': 2, + }), + ('staticdir-path', + {'type': 'string', + 'default': None, + 'help': 'The static data resource directory path.', + 'group': 'web', 'level': 2, + }), + ('access-control-allow-origin', + {'type' : 'csv', + 'default': (), + 'help':('comma-separated list of allowed origin domains or "*" for any domain'), + 'group': 'web', 'level': 2, + }), + ('access-control-allow-methods', + {'type' : 'csv', + 'default': (), + 'help': ('comma-separated list of allowed HTTP methods'), + 'group': 'web', 'level': 2, + }), + ('access-control-max-age', + {'type' : 'int', + 'default': None, + 'help': ('maximum age of cross-origin resource sharing (in seconds)'), + 'group': 'web', 'level': 2, + }), + ('access-control-expose-headers', + {'type' : 'csv', + 'default': (), + 'help':('comma-separated list of HTTP headers the application declare in response to a preflight request'), + 'group': 'web', 'level': 2, + }), + ('access-control-allow-headers', + {'type' : 'csv', + 'default': (), + 'help':('comma-separated list of HTTP headers the application may set in the response'), + 'group': 'web', 'level': 2, + }), + )) + + def __init__(self, *args, **kwargs): + super(WebConfiguration, self).__init__(*args, **kwargs) + self.uiprops = None + self.https_uiprops = None + self.datadir_url = None + self.https_datadir_url = None + + def fckeditor_installed(self): + if self.uiprops is None: + return False + return exists(self.uiprops.get('FCKEDITOR_PATH', '')) + + def cwproperty_definitions(self): + for key, pdef in super(WebConfiguration, self).cwproperty_definitions(): + if key == 'ui.fckeditor' and not self.fckeditor_installed(): + continue + yield key, pdef + + @deprecated('[3.22] call req.cnx.repo.get_versions() directly') + def vc_config(self): + return self.repository().get_versions() + + def anonymous_user(self): + """return a login and password to use for anonymous users. + + None may be returned for both if anonymous connection is not + allowed or if an empty login is used in configuration + """ + try: + user = self['anonymous-user'] or None + passwd = self['anonymous-password'] + if user: + user = text_type(user) + except KeyError: + user, passwd = None, None + except UnicodeDecodeError: + raise ConfigurationError("anonymous information should only contains ascii") + return user, passwd + + @cachedproperty + def _instance_salt(self): + """This random key/salt is used to sign content to be sent back by + browsers, eg. in the error report form. + """ + return str(uuid4()).encode('ascii') + + def sign_text(self, text): + """sign some text for later checking""" + # hmac.new expect bytes + if isinstance(text, text_type): + text = text.encode('utf-8') + # replace \r\n so we do not depend on whether a browser "reencode" + # original message using \r\n or not + return hmac.new(self._instance_salt, + text.strip().replace(b'\r\n', b'\n')).hexdigest() + + def check_text_sign(self, text, signature): + """check the text signature is equal to the given signature""" + return self.sign_text(text) == signature + + def locate_resource(self, rid): + """return the (directory, filename) where the given resource + may be found + """ + return self._fs_locate(rid, 'data') + + def locate_doc_file(self, fname): + """return the directory where the given resource may be found""" + return self._fs_locate(fname, 'wdoc')[0] + + @cached + def _fs_path_locate(self, rid, rdirectory): + """return the directory where the given resource may be found""" + path = [self.apphome] + self.cubes_path() + [join(self.shared_dir())] + for directory in path: + if exists(join(directory, rdirectory, rid)): + return directory + + def _fs_locate(self, rid, rdirectory): + """return the (directory, filename) where the given resource + may be found + """ + directory = self._fs_path_locate(rid, rdirectory) + if directory is None: + return None, None + if rdirectory == 'data' and rid.endswith('.css'): + if rid == 'cubicweb.old.css': + # @import('cubicweb.css') in css + warn('[3.20] cubicweb.old.css has been renamed back to cubicweb.css', + DeprecationWarning) + rid = 'cubicweb.css' + return self.uiprops.process_resource(join(directory, rdirectory), rid), rid + return join(directory, rdirectory), rid + + def locate_all_files(self, rid, rdirectory='wdoc'): + """return all files corresponding to the given resource""" + path = [self.apphome] + self.cubes_path() + [join(self.shared_dir())] + for directory in path: + fpath = join(directory, rdirectory, rid) + if exists(fpath): + yield join(fpath) + + def load_configuration(self, **kw): + """load instance's configuration files""" + super(WebConfiguration, self).load_configuration(**kw) + # load external resources definition + self._init_base_url() + self._build_ui_properties() + + def _init_base_url(self): + # normalize base url(s) + baseurl = self['base-url'] or self.default_base_url() + if baseurl and baseurl[-1] != '/': + baseurl += '/' + if not (self.repairing or self.creating): + self.global_set_option('base-url', baseurl) + self.datadir_url = self['datadir-url'] + if self.datadir_url: + if self.datadir_url[-1] != '/': + self.datadir_url += '/' + if self.mode != 'test': + self.datadir_url += '%s/' % self.instance_md5_version() + self.https_datadir_url = self.datadir_url + return + httpsurl = self['https-url'] + data_relpath = self.data_relpath() + if httpsurl: + if httpsurl[-1] != '/': + httpsurl += '/' + if not self.repairing: + self.global_set_option('https-url', httpsurl) + self.https_datadir_url = httpsurl + data_relpath + self.datadir_url = baseurl + data_relpath + + def data_relpath(self): + if self.mode == 'test': + return 'data/' + return 'data/%s/' % self.instance_md5_version() + + def _build_ui_properties(self): + # self.datadir_url[:-1] to remove trailing / + from cubicweb.web.propertysheet import PropertySheet + cachedir = join(self.appdatahome, 'uicache') + self.check_writeable_uid_directory(cachedir) + self.uiprops = PropertySheet( + cachedir, + data=lambda x: self.datadir_url + x, + datadir_url=self.datadir_url[:-1]) + self._init_uiprops(self.uiprops) + if self['https-url']: + cachedir = join(self.appdatahome, 'uicachehttps') + self.check_writeable_uid_directory(cachedir) + self.https_uiprops = PropertySheet( + cachedir, + data=lambda x: self.https_datadir_url + x, + datadir_url=self.https_datadir_url[:-1]) + self._init_uiprops(self.https_uiprops) + + def _init_uiprops(self, uiprops): + libuiprops = join(self.shared_dir(), 'data', 'uiprops.py') + uiprops.load(libuiprops) + for path in reversed([self.apphome] + self.cubes_path()): + self._load_ui_properties_file(uiprops, path) + self._load_ui_properties_file(uiprops, self.apphome) + datadir_url = uiprops.context['datadir_url'] + if (datadir_url+'/cubicweb.old.css') in uiprops['STYLESHEETS']: + warn('[3.20] cubicweb.old.css has been renamed back to cubicweb.css', + DeprecationWarning) + idx = uiprops['STYLESHEETS'].index(datadir_url+'/cubicweb.old.css') + uiprops['STYLESHEETS'][idx] = datadir_url+'/cubicweb.css' + if datadir_url+'/cubicweb.reset.css' in uiprops['STYLESHEETS']: + warn('[3.20] cubicweb.reset.css is obsolete', DeprecationWarning) + uiprops['STYLESHEETS'].remove(datadir_url+'/cubicweb.reset.css') + cubicweb_js_url = datadir_url + '/cubicweb.js' + if cubicweb_js_url not in uiprops['JAVASCRIPTS']: + uiprops['JAVASCRIPTS'].insert(0, cubicweb_js_url) + + def _load_ui_properties_file(self, uiprops, path): + uipropsfile = join(path, 'uiprops.py') + if exists(uipropsfile): + self.debug('loading %s', uipropsfile) + uiprops.load(uipropsfile) + + # static files handling ################################################### + + @property + def static_directory(self): + return join(self.appdatahome, 'static') + + def static_file_exists(self, rpath): + return exists(join(self.static_directory, rpath)) + + def static_file_open(self, rpath, mode='wb'): + staticdir = self.static_directory + rdir, filename = split(rpath) + if rdir: + staticdir = join(staticdir, rdir) + if not isdir(staticdir) and 'w' in mode: + os.makedirs(staticdir) + return open(join(staticdir, filename), mode) + + def static_file_add(self, rpath, data): + stream = self.static_file_open(rpath) + stream.write(data) + stream.close() + + def static_file_del(self, rpath): + if self.static_file_exists(rpath): + os.remove(join(self.static_directory, rpath)) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/web/webctl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/web/webctl.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,136 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-ctl commands and command handlers common to twisted/modpython +web configuration +""" +from __future__ import print_function + +__docformat__ = "restructuredtext en" + +import os, os.path as osp +from shutil import copy, rmtree + +from logilab.common.shellutils import ASK + +from cubicweb import ExecutionError +from cubicweb.cwctl import CWCTL +from cubicweb.cwconfig import CubicWebConfiguration as cwcfg +from cubicweb.toolsutils import Command, CommandHandler, underline_title + + +try: + from os import symlink as linkdir +except ImportError: + from shutil import copytree as linkdir + + +class WebCreateHandler(CommandHandler): + cmdname = 'create' + + def bootstrap(self, cubes, automatic=False, inputlevel=0): + """bootstrap this configuration""" + if not automatic: + print('\n' + underline_title('Generic web configuration')) + config = self.config + config.input_config('web', inputlevel) + if ASK.confirm('Allow anonymous access ?', False): + config.global_set_option('anonymous-user', 'anon') + config.global_set_option('anonymous-password', 'anon') + + def postcreate(self, *args, **kwargs): + """hooks called once instance's initialization has been completed""" + + +class GenStaticDataDirMixIn(object): + """Create a directory merging all data directory content from cubes and CW. + """ + def generate_static_dir(self, config, dest=None, ask_clean=False, repo=None): + if not dest: + dest = config['staticdir-path'] + if not dest: + dest = osp.join(config.appdatahome, 'data') + if osp.exists(dest): + if (not ask_clean or + not (config.verbosity and + ASK.confirm('Remove existing data directory %s?' % dest))): + raise ExecutionError('Directory %s already exists. ' + 'Remove it first.' % dest) + rmtree(dest) + config.quick_start = True # notify this is not a regular start + # list all resources (no matter their order) + resources = set() + for datadir in self._datadirs(config, repo=repo): + for dirpath, dirnames, filenames in os.walk(datadir): + rel_dirpath = dirpath[len(datadir)+1:] + resources.update(osp.join(rel_dirpath, f) for f in filenames) + # locate resources and copy them to destination + for resource in resources: + dest_resource = osp.join(dest, resource) + dirname = osp.dirname(dest_resource) + if not osp.isdir(dirname): + os.makedirs(dirname) + resource_dir, resource_path = config.locate_resource(resource) + copy(osp.join(resource_dir, resource_path), dest_resource) + # handle md5 version subdirectory + linkdir(dest, osp.join(dest, config.instance_md5_version())) + print('You can use apache rewrite rule below :\n' + 'RewriteRule ^/data/(.*) %s/$1 [L]' % dest) + + def _datadirs(self, config, repo=None): + if repo is None: + repo = config.repository() + if config._cubes is None: + # web only config + config.init_cubes(repo.get_cubes()) + for cube in repo.get_cubes(): + cube_datadir = osp.join(cwcfg.cube_dir(cube), 'data') + if osp.isdir(cube_datadir): + yield cube_datadir + yield osp.join(config.shared_dir(), 'data') + + +class WebUpgradeHandler(CommandHandler, GenStaticDataDirMixIn): + cmdname = 'upgrade' + + def postupgrade(self, repo): + config = self.config + if not config['generate-staticdir']: + return + self.generate_static_dir(config, ask_clean=True, repo=repo) + + +class GenStaticDataDir(Command, GenStaticDataDirMixIn): + """Create a directory merging all data directory content from cubes and CW. + """ + name = 'gen-static-datadir' + arguments = ' [dirpath]' + min_args = 1 + max_args = 2 + + options = () + + def run(self, args): + appid = args.pop(0) + config = cwcfg.config_for(appid) + dest = None + if args: + dest = args[0] + self.generate_static_dir(config, dest) + + +CWCTL.register(GenStaticDataDir) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/wsgi/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/wsgi/__init__.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,63 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""This package contains all WSGI specific code for cubicweb + +NOTE: this package borrows a lot of code to Django + (http://www.djangoproject.com) and to the wsgiref module + of the python2.5's stdlib. + +WSGI corresponding PEP: http://www.python.org/dev/peps/pep-0333/ + +""" +__docformat__ = "restructuredtext en" + +from email import message, message_from_string +from pprint import pformat as _pformat + +from six.moves.http_cookies import SimpleCookie + +def pformat(obj): + """pretty prints `obj` if possible""" + try: + return _pformat(obj) + except Exception: + return u'' + +def normalize_header(header): + """returns a normalized header name + + >>> normalize_header('User_Agent') + 'User-agent' + """ + return header.replace('_', '-').capitalize() + +def safe_copyfileobj(fsrc, fdst, length=16*1024, size=0): + """ + THIS COMES FROM DJANGO + A version of shutil.copyfileobj that will not read more than 'size' bytes. + This makes it safe from clients sending more than CONTENT_LENGTH bytes of + data in the body. + """ + if not size: + return + while size > 0: + buf = fsrc.read(min(length, size)) + if not buf: + break + fdst.write(buf) + size -= len(buf) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/wsgi/handler.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/wsgi/handler.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,134 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""WSGI request handler for cubicweb""" + +__docformat__ = "restructuredtext en" + +from itertools import chain, repeat + +from six.moves import zip + +from cubicweb import AuthenticationError +from cubicweb.web import DirectResponse +from cubicweb.web.application import CubicWebPublisher +from cubicweb.wsgi.request import CubicWebWsgiRequest + +# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html +STATUS_CODE_TEXT = { + 100: 'CONTINUE', + 101: 'SWITCHING PROTOCOLS', + 200: 'OK', + 201: 'CREATED', + 202: 'ACCEPTED', + 203: 'NON-AUTHORITATIVE INFORMATION', + 204: 'NO CONTENT', + 205: 'RESET CONTENT', + 206: 'PARTIAL CONTENT', + 300: 'MULTIPLE CHOICES', + 301: 'MOVED PERMANENTLY', + 302: 'FOUND', + 303: 'SEE OTHER', + 304: 'NOT MODIFIED', + 305: 'USE PROXY', + 306: 'RESERVED', + 307: 'TEMPORARY REDIRECT', + 400: 'BAD REQUEST', + 401: 'UNAUTHORIZED', + 402: 'PAYMENT REQUIRED', + 403: 'FORBIDDEN', + 404: 'NOT FOUND', + 405: 'METHOD NOT ALLOWED', + 406: 'NOT ACCEPTABLE', + 407: 'PROXY AUTHENTICATION REQUIRED', + 408: 'REQUEST TIMEOUT', + 409: 'CONFLICT', + 410: 'GONE', + 411: 'LENGTH REQUIRED', + 412: 'PRECONDITION FAILED', + 413: 'REQUEST ENTITY TOO LARGE', + 414: 'REQUEST-URI TOO LONG', + 415: 'UNSUPPORTED MEDIA TYPE', + 416: 'REQUESTED RANGE NOT SATISFIABLE', + 417: 'EXPECTATION FAILED', + 500: 'INTERNAL SERVER ERROR', + 501: 'NOT IMPLEMENTED', + 502: 'BAD GATEWAY', + 503: 'SERVICE UNAVAILABLE', + 504: 'GATEWAY TIMEOUT', + 505: 'HTTP VERSION NOT SUPPORTED', +} + +class WSGIResponse(object): + """encapsulates the wsgi response parameters + (code, headers and body if there is one) + """ + def __init__(self, code, req, body=None): + text = STATUS_CODE_TEXT.get(code, 'UNKNOWN STATUS CODE') + self.status = '%s %s' % (code, text) + self.headers = list(chain(*[zip(repeat(k), v) + for k, v in req.headers_out.getAllRawHeaders()])) + self.headers = [(str(k), str(v)) for k, v in self.headers] + if body: + self.body = [body] + else: + self.body = [] + + def __iter__(self): + return iter(self.body) + + +class CubicWebWSGIApplication(object): + """This is the wsgi application which will be called by the + wsgi server with the WSGI ``environ`` and ``start_response`` + parameters. + """ + + def __init__(self, config): + self.appli = CubicWebPublisher(config.repository(), config) + self.config = config + self.base_url = self.config['base-url'] + self.url_rewriter = self.appli.vreg['components'].select_or_none('urlrewriter') + + def _render(self, req): + """this function performs the actual rendering + """ + try: + path = req.path + result = self.appli.handle_request(req, path) + except DirectResponse as ex: + return ex.response + return WSGIResponse(req.status_out, req, result) + + + def __call__(self, environ, start_response): + """WSGI protocol entry point""" + req = CubicWebWsgiRequest(environ, self.appli.vreg) + response = self._render(req) + start_response(response.status, response.headers) + return response.body + + + + # these are overridden by set_log_methods below + # only defining here to prevent pylint from complaining + info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + + +from logging import getLogger +from cubicweb import set_log_methods +set_log_methods(CubicWebWSGIApplication, getLogger('cubicweb.wsgi')) diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/wsgi/request.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/wsgi/request.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,160 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""WSGI request adapter for cubicweb + +NOTE: each docstring tagged with ``COME FROM DJANGO`` means that +the code has been taken (or adapted) from Djanco source code : + http://www.djangoproject.com/ + +""" + +__docformat__ = "restructuredtext en" + +import tempfile + +from io import BytesIO + +from six.moves.urllib.parse import parse_qs + +from cubicweb.multipart import ( + copy_file, parse_form_data, parse_options_header) +from cubicweb.web import RequestError +from cubicweb.web.request import CubicWebRequestBase +from cubicweb.wsgi import pformat, normalize_header + + +class CubicWebWsgiRequest(CubicWebRequestBase): + """most of this code COMES FROM DJANGO + """ + + def __init__(self, environ, vreg): + # self.vreg is used in get_posted_data, which is called before the + # parent constructor. + self.vreg = vreg + + self.environ = environ + self.path = environ['PATH_INFO'] + self.method = environ['REQUEST_METHOD'].upper() + + # content_length "may be empty or absent" + try: + length = int(environ['CONTENT_LENGTH']) + except (KeyError, ValueError): + length = 0 + # wsgi.input is not seekable, so copy the request contents to a temporary file + if length < 100000: + self.content = BytesIO() + else: + self.content = tempfile.TemporaryFile() + copy_file(environ['wsgi.input'], self.content, maxread=length) + self.content.seek(0, 0) + environ['wsgi.input'] = self.content + + headers_in = dict((normalize_header(k[5:]), v) for k, v in self.environ.items() + if k.startswith('HTTP_')) + if 'CONTENT_TYPE' in environ: + headers_in['Content-Type'] = environ['CONTENT_TYPE'] + https = self.is_secure() + if self.path.startswith('/https/'): + self.path = self.path[6:] + self.environ['PATH_INFO'] = self.path + https = True + + post, files = self.get_posted_data() + + super(CubicWebWsgiRequest, self).__init__(vreg, https, post, + headers= headers_in) + self.content = environ['wsgi.input'] + if files is not None: + for key, part in files.items(): + self.form[key] = (part.filename, part.file) + + def __repr__(self): + # Since this is called as part of error handling, we need to be very + # robust against potentially malformed input. + form = pformat(self.form) + meta = pformat(self.environ) + return '' % \ + (form, meta) + + ## cubicweb request interface ################################################ + + def http_method(self): + """returns 'POST', 'GET', 'HEAD', etc.""" + return self.method + + def relative_path(self, includeparams=True): + """return the normalized path of the request (ie at least relative + to the instance's root, but some other normalization may be needed + so that the returned path may be used to compare to generated urls + + :param includeparams: + boolean indicating if GET form parameters should be kept in the path + """ + path = self.environ['PATH_INFO'] + path = path[1:] # remove leading '/' + if includeparams: + qs = self.environ.get('QUERY_STRING') + if qs: + return '%s?%s' % (path, qs) + + return path + + ## wsgi request helpers ################################################### + + def is_secure(self): + return self.environ['wsgi.url_scheme'] == 'https' + + def get_posted_data(self): + # The WSGI spec says 'QUERY_STRING' may be absent. + post = parse_qs(self.environ.get('QUERY_STRING', '')) + files = None + if self.method == 'POST': + content_type = self.environ.get('CONTENT_TYPE') + if not content_type: + raise RequestError("Missing Content-Type") + content_type, options = parse_options_header(content_type) + if content_type in ( + 'multipart/form-data', + 'application/x-www-form-urlencoded', + 'application/x-url-encoded'): + forms, files = parse_form_data( + self.environ, strict=True, + mem_limit=self.vreg.config['max-post-length']) + post.update(forms.dict) + self.content.seek(0, 0) + return post, files + + def setup_params(self, params): + # This is a copy of CubicWebRequestBase.setup_params, but without + # converting unicode strings because it is partially done by + # get_posted_data + self.form = {} + if params is None: + return + encoding = self.encoding + for param, val in params.items(): + if isinstance(val, (tuple, list)): + if len(val) == 1: + val = val[0] + if param in self.no_script_form_params and val: + val = self.no_script_form_param(param, val) + if param == '_cwmsgid': + self.set_message_id(val) + else: + self.form[param] = val diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/wsgi/server.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/wsgi/server.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,46 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""dummy wsgi server for CubicWeb web instances""" + +__docformat__ = "restructuredtext en" + +from cubicweb.wsgi.handler import CubicWebWSGIApplication +from cubicweb import ConfigurationError +from wsgiref import simple_server + +from logging import getLogger +LOGGER = getLogger('cubicweb') + + +def run(config): + config.check_writeable_uid_directory(config.appdatahome) + + port = config['port'] or 8080 + interface = config['interface'] + + app = CubicWebWSGIApplication(config) + handler_cls = simple_server.WSGIRequestHandler + httpd = simple_server.WSGIServer((interface, port), handler_cls) + httpd.set_app(app) + repo = app.appli.repo + try: + repo.start_looping_tasks() + LOGGER.info('starting http server on %s', config['base-url']) + httpd.serve_forever() + finally: + repo.shutdown() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/wsgi/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/wsgi/test/requirements.txt Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,1 @@ +webtest diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/wsgi/test/unittest_wsgi.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/wsgi/test/unittest_wsgi.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,116 @@ +# encoding=utf-8 + +import webtest.app +from io import BytesIO + +from cubicweb.devtools.webtest import CubicWebTestTC + +from cubicweb.wsgi.request import CubicWebWsgiRequest +from cubicweb.multipart import MultipartError + + +class WSGIAppTC(CubicWebTestTC): + def test_content_type(self): + r = webtest.app.TestRequest.blank('/', {'CONTENT_TYPE': 'text/plain'}) + + req = CubicWebWsgiRequest(r.environ, self.vreg) + + self.assertEqual('text/plain', req.get_header('Content-Type')) + + def test_content_body(self): + r = webtest.app.TestRequest.blank('/', { + 'CONTENT_LENGTH': 12, + 'CONTENT_TYPE': 'text/plain', + 'wsgi.input': BytesIO(b'some content')}) + + req = CubicWebWsgiRequest(r.environ, self.vreg) + + self.assertEqual(b'some content', req.content.read()) + + def test_http_scheme(self): + r = webtest.app.TestRequest.blank('/', { + 'wsgi.url_scheme': 'http'}) + + req = CubicWebWsgiRequest(r.environ, self.vreg) + + self.assertFalse(req.https) + + def test_https_scheme(self): + r = webtest.app.TestRequest.blank('/', { + 'wsgi.url_scheme': 'https'}) + + req = CubicWebWsgiRequest(r.environ, self.vreg) + + self.assertTrue(req.https) + + def test_https_prefix(self): + r = webtest.app.TestRequest.blank('/https/', { + 'wsgi.url_scheme': 'http'}) + + req = CubicWebWsgiRequest(r.environ, self.vreg) + + self.assertTrue(req.https) + + def test_big_content(self): + content = b'x'*100001 + r = webtest.app.TestRequest.blank('/', { + 'CONTENT_LENGTH': len(content), + 'CONTENT_TYPE': 'text/plain', + 'wsgi.input': BytesIO(content)}) + + req = CubicWebWsgiRequest(r.environ, self.vreg) + + self.assertEqual(content, req.content.read()) + + def test_post(self): + self.webapp.post( + '/', + params={'__login': self.admlogin, '__password': self.admpassword}) + + def test_post_bad_form(self): + with self.assertRaises(MultipartError): + self.webapp.post( + '/', + params='badcontent', + headers={'Content-Type': 'multipart/form-data'}) + + def test_post_non_form(self): + self.webapp.post( + '/', + params='{}', + headers={'Content-Type': 'application/json'}) + + def test_get_multiple_variables(self): + r = webtest.app.TestRequest.blank('/?arg=1&arg=2') + req = CubicWebWsgiRequest(r.environ, self.vreg) + + self.assertEqual([u'1', u'2'], req.form['arg']) + + def test_post_multiple_variables(self): + r = webtest.app.TestRequest.blank('/', POST='arg=1&arg=2') + req = CubicWebWsgiRequest(r.environ, self.vreg) + + self.assertEqual([u'1', u'2'], req.form['arg']) + + def test_post_files(self): + content_type, params = self.webapp.encode_multipart( + (), (('filefield', 'aname', b'acontent'),)) + r = webtest.app.TestRequest.blank( + '/', POST=params, content_type=content_type) + req = CubicWebWsgiRequest(r.environ, self.vreg) + self.assertIn('filefield', req.form) + fieldvalue = req.form['filefield'] + self.assertEqual(u'aname', fieldvalue[0]) + self.assertEqual(b'acontent', fieldvalue[1].read()) + + def test_post_unicode_urlencoded(self): + params = 'arg=%C3%A9' + r = webtest.app.TestRequest.blank( + '/', POST=params, content_type='application/x-www-form-urlencoded') + req = CubicWebWsgiRequest(r.environ, self.vreg) + self.assertEqual(u"é", req.form['arg']) + + +if __name__ == '__main__': + import unittest + unittest.main() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/wsgi/tnd.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/wsgi/tnd.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,47 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""tornado wsgi server for CubicWeb web instances""" + +__docformat__ = "restructuredtext en" + + +from cubicweb.wsgi.handler import CubicWebWSGIApplication +from cubicweb import ConfigurationError +from tornado import wsgi, httpserver, ioloop + +from logging import getLogger +LOGGER = getLogger('cubicweb') + + +def run(config): + config.check_writeable_uid_directory(config.appdatahome) + + port = config['port'] or 8080 + interface = config['interface'] + + app = CubicWebWSGIApplication(config) + container = wsgi.WSGIContainer(app) + http_server = httpserver.HTTPServer(container) + http_server.listen(port, interface) + repo = app.appli.repo + try: + repo.start_looping_tasks() + LOGGER.info('starting http server on %s', config['base-url']) + ioloop.IOLoop.instance().start() + finally: + repo.shutdown() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/wsgi/wz.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/wsgi/wz.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,48 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""dummy wsgi server for CubicWeb web instances""" + +__docformat__ = "restructuredtext en" + +import socket + +from cubicweb.wsgi.handler import CubicWebWSGIApplication +from cubicweb import ConfigurationError +from werkzeug.serving import run_simple + +from logging import getLogger +LOGGER = getLogger('cubicweb') + + +def run(config): + config.check_writeable_uid_directory(config.appdatahome) + + port = config['port'] or 8080 + interface = config['interface'] + + app = CubicWebWSGIApplication(config) + repo = app.appli.repo + try: + repo.start_looping_tasks() + LOGGER.info('starting http server on %s', config['base-url']) + run_simple(interface, port, app, + threaded=True, + use_debugger=True, + processes=1) # more processes yield weird errors + finally: + repo.shutdown() diff -r 058bb3dc685f -r 0b59724cb3f2 cubicweb/xy.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cubicweb/xy.py Sat Jan 16 13:48:51 2016 +0100 @@ -0,0 +1,34 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""map standard cubicweb schema to xml vocabularies""" + +from yams import xy + +xy.register_prefix('rdf', 'http://www.w3.org/1999/02/22-rdf-syntax-ns#') +xy.register_prefix('dc', 'http://purl.org/dc/elements/1.1/') +xy.register_prefix('foaf', 'http://xmlns.com/foaf/0.1/') +xy.register_prefix('doap', 'http://usefulinc.com/ns/doap#') +xy.register_prefix('owl', 'http://www.w3.org/2002/07/owl#') +xy.register_prefix('dcterms', 'http://purl.org/dc/terms/') + +xy.add_equivalence('creation_date', 'dc:date') +xy.add_equivalence('created_by', 'dc:creator') +xy.add_equivalence('description', 'dc:description') +xy.add_equivalence('CWUser', 'foaf:Person') +xy.add_equivalence('CWUser login', 'foaf:Person dc:title') +xy.add_equivalence('CWUser surname', 'foaf:Person foaf:name') diff -r 058bb3dc685f -r 0b59724cb3f2 cwconfig.py --- a/cwconfig.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1346 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -.. _ResourceMode: - -Resource mode -------------- - -Standard resource mode -``````````````````````````` - -A resource *mode* is a predefined set of settings for various resources -directories, such as cubes, instances, etc. to ease development with the -framework. There are two running modes with *CubicWeb*: - -* **system**: resources are searched / created in the system directories (eg - usually requiring root access): - - - instances are stored in :file:`/etc/cubicweb.d` - - temporary files (such as pid file) in :file:`/var/run/cubicweb` - - where `` is the detected installation prefix ('/usr/local' for - instance). - -* **user**: resources are searched / created in the user home directory: - - - instances are stored in :file:`~/etc/cubicweb.d` - - temporary files (such as pid file) in :file:`/tmp` - - - - -.. _CubicwebWithinVirtualEnv: - -Within virtual environment -``````````````````````````` - -If you are not administrator of you machine or if you need to play with some -specific version of |cubicweb| you can use `virtualenv`_ a tool to create -isolated Python environments. - -- instances are stored in :file:`/etc/cubicweb.d` -- temporary files (such as pid file) in :file:`/var/run/cubicweb` - -.. _`virtualenv`: http://pypi.python.org/pypi/virtualenv - -Custom resource location -```````````````````````````````` - -Notice that each resource path may be explicitly set using an environment -variable if the default doesn't suit your needs. Here are the default resource -directories that are affected according to mode: - -* **system**: :: - - CW_INSTANCES_DIR = /etc/cubicweb.d/ - CW_INSTANCES_DATA_DIR = /var/lib/cubicweb/instances/ - CW_RUNTIME_DIR = /var/run/cubicweb/ - -* **user**: :: - - CW_INSTANCES_DIR = ~/etc/cubicweb.d/ - CW_INSTANCES_DATA_DIR = ~/etc/cubicweb.d/ - CW_RUNTIME_DIR = /tmp - -Cubes search path is also affected, see the :ref:`Cube` section. - -Setting Cubicweb Mode -````````````````````` - -By default, the mode is set to 'system' for standard installation. The mode is -set to 'user' if `cubicweb is used from a mercurial repository`_. You can force -this by setting the :envvar:`CW_MODE` environment variable to either 'user' or -'system' so you can easily: - -* use system wide installation but user specific instances and all, without root - privileges on the system (`export CW_MODE=user`) - -* use local checkout of cubicweb on system wide instances (requires root - privileges on the system (`export CW_MODE=system`) - -If you've a doubt about the mode you're currently running, check the first line -outputed by the :command:`cubicweb-ctl list` command. - -.. _`cubicweb is used from a mercurial repository`: CubicwebDevelopmentMod_ - -.. _CubicwebDevelopmentMod: - -Development Mode -````````````````````` -If :file:`.hg` directory is found into the cubicweb package, there are specific resource rules. - -`` is the mercurial checkout of cubicweb: - -* main cubes directory is `/../cubes`. You can specify - another one with :envvar:`CW_INSTANCES_DIR` environment variable or simply - add some other directories by using :envvar:`CW_CUBES_PATH` - -* cubicweb migration files are searched in `/misc/migration` - instead of `/share/cubicweb/migration/`. - - -.. _ConfigurationEnv: - -Environment configuration -------------------------- - -Python -`````` - -If you installed *CubicWeb* by cloning the Mercurial shell repository or from source -distribution, then you will need to update the environment variable PYTHONPATH by -adding the path to `cubicweb`: - -Add the following lines to either :file:`.bashrc` or :file:`.bash_profile` to -configure your development environment :: - - export PYTHONPATH=/full/path/to/grshell-cubicweb - -If you installed *CubicWeb* with packages, no configuration is required and your -new cubes will be placed in `/usr/share/cubicweb/cubes` and your instances will -be placed in `/etc/cubicweb.d`. - - -CubicWeb -```````` - -Here are all environment variables that may be used to configure *CubicWeb*: - -.. envvar:: CW_MODE - - Resource mode: user or system, as explained in :ref:`ResourceMode`. - -.. envvar:: CW_CUBES_PATH - - Augments the default search path for cubes. You may specify several - directories using ':' as separator (';' under windows environment). - -.. envvar:: CW_INSTANCES_DIR - - Directory where cubicweb instances will be found. - -.. envvar:: CW_INSTANCES_DATA_DIR - - Directory where cubicweb instances data will be written (backup file...) - -.. envvar:: CW_RUNTIME_DIR - - Directory where pid files will be written -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -import os -import stat -import logging -import logging.config -from smtplib import SMTP -from threading import Lock -from os.path import (exists, join, expanduser, abspath, normpath, - basename, isdir, dirname, splitext) -from warnings import warn, filterwarnings - -from six import text_type - -from logilab.common.decorators import cached, classproperty -from logilab.common.deprecation import deprecated -from logilab.common.logging_ext import set_log_methods, init_log -from logilab.common.configuration import (Configuration, Method, - ConfigurationMixIn, merge_options) - -from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, - ConfigurationError, Binary, _) -from cubicweb.toolsutils import create_dir - -CONFIGURATIONS = [] - -SMTP_LOCK = Lock() - - -def configuration_cls(name): - """return the configuration class registered with the given name""" - try: - return [c for c in CONFIGURATIONS if c.name == name][0] - except IndexError: - raise ConfigurationError('no such config %r (check it exists with "cubicweb-ctl list")' % name) - -def possible_configurations(directory): - """return a list of installed configurations in a directory - according to \*-ctl files - """ - return [name for name in ('repository', 'all-in-one') - if exists(join(directory, '%s.conf' % name))] - -def guess_configuration(directory): - """try to guess the configuration to use for a directory. If multiple - configurations are found, ConfigurationError is raised - """ - modes = possible_configurations(directory) - if len(modes) != 1: - raise ConfigurationError('unable to guess configuration from %r %s' - % (directory, modes)) - return modes[0] - -def _find_prefix(start_path=CW_SOFTWARE_ROOT): - """Runs along the parent directories of *start_path* (default to cubicweb source directory) - looking for one containing a 'share/cubicweb' directory. - The first matching directory is assumed as the prefix installation of cubicweb - - Returns the matching prefix or None. - """ - prefix = start_path - old_prefix = None - if not isdir(start_path): - prefix = dirname(start_path) - while (not isdir(join(prefix, 'share', 'cubicweb')) - or prefix.endswith('.egg')) and prefix != old_prefix: - old_prefix = prefix - prefix = dirname(prefix) - if isdir(join(prefix, 'share', 'cubicweb')): - return prefix - return sys.prefix - -# persistent options definition -PERSISTENT_OPTIONS = ( - ('encoding', - {'type' : 'string', - 'default': 'UTF-8', - 'help': _('user interface encoding'), - 'group': 'ui', 'sitewide': True, - }), - ('language', - {'type' : 'string', - 'default': 'en', - 'vocabulary': Method('available_languages'), - 'help': _('language of the user interface'), - 'group': 'ui', - }), - ('date-format', - {'type' : 'string', - 'default': '%Y/%m/%d', - 'help': _('how to format date in the ui (see this page for format description)'), - 'group': 'ui', - }), - ('datetime-format', - {'type' : 'string', - 'default': '%Y/%m/%d %H:%M', - 'help': _('how to format date and time in the ui (see this page for format description)'), - 'group': 'ui', - }), - ('time-format', - {'type' : 'string', - 'default': '%H:%M', - 'help': _('how to format time in the ui (see this page for format description)'), - 'group': 'ui', - }), - ('float-format', - {'type' : 'string', - 'default': '%.3f', - 'help': _('how to format float numbers in the ui'), - 'group': 'ui', - }), - ('default-text-format', - {'type' : 'choice', - 'choices': ('text/plain', 'text/rest', 'text/html', 'text/markdown'), - 'default': 'text/plain', - 'help': _('default text format for rich text fields.'), - 'group': 'ui', - }), - ('short-line-size', - {'type' : 'int', - 'default': 80, - 'help': _('maximum number of characters in short description'), - 'group': 'navigation', - }), - ) - -def register_persistent_options(options): - global PERSISTENT_OPTIONS - PERSISTENT_OPTIONS = merge_options(PERSISTENT_OPTIONS + options) - -CFGTYPE2ETYPE_MAP = { - 'string': 'String', - 'choice': 'String', - 'yn': 'Boolean', - 'int': 'Int', - 'float' : 'Float', - } - -_forced_mode = os.environ.get('CW_MODE') -assert _forced_mode in (None, 'system', 'user') - -# CWDEV tells whether directories such as i18n/, web/data/, etc. (ie containing -# some other resources than python libraries) are located with the python code -# or as a 'shared' cube -CWDEV = exists(join(CW_SOFTWARE_ROOT, 'i18n')) - -try: - _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX'] -except KeyError: - _INSTALL_PREFIX = _find_prefix() -_USR_INSTALL = _INSTALL_PREFIX == '/usr' - -class CubicWebNoAppConfiguration(ConfigurationMixIn): - """base class for cubicweb configuration without a specific instance directory - """ - # to set in concrete configuration - name = None - # log messages format (see logging module documentation for available keys) - log_format = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s' - # the format below can be useful to debug multi thread issues: - # log_format = '%(asctime)s - [%(threadName)s] (%(name)s) %(levelname)s: %(message)s' - # nor remove appobjects based on unused interface [???] - cleanup_unused_appobjects = True - - quick_start = False - - if (CWDEV and _forced_mode != 'system'): - mode = 'user' - _CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes') - else: - mode = _forced_mode or 'system' - _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes') - - CUBES_DIR = abspath(os.environ.get('CW_CUBES_DIR', _CUBES_DIR)) - CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep) - - options = ( - ('log-threshold', - {'type' : 'string', # XXX use a dedicated type? - 'default': 'WARNING', - 'help': 'server\'s log level', - 'group': 'main', 'level': 1, - }), - ('umask', - {'type' : 'int', - 'default': 0o077, - 'help': 'permission umask for files created by the server', - 'group': 'main', 'level': 2, - }), - # common configuration options which are potentially required as soon as - # you're using "base" application objects (ie to really server/web - # specific) - ('base-url', - {'type' : 'string', - 'default': None, - 'help': 'web server root url', - 'group': 'main', 'level': 1, - }), - ('allow-email-login', - {'type' : 'yn', - 'default': False, - 'help': 'allow users to login with their primary email if set', - 'group': 'main', 'level': 2, - }), - ('mangle-emails', - {'type' : 'yn', - 'default': False, - 'help': "don't display actual email addresses but mangle them if \ -this option is set to yes", - 'group': 'email', 'level': 3, - }), - ) - # static and class methods used to get instance independant resources ## - @staticmethod - def cubicweb_version(): - """return installed cubicweb version""" - from logilab.common.changelog import Version - from cubicweb import __pkginfo__ - version = __pkginfo__.numversion - assert len(version) == 3, version - return Version(version) - - @staticmethod - def persistent_options_configuration(): - return Configuration(options=PERSISTENT_OPTIONS) - - @classmethod - def shared_dir(cls): - """return the shared data directory (i.e. directory where standard - library views and data may be found) - """ - if CWDEV: - return join(CW_SOFTWARE_ROOT, 'web') - return cls.cube_dir('shared') - - @classmethod - def i18n_lib_dir(cls): - """return instance's i18n directory""" - if CWDEV: - return join(CW_SOFTWARE_ROOT, 'i18n') - return join(cls.shared_dir(), 'i18n') - - @classmethod - def cw_languages(cls): - for fname in os.listdir(join(cls.i18n_lib_dir())): - if fname.endswith('.po'): - yield splitext(fname)[0] - - - @classmethod - def available_cubes(cls): - import re - cubes = set() - for directory in cls.cubes_search_path(): - if not exists(directory): - cls.error('unexistant directory in cubes search path: %s' - % directory) - continue - for cube in os.listdir(directory): - if cube == 'shared': - continue - if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cube): - continue # skip invalid python package name - cubedir = join(directory, cube) - if isdir(cubedir) and exists(join(cubedir, '__init__.py')): - cubes.add(cube) - return sorted(cubes) - - @classmethod - def cubes_search_path(cls): - """return the path of directories where cubes should be searched""" - path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH - if directory.strip() and exists(directory.strip())] - if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR): - path.append(cls.CUBES_DIR) - return path - - @classproperty - def extrapath(cls): - extrapath = {} - for cubesdir in cls.cubes_search_path(): - if cubesdir != cls.CUBES_DIR: - extrapath[cubesdir] = 'cubes' - return extrapath - - @classmethod - def cube_dir(cls, cube): - """return the cube directory for the given cube id, raise - `ConfigurationError` if it doesn't exist - """ - for directory in cls.cubes_search_path(): - cubedir = join(directory, cube) - if exists(cubedir): - return cubedir - raise ConfigurationError('no cube %r in %s' % ( - cube, cls.cubes_search_path())) - - @classmethod - def cube_migration_scripts_dir(cls, cube): - """cube migration scripts directory""" - return join(cls.cube_dir(cube), 'migration') - - @classmethod - def cube_pkginfo(cls, cube): - """return the information module for the given cube""" - cube = CW_MIGRATION_MAP.get(cube, cube) - try: - parent = __import__('cubes.%s.__pkginfo__' % cube) - return getattr(parent, cube).__pkginfo__ - except Exception as ex: - raise ConfigurationError( - 'unable to find packaging information for cube %s (%s: %s)' - % (cube, ex.__class__.__name__, ex)) - - @classmethod - def cube_version(cls, cube): - """return the version of the cube located in the given directory - """ - from logilab.common.changelog import Version - version = cls.cube_pkginfo(cube).numversion - assert len(version) == 3, version - return Version(version) - - @classmethod - def _cube_deps(cls, cube, key, oldkey): - """return cubicweb cubes used by the given cube""" - pkginfo = cls.cube_pkginfo(cube) - try: - # explicit __xxx_cubes__ attribute - deps = getattr(pkginfo, key) - except AttributeError: - # deduce cubes from generic __xxx__ attribute - try: - gendeps = getattr(pkginfo, key.replace('_cubes', '')) - except AttributeError: - deps = {} - else: - deps = dict( (x[len('cubicweb-'):], v) - for x, v in gendeps.items() - if x.startswith('cubicweb-')) - for depcube in deps: - try: - newname = CW_MIGRATION_MAP[depcube] - except KeyError: - pass - else: - deps[newname] = deps.pop(depcube) - return deps - - @classmethod - def cube_depends_cubicweb_version(cls, cube): - # XXX no backward compat (see _cube_deps above) - try: - pkginfo = cls.cube_pkginfo(cube) - deps = getattr(pkginfo, '__depends__') - return deps.get('cubicweb') - except AttributeError: - return None - - @classmethod - def cube_dependencies(cls, cube): - """return cubicweb cubes used by the given cube""" - return cls._cube_deps(cube, '__depends_cubes__', '__use__') - - @classmethod - def cube_recommends(cls, cube): - """return cubicweb cubes recommended by the given cube""" - return cls._cube_deps(cube, '__recommends_cubes__', '__recommend__') - - @classmethod - def expand_cubes(cls, cubes, with_recommends=False): - """expand the given list of top level cubes used by adding recursivly - each cube dependencies - """ - cubes = list(cubes) - todo = cubes[:] - if with_recommends: - available = set(cls.available_cubes()) - while todo: - cube = todo.pop(0) - for depcube in cls.cube_dependencies(cube): - if depcube not in cubes: - cubes.append(depcube) - todo.append(depcube) - if with_recommends: - for depcube in cls.cube_recommends(cube): - if depcube not in cubes and depcube in available: - cubes.append(depcube) - todo.append(depcube) - return cubes - - @classmethod - def reorder_cubes(cls, cubes): - """reorder cubes from the top level cubes to inner dependencies - cubes - """ - from logilab.common.graph import ordered_nodes, UnorderableGraph - graph = {} - for cube in cubes: - cube = CW_MIGRATION_MAP.get(cube, cube) - graph[cube] = set(dep for dep in cls.cube_dependencies(cube) - if dep in cubes) - graph[cube] |= set(dep for dep in cls.cube_recommends(cube) - if dep in cubes) - try: - return ordered_nodes(graph) - except UnorderableGraph as ex: - raise ConfigurationError(ex) - - @classmethod - def cls_adjust_sys_path(cls): - """update python path if necessary""" - cubes_parent_dir = normpath(join(cls.CUBES_DIR, '..')) - if not cubes_parent_dir in sys.path: - sys.path.insert(0, cubes_parent_dir) - try: - import cubes - cubes.__path__ = cls.cubes_search_path() - except ImportError: - return # cubes dir doesn't exists - - @classmethod - def load_available_configs(cls): - from logilab.common.modutils import load_module_from_file - for conffile in ('web/webconfig.py', 'etwist/twconfig.py', - 'server/serverconfig.py',): - if exists(join(CW_SOFTWARE_ROOT, conffile)): - load_module_from_file(join(CW_SOFTWARE_ROOT, conffile)) - - @classmethod - def load_cwctl_plugins(cls): - from logilab.common.modutils import load_module_from_file - cls.cls_adjust_sys_path() - for ctlfile in ('web/webctl.py', 'etwist/twctl.py', - 'server/serverctl.py', - 'devtools/devctl.py', 'goa/goactl.py'): - if exists(join(CW_SOFTWARE_ROOT, ctlfile)): - try: - load_module_from_file(join(CW_SOFTWARE_ROOT, ctlfile)) - except ImportError as err: - cls.error('could not import the command provider %s: %s', - ctlfile, err) - cls.info('loaded cubicweb-ctl plugin %s', ctlfile) - for cube in cls.available_cubes(): - pluginfile = join(cls.cube_dir(cube), 'ccplugin.py') - initfile = join(cls.cube_dir(cube), '__init__.py') - if exists(pluginfile): - try: - __import__('cubes.%s.ccplugin' % cube) - cls.info('loaded cubicweb-ctl plugin from %s', cube) - except Exception: - cls.exception('while loading plugin %s', pluginfile) - elif exists(initfile): - try: - __import__('cubes.%s' % cube) - except Exception: - cls.exception('while loading cube %s', cube) - else: - cls.warning('no __init__ file in cube %s', cube) - - @classmethod - def init_available_cubes(cls): - """cubes may register some sources (svnfile for instance) in their - __init__ file, so they should be loaded early in the startup process - """ - for cube in cls.available_cubes(): - try: - __import__('cubes.%s' % cube) - except Exception as ex: - cls.warning("can't init cube %s: %s", cube, ex) - - cubicweb_appobject_path = set(['entities']) - cube_appobject_path = set(['entities']) - - def __init__(self, debugmode=False): - if debugmode: - # in python 2.7, DeprecationWarning are not shown anymore by default - filterwarnings('default', category=DeprecationWarning) - register_stored_procedures() - self._cubes = None - super(CubicWebNoAppConfiguration, self).__init__() - self.debugmode = debugmode - self.adjust_sys_path() - self.load_defaults() - # will be properly initialized later by _gettext_init - self.translations = {'en': (text_type, lambda ctx, msgid: text_type(msgid) )} - self._site_loaded = set() - # don't register ReStructured Text directives by simple import, avoid pb - # with eg sphinx. - # XXX should be done properly with a function from cw.uicfg - try: - from cubicweb.ext.rest import cw_rest_init - except ImportError: - pass - else: - cw_rest_init() - - def adjust_sys_path(self): - # overriden in CubicWebConfiguration - self.cls_adjust_sys_path() - - def init_log(self, logthreshold=None, logfile=None, syslog=False): - """init the log service""" - if logthreshold is None: - if self.debugmode: - logthreshold = 'DEBUG' - else: - logthreshold = self['log-threshold'] - if sys.platform == 'win32': - # no logrotate on win32, so use logging rotation facilities - # for now, hard code weekly rotation every sunday, and 52 weeks kept - # idea: make this configurable? - init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format, - rotation_parameters={'when': 'W6', # every sunday - 'interval': 1, - 'backupCount': 52}) - else: - init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format) - # configure simpleTal logger - logging.getLogger('simpleTAL').setLevel(logging.ERROR) - - def appobjects_path(self): - """return a list of files or directories where the registry will look - for application objects. By default return nothing in NoApp config. - """ - return [] - - def build_appobjects_path(self, templpath, evobjpath=None, tvobjpath=None): - """given a list of directories, return a list of sub files and - directories that should be loaded by the instance objects registry. - - :param evobjpath: - optional list of sub-directories (or files without the .py ext) of - the cubicweb library that should be tested and added to the output list - if they exists. If not give, default to `cubicweb_appobject_path` class - attribute. - :param tvobjpath: - optional list of sub-directories (or files without the .py ext) of - directories given in `templpath` that should be tested and added to - the output list if they exists. If not give, default to - `cube_appobject_path` class attribute. - """ - vregpath = self.build_appobjects_cubicweb_path(evobjpath) - vregpath += self.build_appobjects_cube_path(templpath, tvobjpath) - return vregpath - - def build_appobjects_cubicweb_path(self, evobjpath=None): - vregpath = [] - if evobjpath is None: - evobjpath = self.cubicweb_appobject_path - # NOTE: for the order, see http://www.cubicweb.org/ticket/2330799 - # it is clearly a workaround - for subdir in sorted(evobjpath, key=lambda x:x != 'entities'): - path = join(CW_SOFTWARE_ROOT, subdir) - if exists(path): - vregpath.append(path) - return vregpath - - def build_appobjects_cube_path(self, templpath, tvobjpath=None): - vregpath = [] - if tvobjpath is None: - tvobjpath = self.cube_appobject_path - for directory in templpath: - # NOTE: for the order, see http://www.cubicweb.org/ticket/2330799 - for subdir in sorted(tvobjpath, key=lambda x:x != 'entities'): - path = join(directory, subdir) - if exists(path): - vregpath.append(path) - elif exists(path + '.py'): - vregpath.append(path + '.py') - return vregpath - - apphome = None - - def load_site_cubicweb(self, paths=None): - """load instance's specific site_cubicweb file""" - if paths is None: - paths = self.cubes_path() - if self.apphome is not None: - paths = [self.apphome] + paths - for path in reversed(paths): - sitefile = join(path, 'site_cubicweb.py') - if exists(sitefile) and not sitefile in self._site_loaded: - self._load_site_cubicweb(sitefile) - self._site_loaded.add(sitefile) - - def _load_site_cubicweb(self, sitefile): - # XXX extrapath argument to load_module_from_file only in lgc > 0.50.2 - from logilab.common.modutils import load_module_from_modpath, modpath_from_file - module = load_module_from_modpath(modpath_from_file(sitefile, self.extrapath)) - self.debug('%s loaded', sitefile) - return module - - def cwproperty_definitions(self): - cfg = self.persistent_options_configuration() - for section, options in cfg.options_by_section(): - section = section.lower() - for optname, optdict, value in options: - key = '%s.%s' % (section, optname) - type, vocab = self.map_option(optdict) - default = cfg.option_default(optname, optdict) - pdef = {'type': type, 'vocabulary': vocab, 'default': default, - 'help': optdict['help'], - 'sitewide': optdict.get('sitewide', False)} - yield key, pdef - - def map_option(self, optdict): - try: - vocab = optdict['choices'] - except KeyError: - vocab = optdict.get('vocabulary') - if isinstance(vocab, Method): - vocab = getattr(self, vocab.method, ()) - return CFGTYPE2ETYPE_MAP[optdict['type']], vocab - - def default_instance_id(self): - """return the instance identifier, useful for option which need this - as default value - """ - return None - - _cubes = None - - def init_cubes(self, cubes): - self._cubes = self.reorder_cubes(cubes) - # load cubes'__init__.py file first - for cube in cubes: - __import__('cubes.%s' % cube) - self.load_site_cubicweb() - - def cubes(self): - """return the list of cubes used by this instance - - result is ordered from the top level cubes to inner dependencies - cubes - """ - assert self._cubes is not None, 'cubes not initialized' - return self._cubes - - def cubes_path(self): - """return the list of path to cubes used by this instance, from outer - most to inner most cubes - """ - return [self.cube_dir(p) for p in self.cubes()] - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - @classmethod - def debug(cls, msg, *a, **kw): - pass - info = warning = error = critical = exception = debug - - -class CubicWebConfiguration(CubicWebNoAppConfiguration): - """base class for cubicweb server and web configurations""" - - if CubicWebNoAppConfiguration.mode == 'user': - _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/') - #mode == system' - elif _USR_INSTALL: - _INSTANCES_DIR = '/etc/cubicweb.d/' - else: - _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d') - - # set to true during repair (shell, migration) to allow some things which - # wouldn't be possible otherwise - repairing = False - - # set by upgrade command - verbosity = 0 - cmdline_options = None - options = CubicWebNoAppConfiguration.options + ( - ('log-file', - {'type' : 'string', - 'default': Method('default_log_file'), - 'help': 'file where output logs should be written', - 'group': 'main', 'level': 2, - }), - ('statsd-endpoint', - {'type' : 'string', - 'default': '', - 'help': 'UDP address of the statsd endpoint; it must be formatted' - 'like :; disabled is unset.', - 'group': 'main', 'level': 2, - }), - # email configuration - ('smtp-host', - {'type' : 'string', - 'default': 'mail', - 'help': 'hostname of the SMTP mail server', - 'group': 'email', 'level': 1, - }), - ('smtp-port', - {'type' : 'int', - 'default': 25, - 'help': 'listening port of the SMTP mail server', - 'group': 'email', 'level': 1, - }), - ('sender-name', - {'type' : 'string', - 'default': Method('default_instance_id'), - 'help': 'name used as HELO name for outgoing emails from the \ -repository.', - 'group': 'email', 'level': 2, - }), - ('sender-addr', - {'type' : 'string', - 'default': 'cubicweb@mydomain.com', - 'help': 'email address used as HELO address for outgoing emails from \ -the repository', - 'group': 'email', 'level': 1, - }), - ('logstat-interval', - {'type' : 'int', - 'default': 0, - 'help': 'interval (in seconds) at which stats are dumped in the logstat file; set 0 to disable', - 'group': 'main', 'level': 2, - }), - ('logstat-file', - {'type' : 'string', - 'default': Method('default_stats_file'), - 'help': 'file where stats for the instance should be written', - 'group': 'main', 'level': 2, - }), - ) - - @classmethod - def instances_dir(cls): - """return the control directory""" - return abspath(os.environ.get('CW_INSTANCES_DIR', cls._INSTANCES_DIR)) - - @classmethod - def migration_scripts_dir(cls): - """cubicweb migration scripts directory""" - if CWDEV: - return join(CW_SOFTWARE_ROOT, 'misc', 'migration') - mdir = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'migration') - if not exists(mdir): - raise ConfigurationError('migration path %s doesn\'t exist' % mdir) - return mdir - - @classmethod - def config_for(cls, appid, config=None, debugmode=False, creating=False): - """return a configuration instance for the given instance identifier - """ - cls.load_available_configs() - config = config or guess_configuration(cls.instance_home(appid)) - configcls = configuration_cls(config) - return configcls(appid, debugmode, creating) - - @classmethod - def possible_configurations(cls, appid): - """return the name of possible configurations for the given - instance id - """ - home = cls.instance_home(appid) - return possible_configurations(home) - - @classmethod - def instance_home(cls, appid): - """return the home directory of the instance with the given - instance id - """ - home = join(cls.instances_dir(), appid) - if not exists(home): - raise ConfigurationError('no such instance %s (check it exists with' - ' "cubicweb-ctl list")' % appid) - return home - - MODES = ('common', 'repository', 'Any') - MCOMPAT = {'all-in-one': MODES, - 'repository': ('common', 'repository', 'Any')} - @classmethod - def accept_mode(cls, mode): - #assert mode in cls.MODES, mode - return mode in cls.MCOMPAT[cls.name] - - # default configuration methods ########################################### - - def default_instance_id(self): - """return the instance identifier, useful for option which need this - as default value - """ - return self.appid - - def default_log_file(self): - """return default path to the log file of the instance'server""" - if self.mode == 'user': - import tempfile - basepath = join(tempfile.gettempdir(), '%s-%s' % ( - basename(self.appid), self.name)) - path = basepath + '.log' - i = 1 - while exists(path) and i < 100: # arbitrary limit to avoid infinite loop - try: - open(path, 'a') - break - except IOError: - path = '%s-%s.log' % (basepath, i) - i += 1 - return path - if _USR_INSTALL: - return '/var/log/cubicweb/%s-%s.log' % (self.appid, self.name) - else: - log_path = os.path.join(_INSTALL_PREFIX, 'var', 'log', 'cubicweb', '%s-%s.log') - return log_path % (self.appid, self.name) - - def default_stats_file(self): - """return default path to the stats file of the instance'server""" - logfile = self.default_log_file() - if logfile.endswith('.log'): - logfile = logfile[:-4] - return logfile + '.stats' - - def default_pid_file(self): - """return default path to the pid file of the instance'server""" - if self.mode == 'system': - if _USR_INSTALL: - default = '/var/run/cubicweb/' - else: - default = os.path.join(_INSTALL_PREFIX, 'var', 'run', 'cubicweb') - else: - import tempfile - default = tempfile.gettempdir() - # runtime directory created on startup if necessary, don't check it - # exists - rtdir = abspath(os.environ.get('CW_RUNTIME_DIR', default)) - return join(rtdir, '%s-%s.pid' % (self.appid, self.name)) - - # config -> repository - - def repository(self, vreg=None): - from cubicweb.server.repository import Repository - from cubicweb.server.utils import TasksManager - return Repository(self, TasksManager(), vreg=vreg) - - # instance methods used to get instance specific resources ############# - - def __init__(self, appid, debugmode=False, creating=False): - self.appid = appid - # set to true while creating an instance - self.creating = creating - super(CubicWebConfiguration, self).__init__(debugmode) - fake_gettext = (text_type, lambda ctx, msgid: text_type(msgid)) - for lang in self.available_languages(): - self.translations[lang] = fake_gettext - self._cubes = None - self.load_file_configuration(self.main_config_file()) - - def adjust_sys_path(self): - super(CubicWebConfiguration, self).adjust_sys_path() - # adding apphome to python path is not usually necessary in production - # environments, but necessary for tests - if self.apphome and self.apphome not in sys.path: - sys.path.insert(0, self.apphome) - - @property - def apphome(self): - return join(self.instances_dir(), self.appid) - - @property - def appdatahome(self): - if self.mode == 'system': - if _USR_INSTALL: - iddir = os.path.join('/var','lib', 'cubicweb', 'instances') - else: - iddir = os.path.join(_INSTALL_PREFIX, 'var', 'lib', 'cubicweb', 'instances') - else: - iddir = self.instances_dir() - iddir = abspath(os.environ.get('CW_INSTANCES_DATA_DIR', iddir)) - return join(iddir, self.appid) - - def init_cubes(self, cubes): - super(CubicWebConfiguration, self).init_cubes(cubes) - # reload config file in cases options are defined in cubes __init__ - # or site_cubicweb files - self.load_file_configuration(self.main_config_file()) - # configuration initialization hook - self.load_configuration(**(self.cmdline_options or {})) - - def add_cubes(self, cubes): - """add given cubes to the list of used cubes""" - if not isinstance(cubes, list): - cubes = list(cubes) - self._cubes = self.reorder_cubes(list(self._cubes) + cubes) - self.load_site_cubicweb([self.cube_dir(cube) for cube in cubes]) - - def main_config_file(self): - """return instance's control configuration file""" - return join(self.apphome, '%s.conf' % self.name) - - def save(self): - """write down current configuration""" - with open(self.main_config_file(), 'w') as fobj: - self.generate_config(fobj) - - def check_writeable_uid_directory(self, path): - """check given directory path exists, belongs to the user running the - server process and is writeable. - - If not, try to fix this, letting exception propagate when not possible. - """ - if not exists(path): - self.info('creating %s directory', path) - try: - os.makedirs(path) - except OSError as ex: - self.warning('error while creating %s directory: %s', path, ex) - return - if self['uid']: - try: - uid = int(self['uid']) - except ValueError: - from pwd import getpwnam - uid = getpwnam(self['uid']).pw_uid - else: - try: - uid = os.getuid() - except AttributeError: # we are on windows - return - fstat = os.stat(path) - if fstat.st_uid != uid: - self.info('giving ownership of %s directory to %s', path, self['uid']) - try: - os.chown(path, uid, os.getgid()) - except OSError as ex: - self.warning('error while giving ownership of %s directory to %s: %s', - path, self['uid'], ex) - if not (fstat.st_mode & stat.S_IWUSR): - self.info('forcing write permission on directory %s', path) - try: - os.chmod(path, fstat.st_mode | stat.S_IWUSR) - except OSError as ex: - self.warning('error while forcing write permission on directory %s: %s', - path, ex) - return - - @cached - def instance_md5_version(self): - from hashlib import md5 # pylint: disable=E0611 - infos = [] - for pkg in sorted(self.cubes()): - version = self.cube_version(pkg) - infos.append('%s-%s' % (pkg, version)) - infos.append('cubicweb-%s' % str(self.cubicweb_version())) - return md5((';'.join(infos)).encode('ascii')).hexdigest() - - def load_configuration(self, **kw): - """load instance's configuration files""" - super(CubicWebConfiguration, self).load_configuration(**kw) - if self.apphome and not self.creating: - # init gettext - self._gettext_init() - - def _load_site_cubicweb(self, sitefile): - # overridden to register cube specific options - mod = super(CubicWebConfiguration, self)._load_site_cubicweb(sitefile) - if getattr(mod, 'options', None): - self.register_options(mod.options) - self.load_defaults() - - def init_log(self, logthreshold=None, force=False): - """init the log service""" - if not force and hasattr(self, '_logging_initialized'): - return - self._logging_initialized = True - super_self = super(CubicWebConfiguration, self) - super_self.init_log(logthreshold, logfile=self.get('log-file')) - # read a config file if it exists - logconfig = join(self.apphome, 'logging.conf') - if exists(logconfig): - logging.config.fileConfig(logconfig) - # set the statsd address, if any - if self.get('statsd-endpoint'): - try: - address, port = self.get('statsd-endpoint').split(':') - port = int(port) - except: - self.error('statsd-endpoint: invalid address format ({}); ' - 'it should be "ip:port"'.format(self.get('statsd-endpoint'))) - else: - import statsd_logger - statsd_logger.setup('cubicweb.%s' % self.appid, (address, port)) - - def available_languages(self, *args): - """return available translation for an instance, by looking for - compiled catalog - - take \*args to be usable as a vocabulary method - """ - from glob import glob - yield 'en' # ensure 'en' is yielded even if no .mo found - for path in glob(join(self.apphome, 'i18n', - '*', 'LC_MESSAGES')): - lang = path.split(os.sep)[-2] - if lang != 'en': - yield lang - - def _gettext_init(self): - """set language for gettext""" - from cubicweb.cwgettext import translation - path = join(self.apphome, 'i18n') - for language in self.available_languages(): - self.info("loading language %s", language) - try: - tr = translation('cubicweb', path, languages=[language]) - self.translations[language] = (tr.ugettext, tr.upgettext) - except (ImportError, AttributeError, IOError): - if self.mode != 'test': - # in test contexts, data/i18n does not exist, hence - # logging will only pollute the logs - self.exception('localisation support error for language %s', - language) - - def appobjects_path(self): - """return a list of files or directories where the registry will look - for application objects - """ - templpath = list(reversed(self.cubes_path())) - if self.apphome: # may be unset in tests - templpath.append(self.apphome) - return self.build_appobjects_path(templpath) - - def set_sources_mode(self, sources): - if not 'all' in sources: - print('warning: ignoring specified sources, requires a repository ' - 'configuration') - - def i18ncompile(self, langs=None): - from cubicweb import i18n - if langs is None: - langs = self.available_languages() - i18ndir = join(self.apphome, 'i18n') - if not exists(i18ndir): - create_dir(i18ndir) - sourcedirs = [join(path, 'i18n') for path in self.cubes_path()] - sourcedirs.append(self.i18n_lib_dir()) - return i18n.compile_i18n_catalogs(sourcedirs, i18ndir, langs) - - def sendmails(self, msgs, fromaddr=None): - """msgs: list of 2-uple (message object, recipients). Return False - if connection to the smtp server failed, else True. - """ - server, port = self['smtp-host'], self['smtp-port'] - if fromaddr is None: - fromaddr = '%s <%s>' % (self['sender-name'], self['sender-addr']) - SMTP_LOCK.acquire() - try: - try: - smtp = SMTP(server, port) - except Exception as ex: - self.exception("can't connect to smtp server %s:%s (%s)", - server, port, ex) - return False - for msg, recipients in msgs: - try: - smtp.sendmail(fromaddr, recipients, msg.as_string()) - except Exception as ex: - self.exception("error sending mail to %s (%s)", - recipients, ex) - smtp.close() - finally: - SMTP_LOCK.release() - return True - -set_log_methods(CubicWebNoAppConfiguration, - logging.getLogger('cubicweb.configuration')) - -# alias to get a configuration instance from an instance id -instance_configuration = CubicWebConfiguration.config_for -application_configuration = deprecated('use instance_configuration')(instance_configuration) - - -_EXT_REGISTERED = False -def register_stored_procedures(): - from logilab.database import FunctionDescr - from rql.utils import register_function, iter_funcnode_variables - from rql.nodes import SortTerm, Constant, VariableRef - - global _EXT_REGISTERED - if _EXT_REGISTERED: - return - _EXT_REGISTERED = True - - class COMMA_JOIN(FunctionDescr): - supported_backends = ('postgres', 'sqlite',) - rtype = 'String' - - def st_description(self, funcnode, mainindex, tr): - return ', '.join(sorted(term.get_description(mainindex, tr) - for term in iter_funcnode_variables(funcnode))) - - register_function(COMMA_JOIN) # XXX do not expose? - - - class CONCAT_STRINGS(COMMA_JOIN): - aggregat = True - - register_function(CONCAT_STRINGS) # XXX bw compat - - - class GROUP_CONCAT(CONCAT_STRINGS): - supported_backends = ('mysql', 'postgres', 'sqlite',) - - register_function(GROUP_CONCAT) - - - class LIMIT_SIZE(FunctionDescr): - supported_backends = ('postgres', 'sqlite',) - minargs = maxargs = 3 - rtype = 'String' - - def st_description(self, funcnode, mainindex, tr): - return funcnode.children[0].get_description(mainindex, tr) - - register_function(LIMIT_SIZE) - - - class TEXT_LIMIT_SIZE(LIMIT_SIZE): - supported_backends = ('mysql', 'postgres', 'sqlite',) - minargs = maxargs = 2 - - register_function(TEXT_LIMIT_SIZE) - - - class FTIRANK(FunctionDescr): - """return ranking of a variable that must be used as some has_text - relation subject in the query's restriction. Usually used to sort result - of full-text search by ranking. - """ - supported_backends = ('postgres',) - rtype = 'Float' - - def st_check_backend(self, backend, funcnode): - """overriden so that on backend not supporting fti ranking, the - function is removed when in an orderby clause, or replaced by a 1.0 - constant. - """ - if not self.supports(backend): - parent = funcnode.parent - while parent is not None and not isinstance(parent, SortTerm): - parent = parent.parent - if isinstance(parent, SortTerm): - parent.parent.remove(parent) - else: - funcnode.parent.replace(funcnode, Constant(1.0, 'Float')) - parent = funcnode - for vref in parent.iget_nodes(VariableRef): - vref.unregister_reference() - - register_function(FTIRANK) - - - class FSPATH(FunctionDescr): - """return path of some bytes attribute stored using the Bytes - File-System Storage (bfss) - """ - rtype = 'Bytes' # XXX return a String? potential pb with fs encoding - - def update_cb_stack(self, stack): - assert len(stack) == 1 - stack[0] = self.source_execute - - def as_sql(self, backend, args): - raise NotImplementedError( - 'This callback is only available for BytesFileSystemStorage ' - 'managed attribute. Is FSPATH() argument BFSS managed?') - - def source_execute(self, source, session, value): - fpath = source.binary_to_str(value) - try: - return Binary(fpath) - except OSError as ex: - source.critical("can't open %s: %s", fpath, ex) - return None - - register_function(FSPATH) diff -r 058bb3dc685f -r 0b59724cb3f2 cwctl.py --- a/cwctl.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1154 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""the cubicweb-ctl tool, based on logilab.common.clcommands to -provide a pluggable commands system. -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -# *ctl module should limit the number of import to be imported as quickly as -# possible (for cubicweb-ctl reactivity, necessary for instance for usable bash -# completion). So import locally in command helpers. -import sys -from warnings import warn, filterwarnings -from os import remove, listdir, system, pathsep -from os.path import exists, join, isfile, isdir, dirname, abspath - -try: - from os import kill, getpgid -except ImportError: - def kill(*args): - """win32 kill implementation""" - def getpgid(): - """win32 getpgid implementation""" - -from six.moves.urllib.parse import urlparse - -from logilab.common.clcommands import CommandLine -from logilab.common.shellutils import ASK -from logilab.common.configuration import merge_options - -from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage -from cubicweb.utils import support_args -from cubicweb.cwconfig import CubicWebConfiguration as cwcfg, CWDEV, CONFIGURATIONS -from cubicweb.toolsutils import Command, rm, create_dir, underline_title -from cubicweb.__pkginfo__ import version - -# don't check duplicated commands, it occurs when reloading site_cubicweb -CWCTL = CommandLine('cubicweb-ctl', 'The CubicWeb swiss-knife.', - version=version, check_duplicated_command=False) - -def wait_process_end(pid, maxtry=10, waittime=1): - """wait for a process to actually die""" - import signal - from time import sleep - nbtry = 0 - while nbtry < maxtry: - try: - kill(pid, signal.SIGUSR1) - except (OSError, AttributeError): # XXX win32 - break - nbtry += 1 - sleep(waittime) - else: - raise ExecutionError('can\'t kill process %s' % pid) - -def list_instances(regdir): - if isdir(regdir): - return sorted(idir for idir in listdir(regdir) if isdir(join(regdir, idir))) - else: - return [] - -def detect_available_modes(templdir): - modes = [] - for fname in ('schema', 'schema.py'): - if exists(join(templdir, fname)): - modes.append('repository') - break - for fname in ('data', 'views', 'views.py'): - if exists(join(templdir, fname)): - modes.append('web ui') - break - return modes - - -class InstanceCommand(Command): - """base class for command taking 0 to n instance id as arguments - (0 meaning all registered instances) - """ - arguments = '[...]' - options = ( - ("force", - {'short': 'f', 'action' : 'store_true', - 'default': False, - 'help': 'force command without asking confirmation', - } - ), - ) - actionverb = None - - def ordered_instances(self): - """return instances in the order in which they should be started, - considering $REGISTRY_DIR/startorder file if it exists (useful when - some instances depends on another as external source). - - Instance used by another one should appears first in the file (one - instance per line) - """ - regdir = cwcfg.instances_dir() - _allinstances = list_instances(regdir) - if isfile(join(regdir, 'startorder')): - allinstances = [] - for line in open(join(regdir, 'startorder')): - line = line.strip() - if line and not line.startswith('#'): - try: - _allinstances.remove(line) - allinstances.append(line) - except ValueError: - print('ERROR: startorder file contains unexistant ' - 'instance %s' % line) - allinstances += _allinstances - else: - allinstances = _allinstances - return allinstances - - def run(self, args): - """run the _method on each argument (a list of instance - identifiers) - """ - if not args: - args = self.ordered_instances() - try: - askconfirm = not self.config.force - except AttributeError: - # no force option - askconfirm = False - else: - askconfirm = False - self.run_args(args, askconfirm) - - def run_args(self, args, askconfirm): - status = 0 - for appid in args: - if askconfirm: - print('*'*72) - if not ASK.confirm('%s instance %r ?' % (self.name, appid)): - continue - try: - status = max(status, self.run_arg(appid)) - except (KeyboardInterrupt, SystemExit): - sys.stderr.write('%s aborted\n' % self.name) - return 2 # specific error code - sys.exit(status) - - def run_arg(self, appid): - cmdmeth = getattr(self, '%s_instance' % self.name) - try: - status = cmdmeth(appid) - except (ExecutionError, ConfigurationError) as ex: - sys.stderr.write('instance %s not %s: %s\n' % ( - appid, self.actionverb, ex)) - status = 4 - except Exception as ex: - import traceback - traceback.print_exc() - sys.stderr.write('instance %s not %s: %s\n' % ( - appid, self.actionverb, ex)) - status = 8 - return status - -class InstanceCommandFork(InstanceCommand): - """Same as `InstanceCommand`, but command is forked in a new environment - for each argument - """ - - def run_args(self, args, askconfirm): - if len(args) > 1: - forkcmd = ' '.join(w for w in sys.argv if not w in args) - else: - forkcmd = None - for appid in args: - if askconfirm: - print('*'*72) - if not ASK.confirm('%s instance %r ?' % (self.name, appid)): - continue - if forkcmd: - status = system('%s %s' % (forkcmd, appid)) - if status: - print('%s exited with status %s' % (forkcmd, status)) - else: - self.run_arg(appid) - - -# base commands ############################################################### - -class ListCommand(Command): - """List configurations, cubes and instances. - - List available configurations, installed cubes, and registered instances. - - If given, the optional argument allows to restrict listing only a category of items. - """ - name = 'list' - arguments = '[all|cubes|configurations|instances]' - options = ( - ('verbose', - {'short': 'v', 'action' : 'store_true', - 'help': "display more information."}), - ) - - def run(self, args): - """run the command with its specific arguments""" - if not args: - mode = 'all' - elif len(args) == 1: - mode = args[0] - else: - raise BadCommandUsage('Too many arguments') - - from cubicweb.migration import ConfigurationProblem - - if mode == 'all': - print('CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)) - print() - - if mode in ('all', 'config', 'configurations'): - print('Available configurations:') - for config in CONFIGURATIONS: - print('*', config.name) - for line in config.__doc__.splitlines(): - line = line.strip() - if not line: - continue - print(' ', line) - print() - - if mode in ('all', 'cubes'): - cfgpb = ConfigurationProblem(cwcfg) - try: - cubesdir = pathsep.join(cwcfg.cubes_search_path()) - namesize = max(len(x) for x in cwcfg.available_cubes()) - except ConfigurationError as ex: - print('No cubes available:', ex) - except ValueError: - print('No cubes available in %s' % cubesdir) - else: - print('Available cubes (%s):' % cubesdir) - for cube in cwcfg.available_cubes(): - try: - tinfo = cwcfg.cube_pkginfo(cube) - tversion = tinfo.version - cfgpb.add_cube(cube, tversion) - except (ConfigurationError, AttributeError) as ex: - tinfo = None - tversion = '[missing cube information: %s]' % ex - print('* %s %s' % (cube.ljust(namesize), tversion)) - if self.config.verbose: - if tinfo: - descr = getattr(tinfo, 'description', '') - if not descr: - descr = tinfo.__doc__ - if descr: - print(' '+ ' \n'.join(descr.splitlines())) - modes = detect_available_modes(cwcfg.cube_dir(cube)) - print(' available modes: %s' % ', '.join(modes)) - print() - - if mode in ('all', 'instances'): - try: - regdir = cwcfg.instances_dir() - except ConfigurationError as ex: - print('No instance available:', ex) - print() - return - instances = list_instances(regdir) - if instances: - print('Available instances (%s):' % regdir) - for appid in instances: - modes = cwcfg.possible_configurations(appid) - if not modes: - print('* %s (BROKEN instance, no configuration found)' % appid) - continue - print('* %s (%s)' % (appid, ', '.join(modes))) - try: - config = cwcfg.config_for(appid, modes[0]) - except Exception as exc: - print(' (BROKEN instance, %s)' % exc) - continue - else: - print('No instance available in %s' % regdir) - print() - - if mode == 'all': - # configuration management problem solving - cfgpb.solve() - if cfgpb.warnings: - print('Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)) - if cfgpb.errors: - print('Errors:') - for op, cube, version, src in cfgpb.errors: - if op == 'add': - print('* cube', cube, end=' ') - if version: - print(' version', version, end=' ') - print('is not installed, but required by %s' % src) - else: - print('* cube %s version %s is installed, but version %s is required by %s' % ( - cube, cfgpb.cubes[cube], version, src)) - -def check_options_consistency(config): - if config.automatic and config.config_level > 0: - raise BadCommandUsage('--automatic and --config-level should not be ' - 'used together') - -class CreateInstanceCommand(Command): - """Create an instance from a cube. This is a unified - command which can handle web / server / all-in-one installation - according to available parts of the software library and of the - desired cube. - - - the name of cube to use (list available cube names using - the "list" command). You can use several cubes by separating - them using comma (e.g. 'jpl,email') - - an identifier for the instance to create - """ - name = 'create' - arguments = ' ' - min_args = max_args = 2 - options = ( - ('automatic', - {'short': 'a', 'action' : 'store_true', - 'default': False, - 'help': 'automatic mode: never ask and use default answer to every ' - 'question. this may require that your login match a database super ' - 'user (allowed to create database & all).', - }), - ('config-level', - {'short': 'l', 'type' : 'int', 'metavar': '', - 'default': 0, - 'help': 'configuration level (0..2): 0 will ask for essential ' - 'configuration parameters only while 2 will ask for all parameters', - }), - ('config', - {'short': 'c', 'type' : 'choice', 'metavar': '', - 'choices': ('all-in-one', 'repository'), - 'default': 'all-in-one', - 'help': 'installation type, telling which part of an instance ' - 'should be installed. You can list available configurations using the' - ' "list" command. Default to "all-in-one", e.g. an installation ' - 'embedding both the RQL repository and the web server.', - }), - ('no-db-create', - {'short': 'S', - 'action': 'store_true', - 'default': False, - 'help': 'stop after creation and do not continue with db-create', - }), - ) - - def run(self, args): - """run the command with its specific arguments""" - from logilab.common.textutils import splitstrip - check_options_consistency(self.config) - configname = self.config.config - cubes, appid = args - cubes = splitstrip(cubes) - # get the configuration and helper - config = cwcfg.config_for(appid, configname, creating=True) - cubes = config.expand_cubes(cubes) - config.init_cubes(cubes) - helper = self.config_helper(config) - # check the cube exists - try: - templdirs = [cwcfg.cube_dir(cube) - for cube in cubes] - except ConfigurationError as ex: - print(ex) - print('\navailable cubes:', end=' ') - print(', '.join(cwcfg.available_cubes())) - return - # create the registry directory for this instance - print('\n'+underline_title('Creating the instance %s' % appid)) - create_dir(config.apphome) - # cubicweb-ctl configuration - if not self.config.automatic: - print('\n'+underline_title('Configuring the instance (%s.conf)' - % configname)) - config.input_config('main', self.config.config_level) - # configuration'specific stuff - print() - helper.bootstrap(cubes, self.config.automatic, self.config.config_level) - # input for cubes specific options - if not self.config.automatic: - sections = set(sect.lower() for sect, opt, odict in config.all_options() - if 'type' in odict - and odict.get('level') <= self.config.config_level) - for section in sections: - if section not in ('main', 'email', 'web'): - print('\n' + underline_title('%s options' % section)) - config.input_config(section, self.config.config_level) - # write down configuration - config.save() - self._handle_win32(config, appid) - print('-> generated config %s' % config.main_config_file()) - # handle i18n files structure - # in the first cube given - from cubicweb import i18n - langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))] - errors = config.i18ncompile(langs) - if errors: - print('\n'.join(errors)) - if self.config.automatic \ - or not ASK.confirm('error while compiling message catalogs, ' - 'continue anyway ?'): - print('creation not completed') - return - # create the additional data directory for this instance - if config.appdatahome != config.apphome: # true in dev mode - create_dir(config.appdatahome) - create_dir(join(config.appdatahome, 'backup')) - if config['uid']: - from logilab.common.shellutils import chown - # this directory should be owned by the uid of the server process - print('set %s as owner of the data directory' % config['uid']) - chown(config.appdatahome, config['uid']) - print('\n-> creation done for %s\n' % repr(config.apphome)[1:-1]) - if not self.config.no_db_create: - helper.postcreate(self.config.automatic, self.config.config_level) - - def _handle_win32(self, config, appid): - if sys.platform != 'win32': - return - service_template = """ -import sys -import win32serviceutil -sys.path.insert(0, r"%(CWPATH)s") - -from cubicweb.etwist.service import CWService - -classdict = {'_svc_name_': 'cubicweb-%(APPID)s', - '_svc_display_name_': 'CubicWeb ' + '%(CNAME)s', - 'instance': '%(APPID)s'} -%(CNAME)sService = type('%(CNAME)sService', (CWService,), classdict) - -if __name__ == '__main__': - win32serviceutil.HandleCommandLine(%(CNAME)sService) -""" - open(join(config.apphome, 'win32svc.py'), 'wb').write( - service_template % {'APPID': appid, - 'CNAME': appid.capitalize(), - 'CWPATH': abspath(join(dirname(__file__), '..'))}) - - -class DeleteInstanceCommand(Command): - """Delete an instance. Will remove instance's files and - unregister it. - """ - name = 'delete' - arguments = '' - min_args = max_args = 1 - options = () - - def run(self, args): - """run the command with its specific arguments""" - appid = args[0] - configs = [cwcfg.config_for(appid, configname) - for configname in cwcfg.possible_configurations(appid)] - if not configs: - raise ExecutionError('unable to guess configuration for %s' % appid) - for config in configs: - helper = self.config_helper(config, required=False) - if helper: - helper.cleanup() - # remove home - rm(config.apphome) - # remove instance data directory - try: - rm(config.appdatahome) - except OSError as ex: - import errno - if ex.errno != errno.ENOENT: - raise - confignames = ', '.join([config.name for config in configs]) - print('-> instance %s (%s) deleted.' % (appid, confignames)) - - -# instance commands ######################################################## - -class StartInstanceCommand(InstanceCommandFork): - """Start the given instances. If no instance is given, start them all. - - ... - identifiers of the instances to start. If no instance is - given, start them all. - """ - name = 'start' - actionverb = 'started' - options = ( - ("debug", - {'short': 'D', 'action' : 'store_true', - 'help': 'start server in debug mode.'}), - ("force", - {'short': 'f', 'action' : 'store_true', - 'default': False, - 'help': 'start the instance even if it seems to be already \ -running.'}), - ('profile', - {'short': 'P', 'type' : 'string', 'metavar': '', - 'default': None, - 'help': 'profile code and use the specified file to store stats', - }), - ('loglevel', - {'short': 'l', 'type' : 'choice', 'metavar': '', - 'default': None, 'choices': ('debug', 'info', 'warning', 'error'), - 'help': 'debug if -D is set, error otherwise', - }), - ('param', - {'short': 'p', 'type' : 'named', 'metavar' : 'key1:value1,key2:value2', - 'default': {}, - 'help': 'override configuration file option with .', - }), - ) - - def start_instance(self, appid): - """start the instance's server""" - try: - import twisted # noqa - except ImportError: - msg = ( - "Twisted is required by the 'start' command\n" - "Either install it, or use one of the alternative commands:\n" - "- '{ctl} wsgi {appid}'\n" - "- '{ctl} pyramid {appid}' (requires the pyramid cube)\n") - raise ExecutionError(msg.format(ctl='cubicweb-ctl', appid=appid)) - config = cwcfg.config_for(appid, debugmode=self['debug']) - # override config file values with cmdline options - config.cmdline_options = self.config.param - init_cmdline_log_threshold(config, self['loglevel']) - if self['profile']: - config.global_set_option('profile', self.config.profile) - helper = self.config_helper(config, cmdname='start') - pidf = config['pid-file'] - if exists(pidf) and not self['force']: - msg = "%s seems to be running. Remove %s by hand if necessary or use \ -the --force option." - raise ExecutionError(msg % (appid, pidf)) - if helper.start_server(config) == 1: - print('instance %s started' % appid) - - -def init_cmdline_log_threshold(config, loglevel): - if loglevel is not None: - config.global_set_option('log-threshold', loglevel.upper()) - config.init_log(config['log-threshold'], force=True) - - -class StopInstanceCommand(InstanceCommand): - """Stop the given instances. - - ... - identifiers of the instances to stop. If no instance is - given, stop them all. - """ - name = 'stop' - actionverb = 'stopped' - - def ordered_instances(self): - instances = super(StopInstanceCommand, self).ordered_instances() - instances.reverse() - return instances - - def stop_instance(self, appid): - """stop the instance's server""" - config = cwcfg.config_for(appid) - helper = self.config_helper(config, cmdname='stop') - helper.poststop() # do this anyway - pidf = config['pid-file'] - if not exists(pidf): - sys.stderr.write("%s doesn't exist.\n" % pidf) - return - import signal - pid = int(open(pidf).read().strip()) - try: - kill(pid, signal.SIGTERM) - except Exception: - sys.stderr.write("process %s seems already dead.\n" % pid) - else: - try: - wait_process_end(pid) - except ExecutionError as ex: - sys.stderr.write('%s\ntrying SIGKILL\n' % ex) - try: - kill(pid, signal.SIGKILL) - except Exception: - # probably dead now - pass - wait_process_end(pid) - try: - remove(pidf) - except OSError: - # already removed by twistd - pass - print('instance %s stopped' % appid) - - -class RestartInstanceCommand(StartInstanceCommand): - """Restart the given instances. - - ... - identifiers of the instances to restart. If no instance is - given, restart them all. - """ - name = 'restart' - actionverb = 'restarted' - - def run_args(self, args, askconfirm): - regdir = cwcfg.instances_dir() - if not isfile(join(regdir, 'startorder')) or len(args) <= 1: - # no specific startorder - super(RestartInstanceCommand, self).run_args(args, askconfirm) - return - print ('some specific start order is specified, will first stop all ' - 'instances then restart them.') - # get instances in startorder - for appid in args: - if askconfirm: - print('*'*72) - if not ASK.confirm('%s instance %r ?' % (self.name, appid)): - continue - StopInstanceCommand(self.logger).stop_instance(appid) - forkcmd = [w for w in sys.argv if not w in args] - forkcmd[1] = 'start' - forkcmd = ' '.join(forkcmd) - for appid in reversed(args): - status = system('%s %s' % (forkcmd, appid)) - if status: - sys.exit(status) - - def restart_instance(self, appid): - StopInstanceCommand(self.logger).stop_instance(appid) - self.start_instance(appid) - - -class ReloadConfigurationCommand(RestartInstanceCommand): - """Reload the given instances. This command is equivalent to a - restart for now. - - ... - identifiers of the instances to reload. If no instance is - given, reload them all. - """ - name = 'reload' - - def reload_instance(self, appid): - self.restart_instance(appid) - - -class StatusCommand(InstanceCommand): - """Display status information about the given instances. - - ... - identifiers of the instances to status. If no instance is - given, get status information about all registered instances. - """ - name = 'status' - options = () - - @staticmethod - def status_instance(appid): - """print running status information for an instance""" - status = 0 - for mode in cwcfg.possible_configurations(appid): - config = cwcfg.config_for(appid, mode) - print('[%s-%s]' % (appid, mode), end=' ') - try: - pidf = config['pid-file'] - except KeyError: - print('buggy instance, pid file not specified') - continue - if not exists(pidf): - print("doesn't seem to be running") - status = 1 - continue - pid = int(open(pidf).read().strip()) - # trick to guess whether or not the process is running - try: - getpgid(pid) - except OSError: - print("should be running with pid %s but the process can not be found" % pid) - status = 1 - continue - print("running with pid %s" % (pid)) - return status - -class UpgradeInstanceCommand(InstanceCommandFork): - """Upgrade an instance after cubicweb and/or component(s) upgrade. - - For repository update, you will be prompted for a login / password to use - to connect to the system database. For some upgrades, the given user - should have create or alter table permissions. - - ... - identifiers of the instances to upgrade. If no instance is - given, upgrade them all. - """ - name = 'upgrade' - actionverb = 'upgraded' - options = InstanceCommand.options + ( - ('force-cube-version', - {'short': 't', 'type' : 'named', 'metavar': 'cube1:X.Y.Z,cube2:X.Y.Z', - 'default': None, - 'help': 'force migration from the indicated version for the specified cube(s).'}), - - ('force-cubicweb-version', - {'short': 'e', 'type' : 'string', 'metavar': 'X.Y.Z', - 'default': None, - 'help': 'force migration from the indicated cubicweb version.'}), - - ('fs-only', - {'short': 's', 'action' : 'store_true', - 'default': False, - 'help': 'only upgrade files on the file system, not the database.'}), - - ('nostartstop', - {'short': 'n', 'action' : 'store_true', - 'default': False, - 'help': 'don\'t try to stop instance before migration and to restart it after.'}), - - ('verbosity', - {'short': 'v', 'type' : 'int', 'metavar': '<0..2>', - 'default': 1, - 'help': "0: no confirmation, 1: only main commands confirmed, 2 ask \ -for everything."}), - - ('backup-db', - {'short': 'b', 'type' : 'yn', 'metavar': '', - 'default': None, - 'help': "Backup the instance database before upgrade.\n"\ - "If the option is ommitted, confirmation will be ask.", - }), - - ('ext-sources', - {'short': 'E', 'type' : 'csv', 'metavar': '', - 'default': None, - 'help': "For multisources instances, specify to which sources the \ -repository should connect to for upgrading. When unspecified or 'migration' is \ -given, appropriate sources for migration will be automatically selected \ -(recommended). If 'all' is given, will connect to all defined sources.", - }), - ) - - def upgrade_instance(self, appid): - print('\n' + underline_title('Upgrading the instance %s' % appid)) - from logilab.common.changelog import Version - config = cwcfg.config_for(appid) - instance_running = exists(config['pid-file']) - config.repairing = True # notice we're not starting the server - config.verbosity = self.config.verbosity - set_sources_mode = getattr(config, 'set_sources_mode', None) - if set_sources_mode is not None: - set_sources_mode(self.config.ext_sources or ('migration',)) - # get instance and installed versions for the server and the componants - mih = config.migration_handler() - repo = mih.repo - vcconf = repo.get_versions() - helper = self.config_helper(config, required=False) - if self.config.force_cube_version: - for cube, version in self.config.force_cube_version.items(): - vcconf[cube] = Version(version) - toupgrade = [] - for cube in config.cubes(): - installedversion = config.cube_version(cube) - try: - applversion = vcconf[cube] - except KeyError: - config.error('no version information for %s' % cube) - continue - if installedversion > applversion: - toupgrade.append( (cube, applversion, installedversion) ) - cubicwebversion = config.cubicweb_version() - if self.config.force_cubicweb_version: - applcubicwebversion = Version(self.config.force_cubicweb_version) - vcconf['cubicweb'] = applcubicwebversion - else: - applcubicwebversion = vcconf.get('cubicweb') - if cubicwebversion > applcubicwebversion: - toupgrade.append(('cubicweb', applcubicwebversion, cubicwebversion)) - # only stop once we're sure we have something to do - if instance_running and not (CWDEV or self.config.nostartstop): - StopInstanceCommand(self.logger).stop_instance(appid) - # run cubicweb/componants migration scripts - if self.config.fs_only or toupgrade: - for cube, fromversion, toversion in toupgrade: - print('-> migration needed from %s to %s for %s' % (fromversion, toversion, cube)) - with mih.cnx: - with mih.cnx.security_enabled(False, False): - mih.migrate(vcconf, reversed(toupgrade), self.config) - else: - print('-> no data migration needed for instance %s.' % appid) - # rewrite main configuration file - mih.rewrite_configuration() - mih.shutdown() - # handle i18n upgrade - if not self.i18nupgrade(config): - return - print() - if helper: - helper.postupgrade(repo) - print('-> instance migrated.') - if instance_running and not (CWDEV or self.config.nostartstop): - # restart instance through fork to get a proper environment, avoid - # uicfg pb (and probably gettext catalogs, to check...) - forkcmd = '%s start %s' % (sys.argv[0], appid) - status = system(forkcmd) - if status: - print('%s exited with status %s' % (forkcmd, status)) - print() - - def i18nupgrade(self, config): - # handle i18n upgrade: - # * install new languages - # * recompile catalogs - # XXX search available language in the first cube given - from cubicweb import i18n - templdir = cwcfg.cube_dir(config.cubes()[0]) - langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))] - errors = config.i18ncompile(langs) - if errors: - print('\n'.join(errors)) - if not ASK.confirm('Error while compiling message catalogs, ' - 'continue anyway?'): - print('-> migration not completed.') - return False - return True - - -class ListVersionsInstanceCommand(InstanceCommand): - """List versions used by an instance. - - ... - identifiers of the instances to list versions for. - """ - name = 'versions' - - def versions_instance(self, appid): - config = cwcfg.config_for(appid) - # should not raise error if db versions don't match fs versions - config.repairing = True - # no need to load all appobjects and schema - config.quick_start = True - if hasattr(config, 'set_sources_mode'): - config.set_sources_mode(('migration',)) - vcconf = config.repository().get_versions() - for key in sorted(vcconf): - print(key+': %s.%s.%s' % vcconf[key]) - -class ShellCommand(Command): - """Run an interactive migration shell on an instance. This is a python shell - with enhanced migration commands predefined in the namespace. An additional - argument may be given corresponding to a file containing commands to execute - in batch mode. - - By default it will connect to a local instance using an in memory - connection, unless a URL to a running instance is specified. - - Arguments after bare "--" string will not be processed by the shell command - You can use it to pass extra arguments to your script and expect for - them in '__args__' afterwards. - - - the identifier of the instance to connect. - """ - name = 'shell' - arguments = ' [batch command file(s)] [-- ", re.M|re.I|re.S) -def _remove_script_tags(data): - """Remove the script (usually javascript) tags to help the lxml - XMLParser / HTMLParser do their job. Without that, they choke on - tags embedded in JS strings. - """ - # Notice we may want to use lxml cleaner, but it's far too intrusive: - # - # cleaner = Cleaner(scripts=True, - # javascript=False, - # comments=False, - # style=False, - # links=False, - # meta=False, - # page_structure=False, - # processing_instructions=False, - # embedded=False, - # frames=False, - # forms=False, - # annoying_tags=False, - # remove_tags=(), - # remove_unknown_tags=False, - # safe_attrs_only=False, - # add_nofollow=False) - # >>> cleaner.clean_html('') - # '' - # >>> cleaner.clean_html('') - # '' - # >>> cleaner.clean_html('
      ') - # '
      ' - # >>> cleaner.clean_html('

      ') - # '

      ' - # >>> cleaner.clean_html('

      ') - # '

      ' - # - # using that, we'll miss most actual validation error we want to - # catch. For now, use dumb regexp - return _REM_SCRIPT_RGX.sub(b'', data) - - -class Validator(object): - """ base validator API """ - parser = None - - def parse_string(self, source): - etree = self._parse(self.preprocess_data(source)) - return PageInfo(source, etree) - - def preprocess_data(self, data): - return data - - def _parse(self, pdata): - try: - return etree.fromstring(pdata, self.parser) - except etree.XMLSyntaxError as exc: - new_exc = AssertionError(u'invalid document: %s' % exc) - new_exc.position = exc.position - raise new_exc - - -class DTDValidator(Validator): - def __init__(self): - Validator.__init__(self) - # XXX understand what's happening under windows - self.parser = etree.XMLParser(dtd_validation=sys.platform != 'win32') - - def preprocess_data(self, data): - """used to fix potential blockquote mess generated by docutils""" - if STRICT_DOCTYPE not in data: - return data - # parse using transitional DTD - data = data.replace(STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE) - tree = self._parse(data) - namespace = tree.nsmap.get(None) - # this is the list of authorized child tags for
      nodes - expected = 'p h1 h2 h3 h4 h5 h6 div ul ol dl pre hr blockquote address ' \ - 'fieldset table form noscript ins del script'.split() - if namespace: - blockquotes = tree.findall('.//{%s}blockquote' % namespace) - expected = ['{%s}%s' % (namespace, tag) for tag in expected] - else: - blockquotes = tree.findall('.//blockquote') - # quick and dirty approach: remove all blockquotes - for blockquote in blockquotes: - parent = blockquote.getparent() - parent.remove(blockquote) - data = etree.tostring(tree) - return '%s\n%s' % ( - STRICT_DOCTYPE, data) - - -class XMLValidator(Validator): - """XML validator, checks that XML is well-formed and used XMLNS are defined""" - - def __init__(self): - Validator.__init__(self) - self.parser = etree.XMLParser() - -SaxOnlyValidator = class_renamed('SaxOnlyValidator', - XMLValidator, - '[3.17] you should use the ' - 'XMLValidator class instead') - - -class XMLSyntaxValidator(Validator): - """XML syntax validator, check XML is well-formed""" - - class MySaxErrorHandler(sax.ErrorHandler): - """override default handler to avoid choking because of unknown entity""" - def fatalError(self, exception): - # XXX check entity in htmlentitydefs - if not str(exception).endswith('undefined entity'): - raise exception - _parser = sax.make_parser() - _parser.setContentHandler(sax.handler.ContentHandler()) - _parser.setErrorHandler(MySaxErrorHandler()) - - def __init__(self): - super(XMLSyntaxValidator, self).__init__() - # XMLParser() wants xml namespaces defined - # XMLParser(recover=True) will accept almost anything - # - # -> use the later but preprocess will check xml well-formness using a - # dumb SAX parser - self.parser = etree.XMLParser(recover=True) - - def preprocess_data(self, data): - return _remove_script_tags(data) - - def _parse(self, data): - inpsrc = sax.InputSource() - inpsrc.setByteStream(BytesIO(data)) - try: - self._parser.parse(inpsrc) - except sax.SAXParseException as exc: - new_exc = AssertionError(u'invalid document: %s' % exc) - new_exc.position = (exc._linenum, exc._colnum) - raise new_exc - return super(XMLSyntaxValidator, self)._parse(data) - - -class HTMLValidator(Validator): - - def __init__(self): - Validator.__init__(self) - self.parser = etree.HTMLParser(recover=False) - - def preprocess_data(self, data): - return _remove_script_tags(data) - - -class PageInfo(object): - """holds various informations on the view's output""" - def __init__(self, source, root): - self.source = source - self.etree = root - self.raw_text = u''.join(root.xpath('//text()')) - self.namespace = self.etree.nsmap - self.default_ns = self.namespace.get(None) - self.a_tags = self.find_tag('a') - self.h1_tags = self.find_tag('h1') - self.h2_tags = self.find_tag('h2') - self.h3_tags = self.find_tag('h3') - self.h4_tags = self.find_tag('h4') - self.input_tags = self.find_tag('input') - self.title_tags = [self.h1_tags, self.h2_tags, self.h3_tags, self.h4_tags] - - def _iterstr(self, tag): - if self.default_ns is None: - return ".//%s" % tag - else: - return ".//{%s}%s" % (self.default_ns, tag) - - def matching_nodes(self, tag, **attrs): - for elt in self.etree.iterfind(self._iterstr(tag)): - eltattrs = elt.attrib - for attr, value in attrs.items(): - try: - if eltattrs[attr] != value: - break - except KeyError: - break - else: # all attributes match - yield elt - - def has_tag(self, tag, nboccurs=1, **attrs): - """returns True if tag with given attributes appears in the page - `nbtimes` (any if None) - """ - for elt in self.matching_nodes(tag, **attrs): - if nboccurs is None: # no need to check number of occurences - return True - if not nboccurs: # too much occurences - return False - nboccurs -= 1 - if nboccurs == 0: # correct number of occurences - return True - return False # no matching tag/attrs - - def find_tag(self, tag, gettext=True): - """return a list which contains text of all "tag" elements """ - iterstr = self._iterstr(tag) - if not gettext or tag in ('a', 'input'): - return [(elt.text, elt.attrib) - for elt in self.etree.iterfind(iterstr)] - return [u''.join(elt.xpath('.//text()')) - for elt in self.etree.iterfind(iterstr)] - - def appears(self, text): - """returns True if appears in the page""" - return text in self.raw_text - - def __contains__(self, text): - return text in self.source - - def has_title(self, text, level=None): - """returns True if text - - :param level: the title's level (1 for h1, 2 for h2, etc.) - """ - if level is None: - for hlist in self.title_tags: - if text in hlist: - return True - return False - else: - hlist = self.title_tags[level - 1] - return text in hlist - - def has_title_regexp(self, pattern, level=None): - """returns True if pattern""" - sre = re.compile(pattern) - if level is None: - for hlist in self.title_tags: - for title in hlist: - if sre.match(title): - return True - return False - else: - hlist = self.title_tags[level - 1] - for title in hlist: - if sre.match(title): - return True - return False - - def has_link(self, text, url=None): - """returns True if text was found in the page""" - for link_text, attrs in self.a_tags: - if text == link_text: - if url is None: - return True - try: - href = attrs['href'] - if href == url: - return True - except KeyError: - continue - return False - - def has_link_regexp(self, pattern, url=None): - """returns True if pattern was found in the page""" - sre = re.compile(pattern) - for link_text, attrs in self.a_tags: - if sre.match(link_text): - if url is None: - return True - try: - href = attrs['href'] - if href == url: - return True - except KeyError: - continue - return False - -VALMAP = {None: None, - 'dtd': DTDValidator, - 'xml': XMLValidator, - 'html': HTMLValidator, - } diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/httptest.py --- a/devtools/httptest.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,170 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""this module contains base classes and utilities for integration with running -http server -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import random -import threading -import socket - -from six.moves import range, http_client -from six.moves.urllib.parse import urlparse - - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools import ApptestConfiguration - - -def get_available_port(ports_scan): - """return the first available port from the given ports range - - Try to connect port by looking for refused connection (111) or transport - endpoint already connected (106) errors - - Raise a RuntimeError if no port can be found - - :type ports_range: list - :param ports_range: range of ports to test - :rtype: int - - .. see:: :func:`test.test_support.bind_port` - """ - ports_scan = list(ports_scan) - random.shuffle(ports_scan) # lower the chance of race condition - for port in ports_scan: - try: - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock = s.connect(("localhost", port)) - except socket.error as err: - if err.args[0] in (111, 106): - return port - finally: - s.close() - raise RuntimeError('get_available_port([ports_range]) cannot find an available port') - - -class CubicWebServerTC(CubicWebTC): - """Class for running a Twisted-based test web server. - """ - ports_range = range(7000, 8000) - - def start_server(self): - from twisted.internet import reactor - from cubicweb.etwist.server import run - # use a semaphore to avoid starting test while the http server isn't - # fully initilialized - semaphore = threading.Semaphore(0) - def safe_run(*args, **kwargs): - try: - run(*args, **kwargs) - finally: - semaphore.release() - - reactor.addSystemEventTrigger('after', 'startup', semaphore.release) - t = threading.Thread(target=safe_run, name='cubicweb_test_web_server', - args=(self.config, True), kwargs={'repo': self.repo}) - self.web_thread = t - t.start() - semaphore.acquire() - if not self.web_thread.isAlive(): - # XXX race condition with actual thread death - raise RuntimeError('Could not start the web server') - #pre init utils connection - parseurl = urlparse(self.config['base-url']) - assert parseurl.port == self.config['port'], (self.config['base-url'], self.config['port']) - self._web_test_cnx = http_client.HTTPConnection(parseurl.hostname, - parseurl.port) - self._ident_cookie = None - - def stop_server(self, timeout=15): - """Stop the webserver, waiting for the thread to return""" - from twisted.internet import reactor - if self._web_test_cnx is None: - self.web_logout() - self._web_test_cnx.close() - try: - reactor.stop() - self.web_thread.join(timeout) - assert not self.web_thread.isAlive() - - finally: - reactor.__init__() - - def web_login(self, user=None, passwd=None): - """Log the current http session for the provided credential - - If no user is provided, admin connection are used. - """ - if user is None: - user = self.admlogin - passwd = self.admpassword - if passwd is None: - passwd = user - response = self.web_get("login?__login=%s&__password=%s" % - (user, passwd)) - assert response.status == http_client.SEE_OTHER, response.status - self._ident_cookie = response.getheader('Set-Cookie') - assert self._ident_cookie - return True - - def web_logout(self, user='admin', pwd=None): - """Log out current http user""" - if self._ident_cookie is not None: - response = self.web_get('logout') - self._ident_cookie = None - - def web_request(self, path='', method='GET', body=None, headers=None): - """Return an http_client.HTTPResponse object for the specified path - - Use available credential if available. - """ - if headers is None: - headers = {} - if self._ident_cookie is not None: - assert 'Cookie' not in headers - headers['Cookie'] = self._ident_cookie - self._web_test_cnx.request(method, '/' + path, headers=headers, body=body) - response = self._web_test_cnx.getresponse() - response.body = response.read() # to chain request - response.read = lambda : response.body - return response - - def web_get(self, path='', body=None, headers=None): - return self.web_request(path=path, body=body, headers=headers) - - def setUp(self): - super(CubicWebServerTC, self).setUp() - port = self.config['port'] or get_available_port(self.ports_range) - self.config.global_set_option('port', port) # force rewrite here - self.config.global_set_option('base-url', 'http://127.0.0.1:%d/' % port) - # call load_configuration again to let the config reset its datadir_url - self.config.load_configuration() - self.start_server() - - def tearDown(self): - from twisted.internet import error - try: - self.stop_server() - except error.ReactorNotRunning as err: - # Server could be launched manually - print(err) - super(CubicWebServerTC, self).tearDown() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/instrument.py --- a/devtools/instrument.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,225 +0,0 @@ -# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr -- mailto:contact@logilab.fr -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with this program. If not, see . -"""Instrumentation utilities""" -from __future__ import print_function - -import os - -try: - import pygraphviz -except ImportError: - pygraphviz = None - -from cubicweb.cwvreg import CWRegistryStore -from cubicweb.devtools.devctl import DevConfiguration - - -ALL_COLORS = [ - "00FF00", "0000FF", "FFFF00", "FF00FF", "00FFFF", "000000", - "800000", "008000", "000080", "808000", "800080", "008080", "808080", - "C00000", "00C000", "0000C0", "C0C000", "C000C0", "00C0C0", "C0C0C0", - "400000", "004000", "000040", "404000", "400040", "004040", "404040", - "200000", "002000", "000020", "202000", "200020", "002020", "202020", - "600000", "006000", "000060", "606000", "600060", "006060", "606060", - "A00000", "00A000", "0000A0", "A0A000", "A000A0", "00A0A0", "A0A0A0", - "E00000", "00E000", "0000E0", "E0E000", "E000E0", "00E0E0", "E0E0E0", - ] -_COLORS = {} -def get_color(key): - try: - return _COLORS[key] - except KeyError: - _COLORS[key] = '#'+ALL_COLORS[len(_COLORS) % len(ALL_COLORS)] - return _COLORS[key] - -def warn(msg, *args): - print('WARNING: %s' % (msg % args)) - -def info(msg): - print('INFO: ' + msg) - - -class PropagationAnalyzer(object): - """Abstract propagation analyzer, providing utility function to extract - entities involved in propagation from a schema, as well as propagation - rules from hooks (provided they use intrumentalized sets, see - :class:`CubeTracerSet`). - - Concrete classes should at least define `prop_rel` class attribute and - implements the `is_root` method. - - See `localperms` or `nosylist` cubes for example usage (`ccplugin` module). - """ - prop_rel = None # name of the propagation relation - - def init(self, cube): - """Initialize analyze for the given cube, returning the (already loaded) - vregistry and a set of entities which we're interested in. - """ - config = DevConfiguration(cube) - schema = config.load_schema() - vreg = CWRegistryStore(config) - vreg.set_schema(schema) # set_schema triggers objects registrations - eschemas = set(eschema for eschema in schema.entities() - if self.should_include(eschema)) - return vreg, eschemas - - def is_root(self, eschema): - """Return `True` if given entity schema is a root of the graph""" - raise NotImplementedError() - - def should_include(self, eschema): - """Return `True` if given entity schema should be included by the graph. - """ - - if self.prop_rel in eschema.subjrels or self.is_root(eschema): - return True - return False - - def prop_edges(self, s_rels, o_rels, eschemas): - """Return a set of edges where propagation has been detected. - - Each edge is defined by a 4-uple (from node, to node, rtype, package) - where `rtype` is the relation type bringing from to and `package` is the cube adding the rule to the propagation - control set (see see :class:`CubeTracerSet`). - """ - schema = iter(eschemas).next().schema - prop_edges = set() - for rtype in s_rels: - found = False - for subj, obj in schema.rschema(rtype).rdefs: - if subj in eschemas and obj in eschemas: - found = True - prop_edges.add( (subj, obj, rtype, s_rels.value_cube[rtype]) ) - if not found: - warn('no rdef match for %s', rtype) - for rtype in o_rels: - found = False - for subj, obj in schema.rschema(rtype).rdefs: - if subj in eschemas and obj in eschemas: - found = True - prop_edges.add( (obj, subj, rtype, o_rels.value_cube[rtype]) ) - if not found: - warn('no rdef match for %s', rtype) - return prop_edges - - def detect_problems(self, eschemas, edges): - """Given the set of analyzed entity schemas and edges between them, - return a set of entity schemas where a problem has been detected. - """ - problematic = set() - for eschema in eschemas: - if self.has_problem(eschema, edges): - problematic.add(eschema) - not_problematic = set(eschemas).difference(problematic) - if not_problematic: - info('nothing problematic in: %s' % - ', '.join(e.type for e in not_problematic)) - return problematic - - def has_problem(self, eschema, edges): - """Return `True` if the given schema is considered problematic, - considering base propagation rules. - """ - root = self.is_root(eschema) - has_prop_rel = self.prop_rel in eschema.subjrels - # root but no propagation relation - if root and not has_prop_rel: - warn('%s is root but miss %s', eschema, self.prop_rel) - return True - # propagated but without propagation relation / not propagated but - # with propagation relation - if not has_prop_rel and \ - any(edge for edge in edges if edge[1] == eschema): - warn("%s miss %s but is reached by propagation", - eschema, self.prop_rel) - return True - elif has_prop_rel and not root: - rdef = eschema.rdef(self.prop_rel, takefirst=True) - edges = [edge for edge in edges if edge[1] == eschema] - if not edges: - warn("%s has %s but isn't reached by " - "propagation", eschema, self.prop_rel) - return True - # require_permission relation / propagation rule not added by - # the same cube - elif not any(edge for edge in edges if edge[-1] == rdef.package): - warn('%s has %s relation / propagation rule' - ' not added by the same cube (%s / %s)', eschema, - self.prop_rel, rdef.package, edges[0][-1]) - return True - return False - - def init_graph(self, eschemas, edges, problematic): - """Initialize and return graph, adding given nodes (entity schemas) and - edges between them. - - Require pygraphviz installed. - """ - if pygraphviz is None: - raise RuntimeError('pygraphviz is not installed') - graph = pygraphviz.AGraph(strict=False, directed=True) - for eschema in eschemas: - if eschema in problematic: - params = {'color': '#ff0000', 'fontcolor': '#ff0000'} - else: - params = {}#'color': get_color(eschema.package)} - graph.add_node(eschema.type, **params) - for subj, obj, rtype, package in edges: - graph.add_edge(str(subj), str(obj), label=rtype, - color=get_color(package)) - return graph - - def add_colors_legend(self, graph): - """Add a legend of used colors to the graph.""" - for package, color in sorted(_COLORS.items()): - graph.add_node(package, color=color, fontcolor=color, shape='record') - - -class CubeTracerSet(object): - """Dumb set implementation whose purpose is to keep track of which cube is - being loaded when something is added to the set. - - Results will be found in the `value_cube` attribute dictionary. - - See `localperms` or `nosylist` cubes for example usage (`hooks` module). - """ - def __init__(self, vreg, wrapped): - self.vreg = vreg - self.wrapped = wrapped - self.value_cube = {} - - def add(self, value): - self.wrapped.add(value) - cube = self.vreg.currently_loading_cube - if value in self.value_cube: - warn('%s is propagated by cube %s and cube %s', - value, self.value_cube[value], cube) - else: - self.value_cube[value] = cube - - def __iter__(self): - return iter(self.wrapped) - - def __ior__(self, other): - for value in other: - self.add(value) - return self - - def __ror__(self, other): - other |= self.wrapped - return other diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/qunit.py --- a/devtools/qunit.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,293 +0,0 @@ -# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from __future__ import absolute_import - -import os, os.path as osp -import errno -from tempfile import mkdtemp -from subprocess import Popen, PIPE, STDOUT - -from six.moves.queue import Queue, Empty - -# imported by default to simplify further import statements -from logilab.common.testlib import unittest_main, with_tempdir, InnerTest, Tags -import webtest.http - -import cubicweb -from cubicweb.view import View -from cubicweb.web.controller import Controller -from cubicweb.web.views.staticcontrollers import StaticFileController, STATIC_CONTROLLERS -from cubicweb.devtools import webtest as cwwebtest - - -class FirefoxHelper(object): - - def __init__(self, url=None): - self._process = None - self._profile_dir = mkdtemp(prefix='cwtest-ffxprof-') - self.firefox_cmd = ['firefox', '-no-remote'] - if os.name == 'posix': - self.firefox_cmd = [osp.join(osp.dirname(__file__), 'data', 'xvfb-run.sh'), - '-a', '-s', '-noreset -screen 0 800x600x24'] + self.firefox_cmd - - def test(self): - try: - proc = Popen(['firefox', '--help'], stdout=PIPE, stderr=STDOUT) - stdout, _ = proc.communicate() - return proc.returncode == 0, stdout - except OSError as exc: - if exc.errno == errno.ENOENT: - msg = '[%s] %s' % (errno.errorcode[exc.errno], exc.strerror) - return False, msg - raise - - def start(self, url): - self.stop() - cmd = self.firefox_cmd + ['-silent', '--profile', self._profile_dir, - '-url', url] - with open(os.devnull, 'w') as fnull: - self._process = Popen(cmd, stdout=fnull, stderr=fnull) - - def stop(self): - if self._process is not None: - assert self._process.returncode is None, self._process.returncode - self._process.terminate() - self._process.wait() - self._process = None - - def __del__(self): - self.stop() - - -class QUnitTestCase(cwwebtest.CubicWebTestTC): - - tags = cwwebtest.CubicWebTestTC.tags | Tags(('qunit',)) - - # testfile, (dep_a, dep_b) - all_js_tests = () - - def setUp(self): - super(QUnitTestCase, self).setUp() - self.test_queue = Queue() - class MyQUnitResultController(QUnitResultController): - tc = self - test_queue = self.test_queue - self._qunit_controller = MyQUnitResultController - self.webapp.app.appli.vreg.register(MyQUnitResultController) - self.webapp.app.appli.vreg.register(QUnitView) - self.webapp.app.appli.vreg.register(CWDevtoolsStaticController) - self.server = webtest.http.StopableWSGIServer.create(self.webapp.app) - self.config.global_set_option('base-url', self.server.application_url) - - def tearDown(self): - self.server.shutdown() - self.webapp.app.appli.vreg.unregister(self._qunit_controller) - self.webapp.app.appli.vreg.unregister(QUnitView) - self.webapp.app.appli.vreg.unregister(CWDevtoolsStaticController) - super(QUnitTestCase, self).tearDown() - - def test_javascripts(self): - for args in self.all_js_tests: - self.assertIn(len(args), (1, 2)) - test_file = args[0] - if len(args) > 1: - depends = args[1] - else: - depends = () - for js_test in self._test_qunit(test_file, depends): - yield js_test - - @with_tempdir - def _test_qunit(self, test_file, depends=(), timeout=10): - QUnitView.test_file = test_file - QUnitView.depends = depends - - while not self.test_queue.empty(): - self.test_queue.get(False) - - browser = FirefoxHelper() - isavailable, reason = browser.test() - if not isavailable: - self.fail('firefox not available or not working properly (%s)' % reason) - browser.start(self.config['base-url'] + "?vid=qunit") - test_count = 0 - error = False - def raise_exception(cls, *data): - raise cls(*data) - while not error: - try: - result, test_name, msg = self.test_queue.get(timeout=timeout) - test_name = '%s (%s)' % (test_name, test_file) - self.set_description(test_name) - if result is None: - break - test_count += 1 - if result: - yield InnerTest(test_name, lambda : 1) - else: - yield InnerTest(test_name, self.fail, msg) - except Empty: - error = True - msg = '%s inactivity timeout (%is). %i test results received' - yield InnerTest(test_file, raise_exception, RuntimeError, - msg % (test_file, timeout, test_count)) - browser.stop() - if test_count <= 0 and not error: - yield InnerTest(test_name, raise_exception, RuntimeError, - 'No test yielded by qunit for %s' % test_file) - -class QUnitResultController(Controller): - - __regid__ = 'qunit_result' - - - # Class variables to circumvent the instantiation of a new Controller for each request. - _log_stack = [] # store QUnit log messages - _current_module_name = '' # store the current QUnit module name - - def publish(self, rset=None): - event = self._cw.form['event'] - getattr(self, 'handle_%s' % event)() - return b'' - - def handle_module_start(self): - self.__class__._current_module_name = self._cw.form.get('name', '') - - def handle_test_done(self): - name = '%s // %s' % (self._current_module_name, self._cw.form.get('name', '')) - failures = int(self._cw.form.get('failures', 0)) - total = int(self._cw.form.get('total', 0)) - - self._log_stack.append('%i/%i assertions failed' % (failures, total)) - msg = '\n'.join(self._log_stack) - - if failures: - self.tc.test_queue.put((False, name, msg)) - else: - self.tc.test_queue.put((True, name, msg)) - self._log_stack[:] = [] - - def handle_done(self): - self.tc.test_queue.put((None, None, None)) - - def handle_log(self): - result = self._cw.form['result'] - message = self._cw.form.get('message', '') - actual = self._cw.form.get('actual') - expected = self._cw.form.get('expected') - source = self._cw.form.get('source') - log = '%s: %s' % (result, message) - if result == 'false' and actual is not None and expected is not None: - log += ' (got: %s, expected: %s)' % (actual, expected) - if source is not None: - log += '\n' + source - self._log_stack.append(log) - - -class QUnitView(View): - __regid__ = 'qunit' - - templatable = False - - depends = None - test_file = None - - def call(self, **kwargs): - w = self.w - req = self._cw - w(u''' - - - - - - - - ''') - w(u'') - w(u'') - w(u'') - - for dep in self.depends: - w(u' \n' % dep) - - w(u' ') - w(u' ' % self.test_file) - w(u''' - -
      -
      - - ''') - - -class CWDevtoolsStaticController(StaticFileController): - __regid__ = 'devtools' - - def publish(self, rset=None): - staticdir = osp.join(osp.dirname(__file__), 'data') - relpath = self.relpath[len(self.__regid__) + 1:] - return self.static_file(osp.join(staticdir, relpath)) - - -STATIC_CONTROLLERS.append(CWDevtoolsStaticController) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/realdbtest.py --- a/devtools/realdbtest.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,59 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from cubicweb import toolsutils -from cubicweb.devtools import DEFAULT_SOURCES, BaseApptestConfiguration - -class RealDatabaseConfiguration(BaseApptestConfiguration): - init_repository = False - sourcesdef = DEFAULT_SOURCES.copy() - - def sources(self): - """ - By default, we run tests with the sqlite DB backend. - One may use its own configuration by just creating a - 'sources' file in the test directory from wich tests are - launched. - """ - self._sources = self.sourcesdef - return self._sources - - -def buildconfig(dbuser, dbpassword, dbname, adminuser, adminpassword, dbhost=None): - """convenience function that builds a real-db configuration class""" - sourcesdef = {'system': {'adapter' : 'native', - 'db-encoding' : 'UTF-8', #'ISO-8859-1', - 'db-user' : dbuser, - 'db-password' : dbpassword, - 'db-name' : dbname, - 'db-driver' : 'postgres', - 'db-host' : dbhost, - }, - 'admin' : {'login': adminuser, - 'password': adminpassword, - }, - } - return type('MyRealDBConfig', (RealDatabaseConfiguration,), - {'sourcesdef': sourcesdef}) - - -def loadconfig(filename): - """convenience function that builds a real-db configuration class - from a file - """ - return type('MyRealDBConfig', (RealDatabaseConfiguration,), - {'sourcesdef': toolsutils.read_config(filename)}) diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/repotest.py --- a/devtools/repotest.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,353 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some utilities to ease repository testing - -This module contains functions to initialize a new repository. -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from pprint import pprint - -from logilab.common.testlib import SkipTest - -def tuplify(mylist): - return [tuple(item) for item in mylist] - -def snippet_key(a): - # a[0] may be a dict or a key/value tuple - return (sorted(dict(a[0]).items()), [e.expression for e in a[1]]) - -def test_plan(self, rql, expected, kwargs=None): - with self.session.new_cnx() as cnx: - plan = self._prepare_plan(cnx, rql, kwargs) - self.planner.build_plan(plan) - try: - self.assertEqual(len(plan.steps), len(expected), - 'expected %s steps, got %s' % (len(expected), len(plan.steps))) - # step order is important - for i, step in enumerate(plan.steps): - compare_steps(self, step.test_repr(), expected[i]) - except AssertionError: - pprint([step.test_repr() for step in plan.steps]) - raise - -def compare_steps(self, step, expected): - try: - self.assertEqual(step[0], expected[0], 'expected step type %s, got %s' % (expected[0], step[0])) - if len(step) > 2 and isinstance(step[1], list) and isinstance(expected[1], list): - queries, equeries = step[1], expected[1] - self.assertEqual(len(queries), len(equeries), - 'expected %s queries, got %s' % (len(equeries), len(queries))) - for i, (rql, sol) in enumerate(queries): - self.assertEqual(rql, equeries[i][0]) - self.assertEqual(sorted(sorted(x.items()) for x in sol), sorted(sorted(x.items()) for x in equeries[i][1])) - idx = 2 - else: - idx = 1 - self.assertEqual(step[idx:-1], expected[idx:-1], - 'expected step characteristic \n%s\n, got\n%s' % (expected[1:-1], step[1:-1])) - self.assertEqual(len(step[-1]), len(expected[-1]), - 'got %s child steps, expected %s' % (len(step[-1]), len(expected[-1]))) - except AssertionError: - print('error on step ', end=' ') - pprint(step[:-1]) - raise - children = step[-1] - if step[0] in ('UnionFetchStep', 'UnionStep'): - # sort children - children = sorted(children) - expectedchildren = sorted(expected[-1]) - else: - expectedchildren = expected[-1] - for i, substep in enumerate(children): - compare_steps(self, substep, expectedchildren[i]) - - -class DumbOrderedDict(list): - def __iter__(self): - return self.iterkeys() - def __contains__(self, key): - return key in self.iterkeys() - def __getitem__(self, key): - for key_, value in list.__iter__(self): - if key == key_: - return value - raise KeyError(key) - def iterkeys(self): - return (x for x, y in list.__iter__(self)) - def iteritems(self): - return (x for x in list.__iter__(self)) - def items(self): - return [x for x in list.__iter__(self)] - -class DumbOrderedDict2(object): - def __init__(self, origdict, sortkey): - self.origdict = origdict - self.sortkey = sortkey - def __getattr__(self, attr): - return getattr(self.origdict, attr) - def __iter__(self): - return iter(sorted(self.origdict, key=self.sortkey)) - -def schema_eids_idx(schema): - """return a dictionary mapping schema types to their eids so we can reread - it from the fs instead of the db (too costly) between tests - """ - schema_eids = {} - for x in schema.entities(): - schema_eids[x] = x.eid - for x in schema.relations(): - schema_eids[x] = x.eid - for rdef in x.rdefs.values(): - schema_eids[(rdef.subject, rdef.rtype, rdef.object)] = rdef.eid - return schema_eids - -def restore_schema_eids_idx(schema, schema_eids): - """rebuild schema eid index""" - for x in schema.entities(): - x.eid = schema_eids[x] - schema._eid_index[x.eid] = x - for x in schema.relations(): - x.eid = schema_eids[x] - schema._eid_index[x.eid] = x - for rdef in x.rdefs.values(): - rdef.eid = schema_eids[(rdef.subject, rdef.rtype, rdef.object)] - schema._eid_index[rdef.eid] = rdef - - -from logilab.common.testlib import TestCase, mock_object -from logilab.database import get_db_helper - -from rql import RQLHelper - -from cubicweb.devtools.fake import FakeRepo, FakeConfig, FakeSession -from cubicweb.server import set_debug, debugged -from cubicweb.server.querier import QuerierHelper -from cubicweb.server.session import Session -from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions - -class RQLGeneratorTC(TestCase): - schema = backend = None # set this in concrete class - - @classmethod - def setUpClass(cls): - if cls.backend is not None: - try: - cls.dbhelper = get_db_helper(cls.backend) - except ImportError as ex: - raise SkipTest(str(ex)) - - def setUp(self): - self.repo = FakeRepo(self.schema, config=FakeConfig(apphome=self.datadir)) - self.repo.system_source = mock_object(dbdriver=self.backend) - self.rqlhelper = RQLHelper(self.schema, - special_relations={'eid': 'uid', - 'has_text': 'fti'}, - backend=self.backend) - self.qhelper = QuerierHelper(self.repo, self.schema) - ExecutionPlan._check_permissions = _dummy_check_permissions - rqlannotation._select_principal = _select_principal - if self.backend is not None: - self.o = SQLGenerator(self.schema, self.dbhelper) - - def tearDown(self): - ExecutionPlan._check_permissions = _orig_check_permissions - rqlannotation._select_principal = _orig_select_principal - - def set_debug(self, debug): - set_debug(debug) - def debugged(self, debug): - return debugged(debug) - - def _prepare(self, rql): - #print '******************** prepare', rql - union = self.rqlhelper.parse(rql) - #print '********* parsed', union.as_string() - self.rqlhelper.compute_solutions(union) - #print '********* solutions', solutions - self.rqlhelper.simplify(union) - #print '********* simplified', union.as_string() - plan = self.qhelper.plan_factory(union, {}, FakeSession(self.repo)) - plan.preprocess(union) - for select in union.children: - select.solutions.sort(key=lambda x: list(x.items())) - #print '********* ppsolutions', solutions - return union - - -class BaseQuerierTC(TestCase): - repo = None # set this in concrete class - - def setUp(self): - self.o = self.repo.querier - self.session = next(iter(self.repo._sessions.values())) - self.ueid = self.session.user.eid - assert self.ueid != -1 - self.repo._type_source_cache = {} # clear cache - self.maxeid = self.get_max_eid() - do_monkey_patch() - self._dumb_sessions = [] - - def get_max_eid(self): - with self.session.new_cnx() as cnx: - return cnx.execute('Any MAX(X)')[0][0] - - def cleanup(self): - with self.session.new_cnx() as cnx: - cnx.execute('DELETE Any X WHERE X eid > %s' % self.maxeid) - cnx.commit() - - def tearDown(self): - undo_monkey_patch() - self.cleanup() - assert self.session.user.eid != -1 - - def set_debug(self, debug): - set_debug(debug) - def debugged(self, debug): - return debugged(debug) - - def _rqlhelper(self): - rqlhelper = self.repo.vreg.rqlhelper - # reset uid_func so it don't try to get type from eids - rqlhelper._analyser.uid_func = None - rqlhelper._analyser.uid_func_mapping = {} - return rqlhelper - - def _prepare_plan(self, cnx, rql, kwargs=None, simplify=True): - rqlhelper = self._rqlhelper() - rqlst = rqlhelper.parse(rql) - rqlhelper.compute_solutions(rqlst, kwargs=kwargs) - if simplify: - rqlhelper.simplify(rqlst) - for select in rqlst.children: - select.solutions.sort(key=lambda x: list(x.items())) - return self.o.plan_factory(rqlst, kwargs, cnx) - - def _prepare(self, cnx, rql, kwargs=None): - plan = self._prepare_plan(cnx, rql, kwargs, simplify=False) - plan.preprocess(plan.rqlst) - rqlst = plan.rqlst.children[0] - rqlst.solutions = remove_unused_solutions(rqlst, rqlst.solutions, {}, self.repo.schema)[0] - return rqlst - - def user_groups_session(self, *groups): - """lightweight session using the current user with hi-jacked groups""" - # use self.session.user.eid to get correct owned_by relation, unless explicit eid - with self.session.new_cnx() as cnx: - u = self.repo._build_user(cnx, self.session.user.eid) - u._groups = set(groups) - s = Session(u, self.repo) - return s - - def qexecute(self, rql, args=None, build_descr=True): - with self.session.new_cnx() as cnx: - try: - return self.o.execute(cnx, rql, args, build_descr) - finally: - if rql.startswith(('INSERT', 'DELETE', 'SET')): - cnx.commit() - - -class BasePlannerTC(BaseQuerierTC): - - def setup(self): - # XXX source_defs - self.o = self.repo.querier - self.session = self.repo._sessions.values()[0] - self.schema = self.o.schema - self.system = self.repo.system_source - do_monkey_patch() - self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered - - def tearDown(self): - undo_monkey_patch() - - def _prepare_plan(self, cnx, rql, kwargs=None): - rqlst = self.o.parse(rql, annotate=True) - self.o.solutions(cnx, rqlst, kwargs) - if rqlst.TYPE == 'select': - self.repo.vreg.rqlhelper.annotate(rqlst) - for select in rqlst.children: - select.solutions.sort(key=lambda x: list(x.items())) - else: - rqlst.solutions.sort(key=lambda x: list(x.items())) - return self.o.plan_factory(rqlst, kwargs, cnx) - - -# monkey patch some methods to get predictable results ####################### - -from cubicweb import rqlrewrite -_orig_iter_relations = rqlrewrite.iter_relations -_orig_insert_snippets = rqlrewrite.RQLRewriter.insert_snippets -_orig_build_variantes = rqlrewrite.RQLRewriter.build_variantes - -def _insert_snippets(self, snippets, varexistsmap=None): - _orig_insert_snippets(self, sorted(snippets, key=snippet_key), varexistsmap) - -def _build_variantes(self, newsolutions): - variantes = _orig_build_variantes(self, newsolutions) - sortedvariantes = [] - for variante in variantes: - orderedkeys = sorted((k[1], k[2], v) for k, v in variante.items()) - variante = DumbOrderedDict(sorted(variante.items(), - key=lambda a: (a[0][1], a[0][2], a[1]))) - sortedvariantes.append( (orderedkeys, variante) ) - return [v for ok, v in sorted(sortedvariantes)] - -from cubicweb.server.querier import ExecutionPlan -_orig_check_permissions = ExecutionPlan._check_permissions - -def _check_permissions(*args, **kwargs): - res, restricted = _orig_check_permissions(*args, **kwargs) - res = DumbOrderedDict(sorted(res.items(), key=lambda x: [y.items() for y in x[1]])) - return res, restricted - -def _dummy_check_permissions(self, rqlst): - return {(): rqlst.solutions}, set() - -from cubicweb.server import rqlannotation -_orig_select_principal = rqlannotation._select_principal - -def _select_principal(scope, relations): - def sort_key(something): - try: - return something.r_type - except AttributeError: - return (something[0].r_type, something[1]) - return _orig_select_principal(scope, relations, - _sort=lambda rels: sorted(rels, key=sort_key)) - - -def _ordered_iter_relations(stinfo): - return sorted(_orig_iter_relations(stinfo), key=lambda x:x.r_type) - -def do_monkey_patch(): - rqlrewrite.iter_relations = _ordered_iter_relations - rqlrewrite.RQLRewriter.insert_snippets = _insert_snippets - rqlrewrite.RQLRewriter.build_variantes = _build_variantes - ExecutionPlan._check_permissions = _check_permissions - ExecutionPlan.tablesinorder = None - -def undo_monkey_patch(): - rqlrewrite.iter_relations = _orig_iter_relations - rqlrewrite.RQLRewriter.insert_snippets = _orig_insert_snippets - rqlrewrite.RQLRewriter.build_variantes = _orig_build_variantes - ExecutionPlan._check_permissions = _orig_check_permissions diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/stresstester.py --- a/devtools/stresstester.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,196 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" Usage: %s [OPTIONS] - -Stress test a CubicWeb repository - -OPTIONS: - -h / --help - Display this help message and exit. - - -u / --user - Connect as instead of being prompted to give it. - -p / --password - Automatically give for authentication instead of being prompted - to give it. - - -n / --nb-times - Repeat queries times. - -t / --nb-threads - Execute queries in parallel threads. - -P / --profile - dumps profile results (hotshot) in - -o / --report-output - Write profiler report into rather than on stdout - -Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -http://www.logilab.fr/ -- mailto:contact@logilab.fr -""" -from __future__ import print_function - -import os -import sys -import threading -import getopt -import traceback -from getpass import getpass -from os.path import basename -from time import clock - -from logilab.common.fileutils import lines -from logilab.common.ureports import Table, TextWriter -from cubicweb.server.repository import Repository -from cubicweb.dbapi import Connection - -TB_LOCK = threading.Lock() - -class QueryExecutor: - def __init__(self, cursor, times, queries, reporter = None): - self._cursor = cursor - self._times = times - self._queries = queries - self._reporter = reporter - - def run(self): - cursor = self._cursor - times = self._times - while times: - for index, query in enumerate(self._queries): - start = clock() - try: - cursor.execute(query) - except Exception: - TB_LOCK.acquire() - traceback.print_exc() - TB_LOCK.release() - return - if self._reporter is not None: - self._reporter.add_proftime(clock() - start, index) - times -= 1 - -def usage(status=0): - """print usage string and exit""" - print(__doc__ % basename(sys.argv[0])) - sys.exit(status) - - -class ProfileReporter: - """a profile reporter gathers all profile informations from several - threads and can write a report that summarizes all profile informations - """ - profiler_lock = threading.Lock() - - def __init__(self, queries): - self._queries = tuple(queries) - self._profile_results = [(0., 0)] * len(self._queries) - # self._table_report = Table(3, rheaders = True) - len_max = max([len(query) for query in self._queries]) + 5 - self._query_fmt = '%%%ds' % len_max - - def add_proftime(self, elapsed_time, query_index): - """add a new time measure for query""" - ProfileReporter.profiler_lock.acquire() - cumul_time, times = self._profile_results[query_index] - cumul_time += elapsed_time - times += 1. - self._profile_results[query_index] = (cumul_time, times) - ProfileReporter.profiler_lock.release() - - def dump_report(self, output = sys.stdout): - """dump report in 'output'""" - table_elems = ['RQL Query', 'Times', 'Avg Time'] - total_time = 0. - for query, (cumul_time, times) in zip(self._queries, self._profile_results): - avg_time = cumul_time / float(times) - table_elems += [str(query), '%f' % times, '%f' % avg_time ] - total_time += cumul_time - table_elems.append('Total time :') - table_elems.append(str(total_time)) - table_elems.append(' ') - table_layout = Table(3, rheaders = True, children = table_elems) - TextWriter().format(table_layout, output) - # output.write('\n'.join(tmp_output)) - - -def run(args): - """run the command line tool""" - try: - opts, args = getopt.getopt(args, 'hn:t:u:p:P:o:', ['help', 'user=', 'password=', - 'nb-times=', 'nb-threads=', - 'profile', 'report-output=',]) - except Exception as ex: - print(ex) - usage(1) - repeat = 100 - threads = 1 - user = os.environ.get('USER', os.environ.get('LOGNAME')) - password = None - report_output = sys.stdout - prof_file = None - for opt, val in opts: - if opt in ('-h', '--help'): - usage() - if opt in ('-u', '--user'): - user = val - elif opt in ('-p', '--password'): - password = val - elif opt in ('-n', '--nb-times'): - repeat = int(val) - elif opt in ('-t', '--nb-threads'): - threads = int(val) - elif opt in ('-P', '--profile'): - prof_file = val - elif opt in ('-o', '--report-output'): - report_output = open(val, 'w') - if len(args) != 2: - usage(1) - queries = [query for query in lines(args[1]) if not query.startswith('#')] - if user is None: - user = raw_input('login: ') - if password is None: - password = getpass('password: ') - from cubicweb.cwconfig import instance_configuration - config = instance_configuration(args[0]) - # get local access to the repository - print("Creating repo", prof_file) - repo = Repository(config, prof_file) - cnxid = repo.connect(user, password=password) - # connection to the CubicWeb repository - repo_cnx = Connection(repo, cnxid) - repo_cursor = repo_cnx.cursor() - reporter = ProfileReporter(queries) - if threads > 1: - executors = [] - while threads: - qe = QueryExecutor(repo_cursor, repeat, queries, reporter = reporter) - executors.append(qe) - thread = threading.Thread(target=qe.run) - qe.thread = thread - thread.start() - threads -= 1 - for qe in executors: - qe.thread.join() -## for qe in executors: -## print qe.thread, repeat - qe._times, 'times' - else: - QueryExecutor(repo_cursor, repeat, queries, reporter = reporter).run() - reporter.dump_report(report_output) - - -if __name__ == '__main__': - run(sys.argv[1:]) diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/bootstrap_cubes --- a/devtools/test/data/bootstrap_cubes Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -person diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/cubes/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/cubes/i18ntestcube/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/cubes/i18ntestcube/__pkginfo__.py --- a/devtools/test/data/cubes/i18ntestcube/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,18 +0,0 @@ -# pylint: disable=W0622 -"""cubicweb i18n test cube application packaging information""" - -modname = 'i18ntestcube' -distname = 'cubicweb-i18ntestcube' - -numversion = (0, 1, 0) -version = '.'.join(str(num) for num in numversion) - -license = 'LGPL' -author = 'LOGILAB S.A. (Paris, FRANCE)' -author_email = 'contact@logilab.fr' -description = 'forum' -web = 'http://www.cubicweb.org/project/%s' % distname - -__depends__ = {'cubicweb': '>= 3.16.4', - } -__recommends__ = {} diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/cubes/i18ntestcube/i18n/en.po.ref --- a/devtools/test/data/cubes/i18ntestcube/i18n/en.po.ref Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,182 +0,0 @@ -msgid "" -msgstr "" -"Project-Id-Version: cubicweb 3.16.5\n" -"PO-Revision-Date: 2008-03-28 18:14+0100\n" -"Last-Translator: Logilab Team \n" -"Language-Team: fr \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: cubicweb-devtools\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" - -# schema pot file, generated on 2013-07-12 16:18:12 -# -# singular and plural forms for each entity type -# subject and object forms for each relation type -# (no object form for final or symmetric relation types) -msgid "Forum" -msgstr "" - -msgid "Forum_plural" -msgstr "" - -msgid "This Forum" -msgstr "" - -msgid "This Forum:" -msgstr "" - -msgid "New Forum" -msgstr "" - -msgctxt "inlined:Forum.in_forum.object" -msgid "add a ForumThread" -msgstr "" - -msgctxt "inlined:Forum.in_forum.object" -msgid "ForumThread" -msgstr "" - -msgid "add ForumThread in_forum Forum object" -msgstr "" - -msgid "add a Forum" -msgstr "" - -msgid "add a ForumThread" -msgstr "" - -msgid "creating ForumThread (ForumThread in_forum Forum %(linkto)s)" -msgstr "" - -msgid "ForumThread" -msgstr "" - -msgid "ForumThread_plural" -msgstr "" - -msgid "This ForumThread" -msgstr "" - -msgid "This ForumThread:" -msgstr "" - -msgid "New ForumThread" -msgstr "" - -msgid "content" -msgstr "" - -msgctxt "ForumThread" -msgid "content" -msgstr "" - -msgid "content_format" -msgstr "" - -msgctxt "ForumThread" -msgid "content_format" -msgstr "" - -msgctxt "Forum" -msgid "description" -msgstr "" - -msgctxt "Forum" -msgid "description_format" -msgstr "" - -msgid "in_forum" -msgstr "" - -msgctxt "ForumThread" -msgid "in_forum" -msgstr "" - -msgctxt "Forum" -msgid "in_forum_object" -msgstr "" - -msgid "in_forum_object" -msgstr "" - -msgid "interested_in" -msgstr "" - -msgctxt "CWUser" -msgid "interested_in" -msgstr "" - -msgctxt "ForumThread" -msgid "interested_in_object" -msgstr "" - -msgctxt "Forum" -msgid "interested_in_object" -msgstr "" - -msgid "interested_in_object" -msgstr "" - -msgid "nosy_list" -msgstr "" - -msgctxt "ForumThread" -msgid "nosy_list" -msgstr "" - -msgctxt "Forum" -msgid "nosy_list" -msgstr "" - -msgctxt "CWUser" -msgid "nosy_list_object" -msgstr "" - -msgid "nosy_list_object" -msgstr "" - -msgctxt "ForumThread" -msgid "title" -msgstr "" - -msgid "topic" -msgstr "" - -msgctxt "Forum" -msgid "topic" -msgstr "" - -msgid "Topic" -msgstr "" - -msgid "Description" -msgstr "" - -msgid "Number of threads" -msgstr "" - -msgid "Last activity" -msgstr "" - -msgid "" -"a long\n" -"tranlated line\n" -"hop." -msgstr "" - -msgid "Subject" -msgstr "" - -msgid "Created" -msgstr "" - -msgid "Answers" -msgstr "" - -msgid "Last answered" -msgstr "" - -msgid "This forum does not have any thread yet." -msgstr "" diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/cubes/i18ntestcube/schema.py --- a/devtools/test/data/cubes/i18ntestcube/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr -- mailto:contact@logilab.fr -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with this program. If not, see . - -"""cubicweb-forum schema""" - -from yams.buildobjs import (String, RichString, EntityType, - RelationDefinition, SubjectRelation) -from yams.reader import context - -class Forum(EntityType): - topic = String(maxsize=50, required=True, unique=True) - description = RichString() - -class ForumThread(EntityType): - __permissions__ = { - 'read': ('managers', 'users'), - 'add': ('managers', 'users'), - 'update': ('managers', 'owners'), - 'delete': ('managers', 'owners') - } - title = String(required=True, fulltextindexed=True, maxsize=256) - content = RichString(required=True, fulltextindexed=True) - in_forum = SubjectRelation('Forum', cardinality='1*', inlined=True, - composite='object') -class interested_in(RelationDefinition): - subject = 'CWUser' - object = ('ForumThread', 'Forum') - -class nosy_list(RelationDefinition): - subject = ('Forum', 'ForumThread') - object = 'CWUser' diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/cubes/i18ntestcube/views.py --- a/devtools/test/data/cubes/i18ntestcube/views.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr -- mailto:contact@logilab.fr -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with this program. If not, see . - -"""cubicweb-forum views/forms/actions/components for web ui""" - -from cubicweb import view -from cubicweb.predicates import is_instance -from cubicweb.web.views import primary, baseviews, uicfg -from cubicweb.web.views.uicfg import autoform_section as afs - -class MyAFS(uicfg.AutoformSectionRelationTags): - __select__ = is_instance('ForumThread') - -_myafs = MyAFS() - -_myafs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined') - -afs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined') - - -class ForumSameETypeListView(baseviews.SameETypeListView): - __select__ = baseviews.SameETypeListView.__select__ & is_instance('Forum') - - def call(self, **kwargs): - _ = self._cw._ - _('Topic'), _('Description') - _('Number of threads'), _('Last activity') - _('''a long -tranlated line -hop.''') - - -class ForumLastActivity(view.EntityView): - __regid__ = 'forum_last_activity' - __select__ = view.EntityView.__select__ & is_instance('Forum') - - -class ForumPrimaryView(primary.PrimaryView): - __select__ = primary.PrimaryView.__select__ & is_instance('Forum') - - def render_entity_attributes(self, entity): - _ = self._cw._ - _('Subject'), _('Created'), _('Answers'), - _('Last answered') - _('This forum does not have any thread yet.') - -class ForumThreadPrimaryView(primary.PrimaryView): - __select__ = primary.PrimaryView.__select__ & is_instance('ForumThread') diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/firstnames.txt --- a/devtools/test/data/firstnames.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1599 +0,0 @@ -ash -pasqualino -asl -benjy -wolodymyr -dionysos -launce -khaleel -sondra -maaike -lavinia -giosu -daisy -xiang -belgin -edda -olympia -treasa -katya -misi -ville -mahon -yngve -moritz -elder -gawel -horsa -blossom -deanne -imelda -deanna -cairbre -eddy -horst -gaenor -breanne -hewie -breanna -jarvis -jamin -loise -jamil -fingall -giselle -jamie -shinju -gisella -akilina -jordan -gertie -cardea -eiran -valdemar -sebestyen -galia -bride -greg -fausta -eniola -rudo -pratibha -kisha -mickey -charlotte -karp -charlotta -nunzia -nunzio -patrice -kara -hallam -collyn -kari -karl -dusan -lia -cherokee -lim -lin -yvain -madlyn -liv -lir -lis -tullio -norma -liz -lettice -kae -kaj -kai -tatyanna -kam -freddie -elton -meinir -blaise -kat -japeth -alpha -kay -mack -jayna -jayne -hormazed -lupita -humbert -vitya -neoptolemus -richardine -hallvard -diogo -larkin -ravi -louiza -hermogenes -alanis -yadira -leandra -milburga -leandro -sorin -randi -kaleb -rogerio -sanna -kalea -justice -kaleo -dijana -shprintza -randy -colby -otthild -mariamne -patrycja -darwin -christal -khalida -kaley -allegria -vidya -renaud -sisel -suibhne -lonny -julienne -calliope -rocco -alexander -aristide -edwige -xzavier -rajesh -egil -gell -mahavir -charline -sigi -theophania -maurice -afon -konnor -kiran -angie -jalila -tolly -havva -metody -engel -philander -lancelot -nathalie -leilah -dane -elm -chatzkel -keaton -ashlie -kudret -rava -danette -eachann -wilburn -jeff -kazimiera -rukmini -lauryn -femie -mahvash -berkant -alesha -daedalus -aphra -karla -tetty -agostinho -bolivar -savitri -karly -forbes -vencesl -bahija -walter -imam -iman -krzys -imad -elsa -neville -tracie -else -anthony -shevon -katherine -marylou -wojtek -oddmund -tristand -areli -valkyrie -garfield -wyatt -luanne -ossia -luanna -luciana -guido -luciano -shachar -astraea -paco -leland -avra -amenhotep -kekoa -gorden -sameera -boutros -ruaidhr -friedemann -darrell -hideaki -petar -donatien -fannie -eliana -iason -fedora -grant -shay -estee -marcelle -marcella -lothair -shae -ester -marcello -estev -cassian -allyson -dima -goodwin -cezar -blair -monique -elwin -ihsan -olufunmilayo -arturo -nanaia -greetje -clovia -beowulf -vassily -madail -emmeline -guendolen -nandag -eilish -sakari -elisheva -crispin -aksel -alvin -cernunnos -feardorcha -heshel -afra -iqbal -pryce -siddhartha -mikkel -alvis -myrtie -khajag -yesenia -nikki -grigory -grigore -maeve -rebeca -diederick -maeva -grigori -cheryl -rahim -marco -marci -stein -trista -olufemi -emmanuelle -nadezhda -wahid -marcy -vanda -lavra -alida -amara -hipolito -valent -renatus -moira -donny -lucretia -donna -vesta -cadoc -reetta -erma -markku -rosamond -gracia -tuyet -sieffre -gracie -kodey -debra -photine -jacek -yanick -isiah -khordad -rui -stef -rub -foma -sten -kassy -rue -nelly -merrick -ayn -macy -vincente -anargyros -rut -lenox -jenessa -faith -barnaby -manny -jyotsana -hasan -iakopa -edvard -narcisa -loredana -ida -torborg -rollo -stamatios -pero -natalya -maudie -carlton -paulina -aliyah -lanty -tadg -deiniol -dwayne -alison -fabius -rbj -latasha -maarit -roxanna -katinka -publius -augustijn -ferdy -khadiga -akosua -rees -quetzalcoatl -kristian -larry -reed -krystal -micheil -paolo -chelsey -ute -paola -hamilcar -malin -deangelo -munir -velma -malik -utz -malie -govad -chelsea -malia -willem -seetha -andrina -rupert -myrrine -theodoros -tito -ivonne -nan -beryl -nat -tawnie -korn -marzena -tinek -hermine -kora -frances -william -tianna -evan -kory -merletta -kort -nevan -naheed -heath -tyreek -shona -amyas -urjasz -katy -gu -gr -hilde -mehmud -gy -hilda -psyche -olive -nuno -vinnie -ga -kato -kata -jeunesse -kate -chandrakant -caoilainn -arik -rhonda -leocadio -euan -aric -leocadia -aria -bronwen -marcellin -vladislav -ferapont -nichole -kizzy -duilio -jafet -maas -tue -felicity -mansoor -rfhlaith -brigitta -fishke -akua -izabela -olaf -vittore -michael -skar -ryan -gretta -alvena -olav -brigitte -euterpe -barbara -aiolos -carter -khalifa -tziporah -honora -feich -marilena -onesime -theo -gunvor -sa'id -katlyn -nicholas -preeti -etzel -ekewaka -vinal -jubal -ramsey -rowley -jocelin -alfsigr -kalliope -micah -frantisek -holger -alysha -chant -derry -corin -janus -morcant -chang -corie -gena -randa -joost -vasile -clark -clare -wim -wil -clara -danika -jory -eleonoora -ayelet -caligula -zakiah -kilie -meliora -ottavio -idoya -ninette -hudson -deon -gawdat -frida -jonathan -reynold -laocadia -cerise -cosmo -hezekiah -winston -isak -allyn -noelene -trajan -vijaya -cosma -tresha -astrithr -priya -astrophel -pocahontas -eliphalet -stafford -salah -salal -pauliina -lazer -feidhlim -jackalyn -kenny -alayna -wilfried -wasim -blaine -femke -jehu -kenna -lenore -nkechi -letizia -kian -kayleigh -spartacus -manuela -leyton -lesley -georg -ferdinand -cuauhtemoc -aeron -lavrenti -nyx -ronald -yoshiko -gundula -eluf -toma -riccardo -ruadh -matylda -winter -mayson -llew -clytia -jamila -fariha -aegle -octavio -steafan -jacqui -mikelo -dovid -modestus -blake -jeanna -alessa -conway -brook -sunday -kizzie -hande -catherine -eckhard -rr -gwyneth -aukusti -placid -rufino -kyleigh -helah -benoite -eluned -sanaz -cnaeus -ettie -benaiah -brendan -wenonah -nye -candela -dragan -sanda -naveen -margar -naveed -austen -sandu -britta -brodie -morton -kamilla -sandy -guilherme -dorothea -calix -braxton -wigburg -tryphena -ricky -may -sylwia -libor -marek -ece -trinity -katsuro -tercero -'ismat -mared -jill -amato -achim -princess -jaquelyn -eustathios -tapio -aglea -kees -evstathios -edwyna -austin -cristian -jouko -nikandros -leonora -kaitlynn -christoph -mai -parthalan -tancredo -rosaleen -lynnette -yasamin -encarnacion -gerolt -ionut -harmon -ailbhe -islwyn -muirenn -nyah -mariana -viktor -greta -kreszentia -grete -hormazd -foka -poseidon -kazimir -ultan -ben -sudhir -bea -bee -saburo -elnora -ber -michelyne -clytemnestra -yardena -gavrel -michelangelo -wystan -odhiambo -miquel -bertha -su -berthe -alisia -kelley -leonhard -rodger -ewald -oluwaseyi -celandine -kunegunda -luisa -khayyam -iisakki -luise -ligia -zaina -tatiana -siarl -jorge -bronislaw -bronislav -montana -edric -miloslava -achilles -donaldina -wilfredo -laurens -haifa -stelian -glenice -calvino -rodica -hulda -indy -uri -laurena -tzeitel -laurene -urs -danita -platon -parker -chadwick -lorne -narinder -theodoric -florentina -ambrosine -nikephoros -kapel -aeolus -cenek -hadi -perle -alyona -cyril -perla -cicely -darby -madhav -hector -ethan -aretha -ilker -avdotya -boris -sassa -misty -bonaventure -kiefer -emmet -arkadios -farrah -tivoli -pietari -mohammed -shoshana -felipe -felipa -maurene -tancred -raymonde -sho -faron -arundhati -esteri -silvanus -nuha -aloisia -baris -tammie -fabricio -lux -luz -driskoll -tyra -luc -marsha -luk -aron -joye -ken -gethsemane -kelan -yuko -merry -proserpine -precious -suibne -mindy -vitus -olga -jia -kalysta -angharad -ciera -careen -inglebert -apphia -muadhnait -christen -rebekah -dominique -gita -tori -harmonie -anatolius -harmonia -denise -johann -johano -denisa -viktoria -padmini -johana -christer -barakat -willy -sari -fitzroy -yaw -sara -yan -quim -quin -yaa -katelin -pontus -raelene -alexus -gwandoya -venceslav -ott -artemidoros -zaynab -folant -salman -ealdgy -randal -macey -heriberto -kimball -ekin -dema -evelyn -demi -pip -simona -daniil -emmerson -kausalya -kortney -gavriil -yered -parth -fido -solange -oona -anka -renie -anke -habakkuk -linwood -teofilo -grazyna -enitan -bhaskar -finnian -perseus -mordechai -fyodor -ashley -philo -i -hecate -phile -theodor -kiaran -ashlee -dollie -savannah -upton -sofia -noak -sofie -laurel -lauren -dubaku -zacharjasz -patricio -trudi -sophus -vida -patricia -trudy -tapani -mavreena -jesper -sandrine -sonia -livio -mikolaj -laurine -livia -finnegan -oprah -waheed -lavonne -perdita -liviu -imen -attila -lincoln -fernanda -evrard -fernande -jaana -artair -fernando -candy -cande -kazimierz -kaija -shamgar -laxmi -martie -page -candi -brody -piaras -shea -herbie -shem -kristaps -sher -cleveland -carreen -margaid -phinehas -justina -wendi -linus -wenda -matrona -christiane -wendy -kerensa -roch -fergal -fanny -kamila -oswin -camilo -everette -katashi -myron -ridley -shavonne -blythe -nader -marlowe -miha -carolyn -glenn -gadar -rainard -sybella -raquel -rozabela -serhat -bashemath -jing -gobnet -yentl -sylvana -dolores -sanjit -tamsin -sanjiv -innes -daniela -daniele -margr -keysha -rogelio -ean -hj -philipp -valerian -marge -gail -margh -gaia -engelbert -kathie -artemisia -margo -stefan -pansy -swanhilda -swanhilde -alessio -beata -beate -babur -beatrice -eris -erin -maura -camryn -conan -erik -krysia -nigelia -mauri -averill -draco -eric -sophronius -mauro -diego -simcha -malachy -barth -maoilios -germaine -malachi -katariina -lianne -ferdinando -donagh -kelemen -taletta -star -gilah -faustus -lfwine -rayna -gotthard -sa'd -stan -klemen -pranay -howie -dewey -tiarnan -katherina -uzma -jabril -hakan -martin -elsie -cleve -imani -moshe -padma -inmaculada -augustine -trenton -ghislain -aiden -alfhild -ireneus -gottschalk -andra -jahzeel -andro -fredrik -wynter -kohar -tobin -giustino -buddy -marcos -mieszko -giustina -khalil -aur -helladius -riccarda -elettra -glykeria -yeva -trahaearn -ulisse -wilfred -sorrel -saara -ekwueme -sarita -finella -waldo -herbert -elissa -bevan -lavern -till -ruxandra -lavender -ghalib -eldon -masterman -tameka -mihajlo -mahin -neo -asim -jordon -pace -ned -giampiero -asia -nea -haze -bearach -cheng -pieter -yonah -chikako -maverick -fonsie -ozzy -meg -mitxel -filbert -mel -neves -henrik -mei -hilaire -drew -deemer -liborio -dubhghlas -bogdan -dipak -rapha -golda -maighread -masha -pranciskis -mitchell -titilayo -aydin -ippolit -toiba -omar -cindy -alexandrina -lyubov -hiltraud -joshua -moray -baptiste -bahiyya -marquita -benedicta -reagan -latifah -scevola -ardashir -pakpao -topaz -janine -omolara -janina -morag -euripides -lennart -orb -helmuth -armo -diederik -lennard -raeburn -oscar -odell -ualan -noemi -melba -berlin -lazarus -merla -meera -anastas -rhamantus -yussel -meshullam -esdras -kumar -flora -norwood -rio -apollinaris -oleg -rim -nadzeija -akio -akim -efisio -jayda -olek -rowanne -honey -karola -chetana -candelas -friede -phaedrus -frieda -joann -braidy -hitomi -kieron -dakarai -teofil -dervila -ria -pietrina -becky -alechjo -santos -egon -olwin -ove -balthazar -reeta -becka -tillo -royce -peninnah -earnestine -janis -jakab -janie -rosalba -hosanna -aharon -fife -zacharias -fifi -aleesha -murray -helena -helene -rashmi -afia -oswald -zachariah -shawnee -pius -zdenek -kichiro -melchiorre -erland -yaroslava -anushka -cree -iser -rachel -anik -fabiola -ania -aneurin -hernando -ernesto -ernesta -astor -manasseh -naphtali -shai -lorena -lazar -luce -lorenz -luca -briana -rosemary -dawid -nava -payton -linos -aida -gunne -milan -tuomas -sahar -doug -mikala -dawn -vincenza -saturninus -channah -mandy -reuven -cormag -cormac -mandi -sachie -ladonna -phuong -tasha -ramon -hashim -fachtna -euphemia -tisha -jozafat -horatius -imke -venus -rodolf -binyamin -cosmin -oluwafunmilayo -nekane -loup -kohinoor -teuvo -xue -innokenti -vincenzo -kiley -isa -hannibal -vijay -kornelia -afanasy -vittorio -tuor -adalia -damayanti -afanasi -grady -evangelos -ermete -brock -bonita -arisha -pelagia -solvej -parthenope -peggie -kierra -jozefa -garry -giuditta -ladislas -jozefo -swietoslaw -yildiz -nasira -eshe -helen -gretchen -shekhar -daren -lenuta -dymphna -daina -matteo -berjouhi -jerusha -solomon -gernot -murtagh -meaveen -godwin -ladislao -minh -hachiro -farquhar -ichabod -mina -caleb -veera -ginger -ming -jaynie -sharyn -seonag -ferdie -ilana -gabriela -gabriele -lloren -hooda -mabelle -timeus -teagan -gorka -ulrich -philadelphia -razvan -lamprecht -marit -kean -marin -mario -rhonwen -vilko -konstantin -tyr -maria -fastred -kazuki -krister -don -dom -iekika -ruben -m -calanthe -luchjo -vicki -sheryl -afanen -kirabo -dov -kristel -dot -kristen -pavao -donelle -antti -donella -katerina -liza -wladyslaw -gerlach -hrodohaidis -samnang -ashok -raelyn -tipene -kallias -kun -gebhard -folke -katica -lennie -rupinder -maryann -adolphus -lachtna -petri -monica -kyriakos -brannon -deforest -shankara -hourig -haniyya -christopher -griogair -saturn -tola -earl -decebal -bas -petra -adelia -cleto -bao -bal -bai -julien -clarette -dimitar -fioralba -tommie -domhnall -ragnhei -gunnar -ailill -juliet -pete -vasya -peta -duff -imaculada -peti -manola -kolab -petr -neriah -manolo -edoardo -onora -elisud -graciano -fayza -as'ad -romola -vernon -pluto -genevra -yahweh -mukesh -fiacre -sudarshana -shahriar -athanasius -una -casimir -derval -ernst -sherilyn -taranis -enzo -bedelia -winnie -kalyan -jinan -plamen -quinn -monat -alcaeus -mathieu -aindri -raffaella -armin -lovell -cyrus -chelo -sidonius -basia -tina -basil -basim -fuad -riley -tracee -chun -talia diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/schema.py --- a/devtools/test/data/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,32 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from yams.buildobjs import EntityType, SubjectRelation, String, Int, Date - -from cubes.person.schema import Person - -Person.add_relation(Date(), 'birthday') - -class Bug(EntityType): - title = String(maxsize=64, required=True, fulltextindexed=True) - severity = String(vocabulary=('important', 'normal', 'minor'), default='normal') - cost = Int() - description = String(maxsize=4096, fulltextindexed=True) - identical_to = SubjectRelation('Bug', symmetric=True) diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/static/js_examples/dep_1.js --- a/devtools/test/data/static/js_examples/dep_1.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -a = 4; diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/static/js_examples/deps_2.js --- a/devtools/test/data/static/js_examples/deps_2.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -b = a +2; diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/static/js_examples/test_simple_failure.js --- a/devtools/test/data/static/js_examples/test_simple_failure.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,18 +0,0 @@ -$(document).ready(function() { - - QUnit.module("air"); - - QUnit.test("test 1", function (assert) { - assert.equal(2, 4); - }); - - QUnit.test("test 2", function (assert) { - assert.equal('', '45'); - assert.equal('1024', '32'); - }); - - QUnit.module("able"); - QUnit.test("test 3", function (assert) { - assert.deepEqual(1, 1); - }); -}); diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/static/js_examples/test_simple_success.js --- a/devtools/test/data/static/js_examples/test_simple_success.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -$(document).ready(function() { - - QUnit.module("air"); - - QUnit.test("test 1", function (assert) { - assert.equal(2, 2); - }); - - QUnit.test("test 2", function (assert) { - assert.equal('45', '45'); - }); - - QUnit.module("able"); - QUnit.test("test 3", function (assert) { - assert.deepEqual(1, 1); - }); -}); diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/static/js_examples/test_with_dep.js --- a/devtools/test/data/static/js_examples/test_with_dep.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -$(document).ready(function() { - - QUnit.module("air"); - - QUnit.test("test 1", function (assert) { - assert.equal(a, 4); - }); - -}); diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/static/js_examples/test_with_ordered_deps.js --- a/devtools/test/data/static/js_examples/test_with_ordered_deps.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -$(document).ready(function() { - - QUnit.module("air"); - - QUnit.test("test 1", function (assert) { - assert.equal(b, 6); - }); - -}); diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/data/static/js_examples/utils.js --- a/devtools/test/data/static/js_examples/utils.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -function datetuple(d) { - return [d.getFullYear(), d.getMonth()+1, d.getDate(), - d.getHours(), d.getMinutes()]; -} - -function pprint(obj) { - print('{'); - for(k in obj) { - print(' ' + k + ' = ' + obj[k]); - } - print('}'); -} - -function arrayrepr(array) { - return '[' + array.join(', ') + ']'; -} - -function assertArrayEquals(array1, array2) { - if (array1.length != array2.length) { - throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); - } - for (var i=0; i. -"""only for unit tests !""" - -from cubicweb.view import EntityView -from cubicweb.predicates import is_instance - -HTML_PAGE = u""" - -

      Hello World !

      - - -""" - -class SimpleView(EntityView): - __regid__ = 'simple' - __select__ = is_instance('Bug',) - - def call(self, **kwargs): - self.cell_call(0, 0) - - def cell_call(self, row, col): - self.w(HTML_PAGE) - -class RaisingView(EntityView): - __regid__ = 'raising' - __select__ = is_instance('Bug',) - - def cell_call(self, row, col): - raise ValueError() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/requirements.txt --- a/devtools/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -Twisted -webtest -cubicweb-person diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/unittest_dbfill.py --- a/devtools/test/unittest_dbfill.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,121 +0,0 @@ -# -*- coding: iso-8859-1 -*- -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for database value generator""" - -import os.path as osp -import re -import datetime -import io - -from six.moves import range - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.devtools.fill import ValueGenerator, make_tel -from cubicweb.devtools import ApptestConfiguration - -DATADIR = osp.join(osp.abspath(osp.dirname(__file__)), 'data') -ISODATE_SRE = re.compile('(?P\d{4})-(?P\d{2})-(?P\d{2})$') - - -class MyValueGenerator(ValueGenerator): - - def generate_Bug_severity(self, entity, index): - return u'dangerous' - - def generate_Any_description(self, entity, index, format=None): - return u'yo' - - -class ValueGeneratorTC(TestCase): - """test case for ValueGenerator""" - - def _choice_func(self, etype, attrname): - try: - return getattr(self, '_available_%s_%s' % (etype, attrname))(etype, attrname) - except AttributeError: - return None - - def _available_Person_firstname(self, etype, attrname): - return [f.strip() for f in io.open(osp.join(DATADIR, 'firstnames.txt'), encoding='latin1')] - - def setUp(self): - config = ApptestConfiguration('data', apphome=DATADIR) - config.bootstrap_cubes() - schema = config.load_schema() - e_schema = schema.eschema('Person') - self.person_valgen = ValueGenerator(e_schema, self._choice_func) - e_schema = schema.eschema('Bug') - self.bug_valgen = MyValueGenerator(e_schema) - self.config = config - - def test_string(self): - """test string generation""" - surname = self.person_valgen.generate_attribute_value({}, 'surname', 12) - self.assertEqual(surname, u'&surname12') - - def test_domain_value(self): - """test value generation from a given domain value""" - firstname = self.person_valgen.generate_attribute_value({}, 'firstname', 12) - possible_choices = self._choice_func('Person', 'firstname') - self.assertTrue(firstname in possible_choices, - '%s not in %s' % (firstname, possible_choices)) - - def test_choice(self): - """test choice generation""" - # Test for random index - for index in range(5): - sx_value = self.person_valgen.generate_attribute_value({}, 'civility', index) - self.assertTrue(sx_value in ('Mr', 'Mrs', 'Ms')) - - def test_integer(self): - """test integer generation""" - # Test for random index - for index in range(5): - cost_value = self.bug_valgen.generate_attribute_value({}, 'cost', index) - self.assertIn(cost_value, list(range(index+1))) - - def test_date(self): - """test date generation""" - # Test for random index - for index in range(10): - date_value = self.person_valgen.generate_attribute_value({}, 'birthday', index) - self.assertTrue(isinstance(date_value, datetime.date)) - - def test_phone(self): - """tests make_tel utility""" - self.assertEqual(make_tel(22030405), '22 03 04 05') - - def test_customized_generation(self): - self.assertEqual(self.bug_valgen.generate_attribute_value({}, 'severity', 12), - u'dangerous') - self.assertEqual(self.bug_valgen.generate_attribute_value({}, 'description', 12), - u'yo') - self.assertEqual(self.person_valgen.generate_attribute_value({}, 'description', 12), - u'yo') - - -class ConstraintInsertionTC(TestCase): - - def test_writeme(self): - self.skipTest('Test automatic insertion / Schema Constraints') - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/unittest_devctl.py --- a/devtools/test/unittest_devctl.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for cubicweb-ctl commands from devtools""" - -import os.path as osp -import sys -import tempfile -import shutil -from subprocess import Popen, PIPE, STDOUT -from unittest import TestCase - - -class CubicWebCtlTC(TestCase): - """test case for devtools commands""" - - def test_newcube(self): - cwctl = osp.abspath(osp.join(osp.dirname(__file__), '../../bin/cubicweb-ctl')) - - tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube") - try: - cmd = [sys.executable, cwctl, 'newcube', - '--directory', tmpdir, 'foo'] - proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT) - stdout, _ = proc.communicate(b'short_desc\n') - finally: - shutil.rmtree(tmpdir, ignore_errors=True) - self.assertEqual(proc.returncode, 0, msg=stdout) - - -if __name__ == '__main__': - from unittest import main - main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/unittest_fill.py --- a/devtools/test/unittest_fill.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,70 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for cubicweb.devtools.fill module - -""" -import re - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.devtools.fill import ValueGenerator, _ValueGenerator - -ISODATE_SRE = re.compile('(?P\d{4})-(?P\d{2})-(?P\d{2})$') - - -class AutoExtendableTC(TestCase): - - def setUp(self): - self.attrvalues = dir(_ValueGenerator) - - def tearDown(self): - attrvalues = set(dir(_ValueGenerator)) - for attrname in attrvalues - set(self.attrvalues): - delattr(_ValueGenerator, attrname) - - - def test_autoextend(self): - self.assertNotIn('generate_server', dir(ValueGenerator)) - class MyValueGenerator(ValueGenerator): - def generate_server(self, index): - return attrname - self.assertIn('generate_server', dir(ValueGenerator)) - - - def test_bad_signature_detection(self): - self.assertNotIn('generate_server', dir(ValueGenerator)) - try: - class MyValueGenerator(ValueGenerator): - def generate_server(self): - pass - except TypeError: - self.assertNotIn('generate_server', dir(ValueGenerator)) - else: - self.fail('TypeError not raised') - - - def test_signature_extension(self): - self.assertNotIn('generate_server', dir(ValueGenerator)) - class MyValueGenerator(ValueGenerator): - def generate_server(self, index, foo): - pass - self.assertIn('generate_server', dir(ValueGenerator)) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/unittest_httptest.py --- a/devtools/test/unittest_httptest.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,70 +0,0 @@ -# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unittest for cubicweb.devtools.httptest module""" - -from six.moves import http_client - -from logilab.common.testlib import Tags -from cubicweb.devtools.httptest import CubicWebServerTC - - -class TwistedCWAnonTC(CubicWebServerTC): - - def test_response(self): - try: - response = self.web_get() - except http_client.NotConnected as ex: - self.fail("Can't connection to test server: %s" % ex) - - def test_response_anon(self): - response = self.web_get() - self.assertEqual(response.status, http_client.OK) - - def test_base_url(self): - if self.config['base-url'] not in self.web_get().read(): - self.fail('no mention of base url in retrieved page') - - -class TwistedCWIdentTC(CubicWebServerTC): - test_db_id = 'httptest-cwident' - anonymous_allowed = False - tags = CubicWebServerTC.tags | Tags(('auth',)) - - def test_response_denied(self): - response = self.web_get() - self.assertEqual(response.status, http_client.FORBIDDEN) - - def test_login(self): - response = self.web_get() - if response.status != http_client.FORBIDDEN: - self.skipTest('Already authenticated, "test_response_denied" must have failed') - # login - self.web_login(self.admlogin, self.admpassword) - response = self.web_get() - self.assertEqual(response.status, http_client.OK, response.body) - # logout - self.web_logout() - response = self.web_get() - self.assertEqual(response.status, http_client.FORBIDDEN, response.body) - - - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/unittest_i18n.py --- a/devtools/test/unittest_i18n.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,76 +0,0 @@ -# -*- coding: iso-8859-1 -*- -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for i18n messages generator""" - -import os, os.path as osp -import sys -import subprocess - -from unittest import TestCase, main - -from cubicweb.cwconfig import CubicWebNoAppConfiguration - -DATADIR = osp.join(osp.abspath(osp.dirname(__file__)), 'data') - -def load_po(fname): - """load a po file and return a set of encountered (msgid, msgctx)""" - msgs = set() - msgid = msgctxt = None - with open(fname) as fobj: - for line in fobj: - if line.strip() in ('', '#'): - continue - if line.startswith('msgstr'): - assert not (msgid, msgctxt) in msgs - msgs.add( (msgid, msgctxt) ) - msgid = msgctxt = None - elif line.startswith('msgid'): - msgid = line.split(' ', 1)[1][1:-1] - elif line.startswith('msgctx'): - msgctxt = line.split(' ', 1)[1][1: -1] - elif msgid is not None: - msgid += line[1:-1] - elif msgctxt is not None: - msgctxt += line[1:-1] - return msgs - - -class cubePotGeneratorTC(TestCase): - """test case for i18n pot file generator""" - - def test_i18ncube(self): - env = os.environ.copy() - env['CW_CUBES_PATH'] = osp.join(DATADIR, 'cubes') - if 'PYTHONPATH' in env: - env['PYTHONPATH'] += os.pathsep - else: - env['PYTHONPATH'] = '' - env['PYTHONPATH'] += DATADIR - cwctl = osp.abspath(osp.join(osp.dirname(__file__), '../../bin/cubicweb-ctl')) - with open(os.devnull, 'w') as devnull: - subprocess.check_call([sys.executable, cwctl, 'i18ncube', 'i18ntestcube'], - env=env, stdout=devnull) - cube = osp.join(DATADIR, 'cubes', 'i18ntestcube') - msgs = load_po(osp.join(cube, 'i18n', 'en.po.ref')) - newmsgs = load_po(osp.join(cube, 'i18n', 'en.po')) - self.assertEqual(msgs, newmsgs) - - -if __name__ == '__main__': - main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/unittest_qunit.py --- a/devtools/test/unittest_qunit.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,27 +0,0 @@ -from cubicweb.devtools import qunit - - -def js(name): - return '/static/js_examples/' + name - -class QUnitTestCaseTC(qunit.QUnitTestCase): - - all_js_tests = ( - (js('test_simple_success.js'),), - (js('test_with_dep.js'), (js('dep_1.js'),)), - (js('test_with_ordered_deps.js'), (js('dep_1.js'), js('deps_2.js'),)), - ) - - - def test_simple_failure(self): - js_tests = list(self._test_qunit(js('test_simple_failure.js'))) - self.assertEqual(len(js_tests), 3) - test_1, test_2, test_3 = js_tests - self.assertRaises(self.failureException, test_1[0], *test_1[1:]) - self.assertRaises(self.failureException, test_2[0], *test_2[1:]) - test_3[0](*test_3[1:]) - - -if __name__ == '__main__': - from unittest import main - main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/unittest_testlib.py --- a/devtools/test/unittest_testlib.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,298 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unittests for cw.devtools.testlib module""" - -from io import BytesIO, StringIO -from unittest import TextTestRunner - -from six import PY2 - -from logilab.common.testlib import TestSuite, TestCase, unittest_main -from logilab.common.registry import yes - -from cubicweb.devtools import htmlparser -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.pytestconf import clean_repo_test_cls - -class FakeFormTC(TestCase): - def test_fake_form(self): - class entity: - cw_etype = 'Entity' - eid = 0 - sio = BytesIO(b'hop\n') - form = CubicWebTC.fake_form('import', - {'file': ('filename.txt', sio), - 'encoding': u'utf-8', - }, [(entity, {'field': 'value'})]) - self.assertEqual(form, {'__form_id': 'import', - '__maineid': 0, - '__type:0': 'Entity', - '_cw_entity_fields:0': '__type,field', - '_cw_fields': 'encoding,file', - 'eid': [0], - 'encoding': u'utf-8', - 'field:0': 'value', - 'file': ('filename.txt', sio)}) - -class WebTestTC(TestCase): - - def setUp(self): - output = BytesIO() if PY2 else StringIO() - self.runner = TextTestRunner(stream=output) - - def test_error_raised(self): - class MyWebTest(CubicWebTC): - - def test_error_view(self): - with self.admin_access.web_request() as req: - req.create_entity('Bug', title=u"bt") - self.view('raising', req.execute('Bug B'), template=None, req=req) - - def test_correct_view(self): - with self.admin_access.web_request() as req: - self.view('primary', req.execute('CWUser U'), template=None, req=req) - - tests = [MyWebTest('test_error_view'), MyWebTest('test_correct_view')] - result = self.runner.run(TestSuite(tests)) - self.assertEqual(result.testsRun, 2) - self.assertEqual(len(result.errors), 0) - self.assertEqual(len(result.failures), 1) - clean_repo_test_cls(MyWebTest) - - -class RepoInstancesConsistencyTC(CubicWebTC): - test_db_id = 'RepoInstancesConsistencyTC' - - def pre_setup_database(self, cnx, config): - self.assertIs(cnx.repo, config.repository()) - - def test_pre_setup(self): - pass - - -HTML_PAGE = u""" - - need a title - -

      Hello World !

      - - -""" - -HTML_PAGE2 = u""" - - need a title - -

      Test

      -

      Hello world !

      -

      h2 title

      -

      h3 title

      -

      antoher h2 title

      -

      h4 title

      -

      Logilab introduces CW !

      - - -""" - -HTML_PAGE_ERROR = u""" - - need a title - - Logilab introduces CW ! - - -""" - -HTML_NON_STRICT = u""" - - need a title - -

      title

      - - -""" - - -class HTMLPageInfoTC(TestCase): - """test cases for PageInfo""" - - def setUp(self): - parser = htmlparser.HTMLValidator() - # disable cleanup that would remove doctype - parser.preprocess_data = lambda data: data - self.page_info = parser.parse_string(HTML_PAGE2) - - def test_source1(self): - """make sure source is stored correctly""" - self.assertEqual(self.page_info.source, HTML_PAGE2) - - def test_source2(self): - """make sure source is stored correctly - raise exception""" - parser = htmlparser.DTDValidator() - self.assertRaises(AssertionError, parser.parse_string, HTML_PAGE_ERROR) - - def test_has_title_no_level(self): - """tests h? tags information""" - self.assertEqual(self.page_info.has_title('Test'), True) - self.assertEqual(self.page_info.has_title('Test '), False) - self.assertEqual(self.page_info.has_title('Tes'), False) - self.assertEqual(self.page_info.has_title('Hello world !'), True) - - def test_has_title_level(self): - """tests h? tags information""" - self.assertEqual(self.page_info.has_title('Test', level = 1), True) - self.assertEqual(self.page_info.has_title('Test', level = 2), False) - self.assertEqual(self.page_info.has_title('Test', level = 3), False) - self.assertEqual(self.page_info.has_title('Test', level = 4), False) - self.assertRaises(IndexError, self.page_info.has_title, 'Test', level = 5) - - def test_has_title_regexp_no_level(self): - """tests has_title_regexp() with no particular level specified""" - self.assertEqual(self.page_info.has_title_regexp('h[23] title'), True) - - def test_has_title_regexp_level(self): - """tests has_title_regexp() with a particular level specified""" - self.assertEqual(self.page_info.has_title_regexp('h[23] title', 2), True) - self.assertEqual(self.page_info.has_title_regexp('h[23] title', 3), True) - self.assertEqual(self.page_info.has_title_regexp('h[23] title', 4), False) - - def test_appears(self): - """tests PageInfo.appears()""" - self.assertEqual(self.page_info.appears('CW'), True) - self.assertEqual(self.page_info.appears('Logilab'), True) - self.assertEqual(self.page_info.appears('Logilab introduces'), True) - self.assertEqual(self.page_info.appears('H2 title'), False) - - def test_has_link(self): - """tests has_link()""" - self.assertEqual(self.page_info.has_link('Logilab'), True) - self.assertEqual(self.page_info.has_link('logilab'), False) - self.assertEqual(self.page_info.has_link('Logilab', 'http://www.logilab.org'), True) - self.assertEqual(self.page_info.has_link('Logilab', 'http://www.google.com'), False) - - def test_has_link_regexp(self): - """test has_link_regexp()""" - self.assertEqual(self.page_info.has_link_regexp('L[oi]gilab'), True) - self.assertEqual(self.page_info.has_link_regexp('L[ai]gilab'), False) - - -class CWUtilitiesTC(CubicWebTC): - - def test_temporary_permissions_eschema(self): - eschema = self.schema['CWUser'] - with self.temporary_permissions(CWUser={'read': ()}): - self.assertEqual(eschema.permissions['read'], ()) - self.assertTrue(eschema.permissions['add']) - self.assertTrue(eschema.permissions['read'], ()) - - def test_temporary_permissions_rdef(self): - rdef = self.schema['CWUser'].rdef('in_group') - with self.temporary_permissions((rdef, {'read': ()})): - self.assertEqual(rdef.permissions['read'], ()) - self.assertTrue(rdef.permissions['add']) - self.assertTrue(rdef.permissions['read'], ()) - - def test_temporary_permissions_rdef_with_exception(self): - rdef = self.schema['CWUser'].rdef('in_group') - try: - with self.temporary_permissions((rdef, {'read': ()})): - self.assertEqual(rdef.permissions['read'], ()) - self.assertTrue(rdef.permissions['add']) - raise ValueError('goto') - except ValueError: - self.assertTrue(rdef.permissions['read'], ()) - else: - self.fail('exception was caught unexpectedly') - - def test_temporary_appobjects_registered(self): - - class AnAppobject(object): - __registries__ = ('hip',) - __regid__ = 'hop' - __select__ = yes() - registered = None - - @classmethod - def __registered__(cls, reg): - cls.registered = reg - - with self.temporary_appobjects(AnAppobject): - self.assertEqual(self.vreg['hip'], AnAppobject.registered) - self.assertIn(AnAppobject, self.vreg['hip']['hop']) - self.assertNotIn(AnAppobject, self.vreg['hip']['hop']) - - def test_login(self): - """Calling login should not break hook control""" - with self.admin_access.repo_cnx() as cnx: - self.hook_executed = False - self.create_user(cnx, 'babar') - cnx.commit() - - from cubicweb.server import hook - from cubicweb.predicates import is_instance - - class MyHook(hook.Hook): - __regid__ = 'whatever' - __select__ = hook.Hook.__select__ & is_instance('CWProperty') - category = 'test-hook' - events = ('after_add_entity',) - test = self - - def __call__(self): - self.test.hook_executed = True - - with self.new_access('babar').repo_cnx() as cnx: - with self.temporary_appobjects(MyHook): - with cnx.allow_all_hooks_but('test-hook'): - prop = cnx.create_entity('CWProperty', pkey=u'ui.language', value=u'en') - cnx.commit() - self.assertFalse(self.hook_executed) - - -class RepoAccessTC(CubicWebTC): - - def test_repo_connection(self): - acc = self.new_access('admin') - with acc.repo_cnx() as cnx: - rset = cnx.execute('Any X WHERE X is CWUser') - self.assertTrue(rset) - - def test_client_connection(self): - acc = self.new_access('admin') - with acc.client_cnx() as cnx: - rset = cnx.execute('Any X WHERE X is CWUser') - self.assertTrue(rset) - - def test_web_request(self): - acc = self.new_access('admin') - with acc.web_request(elephant='babar') as req: - rset = req.execute('Any X WHERE X is CWUser') - self.assertTrue(rset) - self.assertEqual('babar', req.form['elephant']) - - def test_close(self): - acc = self.new_access('admin') - acc.close() - - def test_admin_access(self): - with self.admin_access.client_cnx() as cnx: - self.assertEqual('admin', cnx.user.login) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/test/unittest_webtest.py --- a/devtools/test/unittest_webtest.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,41 +0,0 @@ -from six.moves import http_client - -from logilab.common.testlib import Tags -from cubicweb.devtools.webtest import CubicWebTestTC - - -class CWTTC(CubicWebTestTC): - def test_response(self): - response = self.webapp.get('/') - self.assertEqual(200, response.status_int) - - def test_base_url(self): - if self.config['base-url'] not in self.webapp.get('/').text: - self.fail('no mention of base url in retrieved page') - - -class CWTIdentTC(CubicWebTestTC): - test_db_id = 'webtest-ident' - anonymous_allowed = False - tags = CubicWebTestTC.tags | Tags(('auth',)) - - def test_reponse_denied(self): - res = self.webapp.get('/', expect_errors=True) - self.assertEqual(http_client.FORBIDDEN, res.status_int) - - def test_login(self): - res = self.webapp.get('/', expect_errors=True) - self.assertEqual(http_client.FORBIDDEN, res.status_int) - - self.login(self.admlogin, self.admpassword) - res = self.webapp.get('/') - self.assertEqual(http_client.OK, res.status_int) - - self.logout() - res = self.webapp.get('/', expect_errors=True) - self.assertEqual(http_client.FORBIDDEN, res.status_int) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/testlib.py --- a/devtools/testlib.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1335 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""this module contains base classes and utilities for cubicweb tests""" -from __future__ import print_function - -import sys -import re -from os.path import dirname, join, abspath -from math import log -from contextlib import contextmanager -from itertools import chain - -from six import text_type, string_types -from six.moves import range -from six.moves.urllib.parse import urlparse, parse_qs, unquote as urlunquote - -import yams.schema - -from logilab.common.testlib import TestCase, InnerTest, Tags -from logilab.common.pytest import nocoverage, pause_trace -from logilab.common.debugger import Debugger -from logilab.common.umessage import message_from_string -from logilab.common.decorators import cached, classproperty, clear_cache, iclassmethod -from logilab.common.deprecation import deprecated, class_deprecated -from logilab.common.shellutils import getlogin - -from cubicweb import (ValidationError, NoSelectableObject, AuthenticationError, - BadConnectionId) -from cubicweb import cwconfig, devtools, web, server, repoapi -from cubicweb.utils import json -from cubicweb.sobjects import notification -from cubicweb.web import Redirect, application, eid_param -from cubicweb.server.hook import SendMailOp -from cubicweb.server.session import Session -from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS -from cubicweb.devtools import fake, htmlparser, DEFAULT_EMPTY_DB_ID - - -# low-level utilities ########################################################## - -class CubicWebDebugger(Debugger): - """special debugger class providing a 'view' function which saves some - html into a temporary file and open a web browser to examinate it. - """ - def do_view(self, arg): - import webbrowser - data = self._getval(arg) - with open('/tmp/toto.html', 'w') as toto: - toto.write(data) - webbrowser.open('file:///tmp/toto.html') - - -def line_context_filter(line_no, center, before=3, after=None): - """return true if line are in context - - if after is None: after = before - """ - if after is None: - after = before - return center - before <= line_no <= center + after - - -def unprotected_entities(schema, strict=False): - """returned a set of each non final entity type, excluding "system" entities - (eg CWGroup, CWUser...) - """ - if strict: - protected_entities = yams.schema.BASE_TYPES - else: - protected_entities = yams.schema.BASE_TYPES.union(SYSTEM_ENTITIES) - return set(schema.entities()) - protected_entities - - -class JsonValidator(object): - def parse_string(self, data): - return json.loads(data.decode('ascii')) - - -@contextmanager -def real_error_handling(app): - """By default, CubicWebTC `app` attribute (ie the publisher) is monkey - patched so that unexpected error are raised rather than going through the - `error_handler` method. - - By using this context manager you disable this monkey-patching temporarily. - Hence when publishihng a request no error will be raised, you'll get - req.status_out set to an HTTP error status code and the generated page will - usually hold a traceback as HTML. - - >>> with real_error_handling(app): - >>> page = app.handle_request(req) - """ - # remove the monkey patched error handler - fake_error_handler = app.error_handler - del app.error_handler - # return the app - yield app - # restore - app.error_handler = fake_error_handler - - -# email handling, to test emails sent by an application ######################## - -MAILBOX = [] - - -class Email(object): - """you'll get instances of Email into MAILBOX during tests that trigger - some notification. - - * `msg` is the original message object - - * `recipients` is a list of email address which are the recipients of this - message - """ - def __init__(self, fromaddr, recipients, msg): - self.fromaddr = fromaddr - self.recipients = recipients - self.msg = msg - - @property - def message(self): - return message_from_string(self.msg) - - @property - def subject(self): - return self.message.get('Subject') - - @property - def content(self): - return self.message.get_payload(decode=True) - - def __repr__(self): - return '' % (','.join(self.recipients), - self.message.get('Subject')) - - -# the trick to get email into MAILBOX instead of actually sent: monkey patch -# cwconfig.SMTP object -class MockSMTP: - - def __init__(self, server, port): - pass - - def close(self): - pass - - def sendmail(self, fromaddr, recipients, msg): - MAILBOX.append(Email(fromaddr, recipients, msg)) - -cwconfig.SMTP = MockSMTP - - -# Repoaccess utility ###############################################3########### - -class RepoAccess(object): - """An helper to easily create object to access the repo as a specific user - - Each RepoAccess have it own session. - - A repo access can create three type of object: - - .. automethod:: cubicweb.testlib.RepoAccess.cnx - .. automethod:: cubicweb.testlib.RepoAccess.web_request - - The RepoAccess need to be closed to destroy the associated Session. - TestCase usually take care of this aspect for the user. - - .. automethod:: cubicweb.testlib.RepoAccess.close - """ - - def __init__(self, repo, login, requestcls): - self._repo = repo - self._login = login - self.requestcls = requestcls - self._session = self._unsafe_connect(login) - - def _unsafe_connect(self, login, **kwargs): - """ a completely unsafe connect method for the tests """ - # use an internal connection - with self._repo.internal_cnx() as cnx: - # try to get a user object - user = cnx.find('CWUser', login=login).one() - user.groups - user.properties - user.login - session = Session(user, self._repo) - self._repo._sessions[session.sessionid] = session - user._cw = user.cw_rset.req = session - with session.new_cnx() as cnx: - self._repo.hm.call_hooks('session_open', cnx) - # commit connection at this point in case write operation has been - # done during `session_open` hooks - cnx.commit() - return session - - @contextmanager - def cnx(self): - """Context manager returning a server side connection for the user""" - with self._session.new_cnx() as cnx: - yield cnx - - # aliases for bw compat - client_cnx = repo_cnx = cnx - - @contextmanager - def web_request(self, url=None, headers={}, method='GET', **kwargs): - """Context manager returning a web request pre-linked to a client cnx - - To commit and rollback use:: - - req.cnx.commit() - req.cnx.rolback() - """ - req = self.requestcls(self._repo.vreg, url=url, headers=headers, - method=method, form=kwargs) - with self._session.new_cnx() as cnx: - req.set_cnx(cnx) - yield req - - def close(self): - """Close the session associated to the RepoAccess""" - if self._session is not None: - self._repo.close(self._session.sessionid) - self._session = None - - @contextmanager - def shell(self): - from cubicweb.server.migractions import ServerMigrationHelper - with self._session.new_cnx() as cnx: - mih = ServerMigrationHelper(None, repo=self._repo, cnx=cnx, - interactive=False, - # hack so it don't try to load fs schema - schema=1) - yield mih - cnx.commit() - - -# base class for cubicweb tests requiring a full cw environments ############### - -class CubicWebTC(TestCase): - """abstract class for test using an apptest environment - - attributes: - - * `vreg`, the vregistry - * `schema`, self.vreg.schema - * `config`, cubicweb configuration - * `cnx`, repoapi connection to the repository using an admin user - * `session`, server side session associated to `cnx` - * `app`, the cubicweb publisher (for web testing) - * `repo`, the repository object - * `admlogin`, login of the admin user - * `admpassword`, password of the admin user - * `shell`, create and use shell environment - * `anonymous_allowed`: flag telling if anonymous browsing should be allowed - """ - appid = 'data' - configcls = devtools.ApptestConfiguration - requestcls = fake.FakeRequest - tags = TestCase.tags | Tags('cubicweb', 'cw_repo') - test_db_id = DEFAULT_EMPTY_DB_ID - - # anonymous is logged by default in cubicweb test cases - anonymous_allowed = True - - def __init__(self, *args, **kwargs): - self._admin_session = None - self.repo = None - self._open_access = set() - super(CubicWebTC, self).__init__(*args, **kwargs) - - # repository connection handling ########################################### - - def new_access(self, login): - """provide a new RepoAccess object for a given user - - The access is automatically closed at the end of the test.""" - login = text_type(login) - access = RepoAccess(self.repo, login, self.requestcls) - self._open_access.add(access) - return access - - def _close_access(self): - while self._open_access: - try: - self._open_access.pop().close() - except BadConnectionId: - continue # already closed - - @property - def session(self): - """return admin session""" - return self._admin_session - - # XXX this doesn't need to a be classmethod anymore - def _init_repo(self): - """init the repository and connection to it. - """ - # get or restore and working db. - db_handler = devtools.get_test_db_handler(self.config, self.init_config) - db_handler.build_db_cache(self.test_db_id, self.pre_setup_database) - db_handler.restore_database(self.test_db_id) - self.repo = db_handler.get_repo(startup=True) - # get an admin session (without actual login) - login = text_type(db_handler.config.default_admin_config['login']) - self.admin_access = self.new_access(login) - self._admin_session = self.admin_access._session - - # config management ######################################################## - - @classproperty - def config(cls): - """return the configuration object - - Configuration is cached on the test class. - """ - if cls is CubicWebTC: - # Prevent direct use of CubicWebTC directly to avoid database - # caching issues - return None - try: - return cls.__dict__['_config'] - except KeyError: - home = abspath(join(dirname(sys.modules[cls.__module__].__file__), cls.appid)) - config = cls._config = cls.configcls(cls.appid, apphome=home) - config.mode = 'test' - return config - - @classmethod # XXX could be turned into a regular method - def init_config(cls, config): - """configuration initialization hooks. - - You may only want to override here the configuraton logic. - - Otherwise, consider to use a different :class:`ApptestConfiguration` - defined in the `configcls` class attribute. - - This method will be called by the database handler once the config has - been properly bootstrapped. - """ - admincfg = config.default_admin_config - cls.admlogin = text_type(admincfg['login']) - cls.admpassword = admincfg['password'] - # uncomment the line below if you want rql queries to be logged - # config.global_set_option('query-log-file', - # '/tmp/test_rql_log.' + `os.getpid()`) - config.global_set_option('log-file', None) - # set default-dest-addrs to a dumb email address to avoid mailbox or - # mail queue pollution - config.global_set_option('default-dest-addrs', ['whatever']) - send_to = '%s@logilab.fr' % getlogin() - config.global_set_option('sender-addr', send_to) - config.global_set_option('default-dest-addrs', send_to) - config.global_set_option('sender-name', 'cubicweb-test') - config.global_set_option('sender-addr', 'cubicweb-test@logilab.fr') - # default_base_url on config class isn't enough for TestServerConfiguration - config.global_set_option('base-url', config.default_base_url()) - # web resources - try: - config.global_set_option('embed-allowed', re.compile('.*')) - except Exception: # not in server only configuration - pass - - @property - def vreg(self): - return self.repo.vreg - - # global resources accessors ############################################### - - @property - def schema(self): - """return the application schema""" - return self.vreg.schema - - def set_option(self, optname, value): - self.config.global_set_option(optname, value) - - def set_debug(self, debugmode): - server.set_debug(debugmode) - - def debugged(self, debugmode): - return server.debugged(debugmode) - - # default test setup and teardown ######################################### - - def setUp(self): - # monkey patch send mail operation so emails are sent synchronously - self._patch_SendMailOp() - with pause_trace(): - previous_failure = self.__class__.__dict__.get('_repo_init_failed') - if previous_failure is not None: - self.skipTest('repository is not initialised: %r' % previous_failure) - try: - self._init_repo() - except Exception as ex: - self.__class__._repo_init_failed = ex - raise - self.addCleanup(self._close_access) - self.config.set_anonymous_allowed(self.anonymous_allowed) - self.setup_database() - MAILBOX[:] = [] # reset mailbox - - def tearDown(self): - # XXX hack until logilab.common.testlib is fixed - if self._admin_session is not None: - self.repo.close(self._admin_session.sessionid) - self._admin_session = None - while self._cleanups: - cleanup, args, kwargs = self._cleanups.pop(-1) - cleanup(*args, **kwargs) - self.repo.turn_repo_off() - - def _patch_SendMailOp(self): - # monkey patch send mail operation so emails are sent synchronously - _old_mail_postcommit_event = SendMailOp.postcommit_event - SendMailOp.postcommit_event = SendMailOp.sendmails - - def reverse_SendMailOp_monkey_patch(): - SendMailOp.postcommit_event = _old_mail_postcommit_event - - self.addCleanup(reverse_SendMailOp_monkey_patch) - - def setup_database(self): - """add your database setup code by overriding this method""" - - @classmethod - def pre_setup_database(cls, cnx, config): - """add your pre database setup code by overriding this method - - Do not forget to set the cls.test_db_id value to enable caching of the - result. - """ - - # user / session management ############################################### - - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def user(self, req=None): - """return the application schema""" - if req is None: - return self.request().user - else: - return req.user - - @iclassmethod # XXX turn into a class method - def create_user(self, req, login=None, groups=('users',), password=None, - email=None, commit=True, **kwargs): - """create and return a new user entity""" - if password is None: - password = login - if login is not None: - login = text_type(login) - user = req.create_entity('CWUser', login=login, - upassword=password, **kwargs) - req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' - % ','.join(repr(str(g)) for g in groups), - {'x': user.eid}) - if email is not None: - req.create_entity('EmailAddress', address=text_type(email), - reverse_primary_email=user) - user.cw_clear_relation_cache('in_group', 'subject') - if commit: - try: - req.commit() # req is a session - except AttributeError: - req.cnx.commit() - return user - - # other utilities ######################################################### - - @contextmanager - def temporary_appobjects(self, *appobjects): - self.vreg._loadedmods.setdefault(self.__module__, {}) - for obj in appobjects: - self.vreg.register(obj) - registered = getattr(obj, '__registered__', None) - if registered: - for registry in obj.__registries__: - registered(self.vreg[registry]) - try: - yield - finally: - for obj in appobjects: - self.vreg.unregister(obj) - - @contextmanager - def temporary_permissions(self, *perm_overrides, **perm_kwoverrides): - """Set custom schema permissions within context. - - There are two ways to call this method, which may be used together : - - * using positional argument(s): - - .. sourcecode:: python - - rdef = self.schema['CWUser'].rdef('login') - with self.temporary_permissions((rdef, {'read': ()})): - ... - - - * using named argument(s): - - .. sourcecode:: python - - with self.temporary_permissions(CWUser={'read': ()}): - ... - - Usually the former will be preferred to override permissions on a - relation definition, while the latter is well suited for entity types. - - The allowed keys in the permission dictionary depend on the schema type - (entity type / relation definition). Resulting permissions will be - similar to `orig_permissions.update(partial_perms)`. - """ - torestore = [] - for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.items()): - if isinstance(erschema, string_types): - erschema = self.schema[erschema] - for action, actionperms in etypeperms.items(): - origperms = erschema.permissions[action] - erschema.set_action_permissions(action, actionperms) - torestore.append([erschema, action, origperms]) - try: - yield - finally: - for erschema, action, permissions in torestore: - if action is None: - erschema.permissions = permissions - else: - erschema.set_action_permissions(action, permissions) - - def assertModificationDateGreater(self, entity, olddate): - entity.cw_attr_cache.pop('modification_date', None) - self.assertGreater(entity.modification_date, olddate) - - def assertMessageEqual(self, req, params, expected_msg): - msg = req.session.data[params['_cwmsgid']] - self.assertEqual(expected_msg, msg) - - # workflow utilities ####################################################### - - def assertPossibleTransitions(self, entity, expected): - transitions = entity.cw_adapt_to('IWorkflowable').possible_transitions() - self.assertListEqual(sorted(tr.name for tr in transitions), - sorted(expected)) - - # views and actions registries inspection ################################## - - def pviews(self, req, rset): - return sorted((a.__regid__, a.__class__) - for a in self.vreg['views'].possible_views(req, rset=rset)) - - def pactions(self, req, rset, - skipcategories=('addrelated', 'siteactions', 'useractions', - 'footer', 'manage')): - return [(a.__regid__, a.__class__) - for a in self.vreg['actions'].poss_visible_objects(req, rset=rset) - if a.category not in skipcategories] - - def pactions_by_cats(self, req, rset, categories=('addrelated',)): - return [(a.__regid__, a.__class__) - for a in self.vreg['actions'].poss_visible_objects(req, rset=rset) - if a.category in categories] - - def pactionsdict(self, req, rset, - skipcategories=('addrelated', 'siteactions', 'useractions', - 'footer', 'manage')): - res = {} - for a in self.vreg['actions'].poss_visible_objects(req, rset=rset): - if a.category not in skipcategories: - res.setdefault(a.category, []).append(a.__class__) - return res - - def action_submenu(self, req, rset, id): - return self._test_action(self.vreg['actions'].select(id, req, rset=rset)) - - def _test_action(self, action): - class fake_menu(list): - @property - def items(self): - return self - - class fake_box(object): - def action_link(self, action, **kwargs): - return (action.title, action.url()) - submenu = fake_menu() - action.fill_menu(fake_box(), submenu) - return submenu - - def list_views_for(self, rset): - """returns the list of views that can be applied on `rset`""" - req = rset.req - only_once_vids = ('primary', 'secondary', 'text') - req.data['ex'] = ValueError("whatever") - viewsvreg = self.vreg['views'] - for vid, views in viewsvreg.items(): - if vid[0] == '_': - continue - if rset.rowcount > 1 and vid in only_once_vids: - continue - views = [view for view in views - if view.category != 'startupview' - and not issubclass(view, notification.NotificationView) - and not isinstance(view, class_deprecated)] - if views: - try: - view = viewsvreg._select_best(views, req, rset=rset) - if view is None: - raise NoSelectableObject((req,), {'rset': rset}, views) - if view.linkable(): - yield view - else: - not_selected(self.vreg, view) - # else the view is expected to be used as subview and should - # not be tested directly - except NoSelectableObject: - continue - - def list_actions_for(self, rset): - """returns the list of actions that can be applied on `rset`""" - req = rset.req - for action in self.vreg['actions'].possible_objects(req, rset=rset): - yield action - - def list_boxes_for(self, rset): - """returns the list of boxes that can be applied on `rset`""" - req = rset.req - for box in self.vreg['ctxcomponents'].possible_objects(req, rset=rset): - yield box - - def list_startup_views(self): - """returns the list of startup views""" - with self.admin_access.web_request() as req: - for view in self.vreg['views'].possible_views(req, None): - if view.category == 'startupview': - yield view.__regid__ - else: - not_selected(self.vreg, view) - - # web ui testing utilities ################################################# - - @property - @cached - def app(self): - """return a cubicweb publisher""" - publisher = application.CubicWebPublisher(self.repo, self.config) - - def raise_error_handler(*args, **kwargs): - raise - - publisher.error_handler = raise_error_handler - return publisher - - @deprecated('[3.19] use the .remote_calling method') - def remote_call(self, fname, *args): - """remote json call simulation""" - dump = json.dumps - args = [dump(arg) for arg in args] - req = self.request(fname=fname, pageid='123', arg=args) - ctrl = self.vreg['controllers'].select('ajax', req) - return ctrl.publish(), req - - @contextmanager - def remote_calling(self, fname, *args): - """remote json call simulation""" - args = [json.dumps(arg) for arg in args] - with self.admin_access.web_request(fname=fname, pageid='123', arg=args) as req: - ctrl = self.vreg['controllers'].select('ajax', req) - yield ctrl.publish(), req - - def app_handle_request(self, req, path='view'): - return self.app.core_handle(req, path) - - @deprecated("[3.15] app_handle_request is the new and better way" - " (beware of small semantic changes)") - def app_publish(self, *args, **kwargs): - return self.app_handle_request(*args, **kwargs) - - def ctrl_publish(self, req, ctrl='edit', rset=None): - """call the publish method of the edit controller""" - ctrl = self.vreg['controllers'].select(ctrl, req, appli=self.app) - try: - result = ctrl.publish(rset) - req.cnx.commit() - except web.Redirect: - req.cnx.commit() - raise - return result - - @staticmethod - def fake_form(formid, field_dict=None, entity_field_dicts=()): - """Build _cw.form dictionnary to fake posting of some standard cubicweb form - - * `formid`, the form id, usually form's __regid__ - - * `field_dict`, dictionary of name:value for fields that are not tied to an entity - - * `entity_field_dicts`, list of (entity, dictionary) where dictionary contains name:value - for fields that are not tied to the given entity - """ - assert field_dict or entity_field_dicts, \ - 'field_dict and entity_field_dicts arguments must not be both unspecified' - if field_dict is None: - field_dict = {} - form = {'__form_id': formid} - fields = [] - for field, value in field_dict.items(): - fields.append(field) - form[field] = value - - def _add_entity_field(entity, field, value): - entity_fields.append(field) - form[eid_param(field, entity.eid)] = value - - for entity, field_dict in entity_field_dicts: - if '__maineid' not in form: - form['__maineid'] = entity.eid - entity_fields = [] - form.setdefault('eid', []).append(entity.eid) - _add_entity_field(entity, '__type', entity.cw_etype) - for field, value in field_dict.items(): - _add_entity_field(entity, field, value) - if entity_fields: - form[eid_param('_cw_entity_fields', entity.eid)] = ','.join(entity_fields) - if fields: - form['_cw_fields'] = ','.join(sorted(fields)) - return form - - @deprecated('[3.19] use .admin_request_from_url instead') - def req_from_url(self, url): - """parses `url` and builds the corresponding CW-web request - - req.form will be setup using the url's query string - """ - req = self.request(url=url) - if isinstance(url, unicode): - url = url.encode(req.encoding) # req.setup_params() expects encoded strings - querystring = urlparse(url)[-2] - params = parse_qs(querystring) - req.setup_params(params) - return req - - @contextmanager - def admin_request_from_url(self, url): - """parses `url` and builds the corresponding CW-web request - - req.form will be setup using the url's query string - """ - with self.admin_access.web_request(url=url) as req: - if isinstance(url, unicode): - url = url.encode(req.encoding) # req.setup_params() expects encoded strings - querystring = urlparse(url)[-2] - params = parse_qs(querystring) - req.setup_params(params) - yield req - - def url_publish(self, url, data=None): - """takes `url`, uses application's app_resolver to find the appropriate - controller and result set, then publishes the result. - - To simulate post of www-form-encoded data, give a `data` dictionary - containing desired key/value associations. - - This should pretty much correspond to what occurs in a real CW server - except the apache-rewriter component is not called. - """ - with self.admin_request_from_url(url) as req: - if data is not None: - req.form.update(data) - ctrlid, rset = self.app.url_resolver.process(req, req.relative_path(False)) - return self.ctrl_publish(req, ctrlid, rset) - - def http_publish(self, url, data=None): - """like `url_publish`, except this returns a http response, even in case - of errors. You may give form parameters using the `data` argument. - """ - with self.admin_request_from_url(url) as req: - if data is not None: - req.form.update(data) - with real_error_handling(self.app): - result = self.app_handle_request(req, req.relative_path(False)) - return result, req - - @staticmethod - def _parse_location(req, location): - try: - path, params = location.split('?', 1) - except ValueError: - path = location - params = {} - else: - cleanup = lambda p: (p[0], urlunquote(p[1])) - params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p) - if path.startswith(req.base_url()): # may be relative - path = path[len(req.base_url()):] - return path, params - - def expect_redirect(self, callback, req): - """call the given callback with req as argument, expecting to get a - Redirect exception - """ - try: - callback(req) - except Redirect as ex: - return self._parse_location(req, ex.location) - else: - self.fail('expected a Redirect exception') - - def expect_redirect_handle_request(self, req, path='edit'): - """call the publish method of the application publisher, expecting to - get a Redirect exception - """ - self.app_handle_request(req, path) - self.assertTrue(300 <= req.status_out < 400, req.status_out) - location = req.get_response_header('location') - return self._parse_location(req, location) - - @deprecated("[3.15] expect_redirect_handle_request is the new and better way" - " (beware of small semantic changes)") - def expect_redirect_publish(self, *args, **kwargs): - return self.expect_redirect_handle_request(*args, **kwargs) - - def set_auth_mode(self, authmode, anonuser=None): - self.set_option('auth-mode', authmode) - self.set_option('anonymous-user', anonuser) - if anonuser is None: - self.config.anonymous_credential = None - else: - self.config.anonymous_credential = (anonuser, anonuser) - - def init_authentication(self, authmode, anonuser=None): - self.set_auth_mode(authmode, anonuser) - req = self.requestcls(self.vreg, url='login') - sh = self.app.session_handler - authm = sh.session_manager.authmanager - authm.anoninfo = self.vreg.config.anonymous_user() - authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]} - # not properly cleaned between tests - self.open_sessions = sh.session_manager._sessions = {} - return req, self.session - - def assertAuthSuccess(self, req, origsession, nbsessions=1): - sh = self.app.session_handler - session = self.app.get_session(req) - cnx = repoapi.Connection(session) - req.set_cnx(cnx) - self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions) - self.assertEqual(session.login, origsession.login) - self.assertEqual(session.anonymous_session, False) - - def assertAuthFailure(self, req, nbsessions=0): - with self.assertRaises(AuthenticationError): - self.app.get_session(req) - # +0 since we do not track the opened session - self.assertEqual(len(self.open_sessions), nbsessions) - clear_cache(req, 'get_authorization') - - # content validation ####################################################### - - # validators are used to validate (XML, DTD, whatever) view's content - # validators availables are : - # DTDValidator : validates XML + declared DTD - # SaxOnlyValidator : guarantees XML is well formed - # None : do not try to validate anything - # validators used must be imported from from.devtools.htmlparser - content_type_validators = { - # maps MIME type : validator name - # - # do not set html validators here, we need HTMLValidator for html - # snippets - # 'text/html': DTDValidator, - # 'application/xhtml+xml': DTDValidator, - 'application/xml': htmlparser.XMLValidator, - 'text/xml': htmlparser.XMLValidator, - 'application/json': JsonValidator, - 'text/plain': None, - 'text/comma-separated-values': None, - 'text/x-vcard': None, - 'text/calendar': None, - 'image/png': None, - } - # maps vid : validator name (override content_type_validators) - vid_validators = dict((vid, htmlparser.VALMAP[valkey]) - for vid, valkey in VIEW_VALIDATORS.items()) - - def view(self, vid, rset=None, req=None, template='main-template', - **kwargs): - """This method tests the view `vid` on `rset` using `template` - - If no error occurred while rendering the view, the HTML is analyzed - and parsed. - - :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo` - encapsulation the generated HTML - """ - if req is None: - if rset is None: - req = self.request() - else: - req = rset.req - req.form['vid'] = vid - viewsreg = self.vreg['views'] - view = viewsreg.select(vid, req, rset=rset, **kwargs) - # set explicit test description - if rset is not None: - # coerce to "bytes" on py2 because the description will be sent to - # sys.stdout/stderr which takes "bytes" on py2 and "unicode" on py3 - rql = str(rset.printable_rql()) - self.set_description("testing vid=%s defined in %s with (%s)" % ( - vid, view.__module__, rql)) - else: - self.set_description("testing vid=%s defined in %s without rset" % ( - vid, view.__module__)) - if template is None: # raw view testing, no template - viewfunc = view.render - else: - kwargs['view'] = view - viewfunc = lambda **k: viewsreg.main_template(req, template, - rset=rset, **kwargs) - return self._test_view(viewfunc, view, template, kwargs) - - def _test_view(self, viewfunc, view, template='main-template', kwargs={}): - """this method does the actual call to the view - - If no error occurred while rendering the view, the HTML is analyzed - and parsed. - - :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo` - encapsulation the generated HTML - """ - try: - output = viewfunc(**kwargs) - except Exception: - # hijack exception: generative tests stop when the exception - # is not an AssertionError - klass, exc, tcbk = sys.exc_info() - try: - msg = '[%s in %s] %s' % (klass, view.__regid__, exc) - except Exception: - msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__) - exc = AssertionError(msg) - exc.__traceback__ = tcbk - raise exc - return self._check_html(output, view, template) - - def get_validator(self, view=None, content_type=None, output=None): - if view is not None: - try: - return self.vid_validators[view.__regid__]() - except KeyError: - if content_type is None: - content_type = view.content_type - if content_type is None: - content_type = 'text/html' - if content_type in ('text/html', 'application/xhtml+xml') and output: - if output.startswith(b''): - # only check XML well-formness since HTMLValidator isn't html5 - # compatible and won't like various other extensions - default_validator = htmlparser.XMLSyntaxValidator - elif output.startswith(b' used in progress widget, unknown in html dtd - output = re.sub('', '', output) - return self.assertWellFormed(validator, output.strip(), context=view.__regid__) - - def assertWellFormed(self, validator, content, context=None): - try: - return validator.parse_string(content) - except Exception: - # hijack exception: generative tests stop when the exception - # is not an AssertionError - klass, exc, tcbk = sys.exc_info() - if context is None: - msg = u'[%s]' % (klass,) - else: - msg = u'[%s in %s]' % (klass, context) - msg = msg.encode(sys.getdefaultencoding(), 'replace') - - try: - str_exc = str(exc) - except Exception: - str_exc = 'undisplayable exception' - msg += str_exc.encode(sys.getdefaultencoding(), 'replace') - if content is not None: - position = getattr(exc, "position", (0,))[0] - if position: - # define filter - if isinstance(content, str): - content = unicode(content, sys.getdefaultencoding(), 'replace') - content = validator.preprocess_data(content) - content = content.splitlines() - width = int(log(len(content), 10)) + 1 - line_template = " %" + ("%i" % width) + "i: %s" - # XXX no need to iterate the whole file except to get - # the line number - content = u'\n'.join(line_template % (idx + 1, line) - for idx, line in enumerate(content) - if line_context_filter(idx+1, position)) - msg += u'\nfor content:\n%s' % content - exc = AssertionError(msg) - exc.__traceback__ = tcbk - raise exc - - def assertDocTestFile(self, testfile): - # doctest returns tuple (failure_count, test_count) - with self.admin_access.shell() as mih: - result = mih.process_script(testfile) - if result[0] and result[1]: - raise self.failureException("doctest file '%s' failed" - % testfile) - - # notifications ############################################################ - - def assertSentEmail(self, subject, recipients=None, nb_msgs=None): - """test recipients in system mailbox for given email subject - - :param subject: email subject to find in mailbox - :param recipients: list of email recipients - :param nb_msgs: expected number of entries - :returns: list of matched emails - """ - messages = [email for email in MAILBOX - if email.message.get('Subject') == subject] - if recipients is not None: - sent_to = set() - for msg in messages: - sent_to.update(msg.recipients) - self.assertSetEqual(set(recipients), sent_to) - if nb_msgs is not None: - self.assertEqual(len(MAILBOX), nb_msgs) - return messages - - -# auto-populating test classes and utilities ################################### - -from cubicweb.devtools.fill import insert_entity_queries, make_relations_queries - -# XXX cleanup unprotected_entities & all mess - - -def how_many_dict(schema, cnx, how_many, skip): - """given a schema, compute how many entities by type we need to be able to - satisfy relations cardinality. - - The `how_many` argument tells how many entities of which type we want at - least. - - Return a dictionary with entity types as key, and the number of entities for - this type as value. - """ - relmap = {} - for rschema in schema.relations(): - if rschema.final: - continue - for subj, obj in rschema.rdefs: - card = rschema.rdef(subj, obj).cardinality - # if the relation is mandatory, we'll need at least as many subj and - # obj to satisfy it - if card[0] in '1+' and card[1] in '1?': - # subj has to be linked to at least one obj, - # but obj can be linked to only one subj - # -> we need at least as many subj as obj to satisfy - # cardinalities for this relation - relmap.setdefault((rschema, subj), []).append(str(obj)) - if card[1] in '1+' and card[0] in '1?': - # reverse subj and obj in the above explanation - relmap.setdefault((rschema, obj), []).append(str(subj)) - unprotected = unprotected_entities(schema) - for etype in skip: # XXX (syt) duh? explain or kill - unprotected.add(etype) - howmanydict = {} - # step 1, compute a base number of each entity types: number of already - # existing entities of this type + `how_many` - for etype in unprotected_entities(schema, strict=True): - howmanydict[str(etype)] = cnx.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] - if etype in unprotected: - howmanydict[str(etype)] += how_many - # step 2, augment nb entity per types to satisfy cardinality constraints, - # by recomputing for each relation that constrained an entity type: - # - # new num for etype = max(current num, sum(num for possible target etypes)) - # - # XXX we should first check there is no cycle then propagate changes - for (rschema, etype), targets in relmap.items(): - relfactor = sum(howmanydict[e] for e in targets) - howmanydict[str(etype)] = max(relfactor, howmanydict[etype]) - return howmanydict - - -class AutoPopulateTest(CubicWebTC): - """base class for test with auto-populating of the database""" - __abstract__ = True - - test_db_id = 'autopopulate' - - tags = CubicWebTC.tags | Tags('autopopulated') - - pdbclass = CubicWebDebugger - # this is a hook to be able to define a list of rql queries - # that are application dependent and cannot be guessed automatically - application_rql = [] - - no_auto_populate = () - ignored_relations = set() - - def to_test_etypes(self): - return unprotected_entities(self.schema, strict=True) - - def custom_populate(self, how_many, cnx): - pass - - def post_populate(self, cnx): - pass - - @nocoverage - def auto_populate(self, how_many): - """this method populates the database with `how_many` entities - of each possible type. It also inserts random relations between them - """ - with self.admin_access.cnx() as cnx: - with cnx.security_enabled(read=False, write=False): - self._auto_populate(cnx, how_many) - cnx.commit() - - def _auto_populate(self, cnx, how_many): - self.custom_populate(how_many, cnx) - vreg = self.vreg - howmanydict = how_many_dict(self.schema, cnx, how_many, self.no_auto_populate) - for etype in unprotected_entities(self.schema): - if etype in self.no_auto_populate: - continue - nb = howmanydict.get(etype, how_many) - for rql, args in insert_entity_queries(etype, self.schema, vreg, nb): - cnx.execute(rql, args) - edict = {} - for etype in unprotected_entities(self.schema, strict=True): - rset = cnx.execute('%s X' % etype) - edict[str(etype)] = set(row[0] for row in rset.rows) - existingrels = {} - ignored_relations = SYSTEM_RELATIONS | self.ignored_relations - for rschema in self.schema.relations(): - if rschema.final or rschema in ignored_relations: - continue - rset = cnx.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema) - existingrels.setdefault(rschema.type, set()).update((x, y) for x, y in rset) - q = make_relations_queries(self.schema, edict, cnx, ignored_relations, - existingrels=existingrels) - for rql, args in q: - try: - cnx.execute(rql, args) - except ValidationError as ex: - # failed to satisfy some constraint - print('error in automatic db population', ex) - cnx.commit_state = None # reset uncommitable flag - self.post_populate(cnx) - - def iter_individual_rsets(self, etypes=None, limit=None): - etypes = etypes or self.to_test_etypes() - with self.admin_access.web_request() as req: - for etype in etypes: - if limit: - rql = 'Any X LIMIT %s WHERE X is %s' % (limit, etype) - else: - rql = 'Any X WHERE X is %s' % etype - rset = req.execute(rql) - for row in range(len(rset)): - if limit and row > limit: - break - # XXX iirk - rset2 = rset.limit(limit=1, offset=row) - yield rset2 - - def iter_automatic_rsets(self, limit=10): - """generates basic resultsets for each entity type""" - etypes = self.to_test_etypes() - if not etypes: - return - with self.admin_access.web_request() as req: - for etype in etypes: - yield req.execute('Any X LIMIT %s WHERE X is %s' % (limit, etype)) - etype1 = etypes.pop() - try: - etype2 = etypes.pop() - except KeyError: - etype2 = etype1 - # test a mixed query (DISTINCT/GROUP to avoid getting duplicate - # X which make muledit view failing for instance (html validation fails - # because of some duplicate "id" attributes) - yield req.execute('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' % - (etype1, etype2)) - # test some application-specific queries if defined - for rql in self.application_rql: - yield req.execute(rql) - - def _test_everything_for(self, rset): - """this method tries to find everything that can be tested - for `rset` and yields a callable test (as needed in generative tests) - """ - propdefs = self.vreg['propertydefs'] - # make all components visible - for k, v in propdefs.items(): - if k.endswith('visible') and not v['default']: - propdefs[k]['default'] = True - for view in self.list_views_for(rset): - backup_rset = rset.copy(rset.rows, rset.description) - yield InnerTest(self._testname(rset, view.__regid__, 'view'), - self.view, view.__regid__, rset, - rset.req.reset_headers(), 'main-template') - # We have to do this because some views modify the - # resultset's syntax tree - rset = backup_rset - for action in self.list_actions_for(rset): - yield InnerTest(self._testname(rset, action.__regid__, 'action'), - self._test_action, action) - for box in self.list_boxes_for(rset): - w = [].append - yield InnerTest(self._testname(rset, box.__regid__, 'box'), box.render, w) - - @staticmethod - def _testname(rset, objid, objtype): - return '%s_%s_%s' % ('_'.join(rset.column_types(0)), objid, objtype) - - -# concrete class for automated application testing ############################ - -class AutomaticWebTest(AutoPopulateTest): - """import this if you wan automatic tests to be ran""" - - tags = AutoPopulateTest.tags | Tags('web', 'generated') - - def setUp(self): - if self.__class__ is AutomaticWebTest: - # Prevent direct use of AutomaticWebTest to avoid database caching - # issues. - return - super(AutomaticWebTest, self).setUp() - - # access to self.app for proper initialization of the authentication - # machinery (else some views may fail) - self.app - - def test_one_each_config(self): - self.auto_populate(1) - for rset in self.iter_automatic_rsets(limit=1): - for testargs in self._test_everything_for(rset): - yield testargs - - def test_ten_each_config(self): - self.auto_populate(10) - for rset in self.iter_automatic_rsets(limit=10): - for testargs in self._test_everything_for(rset): - yield testargs - - def test_startup_views(self): - for vid in self.list_startup_views(): - with self.admin_access.web_request() as req: - yield self.view, vid, None, req - - -# registry instrumentization ################################################### - -def not_selected(vreg, appobject): - try: - vreg._selected[appobject.__class__] -= 1 - except (KeyError, AttributeError): - pass - - -# def vreg_instrumentize(testclass): -# # XXX broken -# from cubicweb.devtools.apptest import TestEnvironment -# env = testclass._env = TestEnvironment('data', configcls=testclass.configcls) -# for reg in env.vreg.values(): -# reg._selected = {} -# try: -# orig_select_best = reg.__class__.__orig_select_best -# except Exception: -# orig_select_best = reg.__class__._select_best -# def instr_select_best(self, *args, **kwargs): -# selected = orig_select_best(self, *args, **kwargs) -# try: -# self._selected[selected.__class__] += 1 -# except KeyError: -# self._selected[selected.__class__] = 1 -# except AttributeError: -# pass # occurs on reg used to restore database -# return selected -# reg.__class__._select_best = instr_select_best -# reg.__class__.__orig_select_best = orig_select_best - - -# def print_untested_objects(testclass, skipregs=('hooks', 'etypes')): -# for regname, reg in testclass._env.vreg.items(): -# if regname in skipregs: -# continue -# for appobjects in reg.values(): -# for appobject in appobjects: -# if not reg._selected.get(appobject): -# print 'not tested', regname, appobject diff -r 058bb3dc685f -r 0b59724cb3f2 devtools/webtest.py --- a/devtools/webtest.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,34 +0,0 @@ -from __future__ import absolute_import - -import webtest - -from cubicweb.wsgi import handler -from cubicweb.devtools.testlib import CubicWebTC - - -class CubicWebTestTC(CubicWebTC): - def setUp(self): - super(CubicWebTestTC, self).setUp() - self.config.global_set_option('base-url', 'http://localhost.local/') - # call load_configuration again to let the config reset its datadir_url - self.config.load_configuration() - webapp = handler.CubicWebWSGIApplication(self.config) - self.webapp = webtest.TestApp(webapp) - - def tearDown(self): - del self.webapp - super(CubicWebTestTC, self).tearDown() - - def login(self, user=None, password=None, **args): - if user is None: - user = self.admlogin - if password is None: - password = self.admpassword if user == self.admlogin else user - args.update({ - '__login': user, - '__password': password - }) - return self.webapp.get('/login', args) - - def logout(self): - return self.webapp.get('/logout') diff -r 058bb3dc685f -r 0b59724cb3f2 doc/Makefile --- a/doc/Makefile Mon Jan 04 18:40:30 2016 +0100 +++ b/doc/Makefile Sat Jan 16 13:48:51 2016 +0100 @@ -6,7 +6,7 @@ PAPER = #BUILDDIR = build BUILDDIR = _build -CWDIR = .. +CWDIR = ../cubicweb JSDIR = ${CWDIR}/web/data JSTORST = tools/pyjsrest.py BUILDJS = js_api diff -r 058bb3dc685f -r 0b59724cb3f2 doc/conf.py --- a/doc/conf.py Mon Jan 04 18:40:30 2016 +0100 +++ b/doc/conf.py Sat Jan 16 13:48:51 2016 +0100 @@ -36,9 +36,10 @@ path = __file__ path = osp.dirname(path) # ./doc path = osp.dirname(path) # ./ -path = osp.join(path, '__pkginfo__.py') # ./__pkginfo__.py +path = osp.join(path, 'cubicweb', '__pkginfo__.py') # ./__pkginfo__.py cw = {} -execfile(path, {}, cw) +with open(path) as f: + exec(f.read(), cw) # If your extensions are in another directory, add it here. If the directory # is relative to the documentation root, use os.path.abspath to make it diff -r 058bb3dc685f -r 0b59724cb3f2 entities/__init__.py --- a/entities/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,208 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""base application's entities class implementation: `AnyEntity`""" - -__docformat__ = "restructuredtext en" - -from warnings import warn - -from six import text_type, string_types - -from logilab.common.decorators import classproperty -from logilab.common.deprecation import deprecated - -from cubicweb import Unauthorized -from cubicweb.entity import Entity - - -class AnyEntity(Entity): - """an entity instance has e_schema automagically set on the class and - instances have access to their issuing cursor - """ - __regid__ = 'Any' - - @classproperty - def cw_etype(cls): - """entity type as a unicode string""" - return text_type(cls.__regid__) - - @classmethod - def cw_create_url(cls, req, **kwargs): - """ return the url of the entity creation form for this entity type""" - return req.build_url('add/%s' % cls.__regid__, **kwargs) - - @classmethod - @deprecated('[3.22] use cw_fti_index_rql_limit instead') - def cw_fti_index_rql_queries(cls, req): - """return the list of rql queries to fetch entities to FT-index - - The default is to fetch all entities at once and to prefetch - indexable attributes but one could imagine iterating over - "smaller" resultsets if the table is very big or returning - a subset of entities that match some business-logic condition. - """ - restrictions = ['X is %s' % cls.__regid__] - selected = ['X'] - for attrschema in sorted(cls.e_schema.indexable_attributes()): - varname = attrschema.type.upper() - restrictions.append('X %s %s' % (attrschema, varname)) - selected.append(varname) - return ['Any %s WHERE %s' % (', '.join(selected), - ', '.join(restrictions))] - - @classmethod - def cw_fti_index_rql_limit(cls, req, limit=1000): - """generate rsets of entities to FT-index - - By default, each successive result set is limited to 1000 entities - """ - if cls.cw_fti_index_rql_queries.__func__ != AnyEntity.cw_fti_index_rql_queries.__func__: - warn("[3.22] cw_fti_index_rql_queries is replaced by cw_fti_index_rql_limit", - DeprecationWarning) - for rql in cls.cw_fti_index_rql_queries(req): - yield req.execute(rql) - return - restrictions = ['X is %s' % cls.__regid__] - selected = ['X'] - start = 0 - for attrschema in sorted(cls.e_schema.indexable_attributes()): - varname = attrschema.type.upper() - restrictions.append('X %s %s' % (attrschema, varname)) - selected.append(varname) - while True: - q_restrictions = restrictions + ['X eid > %s' % start] - rset = req.execute('Any %s ORDERBY X LIMIT %s WHERE %s' % - (', '.join(selected), - limit, - ', '.join(q_restrictions))) - if rset: - start = rset[-1][0] - yield rset - else: - break - - # meta data api ########################################################### - - def dc_title(self): - """return a suitable *unicode* title for this entity""" - for rschema, attrschema in self.e_schema.attribute_definitions(): - if rschema.meta: - continue - value = self.cw_attr_value(rschema.type) - if value is not None: - # make the value printable (dates, floats, bytes, etc.) - return self.printable_value(rschema.type, value, attrschema.type, - format='text/plain') - return u'%s #%s' % (self.dc_type(), self.eid) - - def dc_long_title(self): - """return a more detailled title for this entity""" - return self.dc_title() - - def dc_description(self, format='text/plain'): - """return a suitable description for this entity""" - if 'description' in self.e_schema.subjrels: - return self.printable_value('description', format=format) - return u'' - - def dc_authors(self): - """return a suitable description for the author(s) of the entity""" - try: - return ', '.join(u.name() for u in self.owned_by) - except Unauthorized: - return u'' - - def dc_creator(self): - """return a suitable description for the creator of the entity""" - if self.creator: - return self.creator.name() - return u'' - - def dc_date(self, date_format=None):# XXX default to ISO 8601 ? - """return latest modification date of this entity""" - return self._cw.format_date(self.modification_date, date_format=date_format) - - def dc_type(self, form=''): - """return the display name for the type of this entity (translated)""" - return self.e_schema.display_name(self._cw, form) - - def dc_language(self): - """return language used by this entity (translated)""" - # check if entities has internationalizable attributes - # XXX one is enough or check if all String attributes are internationalizable? - for rschema, attrschema in self.e_schema.attribute_definitions(): - if rschema.rdef(self.e_schema, attrschema).internationalizable: - return self._cw._(self._cw.user.property_value('ui.language')) - return self._cw._(self._cw.vreg.property_value('ui.language')) - - @property - def creator(self): - """return the CWUser entity which has created this entity, or None if - unknown or if the curent user doesn't has access to this euser - """ - try: - return self.created_by[0] - except (Unauthorized, IndexError): - return None - - # abstractions making the whole things (well, some at least) working ###### - - def sortvalue(self, rtype=None): - """return a value which can be used to sort this entity or given - entity's attribute - """ - if rtype is None: - return self.dc_title().lower() - value = self.cw_attr_value(rtype) - # do not restrict to `unicode` because Bytes will return a `str` value - if isinstance(value, string_types): - return self.printable_value(rtype, format='text/plain').lower() - return value - - -def fetch_config(fetchattrs, mainattr=None, pclass=AnyEntity, order='ASC'): - """function to ease basic configuration of an entity class ORM. Basic usage - is: - - .. sourcecode:: python - - class MyEntity(AnyEntity): - - fetch_attrs, cw_fetch_order = fetch_config(['attr1', 'attr2']) - # uncomment line below if you want the same sorting for 'unrelated' entities - # cw_fetch_unrelated_order = cw_fetch_order - - Using this, when using ORM methods retrieving this type of entity, 'attr1' - and 'attr2' will be automatically prefetched and results will be sorted on - 'attr1' ascending (ie the first attribute in the list). - - This function will automatically add to fetched attributes those defined in - parent class given using the `pclass` argument. - - Also, You can use `mainattr` and `order` argument to have a different - sorting. - """ - if pclass is not None: - fetchattrs += pclass.fetch_attrs - if mainattr is None: - mainattr = fetchattrs[0] - @classmethod - def fetch_order(cls, select, attr, var): - if attr == mainattr: - select.add_sort_var(var, order=='ASC') - return fetchattrs, fetch_order diff -r 058bb3dc685f -r 0b59724cb3f2 entities/adapters.py --- a/entities/adapters.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,427 +0,0 @@ -# copyright 2010-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some basic entity adapter implementations, for interfaces used in the -framework itself. -""" -from cubicweb import _ - -from itertools import chain -from hashlib import md5 - -from logilab.mtconverter import TransformError -from logilab.common.decorators import cached - -from cubicweb import ValidationError, view, ViolatedConstraint, UniqueTogetherError -from cubicweb.predicates import is_instance, relation_possible, match_exception - - -class IEmailableAdapter(view.EntityAdapter): - __regid__ = 'IEmailable' - __select__ = relation_possible('primary_email') | relation_possible('use_email') - - def get_email(self): - if getattr(self.entity, 'primary_email', None): - return self.entity.primary_email[0].address - if getattr(self.entity, 'use_email', None): - return self.entity.use_email[0].address - return None - - def allowed_massmail_keys(self): - """returns a set of allowed email substitution keys - - The default is to return the entity's attribute list but you might - override this method to allow extra keys. For instance, a Person - class might want to return a `companyname` key. - """ - return set(rschema.type - for rschema, attrtype in self.entity.e_schema.attribute_definitions() - if attrtype.type not in ('Password', 'Bytes')) - - def as_email_context(self): - """returns the dictionary as used by the sendmail controller to - build email bodies. - - NOTE: the dictionary keys should match the list returned by the - `allowed_massmail_keys` method. - """ - return dict((attr, getattr(self.entity, attr)) - for attr in self.allowed_massmail_keys()) - - -class INotifiableAdapter(view.EntityAdapter): - __regid__ = 'INotifiable' - __select__ = is_instance('Any') - - def notification_references(self, view): - """used to control References field of email send on notification - for this entity. `view` is the notification view. - - Should return a list of eids which can be used to generate message - identifiers of previously sent email(s) - """ - itree = self.entity.cw_adapt_to('ITree') - if itree is not None: - return itree.path()[:-1] - if view.msgid_timestamp: - return (self.entity.eid,) - return () - - -class IFTIndexableAdapter(view.EntityAdapter): - """standard adapter to handle fulltext indexing - - .. automethod:: cubicweb.entities.adapters.IFTIndexableAdapter.fti_containers - .. automethod:: cubicweb.entities.adapters.IFTIndexableAdapter.get_words - """ - __regid__ = 'IFTIndexable' - __select__ = is_instance('Any') - - def fti_containers(self, _done=None): - """return the list of entities to index when handling ``self.entity`` - - The actual list of entities depends on ``fulltext_container`` usage - in the datamodel definition - """ - if _done is None: - _done = set() - entity = self.entity - _done.add(entity.eid) - containers = tuple(entity.e_schema.fulltext_containers()) - if containers: - for rschema, role in containers: - if role == 'object': - targets = getattr(entity, rschema.type) - else: - targets = getattr(entity, 'reverse_%s' % rschema) - for target in targets: - if target.eid in _done: - continue - for container in target.cw_adapt_to('IFTIndexable').fti_containers(_done): - yield container - else: - yield entity - - # weight in ABCD - entity_weight = 1.0 - attr_weight = {} - - def get_words(self): - """used by the full text indexer to get words to index - - this method should only be used on the repository side since it depends - on the logilab.database package - - :rtype: list - :return: the list of indexable word of this entity - """ - from logilab.database.fti import tokenize - # take care to cases where we're modyfying the schema - entity = self.entity - pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) - words = {} - for rschema in entity.e_schema.indexable_attributes(): - if (entity.e_schema, rschema) in pending: - continue - weight = self.attr_weight.get(rschema, 'C') - try: - value = entity.printable_value(rschema, format=u'text/plain') - except TransformError: - continue - except Exception: - self.exception("can't add value of %s to text index for entity %s", - rschema, entity.eid) - continue - if value: - words.setdefault(weight, []).extend(tokenize(value)) - for rschema, role in entity.e_schema.fulltext_relations(): - if role == 'subject': - for entity_ in getattr(entity, rschema.type): - merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) - else: # if role == 'object': - for entity_ in getattr(entity, 'reverse_%s' % rschema.type): - merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) - return words - - -def merge_weight_dict(maindict, newdict): - for weight, words in newdict.items(): - maindict.setdefault(weight, []).extend(words) - - -class IDownloadableAdapter(view.EntityAdapter): - """interface for downloadable entities""" - __regid__ = 'IDownloadable' - __abstract__ = True - - def download_url(self, **kwargs): # XXX not really part of this interface - """return a URL to download entity's content - - It should be a unicode object containing url-encoded ASCII. - """ - raise NotImplementedError - - def download_content_type(self): - """return MIME type (unicode) of the downloadable content""" - raise NotImplementedError - - def download_encoding(self): - """return encoding (unicode) of the downloadable content""" - raise NotImplementedError - - def download_file_name(self): - """return file name (unicode) of the downloadable content""" - raise NotImplementedError - - def download_data(self): - """return actual data (bytes) of the downloadable content""" - raise NotImplementedError - - -# XXX should propose to use two different relations for children/parent -class ITreeAdapter(view.EntityAdapter): - """This adapter provides a tree interface. - - It has to be overriden to be configured using the tree_relation, - child_role and parent_role class attributes to benefit from this default - implementation. - - This class provides the following methods: - - .. automethod: iterparents - .. automethod: iterchildren - .. automethod: prefixiter - - .. automethod: is_leaf - .. automethod: is_root - - .. automethod: root - .. automethod: parent - .. automethod: children - .. automethod: different_type_children - .. automethod: same_type_children - .. automethod: children_rql - .. automethod: path - """ - __regid__ = 'ITree' - __abstract__ = True - - child_role = 'subject' - parent_role = 'object' - - def children_rql(self): - """Returns RQL to get the children of the entity.""" - return self.entity.cw_related_rql(self.tree_relation, self.parent_role) - - def different_type_children(self, entities=True): - """Return children entities of different type as this entity. - - According to the `entities` parameter, return entity objects or the - equivalent result set. - """ - res = self.entity.related(self.tree_relation, self.parent_role, - entities=entities) - eschema = self.entity.e_schema - if entities: - return [e for e in res if e.e_schema != eschema] - return res.filtered_rset(lambda x: x.e_schema != eschema, self.entity.cw_col) - - def same_type_children(self, entities=True): - """Return children entities of the same type as this entity. - - According to the `entities` parameter, return entity objects or the - equivalent result set. - """ - res = self.entity.related(self.tree_relation, self.parent_role, - entities=entities) - eschema = self.entity.e_schema - if entities: - return [e for e in res if e.e_schema == eschema] - return res.filtered_rset(lambda x: x.e_schema is eschema, self.entity.cw_col) - - def is_leaf(self): - """Returns True if the entity does not have any children.""" - return len(self.children()) == 0 - - def is_root(self): - """Returns true if the entity is root of the tree (e.g. has no parent). - """ - return self.parent() is None - - def root(self): - """Return the root entity of the tree.""" - return self._cw.entity_from_eid(self.path()[0]) - - def parent(self): - """Returns the parent entity if any, else None (e.g. if we are on the - root). - """ - try: - return self.entity.related(self.tree_relation, self.child_role, - entities=True)[0] - except (KeyError, IndexError): - return None - - def children(self, entities=True, sametype=False): - """Return children entities. - - According to the `entities` parameter, return entity objects or the - equivalent result set. - """ - if sametype: - return self.same_type_children(entities) - else: - return self.entity.related(self.tree_relation, self.parent_role, - entities=entities) - - def iterparents(self, strict=True): - """Return an iterator on the parents of the entity.""" - def _uptoroot(self): - curr = self - while True: - curr = curr.parent() - if curr is None: - break - yield curr - curr = curr.cw_adapt_to('ITree') - if not strict: - return chain([self.entity], _uptoroot(self)) - return _uptoroot(self) - - def iterchildren(self, _done=None): - """Return an iterator over the item's children.""" - if _done is None: - _done = set() - for child in self.children(): - if child.eid in _done: - self.error('loop in %s tree: %s', child.cw_etype.lower(), child) - continue - yield child - _done.add(child.eid) - - def prefixiter(self, _done=None): - """Return an iterator over the item's descendants in a prefixed order.""" - if _done is None: - _done = set() - if self.entity.eid in _done: - return - _done.add(self.entity.eid) - yield self.entity - for child in self.same_type_children(): - for entity in child.cw_adapt_to('ITree').prefixiter(_done): - yield entity - - @cached - def path(self): - """Returns the list of eids from the root object to this object.""" - path = [] - adapter = self - entity = adapter.entity - while entity is not None: - if entity.eid in path: - self.error('loop in %s tree: %s', entity.cw_etype.lower(), entity) - break - path.append(entity.eid) - try: - # check we are not jumping to another tree - if (adapter.tree_relation != self.tree_relation or - adapter.child_role != self.child_role): - break - entity = adapter.parent() - adapter = entity.cw_adapt_to('ITree') - except AttributeError: - break - path.reverse() - return path - - -class ISerializableAdapter(view.EntityAdapter): - """Adapter to serialize an entity to a bare python structure that may be - directly serialized to e.g. JSON. - """ - - __regid__ = 'ISerializable' - __select__ = is_instance('Any') - - def serialize(self): - entity = self.entity - entity.complete() - data = { - 'cw_etype': entity.cw_etype, - 'cw_source': entity.cw_metainformation()['source']['uri'], - 'eid': entity.eid, - } - for rschema, __ in entity.e_schema.attribute_definitions(): - attr = rschema.type - try: - value = entity.cw_attr_cache[attr] - except KeyError: - # Bytes - continue - data[attr] = value - return data - - -# error handling adapters ###################################################### - - -class IUserFriendlyError(view.EntityAdapter): - __regid__ = 'IUserFriendlyError' - __abstract__ = True - - def __init__(self, *args, **kwargs): - self.exc = kwargs.pop('exc') - super(IUserFriendlyError, self).__init__(*args, **kwargs) - - -class IUserFriendlyUniqueTogether(IUserFriendlyError): - __select__ = match_exception(UniqueTogetherError) - - def raise_user_exception(self): - rtypes = self.exc.rtypes - errors = {} - msgargs = {} - i18nvalues = [] - for rtype in rtypes: - errors[rtype] = _('%(KEY-rtype)s is part of violated unicity constraint') - msgargs[rtype + '-rtype'] = rtype - i18nvalues.append(rtype + '-rtype') - errors[''] = _('some relations violate a unicity constraint') - raise ValidationError(self.entity.eid, errors, msgargs=msgargs, i18nvalues=i18nvalues) - - -class IUserFriendlyCheckConstraint(IUserFriendlyError): - __select__ = match_exception(ViolatedConstraint) - - def raise_user_exception(self): - cstrname = self.exc.cstrname - eschema = self.entity.e_schema - for rschema, attrschema in eschema.attribute_definitions(): - rdef = rschema.rdef(eschema, attrschema) - for constraint in rdef.constraints: - if cstrname == 'cstr' + md5( - (eschema.type + rschema.type + constraint.type() + - (constraint.serialize() or '')).encode('ascii')).hexdigest(): - break - else: - continue - break - else: - assert 0 - key = rschema.type + '-subject' - msg, args = constraint.failed_message(key, self.entity.cw_edited[rschema.type]) - raise ValidationError(self.entity.eid, {key: msg}, args) diff -r 058bb3dc685f -r 0b59724cb3f2 entities/authobjs.py --- a/entities/authobjs.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,188 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""entity classes user and group entities""" - -__docformat__ = "restructuredtext en" - -from six import string_types - -from logilab.common.decorators import cached - -from cubicweb import Unauthorized -from cubicweb.entities import AnyEntity, fetch_config - -class CWGroup(AnyEntity): - __regid__ = 'CWGroup' - fetch_attrs, cw_fetch_order = fetch_config(['name']) - cw_fetch_unrelated_order = cw_fetch_order - - def dc_long_title(self): - name = self.name - trname = self._cw._(name) - if trname != name: - return '%s (%s)' % (name, trname) - return name - - @cached - def num_users(self): - """return the number of users in this group""" - return self._cw.execute('Any COUNT(U) WHERE U in_group G, G eid %(g)s', - {'g': self.eid})[0][0] - - -class CWUser(AnyEntity): - __regid__ = 'CWUser' - fetch_attrs, cw_fetch_order = fetch_config(['login', 'firstname', 'surname']) - cw_fetch_unrelated_order = cw_fetch_order - - # used by repository to check if the user can log in or not - AUTHENTICABLE_STATES = ('activated',) - - # low level utilities ##################################################### - def __init__(self, *args, **kwargs): - groups = kwargs.pop('groups', None) - properties = kwargs.pop('properties', None) - super(CWUser, self).__init__(*args, **kwargs) - if groups is not None: - self._groups = groups - if properties is not None: - self._properties = properties - - @property - def groups(self): - try: - return self._groups - except AttributeError: - self._groups = set(g.name for g in self.in_group) - return self._groups - - @property - def properties(self): - try: - return self._properties - except AttributeError: - self._properties = dict( - self._cw.execute( - 'Any K, V WHERE P for_user U, U eid %(userid)s, ' - 'P pkey K, P value V', - {'userid': self.eid})) - return self._properties - - def prefered_language(self, language=None): - """return language used by this user, if explicitly defined (eg not - using http negociation) - """ - language = language or self.property_value('ui.language') - vreg = self._cw.vreg - try: - vreg.config.translations[language] - except KeyError: - language = vreg.property_value('ui.language') - assert language in vreg.config.translations[language], language - return language - - def property_value(self, key): - try: - # properties stored on the user aren't correctly typed - # (e.g. all values are unicode string) - return self._cw.vreg.typed_value(key, self.properties[key]) - except KeyError: - pass - except ValueError: - self.warning('incorrect value for eproperty %s of user %s', - key, self.login) - return self._cw.vreg.property_value(key) - - def set_property(self, pkey, value): - value = unicode(value) - try: - prop = self._cw.execute( - 'CWProperty X WHERE X pkey %(k)s, X for_user U, U eid %(u)s', - {'k': pkey, 'u': self.eid}).get_entity(0, 0) - except Exception: - kwargs = dict(pkey=unicode(pkey), value=value) - if self.is_in_group('managers'): - kwargs['for_user'] = self - self._cw.create_entity('CWProperty', **kwargs) - else: - prop.cw_set(value=value) - - def matching_groups(self, groups): - """return the number of the given group(s) in which the user is - - :type groups: str or iterable(str) - :param groups: a group name or an iterable on group names - """ - if isinstance(groups, string_types): - groups = frozenset((groups,)) - elif isinstance(groups, (tuple, list)): - groups = frozenset(groups) - return len(groups & self.groups) # XXX return the resulting set instead of its size - - def is_in_group(self, group): - """convience / shortcut method to test if the user belongs to `group` - """ - return group in self.groups - - def is_anonymous(self): - """ checks if user is an anonymous user""" - #FIXME on the web-side anonymous user is detected according - # to config['anonymous-user'], we don't have this info on - # the server side. - return self.groups == frozenset(('guests', )) - - def owns(self, eid): - try: - return self._cw.execute( - 'Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - {'x': eid, 'u': self.eid}) - except Unauthorized: - return False - owns = cached(owns, keyarg=1) - - # presentation utilities ################################################## - - def name(self): - """construct a name using firstname / surname or login if not defined""" - - if self.firstname and self.surname: - return self._cw._('%(firstname)s %(surname)s') % { - 'firstname': self.firstname, 'surname' : self.surname} - if self.firstname: - return self.firstname - return self.login - - def dc_title(self): - return self.login - - dc_long_title = name - - def __call__(self, *args, **kwargs): - """ugly hack for compatibility betweeb dbapi and repo api - - In the dbapi, Connection and Session have a ``user`` method to - generated a user for a request In the repo api, Connection and Session - have a user attribute inherited from SessionRequestBase prototype. This - ugly hack allows to not break user of the user method. - - XXX Deprecate me ASAP""" - return self - -from logilab.common.deprecation import class_renamed -EUser = class_renamed('EUser', CWUser) -EGroup = class_renamed('EGroup', CWGroup) diff -r 058bb3dc685f -r 0b59724cb3f2 entities/lib.py --- a/entities/lib.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,149 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""entity classes for optional library entities""" - -__docformat__ = "restructuredtext en" -from warnings import warn -from datetime import datetime - -from six.moves import range -from six.moves.urllib.parse import urlsplit, urlunsplit - -from logilab.mtconverter import xml_escape - -from cubicweb import UnknownProperty -from cubicweb.entity import _marker -from cubicweb.entities import AnyEntity, fetch_config - -def mangle_email(address): - try: - name, host = address.split('@', 1) - except ValueError: - return address - return '%s at %s' % (name, host.replace('.', ' dot ')) - - -class EmailAddress(AnyEntity): - __regid__ = 'EmailAddress' - fetch_attrs, cw_fetch_order = fetch_config(['address', 'alias']) - rest_attr = 'eid' - - def dc_title(self): - if self.alias: - return '%s <%s>' % (self.alias, self.display_address()) - return self.display_address() - - @property - def email_of(self): - return self.reverse_use_email and self.reverse_use_email[0] or None - - @property - def prefered(self): - return self.prefered_form and self.prefered_form[0] or self - - def related_emails(self, skipeids=None): - # XXX move to eemail - # check email relations are in the schema first - subjrels = self.e_schema.object_relations() - if not ('sender' in subjrels and 'recipients' in subjrels): - return - rset = self._cw.execute('DISTINCT Any X, S, D ORDERBY D DESC ' - 'WHERE X sender Y or X recipients Y, ' - 'X subject S, X date D, Y eid %(y)s', - {'y': self.eid}) - if skipeids is None: - skipeids = set() - for i in range(len(rset)): - eid = rset[i][0] - if eid in skipeids: - continue - skipeids.add(eid) - yield rset.get_entity(i, 0) - - def display_address(self): - if self._cw.vreg.config['mangle-emails']: - return mangle_email(self.address) - return self.address - - def printable_value(self, attr, value=_marker, attrtype=None, - format='text/html'): - """overriden to return displayable address when necessary""" - if attr == 'address': - address = self.display_address() - if format == 'text/html': - address = xml_escape(address) - return address - return super(EmailAddress, self).printable_value(attr, value, attrtype, format) - - -class Bookmark(AnyEntity): - """customized class for Bookmark entities""" - __regid__ = 'Bookmark' - fetch_attrs, cw_fetch_order = fetch_config(['title', 'path']) - - def actual_url(self): - url = self._cw.build_url(self.path) - if self.title: - urlparts = list(urlsplit(url)) - if urlparts[3]: - urlparts[3] += '&vtitle=%s' % self._cw.url_quote(self.title) - else: - urlparts[3] = 'vtitle=%s' % self._cw.url_quote(self.title) - url = urlunsplit(urlparts) - return url - - def action_url(self): - return self.absolute_url() + '/follow' - - -class CWProperty(AnyEntity): - __regid__ = 'CWProperty' - - fetch_attrs, cw_fetch_order = fetch_config(['pkey', 'value']) - rest_attr = 'pkey' - - def typed_value(self): - return self._cw.vreg.typed_value(self.pkey, self.value) - - def dc_description(self, format='text/plain'): - try: - return self._cw._(self._cw.vreg.property_info(self.pkey)['help']) - except UnknownProperty: - return u'' - - -class CWCache(AnyEntity): - """Cache""" - __regid__ = 'CWCache' - fetch_attrs, cw_fetch_order = fetch_config(['name']) - - def __init__(self, *args, **kwargs): - warn('[3.19] CWCache entity type is going away soon. ' - 'Other caching mechanisms can be used more reliably ' - 'to the same effect.', - DeprecationWarning) - super(CWCache, self).__init__(*args, **kwargs) - - def touch(self): - self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s', - {'t': datetime.now(), 'x': self.eid}) - - def valid(self, date): - if date: - return date > self.timestamp - return False diff -r 058bb3dc685f -r 0b59724cb3f2 entities/schemaobjs.py --- a/entities/schemaobjs.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,178 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""schema definition related entities""" - -__docformat__ = "restructuredtext en" - -from logilab.common.decorators import cached - -from yams.schema import role_name - -from cubicweb import ValidationError -from cubicweb.schema import ERQLExpression, RRQLExpression - -from cubicweb.entities import AnyEntity, fetch_config - - -class CWEType(AnyEntity): - __regid__ = 'CWEType' - fetch_attrs, cw_fetch_order = fetch_config(['name']) - - def dc_title(self): - return u'%s (%s)' % (self.name, self._cw._(self.name)) - - def dc_long_title(self): - stereotypes = [] - _ = self._cw._ - if self.final: - stereotypes.append(_('final')) - if stereotypes: - return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes)) - return self.dc_title() - - -class CWRType(AnyEntity): - __regid__ = 'CWRType' - fetch_attrs, cw_fetch_order = fetch_config(['name']) - - def dc_title(self): - return u'%s (%s)' % (self.name, self._cw._(self.name)) - - def dc_long_title(self): - stereotypes = [] - _ = self._cw._ - if self.symmetric: - stereotypes.append(_('symmetric')) - if self.inlined: - stereotypes.append(_('inlined')) - if self.final: - stereotypes.append(_('final')) - if stereotypes: - return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes)) - return self.dc_title() - - def check_inlined_allowed(self): - """check inlining is possible, raise ValidationError if not possible - """ - # don't use the persistent schema, we may miss cardinality changes - # in the same transaction - for rdef in self.reverse_relation_type: - card = rdef.cardinality[0] - if not card in '?1': - qname = role_name('inlined', 'subject') - rtype = self.name - stype = rdef.stype - otype = rdef.otype - msg = self._cw._("can't set inlined=True, " - "%(stype)s %(rtype)s %(otype)s " - "has cardinality=%(card)s") - raise ValidationError(self.eid, {qname: msg % locals()}) - - -class CWRelation(AnyEntity): - __regid__ = 'CWRelation' - fetch_attrs = fetch_config(['cardinality'])[0] - - def dc_title(self): - return u'%s %s %s' % ( - self.from_entity[0].name, - self.relation_type[0].name, - self.to_entity[0].name) - - def dc_long_title(self): - card = self.cardinality - scard, ocard = u'', u'' - if card[0] != '1': - scard = '[%s]' % card[0] - if card[1] != '1': - ocard = '[%s]' % card[1] - return u'%s %s%s%s %s' % ( - self.from_entity[0].name, - scard, self.relation_type[0].name, ocard, - self.to_entity[0].name) - - @property - def rtype(self): - return self.relation_type[0] - - @property - def stype(self): - return self.from_entity[0] - - @property - def otype(self): - return self.to_entity[0] - - def yams_schema(self): - rschema = self._cw.vreg.schema.rschema(self.rtype.name) - return rschema.rdefs[(self.stype.name, self.otype.name)] - - -class CWAttribute(CWRelation): - __regid__ = 'CWAttribute' - - def dc_long_title(self): - card = self.cardinality - scard = u'' - if card[0] == '1': - scard = '+' - return u'%s %s%s %s' % ( - self.from_entity[0].name, - scard, self.relation_type[0].name, - self.to_entity[0].name) - - -class CWConstraint(AnyEntity): - __regid__ = 'CWConstraint' - fetch_attrs, cw_fetch_order = fetch_config(['value']) - - def dc_title(self): - return '%s(%s)' % (self.cstrtype[0].name, self.value or u'') - - @property - def type(self): - return self.cstrtype[0].name - - -class RQLExpression(AnyEntity): - __regid__ = 'RQLExpression' - fetch_attrs, cw_fetch_order = fetch_config(['exprtype', 'mainvars', 'expression']) - - def dc_title(self): - return self.expression or u'' - - def dc_long_title(self): - return '%s(%s)' % (self.exprtype, self.expression or u'') - - @property - def expression_of(self): - for rel in ('read_permission', 'add_permission', 'delete_permission', - 'update_permission', 'condition'): - values = getattr(self, 'reverse_%s' % rel) - if values: - return values[0] - - @cached - def _rqlexpr(self): - if self.exprtype == 'ERQLExpression': - return ERQLExpression(self.expression, self.mainvars, self.eid) - #if self.exprtype == 'RRQLExpression': - return RRQLExpression(self.expression, self.mainvars, self.eid) - - def check_expression(self, *args, **kwargs): - return self._rqlexpr().check(*args, **kwargs) diff -r 058bb3dc685f -r 0b59724cb3f2 entities/sources.py --- a/entities/sources.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,184 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""data source related entities""" - -__docformat__ = "restructuredtext en" - -import re -from socket import gethostname -import logging - -from logilab.common.textutils import text_to_dict -from logilab.common.configuration import OptionError -from logilab.mtconverter import xml_escape - -from cubicweb.entities import AnyEntity, fetch_config - -class _CWSourceCfgMixIn(object): - @property - def dictconfig(self): - return self.config and text_to_dict(self.config) or {} - - def update_config(self, skip_unknown=False, **config): - from cubicweb.server import SOURCE_TYPES - from cubicweb.server.serverconfig import (SourceConfiguration, - generate_source_config) - cfg = self.dictconfig - cfg.update(config) - options = SOURCE_TYPES[self.type].options - sconfig = SourceConfiguration(self._cw.vreg.config, options=options) - for opt, val in cfg.items(): - try: - sconfig.set_option(opt, val) - except OptionError: - if skip_unknown: - continue - raise - cfgstr = unicode(generate_source_config(sconfig), self._cw.encoding) - self.cw_set(config=cfgstr) - - -class CWSource(_CWSourceCfgMixIn, AnyEntity): - __regid__ = 'CWSource' - fetch_attrs, cw_fetch_order = fetch_config(['name', 'type']) - - @property - def host_config(self): - dictconfig = self.dictconfig - host = gethostname() - for hostcfg in self.host_configs: - if hostcfg.match(host): - self.info('matching host config %s for source %s', - hostcfg.match_host, self.name) - dictconfig.update(hostcfg.dictconfig) - return dictconfig - - @property - def host_configs(self): - return self.reverse_cw_host_config_of - - def init_mapping(self, mapping): - for key, options in mapping: - if isinstance(key, tuple): # relation definition - assert len(key) == 3 - restrictions = ['X relation_type RT, RT name %(rt)s'] - kwargs = {'rt': key[1]} - if key[0] != '*': - restrictions.append('X from_entity FT, FT name %(ft)s') - kwargs['ft'] = key[0] - if key[2] != '*': - restrictions.append('X to_entity TT, TT name %(tt)s') - kwargs['tt'] = key[2] - rql = 'Any X WHERE %s' % ','.join(restrictions) - schemarset = self._cw.execute(rql, kwargs) - elif key[0].isupper(): # entity type - schemarset = self._cw.execute('CWEType X WHERE X name %(et)s', - {'et': key}) - else: # relation type - schemarset = self._cw.execute('CWRType X WHERE X name %(rt)s', - {'rt': key}) - for schemaentity in schemarset.entities(): - self._cw.create_entity('CWSourceSchemaConfig', - cw_for_source=self, - cw_schema=schemaentity, - options=options) - - @property - def repo_source(self): - """repository only property, not available from the web side (eg - self._cw is expected to be a server session) - """ - return self._cw.repo.sources_by_eid[self.eid] - - -class CWSourceHostConfig(_CWSourceCfgMixIn, AnyEntity): - __regid__ = 'CWSourceHostConfig' - fetch_attrs, cw_fetch_order = fetch_config(['match_host', 'config']) - - @property - def cwsource(self): - return self.cw_host_config_of[0] - - def match(self, hostname): - return re.match(self.match_host, hostname) - - -class CWSourceSchemaConfig(AnyEntity): - __regid__ = 'CWSourceSchemaConfig' - fetch_attrs, cw_fetch_order = fetch_config(['cw_for_source', 'cw_schema', 'options']) - - def dc_title(self): - return self._cw._(self.cw_etype) + ' #%s' % self.eid - - @property - def schema(self): - return self.cw_schema[0] - - @property - def cwsource(self): - return self.cw_for_source[0] - - -class CWDataImport(AnyEntity): - __regid__ = 'CWDataImport' - repo_source = _logs = None # please pylint - - def init(self): - self._logs = [] - self.repo_source = self.cwsource.repo_source - - def dc_title(self): - return '%s [%s]' % (self.printable_value('start_timestamp'), - self.printable_value('status')) - - @property - def cwsource(self): - return self.cw_import_of[0] - - def record_debug(self, msg, path=None, line=None): - self._log(logging.DEBUG, msg, path, line) - self.repo_source.debug(msg) - - def record_info(self, msg, path=None, line=None): - self._log(logging.INFO, msg, path, line) - self.repo_source.info(msg) - - def record_warning(self, msg, path=None, line=None): - self._log(logging.WARNING, msg, path, line) - self.repo_source.warning(msg) - - def record_error(self, msg, path=None, line=None): - self._status = u'failed' - self._log(logging.ERROR, msg, path, line) - self.repo_source.error(msg) - - def record_fatal(self, msg, path=None, line=None): - self._status = u'failed' - self._log(logging.FATAL, msg, path, line) - self.repo_source.fatal(msg) - - def _log(self, severity, msg, path=None, line=None): - encodedmsg = u'%s\t%s\t%s\t%s
      ' % (severity, path or u'', - line or u'', xml_escape(msg)) - self._logs.append(encodedmsg) - - def write_log(self, session, **kwargs): - if 'status' not in kwargs: - kwargs['status'] = getattr(self, '_status', u'success') - self.cw_set(log=u'
      '.join(self._logs), **kwargs) - self._logs = [] diff -r 058bb3dc685f -r 0b59724cb3f2 entities/test/data/migration/postcreate.py --- a/entities/test/data/migration/postcreate.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,19 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -wf = add_workflow(u'bmk wf', 'Bookmark') -wf.add_state(u'hop', initial=True) diff -r 058bb3dc685f -r 0b59724cb3f2 entities/test/data/schema.py --- a/entities/test/data/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""entities tests schema""" - -from yams.buildobjs import EntityType, String, RichString, Int -from cubicweb.schema import make_workflowable - -class Company(EntityType): - order = Int() - name = String() - description = RichString() - -class Division(Company): - __specializes_schema__ = True - -class SubDivision(Division): - __specializes_schema__ = True - - -from cubicweb.schemas import bootstrap, Bookmark -make_workflowable(bootstrap.CWGroup) -make_workflowable(Bookmark.Bookmark) diff -r 058bb3dc685f -r 0b59724cb3f2 entities/test/requirements.txt --- a/entities/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -docutils diff -r 058bb3dc685f -r 0b59724cb3f2 entities/test/unittest_base.py --- a/entities/test/unittest_base.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,262 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for cubicweb.entities.base module -""" - -from logilab.common.testlib import unittest_main -from logilab.common.decorators import clear_cache -from logilab.common.registry import yes - -from cubicweb.devtools.testlib import CubicWebTC - -from cubicweb.entities import AnyEntity - - -class BaseEntityTC(CubicWebTC): - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - self.membereid = self.create_user(cnx, 'member').eid - cnx.commit() - - -class MetadataTC(BaseEntityTC): - - def test_creator(self): - with self.new_access('member').repo_cnx() as cnx: - entity = cnx.create_entity('Bookmark', title=u"hello", path=u'project/cubicweb') - cnx.commit() - self.assertEqual(entity.creator.eid, self.membereid) - self.assertEqual(entity.dc_creator(), u'member') - - def test_type(self): - # dc_type may be translated - with self.admin_access.client_cnx() as cnx: - member = cnx.entity_from_eid(self.membereid) - self.assertEqual(member.dc_type(), 'CWUser') - - def test_cw_etype(self): - # cw_etype is never translated - with self.admin_access.client_cnx() as cnx: - member = cnx.entity_from_eid(self.membereid) - self.assertEqual(member.cw_etype, 'CWUser') - - def test_entity_meta_attributes(self): - # XXX move to yams - self.assertEqual(self.schema['CWUser'].meta_attributes(), {}) - self.assertEqual(dict((str(k), v) - for k, v in self.schema['State'].meta_attributes().items()), - {'description_format': ('format', 'description')}) - - def test_fti_rql_method(self): - class EmailAddress(AnyEntity): - __regid__ = 'EmailAddress' - __select__ = AnyEntity.__select__ & yes(2) - - @classmethod - def cw_fti_index_rql_queries(cls, req): - return ['EmailAddress Y'] - - with self.admin_access.web_request() as req: - req.create_entity('EmailAddress', address=u'foo@bar.com') - eclass = self.vreg['etypes'].etype_class('EmailAddress') - # deprecated - self.assertEqual(['Any X, ADDRESS, ALIAS WHERE X is EmailAddress, ' - 'X address ADDRESS, X alias ALIAS'], - eclass.cw_fti_index_rql_queries(req)) - - self.assertEqual(['Any X, ADDRESS, ALIAS ORDERBY X LIMIT 1000 WHERE X is EmailAddress, ' - 'X address ADDRESS, X alias ALIAS, X eid > 0'], - [rset.rql for rset in eclass.cw_fti_index_rql_limit(req)]) - - # test backwards compatibility with custom method - with self.temporary_appobjects(EmailAddress): - self.vreg['etypes'].clear_caches() - eclass = self.vreg['etypes'].etype_class('EmailAddress') - self.assertEqual(['EmailAddress Y'], - [rset.rql for rset in eclass.cw_fti_index_rql_limit(req)]) - - -class EmailAddressTC(BaseEntityTC): - - def test_canonical_form(self): - with self.admin_access.repo_cnx() as cnx: - email1 = cnx.execute('INSERT EmailAddress X: ' - 'X address "maarten.ter.huurne@philips.com"').get_entity(0, 0) - email2 = cnx.execute('INSERT EmailAddress X: ' - 'X address "maarten@philips.com"').get_entity(0, 0) - email3 = cnx.execute('INSERT EmailAddress X: ' - 'X address "toto@logilab.fr"').get_entity(0, 0) - email1.cw_set(prefered_form=email2) - self.assertEqual(email1.prefered.eid, email2.eid) - self.assertEqual(email2.prefered.eid, email2.eid) - self.assertEqual(email3.prefered.eid, email3.eid) - - def test_mangling(self): - query = 'INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"' - with self.admin_access.repo_cnx() as cnx: - email = cnx.execute(query).get_entity(0, 0) - self.assertEqual(email.display_address(), 'maarten.ter.huurne@philips.com') - self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne@philips.com') - self.vreg.config.global_set_option('mangle-emails', True) - try: - self.assertEqual(email.display_address(), 'maarten.ter.huurne at philips dot com') - self.assertEqual(email.printable_value('address'), - 'maarten.ter.huurne at philips dot com') - email = cnx.execute('INSERT EmailAddress X: X address "syt"').get_entity(0, 0) - self.assertEqual(email.display_address(), 'syt') - self.assertEqual(email.printable_value('address'), 'syt') - finally: - self.vreg.config.global_set_option('mangle-emails', False) - - def test_printable_value_escape(self): - with self.admin_access.repo_cnx() as cnx: - email = cnx.execute('INSERT EmailAddress X: ' - 'X address "maarten&ter@philips.com"').get_entity(0, 0) - self.assertEqual(email.printable_value('address'), - 'maarten&ter@philips.com') - self.assertEqual(email.printable_value('address', format='text/plain'), - 'maarten&ter@philips.com') - - -class CWUserTC(BaseEntityTC): - - def test_complete(self): - with self.admin_access.repo_cnx() as cnx: - e = cnx.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) - e.complete() - - def test_matching_groups(self): - with self.admin_access.repo_cnx() as cnx: - e = cnx.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) - self.assertTrue(e.matching_groups('managers')) - self.assertFalse(e.matching_groups('xyz')) - self.assertTrue(e.matching_groups(('xyz', 'managers'))) - self.assertFalse(e.matching_groups(('xyz', 'abcd'))) - - def test_dc_title_and_name(self): - with self.admin_access.repo_cnx() as cnx: - e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) - self.assertEqual(e.dc_title(), 'member') - self.assertEqual(e.name(), 'member') - e.cw_set(firstname=u'bouah') - self.assertEqual(e.dc_title(), 'member') - self.assertEqual(e.name(), u'bouah') - e.cw_set(surname=u'lôt') - self.assertEqual(e.dc_title(), 'member') - self.assertEqual(e.name(), u'bouah lôt') - - def test_falsey_dc_title(self): - with self.admin_access.repo_cnx() as cnx: - e = cnx.create_entity('Company', order=0, name=u'pythonian') - cnx.commit() - self.assertEqual(u'0', e.dc_title()) - - def test_allowed_massmail_keys(self): - with self.admin_access.repo_cnx() as cnx: - e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) - # Bytes/Password attributes should be omitted - self.assertEqual( - e.cw_adapt_to('IEmailable').allowed_massmail_keys(), - set(('surname', 'firstname', 'login', 'last_login_time', - 'creation_date', 'modification_date', 'cwuri', 'eid')) - ) - - def test_cw_instantiate_object_relation(self): - """ a weird non regression test """ - with self.admin_access.repo_cnx() as cnx: - e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) - cnx.create_entity('CWGroup', name=u'logilab', reverse_in_group=e) - - -class HTMLtransformTC(BaseEntityTC): - - def test_sanitized_html(self): - with self.admin_access.repo_cnx() as cnx: - c = cnx.create_entity('Company', name=u'Babar', - description=u""" -Title -===== - -Elephant management best practices. - -.. raw:: html - - -""", description_format=u'text/rest') - cnx.commit() - c.cw_clear_all_caches() - self.assertIn('alert', - c.printable_value('description', format='text/plain')) - self.assertNotIn('alert', - c.printable_value('description', format='text/html')) - - -class SpecializedEntityClassesTC(CubicWebTC): - - def select_eclass(self, etype): - # clear selector cache - clear_cache(self.vreg['etypes'], 'etype_class') - return self.vreg['etypes'].etype_class(etype) - - def test_etype_class_selection_and_specialization(self): - # no specific class for Subdivisions, the default one should be selected - eclass = self.select_eclass('SubDivision') - self.assertTrue(eclass.__autogenerated__) - # self.assertEqual(eclass.__bases__, (AnyEntity,)) - # build class from most generic to most specific and make - # sure the most specific is always selected - self.vreg._loadedmods[__name__] = {} - for etype in ('Company', 'Division', 'SubDivision'): - class Foo(AnyEntity): - __regid__ = etype - self.vreg.register(Foo) - eclass = self.select_eclass('SubDivision') - self.assertTrue(eclass.__autogenerated__) - self.assertFalse(eclass is Foo) - if etype == 'SubDivision': - self.assertEqual(eclass.__bases__, (Foo,)) - else: - self.assertEqual(eclass.__bases__[0].__bases__, (Foo,)) - # check Division eclass is still selected for plain Division entities - eclass = self.select_eclass('Division') - self.assertEqual(eclass.cw_etype, 'Division') - - -class ISerializableTC(CubicWebTC): - - def test_serialization(self): - with self.admin_access.repo_cnx() as cnx: - entity = cnx.create_entity('CWGroup', name=u'tmp') - cnx.commit() - serializer = entity.cw_adapt_to('ISerializable') - expected = { - 'cw_etype': u'CWGroup', - 'cw_source': 'system', - 'eid': entity.eid, - 'cwuri': u'http://testing.fr/cubicweb/%s' % entity.eid, - 'creation_date': entity.creation_date, - 'modification_date': entity.modification_date, - 'name': u'tmp', - } - self.assertEqual(serializer.serialize(), expected) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,705 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from cubicweb import ValidationError -from cubicweb.devtools.testlib import CubicWebTC - -def add_wf(shell, etype, name=None, default=False): - if name is None: - name = etype - return shell.add_workflow(name, etype, default=default, - ensure_workflowable=False) - -def parse_hist(wfhist): - return [(ti.previous_state.name, ti.new_state.name, - ti.transition and ti.transition.name, ti.comment) - for ti in wfhist] - - -class WorkflowBuildingTC(CubicWebTC): - - def test_wf_construction(self): - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'Company') - foo = wf.add_state(u'foo', initial=True) - bar = wf.add_state(u'bar') - self.assertEqual(wf.state_by_name('bar').eid, bar.eid) - self.assertEqual(wf.state_by_name('barrr'), None) - baz = wf.add_transition(u'baz', (foo,), bar, ('managers',)) - self.assertEqual(wf.transition_by_name('baz').eid, baz.eid) - self.assertEqual(len(baz.require_group), 1) - self.assertEqual(baz.require_group[0].name, 'managers') - - def test_duplicated_state(self): - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'Company') - wf.add_state(u'foo', initial=True) - shell.commit() - with self.assertRaises(ValidationError) as cm: - wf.add_state(u'foo') - self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint', - 'state_of': u'%(KEY-rtype)s is part of violated unicity constraint', - '': u'some relations violate a unicity constraint'}, - cm.exception.errors) - shell.rollback() - # no pb if not in the same workflow - wf2 = add_wf(shell, 'Company') - foo = wf2.add_state(u'foo', initial=True) - shell.commit() - # gnark gnark - bar = wf.add_state(u'bar') - shell.commit() - with self.assertRaises(ValidationError) as cm: - bar.cw_set(name=u'foo') - shell.rollback() - self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint', - 'state_of': u'%(KEY-rtype)s is part of violated unicity constraint', - '': u'some relations violate a unicity constraint'}, - cm.exception.errors) - - def test_duplicated_transition(self): - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'Company') - foo = wf.add_state(u'foo', initial=True) - bar = wf.add_state(u'bar') - wf.add_transition(u'baz', (foo,), bar, ('managers',)) - with self.assertRaises(ValidationError) as cm: - wf.add_transition(u'baz', (bar,), foo) - self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint', - 'transition_of': u'%(KEY-rtype)s is part of violated unicity constraint', - '': u'some relations violate a unicity constraint'}, - cm.exception.errors) - shell.rollback() - # no pb if not in the same workflow - wf2 = add_wf(shell, 'Company') - foo = wf2.add_state(u'foo', initial=True) - bar = wf2.add_state(u'bar') - wf2.add_transition(u'baz', (foo,), bar, ('managers',)) - shell.commit() - # gnark gnark - biz = wf2.add_transition(u'biz', (bar,), foo) - shell.commit() - with self.assertRaises(ValidationError) as cm: - biz.cw_set(name=u'baz') - shell.rollback() - self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint', - 'transition_of': u'%(KEY-rtype)s is part of violated unicity constraint', - '': u'some relations violate a unicity constraint'}, - cm.exception.errors) - - -class WorkflowTC(CubicWebTC): - - def setup_database(self): - rschema = self.schema['in_state'] - for rdef in rschema.rdefs.values(): - self.assertEqual(rdef.cardinality, '1*') - with self.admin_access.client_cnx() as cnx: - self.member_eid = self.create_user(cnx, 'member').eid - cnx.commit() - - def test_workflow_base(self): - with self.admin_access.web_request() as req: - e = self.create_user(req, 'toto') - iworkflowable = e.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'activated') - iworkflowable.change_state('deactivated', u'deactivate 1') - req.cnx.commit() - iworkflowable.change_state('activated', u'activate 1') - req.cnx.commit() - iworkflowable.change_state('deactivated', u'deactivate 2') - req.cnx.commit() - e.cw_clear_relation_cache('wf_info_for', 'object') - self.assertEqual([tr.comment for tr in e.reverse_wf_info_for], - ['deactivate 1', 'activate 1', 'deactivate 2']) - self.assertEqual(iworkflowable.latest_trinfo().comment, 'deactivate 2') - - def test_possible_transitions(self): - with self.admin_access.web_request() as req: - user = req.execute('CWUser X').get_entity(0, 0) - iworkflowable = user.cw_adapt_to('IWorkflowable') - trs = list(iworkflowable.possible_transitions()) - self.assertEqual(len(trs), 1) - self.assertEqual(trs[0].name, u'deactivate') - self.assertEqual(trs[0].destination(None).name, u'deactivated') - # test a std user get no possible transition - with self.new_access('member').web_request() as req: - # fetch the entity using the new session - trs = list(req.user.cw_adapt_to('IWorkflowable').possible_transitions()) - self.assertEqual(len(trs), 0) - - def _test_manager_deactivate(self, user): - iworkflowable = user.cw_adapt_to('IWorkflowable') - user.cw_clear_relation_cache('in_state', 'subject') - self.assertEqual(len(user.in_state), 1) - self.assertEqual(iworkflowable.state, 'deactivated') - trinfo = iworkflowable.latest_trinfo() - self.assertEqual(trinfo.previous_state.name, 'activated') - self.assertEqual(trinfo.new_state.name, 'deactivated') - self.assertEqual(trinfo.comment, 'deactivate user') - self.assertEqual(trinfo.comment_format, 'text/plain') - return trinfo - - def test_change_state(self): - with self.admin_access.client_cnx() as cnx: - user = cnx.user - iworkflowable = user.cw_adapt_to('IWorkflowable') - iworkflowable.change_state('deactivated', comment=u'deactivate user') - trinfo = self._test_manager_deactivate(user) - self.assertEqual(trinfo.transition, None) - - def test_set_in_state_bad_wf(self): - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'CWUser') - s = wf.add_state(u'foo', initial=True) - shell.commit() - with self.admin_access.repo_cnx() as cnx: - with cnx.security_enabled(write=False): - with self.assertRaises(ValidationError) as cm: - cnx.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', - {'x': cnx.user.eid, 's': s.eid}) - self.assertEqual(cm.exception.errors, {'in_state-subject': "state doesn't belong to entity's workflow. " - "You may want to set a custom workflow for this entity first."}) - - def test_fire_transition(self): - with self.admin_access.client_cnx() as cnx: - user = cnx.user - iworkflowable = user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate', comment=u'deactivate user') - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'deactivated') - self._test_manager_deactivate(user) - trinfo = self._test_manager_deactivate(user) - self.assertEqual(trinfo.transition.name, 'deactivate') - - def test_goback_transition(self): - with self.admin_access.web_request() as req: - wf = req.user.cw_adapt_to('IWorkflowable').current_workflow - asleep = wf.add_state('asleep') - wf.add_transition('rest', (wf.state_by_name('activated'), - wf.state_by_name('deactivated')), - asleep) - wf.add_transition('wake up', asleep) - user = self.create_user(req, 'stduser') - iworkflowable = user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('rest') - req.cnx.commit() - iworkflowable.fire_transition('wake up') - req.cnx.commit() - self.assertEqual(iworkflowable.state, 'activated') - iworkflowable.fire_transition('deactivate') - req.cnx.commit() - iworkflowable.fire_transition('rest') - req.cnx.commit() - iworkflowable.fire_transition('wake up') - req.cnx.commit() - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'deactivated') - - # XXX test managers can change state without matching transition - - def _test_stduser_deactivate(self): - with self.admin_access.repo_cnx() as cnx: - self.create_user(cnx, 'tutu') - with self.new_access('tutu').web_request() as req: - iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable') - with self.assertRaises(ValidationError) as cm: - iworkflowable.fire_transition('deactivate') - self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"}) - with self.new_access('member').web_request() as req: - iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - req.cnx.commit() - with self.assertRaises(ValidationError) as cm: - iworkflowable.fire_transition('activate') - self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"}) - - def test_fire_transition_owned_by(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' - 'X expression "X owned_by U", T condition X ' - 'WHERE T name "deactivate"') - cnx.commit() - self._test_stduser_deactivate() - - def test_fire_transition_has_update_perm(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' - 'X expression "U has_update_permission X", T condition X ' - 'WHERE T name "deactivate"') - cnx.commit() - self._test_stduser_deactivate() - - def test_swf_base(self): - """subworkflow - - +-----------+ tr1 +-----------+ - | swfstate1 | ------>| swfstate2 | - +-----------+ +-----------+ - | tr2 +-----------+ - `------>| swfstate3 | - +-----------+ - - main workflow - - +--------+ swftr1 +--------+ - | state1 | -------[swfstate2]->| state2 | - +--------+ | +--------+ - | +--------+ - `-[swfstate3]-->| state3 | - +--------+ - """ - # sub-workflow - with self.admin_access.shell() as shell: - swf = add_wf(shell, 'CWGroup', name='subworkflow') - swfstate1 = swf.add_state(u'swfstate1', initial=True) - swfstate2 = swf.add_state(u'swfstate2') - swfstate3 = swf.add_state(u'swfstate3') - tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2) - tr2 = swf.add_transition(u'tr2', (swfstate1,), swfstate3) - # main workflow - mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True) - state1 = mwf.add_state(u'state1', initial=True) - state2 = mwf.add_state(u'state2') - state3 = mwf.add_state(u'state3') - swftr1 = mwf.add_wftransition(u'swftr1', swf, state1, - [(swfstate2, state2), (swfstate3, state3)]) - swf.cw_clear_all_caches() - self.assertEqual(swftr1.destination(None).eid, swfstate1.eid) - # workflows built, begin test - with self.admin_access.web_request() as req: - group = req.create_entity('CWGroup', name=u'grp1') - req.cnx.commit() - iworkflowable = group.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.current_state.eid, state1.eid) - self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.subworkflow_input_transition(), None) - iworkflowable.fire_transition('swftr1', u'go') - req.cnx.commit() - group.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid) - self.assertEqual(iworkflowable.current_workflow.eid, swf.eid) - self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid) - iworkflowable.fire_transition('tr1', u'go') - req.cnx.commit() - group.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_state.eid, state2.eid) - self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.subworkflow_input_transition(), None) - # force back to swfstate1 is impossible since we can't any more find - # subworkflow input transition - with self.assertRaises(ValidationError) as cm: - iworkflowable.change_state(swfstate1, u'gadget') - self.assertEqual(cm.exception.errors, {'to_state-subject': "state doesn't belong to entity's workflow"}) - req.cnx.rollback() - # force back to state1 - iworkflowable.change_state('state1', u'gadget') - iworkflowable.fire_transition('swftr1', u'au') - group.cw_clear_all_caches() - iworkflowable.fire_transition('tr2', u'chapeau') - req.cnx.commit() - group.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_state.eid, state3.eid) - self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) - self.assertListEqual(parse_hist(iworkflowable.workflow_history), - [('state1', 'swfstate1', 'swftr1', 'go'), - ('swfstate1', 'swfstate2', 'tr1', 'go'), - ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'), - ('state2', 'state1', None, 'gadget'), - ('state1', 'swfstate1', 'swftr1', 'au'), - ('swfstate1', 'swfstate3', 'tr2', 'chapeau'), - ('swfstate3', 'state3', 'swftr1', 'exiting from subworkflow subworkflow'), - ]) - - def test_swf_exit_consistency(self): - with self.admin_access.shell() as shell: - # sub-workflow - swf = add_wf(shell, 'CWGroup', name='subworkflow') - swfstate1 = swf.add_state(u'swfstate1', initial=True) - swfstate2 = swf.add_state(u'swfstate2') - tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2) - # main workflow - mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True) - state1 = mwf.add_state(u'state1', initial=True) - state2 = mwf.add_state(u'state2') - state3 = mwf.add_state(u'state3') - mwf.add_wftransition(u'swftr1', swf, state1, - [(swfstate2, state2), (swfstate2, state3)]) - with self.assertRaises(ValidationError) as cm: - shell.commit() - self.assertEqual(cm.exception.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"}) - - def test_swf_fire_in_a_row(self): - with self.admin_access.shell() as shell: - # sub-workflow - subwf = add_wf(shell, 'CWGroup', name='subworkflow') - xsigning = subwf.add_state('xsigning', initial=True) - xaborted = subwf.add_state('xaborted') - xsigned = subwf.add_state('xsigned') - xabort = subwf.add_transition('xabort', (xsigning,), xaborted) - xsign = subwf.add_transition('xsign', (xsigning,), xsigning) - xcomplete = subwf.add_transition('xcomplete', (xsigning,), xsigned, - type=u'auto') - # main workflow - twf = add_wf(shell, 'CWGroup', name='mainwf', default=True) - created = twf.add_state(_('created'), initial=True) - identified = twf.add_state(_('identified')) - released = twf.add_state(_('released')) - closed = twf.add_state(_('closed')) - twf.add_wftransition(_('identify'), subwf, (created,), - [(xsigned, identified), (xaborted, created)]) - twf.add_wftransition(_('release'), subwf, (identified,), - [(xsigned, released), (xaborted, identified)]) - twf.add_wftransition(_('close'), subwf, (released,), - [(xsigned, closed), (xaborted, released)]) - shell.commit() - with self.admin_access.repo_cnx() as cnx: - group = cnx.create_entity('CWGroup', name=u'grp1') - cnx.commit() - iworkflowable = group.cw_adapt_to('IWorkflowable') - for trans in ('identify', 'release', 'close'): - iworkflowable.fire_transition(trans) - cnx.commit() - - - def test_swf_magic_tr(self): - with self.admin_access.shell() as shell: - # sub-workflow - subwf = add_wf(shell, 'CWGroup', name='subworkflow') - xsigning = subwf.add_state('xsigning', initial=True) - xaborted = subwf.add_state('xaborted') - xsigned = subwf.add_state('xsigned') - xabort = subwf.add_transition('xabort', (xsigning,), xaborted) - xsign = subwf.add_transition('xsign', (xsigning,), xsigned) - # main workflow - twf = add_wf(shell, 'CWGroup', name='mainwf', default=True) - created = twf.add_state(_('created'), initial=True) - identified = twf.add_state(_('identified')) - released = twf.add_state(_('released')) - twf.add_wftransition(_('identify'), subwf, created, - [(xaborted, None), (xsigned, identified)]) - twf.add_wftransition(_('release'), subwf, identified, - [(xaborted, None)]) - shell.commit() - with self.admin_access.web_request() as req: - group = req.create_entity('CWGroup', name=u'grp1') - req.cnx.commit() - iworkflowable = group.cw_adapt_to('IWorkflowable') - for trans, nextstate in (('identify', 'xsigning'), - ('xabort', 'created'), - ('identify', 'xsigning'), - ('xsign', 'identified'), - ('release', 'xsigning'), - ('xabort', 'identified') - ): - iworkflowable.fire_transition(trans) - req.cnx.commit() - group.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, nextstate) - - def test_replace_state(self): - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'CWGroup', name='groupwf', default=True) - s_new = wf.add_state('new', initial=True) - s_state1 = wf.add_state('state1') - wf.add_transition('tr', (s_new,), s_state1) - shell.commit() - - with self.admin_access.repo_cnx() as cnx: - group = cnx.create_entity('CWGroup', name=u'grp1') - cnx.commit() - - iwf = group.cw_adapt_to('IWorkflowable') - iwf.fire_transition('tr') - cnx.commit() - group.cw_clear_all_caches() - - wf = cnx.entity_from_eid(wf.eid) - wf.add_state('state2') - with cnx.security_enabled(write=False): - wf.replace_state('state1', 'state2') - cnx.commit() - - self.assertEqual(iwf.state, 'state2') - self.assertEqual(iwf.latest_trinfo().to_state[0].name, 'state2') - - -class CustomWorkflowTC(CubicWebTC): - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - self.member_eid = self.create_user(cnx, 'member').eid - - def test_custom_wf_replace_state_no_history(self): - """member in inital state with no previous history, state is simply - redirected when changing workflow - """ - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'CWUser') - wf.add_state('asleep', initial=True) - with self.admin_access.web_request() as req: - req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member_eid}) - member = req.entity_from_eid(self.member_eid) - iworkflowable = member.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'activated') # no change before commit - req.cnx.commit() - member.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) - self.assertEqual(iworkflowable.state, 'asleep') - self.assertEqual(iworkflowable.workflow_history, ()) - - def test_custom_wf_replace_state_keep_history(self): - """member in inital state with some history, state is redirected and - state change is recorded to history - """ - with self.admin_access.web_request() as req: - member = req.entity_from_eid(self.member_eid) - iworkflowable = member.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - iworkflowable.fire_transition('activate') - req.cnx.commit() - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'CWUser') - wf.add_state('asleep', initial=True) - shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member_eid}) - with self.admin_access.web_request() as req: - member = req.entity_from_eid(self.member_eid) - iworkflowable = member.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) - self.assertEqual(iworkflowable.state, 'asleep') - self.assertEqual(parse_hist(iworkflowable.workflow_history), - [('activated', 'deactivated', 'deactivate', None), - ('deactivated', 'activated', 'activate', None), - ('activated', 'asleep', None, 'workflow changed to "CWUser"')]) - - def test_custom_wf_no_initial_state(self): - """try to set a custom workflow which has no initial state""" - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'CWUser') - wf.add_state('asleep') - shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member_eid}) - with self.assertRaises(ValidationError) as cm: - shell.commit() - self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u'workflow has no initial state'}) - - def test_custom_wf_bad_etype(self): - """try to set a custom workflow which doesn't apply to entity type""" - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'Company') - wf.add_state('asleep', initial=True) - shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member_eid}) - with self.assertRaises(ValidationError) as cm: - shell.commit() - self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"}) - - def test_del_custom_wf(self): - """member in some state shared by the new workflow, nothing has to be - done - """ - with self.admin_access.web_request() as req: - member = req.entity_from_eid(self.member_eid) - iworkflowable = member.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - req.cnx.commit() - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'CWUser') - wf.add_state('asleep', initial=True) - shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member_eid}) - shell.commit() - with self.admin_access.web_request() as req: - req.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member_eid}) - member = req.entity_from_eid(self.member_eid) - iworkflowable = member.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'asleep')# no change before commit - req.cnx.commit() - member.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_workflow.name, "default user workflow") - self.assertEqual(iworkflowable.state, 'activated') - self.assertEqual(parse_hist(iworkflowable.workflow_history), - [('activated', 'deactivated', 'deactivate', None), - ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'), - ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),]) - - -class AutoTransitionTC(CubicWebTC): - - def setup_custom_wf(self): - with self.admin_access.shell() as shell: - wf = add_wf(shell, 'CWUser') - asleep = wf.add_state('asleep', initial=True) - dead = wf.add_state('dead') - wf.add_transition('rest', asleep, asleep) - wf.add_transition('sick', asleep, dead, type=u'auto', - conditions=({'expr': u'X surname "toto"', - 'mainvars': u'X'},)) - return wf - - def test_auto_transition_fired(self): - wf = self.setup_custom_wf() - with self.admin_access.web_request() as req: - user = self.create_user(req, 'member') - iworkflowable = user.cw_adapt_to('IWorkflowable') - req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': user.eid}) - req.cnx.commit() - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'asleep') - self.assertEqual([t.name for t in iworkflowable.possible_transitions()], - ['rest']) - iworkflowable.fire_transition('rest') - req.cnx.commit() - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'asleep') - self.assertEqual([t.name for t in iworkflowable.possible_transitions()], - ['rest']) - self.assertEqual(parse_hist(iworkflowable.workflow_history), - [('asleep', 'asleep', 'rest', None)]) - user.cw_set(surname=u'toto') # fulfill condition - req.cnx.commit() - iworkflowable.fire_transition('rest') - req.cnx.commit() - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'dead') - self.assertEqual(parse_hist(iworkflowable.workflow_history), - [('asleep', 'asleep', 'rest', None), - ('asleep', 'asleep', 'rest', None), - ('asleep', 'dead', 'sick', None),]) - - def test_auto_transition_custom_initial_state_fired(self): - wf = self.setup_custom_wf() - with self.admin_access.web_request() as req: - user = self.create_user(req, 'member', surname=u'toto') - req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': user.eid}) - req.cnx.commit() - user.cw_clear_all_caches() - iworkflowable = user.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'dead') - - def test_auto_transition_initial_state_fired(self): - with self.admin_access.web_request() as req: - wf = req.execute('Any WF WHERE ET default_workflow WF, ' - 'ET name %(et)s', {'et': 'CWUser'}).get_entity(0, 0) - dead = wf.add_state('dead') - wf.add_transition('sick', wf.state_by_name('activated'), dead, - type=u'auto', conditions=({'expr': u'X surname "toto"', - 'mainvars': u'X'},)) - req.cnx.commit() - with self.admin_access.web_request() as req: - user = self.create_user(req, 'member', surname=u'toto') - req.cnx.commit() - iworkflowable = user.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'dead') - - -class WorkflowHooksTC(CubicWebTC): - - def setUp(self): - CubicWebTC.setUp(self) - with self.admin_access.web_request() as req: - self.wf = req.user.cw_adapt_to('IWorkflowable').current_workflow - self.s_activated = self.wf.state_by_name('activated').eid - self.s_deactivated = self.wf.state_by_name('deactivated').eid - self.s_dummy = self.wf.add_state(u'dummy').eid - self.wf.add_transition(u'dummy', (self.s_deactivated,), self.s_dummy) - ueid = self.create_user(req, 'stduser', commit=False).eid - # test initial state is set - rset = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', - {'x' : ueid}) - self.assertFalse(rset, rset.rows) - req.cnx.commit() - initialstate = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', - {'x' : ueid})[0][0] - self.assertEqual(initialstate, u'activated') - # give access to users group on the user's wf transitions - # so we can test wf enforcing on euser (managers don't have anymore this - # enforcement - req.execute('SET X require_group G ' - 'WHERE G name "users", X transition_of WF, WF eid %(wf)s', - {'wf': self.wf.eid}) - req.cnx.commit() - - # XXX currently, we've to rely on hooks to set initial state, or to use execute - # def test_initial_state(self): - # cnx = self.login('stduser') - # cu = cnx.cursor() - # self.assertRaises(ValidationError, cu.execute, - # 'INSERT CWUser X: X login "badaboum", X upassword %(pwd)s, ' - # 'X in_state S WHERE S name "deactivated"', {'pwd': 'oops'}) - # cnx.close() - # # though managers can do whatever he want - # self.execute('INSERT CWUser X: X login "badaboum", X upassword %(pwd)s, ' - # 'X in_state S, X in_group G WHERE S name "deactivated", G name "users"', {'pwd': 'oops'}) - # self.commit() - - # test that the workflow is correctly enforced - - def _cleanup_msg(self, msg): - """remove the variable part of one specific error message""" - lmsg = msg.split() - lmsg.pop(1) - lmsg.pop() - return ' '.join(lmsg) - - def test_transition_checking1(self): - with self.new_access('stduser').repo_cnx() as cnx: - user = cnx.user - iworkflowable = user.cw_adapt_to('IWorkflowable') - with self.assertRaises(ValidationError) as cm: - iworkflowable.fire_transition('activate') - self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), - u"transition isn't allowed from") - - def test_transition_checking2(self): - with self.new_access('stduser').repo_cnx() as cnx: - user = cnx.user - iworkflowable = user.cw_adapt_to('IWorkflowable') - with self.assertRaises(ValidationError) as cm: - iworkflowable.fire_transition('dummy') - self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), - u"transition isn't allowed from") - - def test_transition_checking3(self): - with self.new_access('stduser').repo_cnx() as cnx: - user = cnx.user - iworkflowable = user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - cnx.commit() - with self.assertRaises(ValidationError) as cm: - iworkflowable.fire_transition('deactivate') - self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), - u"transition isn't allowed from") - cnx.rollback() - # get back now - iworkflowable.fire_transition('activate') - cnx.commit() - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 entities/wfobjs.py --- a/entities/wfobjs.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,589 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""workflow handling: - -* entity types defining workflow (Workflow, State, Transition...) -* workflow history (TrInfo) -* adapter for workflowable entities (IWorkflowableAdapter) -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from six import text_type, string_types - -from logilab.common.decorators import cached, clear_cache -from logilab.common.deprecation import deprecated - -from cubicweb.entities import AnyEntity, fetch_config -from cubicweb.view import EntityAdapter -from cubicweb.predicates import relation_possible - - -try: - from cubicweb import server -except ImportError: - # We need to lookup DEBUG from there, - # however a pure dbapi client may not have it. - class server(object): pass - server.DEBUG = False - - -class WorkflowException(Exception): pass - -class Workflow(AnyEntity): - __regid__ = 'Workflow' - - @property - def initial(self): - """return the initial state for this workflow""" - return self.initial_state and self.initial_state[0] or None - - def is_default_workflow_of(self, etype): - """return True if this workflow is the default workflow for the given - entity type - """ - return any(et for et in self.reverse_default_workflow - if et.name == etype) - - def iter_workflows(self, _done=None): - """return an iterator on actual workflows, eg this workflow and its - subworkflows - """ - # infinite loop safety belt - if _done is None: - _done = set() - yield self - _done.add(self.eid) - for tr in self._cw.execute('Any T WHERE T is WorkflowTransition, ' - 'T transition_of WF, WF eid %(wf)s', - {'wf': self.eid}).entities(): - if tr.subwf.eid in _done: - continue - for subwf in tr.subwf.iter_workflows(_done): - yield subwf - - # state / transitions accessors ############################################ - - def state_by_name(self, statename): - rset = self._cw.execute('Any S, SN WHERE S name SN, S name %(n)s, ' - 'S state_of WF, WF eid %(wf)s', - {'n': statename, 'wf': self.eid}) - if rset: - return rset.get_entity(0, 0) - return None - - def state_by_eid(self, eid): - rset = self._cw.execute('Any S, SN WHERE S name SN, S eid %(s)s, ' - 'S state_of WF, WF eid %(wf)s', - {'s': eid, 'wf': self.eid}) - if rset: - return rset.get_entity(0, 0) - return None - - def transition_by_name(self, trname): - rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, ' - 'T transition_of WF, WF eid %(wf)s', - {'n': text_type(trname), 'wf': self.eid}) - if rset: - return rset.get_entity(0, 0) - return None - - def transition_by_eid(self, eid): - rset = self._cw.execute('Any T, TN WHERE T name TN, T eid %(t)s, ' - 'T transition_of WF, WF eid %(wf)s', - {'t': eid, 'wf': self.eid}) - if rset: - return rset.get_entity(0, 0) - return None - - # wf construction methods ################################################## - - def add_state(self, name, initial=False, **kwargs): - """add a state to this workflow""" - state = self._cw.create_entity('State', name=text_type(name), **kwargs) - self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s', - {'s': state.eid, 'wf': self.eid}) - if initial: - assert not self.initial, "Initial state already defined as %s" % self.initial - self._cw.execute('SET WF initial_state S ' - 'WHERE S eid %(s)s, WF eid %(wf)s', - {'s': state.eid, 'wf': self.eid}) - return state - - def _add_transition(self, trtype, name, fromstates, - requiredgroups=(), conditions=(), **kwargs): - tr = self._cw.create_entity(trtype, name=text_type(name), **kwargs) - self._cw.execute('SET T transition_of WF ' - 'WHERE T eid %(t)s, WF eid %(wf)s', - {'t': tr.eid, 'wf': self.eid}) - assert fromstates, fromstates - if not isinstance(fromstates, (tuple, list)): - fromstates = (fromstates,) - for state in fromstates: - if hasattr(state, 'eid'): - state = state.eid - self._cw.execute('SET S allowed_transition T ' - 'WHERE S eid %(s)s, T eid %(t)s', - {'s': state, 't': tr.eid}) - tr.set_permissions(requiredgroups, conditions, reset=False) - return tr - - def add_transition(self, name, fromstates, tostate=None, - requiredgroups=(), conditions=(), **kwargs): - """add a transition to this workflow from some state(s) to another""" - tr = self._add_transition('Transition', name, fromstates, - requiredgroups, conditions, **kwargs) - if tostate is not None: - if hasattr(tostate, 'eid'): - tostate = tostate.eid - self._cw.execute('SET T destination_state S ' - 'WHERE S eid %(s)s, T eid %(t)s', - {'t': tr.eid, 's': tostate}) - return tr - - def add_wftransition(self, name, subworkflow, fromstates, exitpoints=(), - requiredgroups=(), conditions=(), **kwargs): - """add a workflow transition to this workflow""" - tr = self._add_transition('WorkflowTransition', name, fromstates, - requiredgroups, conditions, **kwargs) - if hasattr(subworkflow, 'eid'): - subworkflow = subworkflow.eid - assert self._cw.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s', - {'t': tr.eid, 'wf': subworkflow}) - for fromstate, tostate in exitpoints: - tr.add_exit_point(fromstate, tostate) - return tr - - def replace_state(self, todelstate, replacement): - """migration convenience method""" - if not hasattr(todelstate, 'eid'): - todelstate = self.state_by_name(todelstate) - if not hasattr(replacement, 'eid'): - replacement = self.state_by_name(replacement) - args = {'os': todelstate.eid, 'ns': replacement.eid} - execute = self._cw.execute - execute('SET X in_state NS WHERE X in_state OS, ' - 'NS eid %(ns)s, OS eid %(os)s', args) - execute('SET X from_state NS WHERE X from_state OS, ' - 'OS eid %(os)s, NS eid %(ns)s', args) - execute('SET X to_state NS WHERE X to_state OS, ' - 'OS eid %(os)s, NS eid %(ns)s', args) - todelstate.cw_delete() - - -class BaseTransition(AnyEntity): - """customized class for abstract transition - - provides a specific may_be_fired method to check if the relation may be - fired by the logged user - """ - __regid__ = 'BaseTransition' - fetch_attrs, cw_fetch_order = fetch_config(['name', 'type']) - - def __init__(self, *args, **kwargs): - if self.cw_etype == 'BaseTransition': - raise WorkflowException('should not be instantiated') - super(BaseTransition, self).__init__(*args, **kwargs) - - @property - def workflow(self): - return self.transition_of[0] - - def has_input_state(self, state): - if hasattr(state, 'eid'): - state = state.eid - return any(s for s in self.reverse_allowed_transition if s.eid == state) - - def may_be_fired(self, eid): - """return true if the logged user may fire this transition - - `eid` is the eid of the object on which we may fire the transition - """ - DBG = False - if server.DEBUG & server.DBG_SEC: - if 'transition' in server._SECURITY_CAPS: - DBG = True - user = self._cw.user - # check user is at least in one of the required groups if any - groups = frozenset(g.name for g in self.require_group) - if groups: - matches = user.matching_groups(groups) - if matches: - if DBG: - print('may_be_fired: %r may fire: user matches %s' % (self.name, groups)) - return matches - if 'owners' in groups and user.owns(eid): - if DBG: - print('may_be_fired: %r may fire: user is owner' % self.name) - return True - # check one of the rql expression conditions matches if any - if self.condition: - if DBG: - print('my_be_fired: %r: %s' % - (self.name, [(rqlexpr.expression, - rqlexpr.check_expression(self._cw, eid)) - for rqlexpr in self.condition])) - for rqlexpr in self.condition: - if rqlexpr.check_expression(self._cw, eid): - return True - if self.condition or groups: - return False - return True - - def set_permissions(self, requiredgroups=(), conditions=(), reset=True): - """set or add (if `reset` is False) groups and conditions for this - transition - """ - if reset: - self._cw.execute('DELETE T require_group G WHERE T eid %(x)s', - {'x': self.eid}) - self._cw.execute('DELETE T condition R WHERE T eid %(x)s', - {'x': self.eid}) - for gname in requiredgroups: - rset = self._cw.execute('SET T require_group G ' - 'WHERE T eid %(x)s, G name %(gn)s', - {'x': self.eid, 'gn': text_type(gname)}) - assert rset, '%s is not a known group' % gname - if isinstance(conditions, string_types): - conditions = (conditions,) - for expr in conditions: - if isinstance(expr, string_types): - kwargs = {'expr': text_type(expr)} - else: - assert isinstance(expr, dict) - kwargs = expr - kwargs['x'] = self.eid - kwargs.setdefault('mainvars', u'X') - self._cw.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' - 'X expression %(expr)s, X mainvars %(mainvars)s, ' - 'T condition X WHERE T eid %(x)s', kwargs) - # XXX clear caches? - - -class Transition(BaseTransition): - """customized class for Transition entities""" - __regid__ = 'Transition' - - def dc_long_title(self): - return '%s (%s)' % (self.name, self._cw._(self.name)) - - def destination(self, entity): - try: - return self.destination_state[0] - except IndexError: - return entity.cw_adapt_to('IWorkflowable').latest_trinfo().previous_state - - def potential_destinations(self): - try: - yield self.destination_state[0] - except IndexError: - for incomingstate in self.reverse_allowed_transition: - for tr in incomingstate.reverse_destination_state: - for previousstate in tr.reverse_allowed_transition: - yield previousstate - - -class WorkflowTransition(BaseTransition): - """customized class for WorkflowTransition entities""" - __regid__ = 'WorkflowTransition' - - @property - def subwf(self): - return self.subworkflow[0] - - def destination(self, entity): - return self.subwf.initial - - def potential_destinations(self): - yield self.subwf.initial - - def add_exit_point(self, fromstate, tostate): - if hasattr(fromstate, 'eid'): - fromstate = fromstate.eid - if tostate is None: - self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' - 'X subworkflow_state FS WHERE T eid %(t)s, FS eid %(fs)s', - {'t': self.eid, 'fs': fromstate}) - else: - if hasattr(tostate, 'eid'): - tostate = tostate.eid - self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' - 'X subworkflow_state FS, X destination_state TS ' - 'WHERE T eid %(t)s, FS eid %(fs)s, TS eid %(ts)s', - {'t': self.eid, 'fs': fromstate, 'ts': tostate}) - - def get_exit_point(self, entity, stateeid): - """if state is an exit point, return its associated destination state""" - if hasattr(stateeid, 'eid'): - stateeid = stateeid.eid - try: - tostateeid = self.exit_points()[stateeid] - except KeyError: - return None - if tostateeid is None: - # go back to state from which we've entered the subworkflow - return entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo().previous_state - return self._cw.entity_from_eid(tostateeid) - - @cached - def exit_points(self): - result = {} - for ep in self.subworkflow_exit: - result[ep.subwf_state.eid] = ep.destination and ep.destination.eid - return result - - def cw_clear_all_caches(self): - super(WorkflowTransition, self).cw_clear_all_caches() - clear_cache(self, 'exit_points') - - -class SubWorkflowExitPoint(AnyEntity): - """customized class for SubWorkflowExitPoint entities""" - __regid__ = 'SubWorkflowExitPoint' - - @property - def subwf_state(self): - return self.subworkflow_state[0] - - @property - def destination(self): - return self.destination_state and self.destination_state[0] or None - - -class State(AnyEntity): - """customized class for State entities""" - __regid__ = 'State' - fetch_attrs, cw_fetch_order = fetch_config(['name']) - rest_attr = 'eid' - - def dc_long_title(self): - return '%s (%s)' % (self.name, self._cw._(self.name)) - - @property - def workflow(self): - # take care, may be missing in multi-sources configuration - return self.state_of and self.state_of[0] or None - - -class TrInfo(AnyEntity): - """customized class for Transition information entities - """ - __regid__ = 'TrInfo' - fetch_attrs, cw_fetch_order = fetch_config(['creation_date', 'comment'], - pclass=None) # don't want modification_date - @property - def for_entity(self): - return self.wf_info_for[0] - - @property - def previous_state(self): - return self.from_state[0] - - @property - def new_state(self): - return self.to_state[0] - - @property - def transition(self): - return self.by_transition and self.by_transition[0] or None - - - -class IWorkflowableAdapter(EntityAdapter): - """base adapter providing workflow helper methods for workflowable entities. - """ - __regid__ = 'IWorkflowable' - __select__ = relation_possible('in_state') - - @cached - def cwetype_workflow(self): - """return the default workflow for entities of this type""" - # XXX CWEType method - wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' - 'ET name %(et)s', {'et': text_type(self.entity.cw_etype)}) - if wfrset: - return wfrset.get_entity(0, 0) - self.warning("can't find any workflow for %s", self.entity.cw_etype) - return None - - @property - def main_workflow(self): - """return current workflow applied to this entity""" - if self.entity.custom_workflow: - return self.entity.custom_workflow[0] - return self.cwetype_workflow() - - @property - def current_workflow(self): - """return current workflow applied to this entity""" - return self.current_state and self.current_state.workflow or self.main_workflow - - @property - def current_state(self): - """return current state entity""" - return self.entity.in_state and self.entity.in_state[0] or None - - @property - def state(self): - """return current state name""" - try: - return self.current_state.name - except AttributeError: - self.warning('entity %s has no state', self.entity) - return None - - @property - def printable_state(self): - """return current state name translated to context's language""" - state = self.current_state - if state: - return self._cw._(state.name) - return u'' - - @property - def workflow_history(self): - """return the workflow history for this entity (eg ordered list of - TrInfo entities) - """ - return self.entity.reverse_wf_info_for - - def latest_trinfo(self): - """return the latest transition information for this entity""" - try: - return self.workflow_history[-1] - except IndexError: - return None - - def possible_transitions(self, type='normal'): - """generates transition that MAY be fired for the given entity, - expected to be in this state - used only by the UI - """ - if self.current_state is None or self.current_workflow is None: - return - rset = self._cw.execute( - 'Any T,TT, TN WHERE S allowed_transition T, S eid %(x)s, ' - 'T type TT, T type %(type)s, ' - 'T name TN, T transition_of WF, WF eid %(wfeid)s', - {'x': self.current_state.eid, 'type': text_type(type), - 'wfeid': self.current_workflow.eid}) - for tr in rset.entities(): - if tr.may_be_fired(self.entity.eid): - yield tr - - def subworkflow_input_trinfo(self): - """return the TrInfo which has be recorded when this entity went into - the current sub-workflow - """ - if self.main_workflow.eid == self.current_workflow.eid: - return # doesn't make sense - subwfentries = [] - for trinfo in self.workflow_history: - if (trinfo.transition and - trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid): - # entering or leaving a subworkflow - if (subwfentries and - subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and - subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid): - # leave - del subwfentries[-1] - else: - # enter - subwfentries.append(trinfo) - if not subwfentries: - return None - return subwfentries[-1] - - def subworkflow_input_transition(self): - """return the transition which has went through the current sub-workflow - """ - return getattr(self.subworkflow_input_trinfo(), 'transition', None) - - def _add_trinfo(self, comment, commentformat, treid=None, tseid=None): - kwargs = {} - if comment is not None: - kwargs['comment'] = comment - if commentformat is not None: - kwargs['comment_format'] = commentformat - kwargs['wf_info_for'] = self.entity - if treid is not None: - kwargs['by_transition'] = self._cw.entity_from_eid(treid) - if tseid is not None: - kwargs['to_state'] = self._cw.entity_from_eid(tseid) - return self._cw.create_entity('TrInfo', **kwargs) - - def _get_transition(self, tr): - assert self.current_workflow - if isinstance(tr, string_types): - _tr = self.current_workflow.transition_by_name(tr) - assert _tr is not None, 'not a %s transition: %s' % ( - self.__regid__, tr) - tr = _tr - return tr - - def fire_transition(self, tr, comment=None, commentformat=None): - """change the entity's state by firing given transition (name or entity) - in entity's workflow - """ - tr = self._get_transition(tr) - return self._add_trinfo(comment, commentformat, tr.eid) - - def fire_transition_if_possible(self, tr, comment=None, commentformat=None): - """change the entity's state by firing given transition (name or entity) - in entity's workflow if this transition is possible - """ - tr = self._get_transition(tr) - if any(tr_ for tr_ in self.possible_transitions() - if tr_.eid == tr.eid): - self.fire_transition(tr, comment, commentformat) - - def change_state(self, statename, comment=None, commentformat=None, tr=None): - """change the entity's state to the given state (name or entity) in - entity's workflow. This method should only by used by manager to fix an - entity's state when their is no matching transition, otherwise - fire_transition should be used. - """ - assert self.current_workflow - if hasattr(statename, 'eid'): - stateeid = statename.eid - else: - state = self.current_workflow.state_by_name(statename) - if state is None: - raise WorkflowException('not a %s state: %s' % (self.__regid__, - statename)) - stateeid = state.eid - # XXX try to find matching transition? - return self._add_trinfo(comment, commentformat, tr and tr.eid, stateeid) - - def set_initial_state(self, statename): - """set a newly created entity's state to the given state (name or entity) - in entity's workflow. This is useful if you don't want it to be the - workflow's initial state. - """ - assert self.current_workflow - if hasattr(statename, 'eid'): - stateeid = statename.eid - else: - state = self.current_workflow.state_by_name(statename) - if state is None: - raise WorkflowException('not a %s state: %s' % (self.__regid__, - statename)) - stateeid = state.eid - self._cw.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', - {'x': self.entity.eid, 's': stateeid}) diff -r 058bb3dc685f -r 0b59724cb3f2 entity.py --- a/entity.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1403 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Base class for entity objects manipulated in clients""" - -__docformat__ = "restructuredtext en" - -from warnings import warn -from functools import partial - -from six import text_type, string_types, integer_types -from six.moves import range - -from logilab.common.decorators import cached -from logilab.common.deprecation import deprecated -from logilab.common.registry import yes -from logilab.mtconverter import TransformData, xml_escape - -from rql.utils import rqlvar_maker -from rql.stmts import Select -from rql.nodes import (Not, VariableRef, Constant, make_relation, - Relation as RqlRelation) - -from cubicweb import Unauthorized, neg_role -from cubicweb.utils import support_args -from cubicweb.rset import ResultSet -from cubicweb.appobject import AppObject -from cubicweb.schema import (RQLVocabularyConstraint, RQLConstraint, - GeneratedConstraint) -from cubicweb.rqlrewrite import RQLRewriter - -from cubicweb.uilib import soup2xhtml -from cubicweb.mttransforms import ENGINE - -_marker = object() - -def greater_card(rschema, subjtypes, objtypes, index): - for subjtype in subjtypes: - for objtype in objtypes: - card = rschema.rdef(subjtype, objtype).cardinality[index] - if card in '+*': - return card - return '1' - -def can_use_rest_path(value): - """return True if value can be used at the end of a Rest URL path""" - if value is None: - return False - value = text_type(value) - # the check for ?, /, & are to prevent problems when running - # behind Apache mod_proxy - if value == u'' or u'?' in value or u'/' in value or u'&' in value: - return False - return True - -def rel_vars(rel): - return ((isinstance(rel.children[0], VariableRef) - and rel.children[0].variable or None), - (isinstance(rel.children[1].children[0], VariableRef) - and rel.children[1].children[0].variable or None) - ) - -def rel_matches(rel, rtype, role, varname, operator='='): - if rel.r_type == rtype and rel.children[1].operator == operator: - same_role_var_idx = 0 if role == 'subject' else 1 - variables = rel_vars(rel) - if variables[same_role_var_idx].name == varname: - return variables[1 - same_role_var_idx] - -def build_cstr_with_linkto_infos(cstr, args, searchedvar, evar, - lt_infos, eidvars): - """restrict vocabulary as much as possible in entity creation, - based on infos provided by __linkto form param. - - Example based on following schema: - - class works_in(RelationDefinition): - subject = 'CWUser' - object = 'Lab' - cardinality = '1*' - constraints = [RQLConstraint('S in_group G, O welcomes G')] - - class welcomes(RelationDefinition): - subject = 'Lab' - object = 'CWGroup' - - If you create a CWUser in the "scientists" CWGroup you can show - only the labs that welcome them using : - - lt_infos = {('in_group', 'subject'): 321} - - You get following restriction : 'O welcomes G, G eid 321' - - """ - st = cstr.snippet_rqlst.copy() - # replace relations in ST by eid infos from linkto where possible - for (info_rtype, info_role), eids in lt_infos.items(): - eid = eids[0] # NOTE: we currently assume a pruned lt_info with only 1 eid - for rel in st.iget_nodes(RqlRelation): - targetvar = rel_matches(rel, info_rtype, info_role, evar.name) - if targetvar is not None: - if targetvar.name in eidvars: - rel.parent.remove(rel) - else: - eidrel = make_relation( - targetvar, 'eid', (targetvar.name, 'Substitute'), - Constant) - rel.parent.replace(rel, eidrel) - args[targetvar.name] = eid - eidvars.add(targetvar.name) - # if modified ST still contains evar references we must discard the - # constraint, otherwise evar is unknown in the final rql query which can - # lead to a SQL table cartesian product and multiple occurences of solutions - evarname = evar.name - for rel in st.iget_nodes(RqlRelation): - for variable in rel_vars(rel): - if variable and evarname == variable.name: - return - # else insert snippets into the global tree - return GeneratedConstraint(st, cstr.mainvars - set(evarname)) - -def pruned_lt_info(eschema, lt_infos): - pruned = {} - for (lt_rtype, lt_role), eids in lt_infos.items(): - # we can only use lt_infos describing relation with a cardinality - # of value 1 towards the linked entity - if not len(eids) == 1: - continue - lt_card = eschema.rdef(lt_rtype, lt_role).cardinality[ - 0 if lt_role == 'subject' else 1] - if lt_card not in '?1': - continue - pruned[(lt_rtype, lt_role)] = eids - return pruned - - -class Entity(AppObject): - """an entity instance has e_schema automagically set on - the class and instances has access to their issuing cursor. - - A property is set for each attribute and relation on each entity's type - class. Becare that among attributes, 'eid' is *NEITHER* stored in the - dict containment (which acts as a cache for other attributes dynamically - fetched) - - :type e_schema: `cubicweb.schema.EntitySchema` - :ivar e_schema: the entity's schema - - :type rest_attr: str - :cvar rest_attr: indicates which attribute should be used to build REST urls - If `None` is specified (the default), the first unique attribute will - be used ('eid' if none found) - - :type cw_skip_copy_for: list - :cvar cw_skip_copy_for: a list of couples (rtype, role) for each relation - that should be skipped when copying this kind of entity. Note that some - relations such as composite relations or relations that have '?1' as - object cardinality are always skipped. - """ - __registry__ = 'etypes' - __select__ = yes() - - # class attributes that must be set in class definition - rest_attr = None - fetch_attrs = None - skip_copy_for = () # bw compat (< 3.14), use cw_skip_copy_for instead - cw_skip_copy_for = [('in_state', 'subject')] - # class attributes set automatically at registration time - e_schema = None - - @classmethod - def __initialize__(cls, schema): - """initialize a specific entity class by adding descriptors to access - entity type's attributes and relations - """ - etype = cls.__regid__ - assert etype != 'Any', etype - cls.e_schema = eschema = schema.eschema(etype) - for rschema, _ in eschema.attribute_definitions(): - if rschema.type == 'eid': - continue - setattr(cls, rschema.type, Attribute(rschema.type)) - mixins = [] - for rschema, _, role in eschema.relation_definitions(): - if role == 'subject': - attr = rschema.type - else: - attr = 'reverse_%s' % rschema.type - setattr(cls, attr, Relation(rschema, role)) - - fetch_attrs = ('modification_date',) - - @classmethod - def cw_fetch_order(cls, select, attr, var): - """This class method may be used to control sort order when multiple - entities of this type are fetched through ORM methods. Its arguments - are: - - * `select`, the RQL syntax tree - - * `attr`, the attribute being watched - - * `var`, the variable through which this attribute's value may be - accessed in the query - - When you want to do some sorting on the given attribute, you should - modify the syntax tree accordingly. For instance: - - .. sourcecode:: python - - from rql import nodes - - class Version(AnyEntity): - __regid__ = 'Version' - - fetch_attrs = ('num', 'description', 'in_state') - - @classmethod - def cw_fetch_order(cls, select, attr, var): - if attr == 'num': - func = nodes.Function('version_sort_value') - func.append(nodes.variable_ref(var)) - sterm = nodes.SortTerm(func, asc=False) - select.add_sort_term(sterm) - - The default implementation call - :meth:`~cubicweb.entity.Entity.cw_fetch_unrelated_order` - """ - cls.cw_fetch_unrelated_order(select, attr, var) - - @classmethod - def cw_fetch_unrelated_order(cls, select, attr, var): - """This class method may be used to control sort order when multiple entities of - this type are fetched to use in edition (e.g. propose them to create a - new relation on an edited entity). - - See :meth:`~cubicweb.entity.Entity.cw_fetch_unrelated_order` for a - description of its arguments and usage. - - By default entities will be listed on their modification date descending, - i.e. you'll get entities recently modified first. - """ - if attr == 'modification_date': - select.add_sort_var(var, asc=False) - - @classmethod - def fetch_rql(cls, user, restriction=None, fetchattrs=None, mainvar='X', - settype=True, ordermethod='fetch_order'): - st = cls.fetch_rqlst(user, mainvar=mainvar, fetchattrs=fetchattrs, - settype=settype, ordermethod=ordermethod) - rql = st.as_string() - if restriction: - # cannot use RQLRewriter API to insert 'X rtype %(x)s' restriction - warn('[3.14] fetch_rql: use of `restriction` parameter is ' - 'deprecated, please use fetch_rqlst and supply a syntax' - 'tree with your restriction instead', DeprecationWarning) - insert = ' WHERE ' + ','.join(restriction) - if ' WHERE ' in rql: - select, where = rql.split(' WHERE ', 1) - rql = select + insert + ',' + where - else: - rql += insert - return rql - - @classmethod - def fetch_rqlst(cls, user, select=None, mainvar='X', fetchattrs=None, - settype=True, ordermethod='fetch_order'): - if select is None: - select = Select() - mainvar = select.get_variable(mainvar) - select.add_selected(mainvar) - elif isinstance(mainvar, string_types): - assert mainvar in select.defined_vars - mainvar = select.get_variable(mainvar) - # eases string -> syntax tree test transition: please remove once stable - select._varmaker = rqlvar_maker(defined=select.defined_vars, - aliases=select.aliases, index=26) - if settype: - rel = select.add_type_restriction(mainvar, cls.__regid__) - # should use 'is_instance_of' instead of 'is' so we retrieve - # subclasses instances as well - rel.r_type = 'is_instance_of' - if fetchattrs is None: - fetchattrs = cls.fetch_attrs - cls._fetch_restrictions(mainvar, select, fetchattrs, user, ordermethod) - return select - - @classmethod - def _fetch_ambiguous_rtypes(cls, select, var, fetchattrs, subjtypes, schema): - """find rtypes in `fetchattrs` that relate different subject etypes - taken from (`subjtypes`) to different target etypes; these so called - "ambiguous" relations, are added directly to the `select` syntax tree - selection but removed from `fetchattrs` to avoid the fetch recursion - because we have to choose only one targettype for the recursion and - adding its own fetch attrs to the selection -when we recurse- would - filter out the other possible target types from the result set - """ - for attr in fetchattrs.copy(): - rschema = schema.rschema(attr) - if rschema.final: - continue - ttypes = None - for subjtype in subjtypes: - cur_ttypes = set(rschema.objects(subjtype)) - if ttypes is None: - ttypes = cur_ttypes - elif cur_ttypes != ttypes: - # we found an ambiguous relation: remove it from fetchattrs - fetchattrs.remove(attr) - # ... and add it to the selection - targetvar = select.make_variable() - select.add_selected(targetvar) - rel = make_relation(var, attr, (targetvar,), VariableRef) - select.add_restriction(rel) - break - - @classmethod - def _fetch_restrictions(cls, mainvar, select, fetchattrs, - user, ordermethod='fetch_order', visited=None): - eschema = cls.e_schema - if visited is None: - visited = set((eschema.type,)) - elif eschema.type in visited: - # avoid infinite recursion - return - else: - visited.add(eschema.type) - _fetchattrs = [] - for attr in sorted(fetchattrs): - try: - rschema = eschema.subjrels[attr] - except KeyError: - cls.warning('skipping fetch_attr %s defined in %s (not found in schema)', - attr, cls.__regid__) - continue - # XXX takefirst=True to remove warning triggered by ambiguous inlined relations - rdef = eschema.rdef(attr, takefirst=True) - if not user.matching_groups(rdef.get_groups('read')): - continue - if rschema.final or rdef.cardinality[0] in '?1': - var = select.make_variable() - select.add_selected(var) - rel = make_relation(mainvar, attr, (var,), VariableRef) - select.add_restriction(rel) - else: - cls.warning('bad relation %s specified in fetch attrs for %s', - attr, cls) - continue - if not rschema.final: - # XXX we need outer join in case the relation is not mandatory - # (card == '?') *or if the entity is being added*, since in - # that case the relation may still be missing. As we miss this - # later information here, systematically add it. - rel.change_optional('right') - targettypes = rschema.objects(eschema.type) - vreg = user._cw.vreg # XXX user._cw.vreg iiiirk - etypecls = vreg['etypes'].etype_class(targettypes[0]) - if len(targettypes) > 1: - # find fetch_attrs common to all destination types - fetchattrs = vreg['etypes'].fetch_attrs(targettypes) - # ... and handle ambiguous relations - cls._fetch_ambiguous_rtypes(select, var, fetchattrs, - targettypes, vreg.schema) - else: - fetchattrs = etypecls.fetch_attrs - etypecls._fetch_restrictions(var, select, fetchattrs, - user, None, visited=visited) - if ordermethod is not None: - try: - cmeth = getattr(cls, ordermethod) - warn('[3.14] %s %s class method should be renamed to cw_%s' - % (cls.__regid__, ordermethod, ordermethod), - DeprecationWarning) - except AttributeError: - cmeth = getattr(cls, 'cw_' + ordermethod) - if support_args(cmeth, 'select'): - cmeth(select, attr, var) - else: - warn('[3.14] %s should now take (select, attr, var) and ' - 'modify the syntax tree when desired instead of ' - 'returning something' % cmeth, DeprecationWarning) - orderterm = cmeth(attr, var.name) - if orderterm is not None: - try: - var, order = orderterm.split() - except ValueError: - if '(' in orderterm: - cls.error('ignore %s until %s is upgraded', - orderterm, cmeth) - orderterm = None - elif not ' ' in orderterm.strip(): - var = orderterm - order = 'ASC' - if orderterm is not None: - select.add_sort_var(select.get_variable(var), - order=='ASC') - - @classmethod - @cached - def cw_rest_attr_info(cls): - """this class method return an attribute name to be used in URL for - entities of this type and a boolean flag telling if its value should be - checked for uniqness. - - The attribute returned is, in order of priority: - - * class's `rest_attr` class attribute - * an attribute defined as unique in the class'schema - * 'eid' - """ - mainattr, needcheck = 'eid', True - if cls.rest_attr: - mainattr = cls.rest_attr - needcheck = not cls.e_schema.has_unique_values(mainattr) - else: - for rschema in cls.e_schema.subject_relations(): - if (rschema.final - and rschema not in ('eid', 'cwuri') - and cls.e_schema.has_unique_values(rschema) - and cls.e_schema.rdef(rschema.type).cardinality[0] == '1'): - mainattr = str(rschema) - needcheck = False - break - if mainattr == 'eid': - needcheck = False - return mainattr, needcheck - - @classmethod - def _cw_build_entity_query(cls, kwargs): - relations = [] - restrictions = set() - pendingrels = [] - eschema = cls.e_schema - qargs = {} - attrcache = {} - for attr, value in kwargs.items(): - if attr.startswith('reverse_'): - attr = attr[len('reverse_'):] - role = 'object' - else: - role = 'subject' - assert eschema.has_relation(attr, role), '%s %s not found on %s' % (attr, role, eschema) - rschema = eschema.subjrels[attr] if role == 'subject' else eschema.objrels[attr] - if not rschema.final and isinstance(value, (tuple, list, set, frozenset)): - if len(value) == 0: - continue # avoid crash with empty IN clause - elif len(value) == 1: - value = next(iter(value)) - else: - # prepare IN clause - pendingrels.append( (attr, role, value) ) - continue - if rschema.final: # attribute - relations.append('X %s %%(%s)s' % (attr, attr)) - attrcache[attr] = value - elif value is None: - pendingrels.append( (attr, role, value) ) - else: - rvar = attr.upper() - if role == 'object': - relations.append('%s %s X' % (rvar, attr)) - else: - relations.append('X %s %s' % (attr, rvar)) - restriction = '%s eid %%(%s)s' % (rvar, attr) - if not restriction in restrictions: - restrictions.add(restriction) - if hasattr(value, 'eid'): - value = value.eid - qargs[attr] = value - rql = u'' - if relations: - rql += ', '.join(relations) - if restrictions: - rql += ' WHERE %s' % ', '.join(restrictions) - return rql, qargs, pendingrels, attrcache - - @classmethod - def _cw_handle_pending_relations(cls, eid, pendingrels, execute): - for attr, role, values in pendingrels: - if role == 'object': - restr = 'Y %s X' % attr - else: - restr = 'X %s Y' % attr - if values is None: - execute('DELETE %s WHERE X eid %%(x)s' % restr, {'x': eid}) - continue - execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % ( - restr, ','.join(str(getattr(r, 'eid', r)) for r in values)), - {'x': eid}, build_descr=False) - - @classmethod - def cw_instantiate(cls, execute, **kwargs): - """add a new entity of this given type - - Example (in a shell session): - - >>> companycls = vreg['etypes'].etype_class('Company') - >>> personcls = vreg['etypes'].etype_class('Person') - >>> c = companycls.cw_instantiate(session.execute, name=u'Logilab') - >>> p = personcls.cw_instantiate(session.execute, firstname=u'John', lastname=u'Doe', - ... works_for=c) - - You can also set relations where the entity has 'object' role by - prefixing the relation name by 'reverse_'. Also, relation values may be - an entity or eid, a list of entities or eids. - """ - rql, qargs, pendingrels, attrcache = cls._cw_build_entity_query(kwargs) - if rql: - rql = 'INSERT %s X: %s' % (cls.__regid__, rql) - else: - rql = 'INSERT %s X' % (cls.__regid__) - try: - created = execute(rql, qargs).get_entity(0, 0) - except IndexError: - raise Exception('could not create a %r with %r (%r)' % - (cls.__regid__, rql, qargs)) - created._cw_update_attr_cache(attrcache) - cls._cw_handle_pending_relations(created.eid, pendingrels, execute) - return created - - def __init__(self, req, rset=None, row=None, col=0): - AppObject.__init__(self, req, rset=rset, row=row, col=col) - self._cw_related_cache = {} - self._cw_adapters_cache = {} - if rset is not None: - self.eid = rset[row][col] - else: - self.eid = None - self._cw_is_saved = True - self.cw_attr_cache = {} - - def __repr__(self): - return '' % ( - self.e_schema, self.eid, list(self.cw_attr_cache), id(self)) - - def __lt__(self, other): - raise NotImplementedError('comparison not implemented for %s' % self.__class__) - - def __eq__(self, other): - if isinstance(self.eid, integer_types): - return self.eid == other.eid - return self is other - - def __hash__(self): - if isinstance(self.eid, integer_types): - return self.eid - return super(Entity, self).__hash__() - - def _cw_update_attr_cache(self, attrcache): - trdata = self._cw.transaction_data - uncached_attrs = trdata.get('%s.storage-special-process-attrs' % self.eid, set()) - uncached_attrs.update(trdata.get('%s.dont-cache-attrs' % self.eid, set())) - for attr in uncached_attrs: - attrcache.pop(attr, None) - self.cw_attr_cache.pop(attr, None) - self.cw_attr_cache.update(attrcache) - - def _cw_dont_cache_attribute(self, attr, repo_side=False): - """Called when some attribute has been transformed by a *storage*, - hence the original value should not be cached **by anyone**. - - For example we have a special "fs_importing" mode in BFSS - where a file path is given as attribute value and stored as is - in the data base. Later access to the attribute will provide - the content of the file at the specified path. We do not want - the "filepath" value to be cached. - - """ - trdata = self._cw.transaction_data - trdata.setdefault('%s.dont-cache-attrs' % self.eid, set()).add(attr) - if repo_side: - trdata.setdefault('%s.storage-special-process-attrs' % self.eid, set()).add(attr) - - def __json_encode__(self): - """custom json dumps hook to dump the entity's eid - which is not part of dict structure itself - """ - dumpable = self.cw_attr_cache.copy() - dumpable['eid'] = self.eid - return dumpable - - def cw_adapt_to(self, interface): - """return an adapter the entity to the given interface name. - - return None if it can not be adapted. - """ - cache = self._cw_adapters_cache - try: - return cache[interface] - except KeyError: - adapter = self._cw.vreg['adapters'].select_or_none( - interface, self._cw, entity=self) - cache[interface] = adapter - return adapter - - def has_eid(self): # XXX cw_has_eid - """return True if the entity has an attributed eid (False - meaning that the entity has to be created - """ - try: - int(self.eid) - return True - except (ValueError, TypeError): - return False - - def cw_is_saved(self): - """during entity creation, there is some time during which the entity - has an eid attributed though it's not saved (eg during - 'before_add_entity' hooks). You can use this method to ensure the entity - has an eid *and* is saved in its source. - """ - return self.has_eid() and self._cw_is_saved - - @cached - def cw_metainformation(self): - metas = self._cw.entity_metas(self.eid) - metas['source'] = self._cw.source_defs()[metas['source']] - return metas - - def cw_check_perm(self, action): - self.e_schema.check_perm(self._cw, action, eid=self.eid) - - def cw_has_perm(self, action): - return self.e_schema.has_perm(self._cw, action, eid=self.eid) - - def view(self, __vid, __registry='views', w=None, initargs=None, **kwargs): # XXX cw_view - """shortcut to apply a view on this entity""" - if initargs is None: - initargs = kwargs - else: - initargs.update(kwargs) - view = self._cw.vreg[__registry].select(__vid, self._cw, rset=self.cw_rset, - row=self.cw_row, col=self.cw_col, - **initargs) - return view.render(row=self.cw_row, col=self.cw_col, w=w, **kwargs) - - def absolute_url(self, *args, **kwargs): # XXX cw_url - """return an absolute url to view this entity""" - # use *args since we don't want first argument to be "anonymous" to - # avoid potential clash with kwargs - if args: - assert len(args) == 1, 'only 0 or 1 non-named-argument expected' - method = args[0] - else: - method = None - # in linksearch mode, we don't want external urls else selecting - # the object for use in the relation is tricky - # XXX search_state is web specific - use_ext_id = False - if 'base_url' not in kwargs and \ - getattr(self._cw, 'search_state', ('normal',))[0] == 'normal': - sourcemeta = self.cw_metainformation()['source'] - if sourcemeta.get('use-cwuri-as-url'): - return self.cwuri # XXX consider kwargs? - if sourcemeta.get('base-url'): - kwargs['base_url'] = sourcemeta['base-url'] - use_ext_id = True - if method in (None, 'view'): - kwargs['_restpath'] = self.rest_path(use_ext_id) - else: - kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid - return self._cw.build_url(method, **kwargs) - - def rest_path(self, use_ext_eid=False): # XXX cw_rest_path - """returns a REST-like (relative) path for this entity""" - mainattr, needcheck = self.cw_rest_attr_info() - etype = str(self.e_schema) - path = etype.lower() - fallback = False - if mainattr != 'eid': - value = getattr(self, mainattr) - if not can_use_rest_path(value): - mainattr = 'eid' - path = None - elif needcheck: - # make sure url is not ambiguous - try: - nbresults = self.__unique - except AttributeError: - rql = 'Any COUNT(X) WHERE X is %s, X %s %%(value)s' % ( - etype, mainattr) - nbresults = self.__unique = self._cw.execute(rql, {'value' : value})[0][0] - if nbresults != 1: # ambiguity? - mainattr = 'eid' - path = None - if mainattr == 'eid': - if use_ext_eid: - value = self.cw_metainformation()['extid'] - else: - value = self.eid - if path is None: - # fallback url: / url is used as cw entities uri, - # prefer it to //eid/ - return text_type(value) - return u'%s/%s' % (path, self._cw.url_quote(value)) - - def cw_attr_metadata(self, attr, metadata): - """return a metadata for an attribute (None if unspecified)""" - value = getattr(self, '%s_%s' % (attr, metadata), None) - if value is None and metadata == 'encoding': - value = self._cw.vreg.property_value('ui.encoding') - return value - - def printable_value(self, attr, value=_marker, attrtype=None, - format='text/html', displaytime=True): # XXX cw_printable_value - """return a displayable value (i.e. unicode string) which may contains - html tags - """ - attr = str(attr) - if value is _marker: - value = getattr(self, attr) - if isinstance(value, string_types): - value = value.strip() - if value is None or value == '': # don't use "not", 0 is an acceptable value - return u'' - if attrtype is None: - attrtype = self.e_schema.destination(attr) - props = self.e_schema.rdef(attr) - if attrtype == 'String': - # internalinalized *and* formatted string such as schema - # description... - if props.internationalizable: - value = self._cw._(value) - attrformat = self.cw_attr_metadata(attr, 'format') - if attrformat: - return self._cw_mtc_transform(value, attrformat, format, - self._cw.encoding) - elif attrtype == 'Bytes': - attrformat = self.cw_attr_metadata(attr, 'format') - if attrformat: - encoding = self.cw_attr_metadata(attr, 'encoding') - return self._cw_mtc_transform(value.getvalue(), attrformat, format, - encoding) - return u'' - value = self._cw.printable_value(attrtype, value, props, - displaytime=displaytime) - if format == 'text/html': - value = xml_escape(value) - return value - - def _cw_mtc_transform(self, data, format, target_format, encoding, - _engine=ENGINE): - trdata = TransformData(data, format, encoding, appobject=self) - data = _engine.convert(trdata, target_format).decode() - if target_format == 'text/html': - data = soup2xhtml(data, self._cw.encoding) - return data - - # entity cloning ########################################################## - - def copy_relations(self, ceid): # XXX cw_copy_relations - """copy relations of the object with the given eid on this - object (this method is called on the newly created copy, and - ceid designates the original entity). - - By default meta and composite relations are skipped. - Overrides this if you want another behaviour - """ - assert self.has_eid() - execute = self._cw.execute - skip_copy_for = {'subject': set(), 'object': set()} - for rtype in self.skip_copy_for: - skip_copy_for['subject'].add(rtype) - warn('[3.14] skip_copy_for on entity classes (%s) is deprecated, ' - 'use cw_skip_for instead with list of couples (rtype, role)' % self.cw_etype, - DeprecationWarning) - for rtype, role in self.cw_skip_copy_for: - assert role in ('subject', 'object'), role - skip_copy_for[role].add(rtype) - for rschema in self.e_schema.subject_relations(): - if rschema.type in skip_copy_for['subject']: - continue - if rschema.final or rschema.meta: - continue - # skip already defined relations - if getattr(self, rschema.type): - continue - # XXX takefirst=True to remove warning triggered by ambiguous relations - rdef = self.e_schema.rdef(rschema, takefirst=True) - # skip composite relation - if rdef.composite: - continue - # skip relation with card in ?1 else we either change the copied - # object (inlined relation) or inserting some inconsistency - if rdef.cardinality[1] in '?1': - continue - rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % ( - rschema.type, rschema.type) - execute(rql, {'x': self.eid, 'y': ceid}) - self.cw_clear_relation_cache(rschema.type, 'subject') - for rschema in self.e_schema.object_relations(): - if rschema.meta: - continue - # skip already defined relations - if self.related(rschema.type, 'object'): - continue - if rschema.type in skip_copy_for['object']: - continue - # XXX takefirst=True to remove warning triggered by ambiguous relations - rdef = self.e_schema.rdef(rschema, 'object', takefirst=True) - # skip composite relation - if rdef.composite: - continue - # skip relation with card in ?1 else we either change the copied - # object (inlined relation) or inserting some inconsistency - if rdef.cardinality[0] in '?1': - continue - rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % ( - rschema.type, rschema.type) - execute(rql, {'x': self.eid, 'y': ceid}) - self.cw_clear_relation_cache(rschema.type, 'object') - - # data fetching methods ################################################### - - @cached - def as_rset(self): # XXX .cw_as_rset - """returns a resultset containing `self` information""" - rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s', - {'x': self.eid}, [(self.cw_etype,)]) - rset.req = self._cw - return rset - - def _cw_to_complete_relations(self): - """by default complete final relations to when calling .complete()""" - for rschema in self.e_schema.subject_relations(): - if rschema.final: - continue - targets = rschema.objects(self.e_schema) - if rschema.inlined: - matching_groups = self._cw.user.matching_groups - if all(matching_groups(e.get_groups('read')) and - rschema.rdef(self.e_schema, e).get_groups('read') - for e in targets): - yield rschema, 'subject' - - def _cw_to_complete_attributes(self, skip_bytes=True, skip_pwd=True): - for rschema, attrschema in self.e_schema.attribute_definitions(): - # skip binary data by default - if skip_bytes and attrschema.type == 'Bytes': - continue - attr = rschema.type - if attr == 'eid': - continue - # password retrieval is blocked at the repository server level - rdef = rschema.rdef(self.e_schema, attrschema) - if not self._cw.user.matching_groups(rdef.get_groups('read')) \ - or (attrschema.type == 'Password' and skip_pwd): - self.cw_attr_cache[attr] = None - continue - yield attr - - _cw_completed = False - def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): # XXX cw_complete - """complete this entity by adding missing attributes (i.e. query the - repository to fill the entity) - - :type skip_bytes: bool - :param skip_bytes: - if true, attribute of type Bytes won't be considered - """ - assert self.has_eid() - if self._cw_completed: - return - if attributes is None: - self._cw_completed = True - varmaker = rqlvar_maker() - V = next(varmaker) - rql = ['WHERE %s eid %%(x)s' % V] - selected = [] - for attr in (attributes or self._cw_to_complete_attributes(skip_bytes, skip_pwd)): - # if attribute already in entity, nothing to do - if attr in self.cw_attr_cache: - continue - # case where attribute must be completed, but is not yet in entity - var = next(varmaker) - rql.append('%s %s %s' % (V, attr, var)) - selected.append((attr, var)) - # +1 since this doesn't include the main variable - lastattr = len(selected) + 1 - # don't fetch extra relation if attributes specified or of the entity is - # coming from an external source (may lead to error) - if attributes is None and self.cw_metainformation()['source']['uri'] == 'system': - # fetch additional relations (restricted to 0..1 relations) - for rschema, role in self._cw_to_complete_relations(): - rtype = rschema.type - if self.cw_relation_cached(rtype, role): - continue - # at this point we suppose that: - # * this is a inlined relation - # * entity (self) is the subject - # * user has read perm on the relation and on the target entity - assert rschema.inlined - assert role == 'subject' - var = next(varmaker) - # keep outer join anyway, we don't want .complete to crash on - # missing mandatory relation (see #1058267) - rql.append('%s %s %s?' % (V, rtype, var)) - selected.append(((rtype, role), var)) - if selected: - # select V, we need it as the left most selected variable - # if some outer join are included to fetch inlined relations - rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected), - ','.join(rql)) - try: - rset = self._cw.execute(rql, {'x': self.eid}, build_descr=False)[0] - except IndexError: - raise Exception('unable to fetch attributes for entity with eid %s' - % self.eid) - # handle attributes - for i in range(1, lastattr): - self.cw_attr_cache[str(selected[i-1][0])] = rset[i] - # handle relations - for i in range(lastattr, len(rset)): - rtype, role = selected[i-1][0] - value = rset[i] - if value is None: - rrset = ResultSet([], rql, {'x': self.eid}) - rrset.req = self._cw - else: - rrset = self._cw.eid_rset(value) - self.cw_set_relation_cache(rtype, role, rrset) - - def cw_attr_value(self, name): - """get value for the attribute relation , query the repository - to get the value if necessary. - - :type name: str - :param name: name of the attribute to get - """ - try: - return self.cw_attr_cache[name] - except KeyError: - if not self.cw_is_saved(): - return None - rql = "Any A WHERE X eid %%(x)s, X %s A" % name - try: - rset = self._cw.execute(rql, {'x': self.eid}) - except Unauthorized: - self.cw_attr_cache[name] = value = None - else: - assert rset.rowcount <= 1, (self, rql, rset.rowcount) - try: - self.cw_attr_cache[name] = value = rset.rows[0][0] - except IndexError: - # probably a multisource error - self.critical("can't get value for attribute %s of entity with eid %s", - name, self.eid) - if self.e_schema.destination(name) == 'String': - self.cw_attr_cache[name] = value = self._cw._('unaccessible') - else: - self.cw_attr_cache[name] = value = None - return value - - def related(self, rtype, role='subject', limit=None, entities=False, # XXX .cw_related - safe=False, targettypes=None): - """returns a resultset of related entities - - :param rtype: - the name of the relation, aka relation type - :param role: - the role played by 'self' in the relation ('subject' or 'object') - :param limit: - resultset's maximum size - :param entities: - if True, the entites are returned; if False, a result set is returned - :param safe: - if True, an empty rset/list of entities will be returned in case of - :exc:`Unauthorized`, else (the default), the exception is propagated - :param targettypes: - a tuple of target entity types to restrict the query - """ - rtype = str(rtype) - # Caching restricted/limited results is best avoided. - cacheable = limit is None and targettypes is None - if cacheable: - cache_key = '%s_%s' % (rtype, role) - if cache_key in self._cw_related_cache: - return self._cw_related_cache[cache_key][entities] - if not self.has_eid(): - if entities: - return [] - return self._cw.empty_rset() - rql = self.cw_related_rql(rtype, role, limit=limit, targettypes=targettypes) - try: - rset = self._cw.execute(rql, {'x': self.eid}) - except Unauthorized: - if not safe: - raise - rset = self._cw.empty_rset() - if entities: - if cacheable: - self.cw_set_relation_cache(rtype, role, rset) - return self.related(rtype, role, entities=entities) - return list(rset.entities()) - else: - return rset - - def cw_related_rql(self, rtype, role='subject', targettypes=None, limit=None): - vreg = self._cw.vreg - rschema = vreg.schema[rtype] - select = Select() - mainvar, evar = select.get_variable('X'), select.get_variable('E') - select.add_selected(mainvar) - if limit is not None: - select.set_limit(limit) - select.add_eid_restriction(evar, 'x', 'Substitute') - if role == 'subject': - rel = make_relation(evar, rtype, (mainvar,), VariableRef) - select.add_restriction(rel) - if targettypes is None: - targettypes = rschema.objects(self.e_schema) - else: - select.add_constant_restriction(mainvar, 'is', - targettypes, 'etype') - gcard = greater_card(rschema, (self.e_schema,), targettypes, 0) - else: - rel = make_relation(mainvar, rtype, (evar,), VariableRef) - select.add_restriction(rel) - if targettypes is None: - targettypes = rschema.subjects(self.e_schema) - else: - select.add_constant_restriction(mainvar, 'is', targettypes, - 'etype') - gcard = greater_card(rschema, targettypes, (self.e_schema,), 1) - etypecls = vreg['etypes'].etype_class(targettypes[0]) - if len(targettypes) > 1: - fetchattrs = vreg['etypes'].fetch_attrs(targettypes) - self._fetch_ambiguous_rtypes(select, mainvar, fetchattrs, - targettypes, vreg.schema) - else: - fetchattrs = etypecls.fetch_attrs - etypecls.fetch_rqlst(self._cw.user, select, mainvar, fetchattrs, - settype=False) - # optimisation: remove ORDERBY if cardinality is 1 or ? (though - # greater_card return 1 for those both cases) - if gcard == '1': - select.remove_sort_terms() - elif not select.orderby: - # if modification_date is already retrieved, we use it instead - # of adding another variable for sorting. This should not be - # problematic, but it is with sqlserver, see ticket #694445 - for rel in select.where.get_nodes(RqlRelation): - if (rel.r_type == 'modification_date' - and rel.children[0].variable == mainvar - and rel.children[1].operator == '='): - var = rel.children[1].children[0].variable - select.add_sort_var(var, asc=False) - break - else: - mdvar = select.make_variable() - rel = make_relation(mainvar, 'modification_date', - (mdvar,), VariableRef) - select.add_restriction(rel) - select.add_sort_var(mdvar, asc=False) - return select.as_string() - - # generic vocabulary methods ############################################## - - def cw_linkable_rql(self, rtype, targettype, role, ordermethod=None, - vocabconstraints=True, lt_infos={}, limit=None): - """build a rql to fetch targettype entities either related or unrelated - to this entity using (rtype, role) relation. - - Consider relation permissions so that returned entities may be actually - linked by `rtype`. - - `lt_infos` are supplementary informations, usually coming from __linkto - parameter, that can help further restricting the results in case current - entity is not yet created. It is a dict describing entities the current - entity will be linked to, which keys are (rtype, role) tuples and values - are a list of eids. - """ - return self._cw_compute_linkable_rql(rtype, targettype, role, ordermethod=None, - vocabconstraints=vocabconstraints, - lt_infos=lt_infos, limit=limit, - unrelated_only=False) - - def cw_unrelated_rql(self, rtype, targettype, role, ordermethod=None, - vocabconstraints=True, lt_infos={}, limit=None): - """build a rql to fetch `targettype` entities unrelated to this entity - using (rtype, role) relation. - - Consider relation permissions so that returned entities may be actually - linked by `rtype`. - - `lt_infos` are supplementary informations, usually coming from __linkto - parameter, that can help further restricting the results in case current - entity is not yet created. It is a dict describing entities the current - entity will be linked to, which keys are (rtype, role) tuples and values - are a list of eids. - """ - return self._cw_compute_linkable_rql(rtype, targettype, role, ordermethod=None, - vocabconstraints=vocabconstraints, - lt_infos=lt_infos, limit=limit, - unrelated_only=True) - - def _cw_compute_linkable_rql(self, rtype, targettype, role, ordermethod=None, - vocabconstraints=True, lt_infos={}, limit=None, - unrelated_only=False): - """build a rql to fetch `targettype` entities that may be related to - this entity using the (rtype, role) relation. - - By default (unrelated_only=False), this includes the already linked - entities as well as the unrelated ones. If `unrelated_only` is True, the - rql filters out the already related entities. - """ - ordermethod = ordermethod or 'fetch_unrelated_order' - rschema = self._cw.vreg.schema.rschema(rtype) - rdef = rschema.role_rdef(self.e_schema, targettype, role) - rewriter = RQLRewriter(self._cw) - select = Select() - # initialize some variables according to the `role` of `self` in the - # relation (variable names must respect constraints conventions): - # * variable for myself (`evar`) - # * variable for searched entities (`searchvedvar`) - if role == 'subject': - evar = subjvar = select.get_variable('S') - searchedvar = objvar = select.get_variable('O') - else: - searchedvar = subjvar = select.get_variable('S') - evar = objvar = select.get_variable('O') - select.add_selected(searchedvar) - if limit is not None: - select.set_limit(limit) - # initialize some variables according to `self` existence - if rdef.role_cardinality(neg_role(role)) in '?1': - # if cardinality in '1?', we want a target entity which isn't - # already linked using this relation - variable = select.make_variable() - if role == 'subject': - rel = make_relation(variable, rtype, (searchedvar,), VariableRef) - else: - rel = make_relation(searchedvar, rtype, (variable,), VariableRef) - select.add_restriction(Not(rel)) - elif self.has_eid() and unrelated_only: - # elif we have an eid, we don't want a target entity which is - # already linked to ourself through this relation - rel = make_relation(subjvar, rtype, (objvar,), VariableRef) - select.add_restriction(Not(rel)) - if self.has_eid(): - rel = make_relation(evar, 'eid', ('x', 'Substitute'), Constant) - select.add_restriction(rel) - args = {'x': self.eid} - if role == 'subject': - sec_check_args = {'fromeid': self.eid} - else: - sec_check_args = {'toeid': self.eid} - existant = None # instead of 'SO', improve perfs - else: - args = {} - sec_check_args = {} - existant = searchedvar.name - # undefine unused evar, or the type resolver will consider it - select.undefine_variable(evar) - # retrieve entity class for targettype to compute base rql - etypecls = self._cw.vreg['etypes'].etype_class(targettype) - etypecls.fetch_rqlst(self._cw.user, select, searchedvar, - ordermethod=ordermethod) - # from now on, we need variable type resolving - self._cw.vreg.solutions(self._cw, select, args) - # insert RQL expressions for schema constraints into the rql syntax tree - if vocabconstraints: - cstrcls = (RQLVocabularyConstraint, RQLConstraint) - else: - cstrcls = RQLConstraint - lt_infos = pruned_lt_info(self.e_schema, lt_infos or {}) - # if there are still lt_infos, use set to keep track of added eid - # relations (adding twice the same eid relation is incorrect RQL) - eidvars = set() - for cstr in rdef.constraints: - # consider constraint.mainvars to check if constraint apply - if isinstance(cstr, cstrcls) and searchedvar.name in cstr.mainvars: - if not self.has_eid(): - if lt_infos: - # we can perhaps further restrict with linkto infos using - # a custom constraint built from cstr and lt_infos - cstr = build_cstr_with_linkto_infos( - cstr, args, searchedvar, evar, lt_infos, eidvars) - if cstr is None: - continue # could not build constraint -> discard - elif evar.name in cstr.mainvars: - continue - # compute a varmap suitable to RQLRewriter.rewrite argument - varmap = dict((v, v) for v in (searchedvar.name, evar.name) - if v in select.defined_vars and v in cstr.mainvars) - # rewrite constraint by constraint since we want a AND between - # expressions. - rewriter.rewrite(select, [(varmap, (cstr,))], args, existant) - # insert security RQL expressions granting the permission to 'add' the - # relation into the rql syntax tree, if necessary - rqlexprs = rdef.get_rqlexprs('add') - if not self.has_eid(): - rqlexprs = [rqlexpr for rqlexpr in rqlexprs - if searchedvar.name in rqlexpr.mainvars] - if rqlexprs and not rdef.has_perm(self._cw, 'add', **sec_check_args): - # compute a varmap suitable to RQLRewriter.rewrite argument - varmap = dict((v, v) for v in (searchedvar.name, evar.name) - if v in select.defined_vars) - # rewrite all expressions at once since we want a OR between them. - rewriter.rewrite(select, [(varmap, rqlexprs)], args, existant) - # ensure we have an order defined - if not select.orderby: - select.add_sort_var(select.defined_vars[searchedvar.name]) - # we're done, turn the rql syntax tree as a string - rql = select.as_string() - return rql, args - - def unrelated(self, rtype, targettype, role='subject', limit=None, - ordermethod=None, lt_infos={}): # XXX .cw_unrelated - """return a result set of target type objects that may be related - by a given relation, with self as subject or object - """ - try: - rql, args = self.cw_unrelated_rql(rtype, targettype, role, limit=limit, - ordermethod=ordermethod, lt_infos=lt_infos) - except Unauthorized: - return self._cw.empty_rset() - return self._cw.execute(rql, args) - - # relations cache handling ################################################# - - def cw_relation_cached(self, rtype, role): - """return None if the given relation isn't already cached on the - instance, else the content of the cache (a 2-uple (rset, entities)). - """ - return self._cw_related_cache.get('%s_%s' % (rtype, role)) - - def cw_set_relation_cache(self, rtype, role, rset): - """set cached values for the given relation""" - if rset: - related = list(rset.entities(0)) - rschema = self._cw.vreg.schema.rschema(rtype) - if role == 'subject': - rcard = rschema.rdef(self.e_schema, related[0].e_schema).cardinality[1] - target = 'object' - else: - rcard = rschema.rdef(related[0].e_schema, self.e_schema).cardinality[0] - target = 'subject' - if rcard in '?1': - for rentity in related: - rentity._cw_related_cache['%s_%s' % (rtype, target)] = ( - self.as_rset(), (self,)) - else: - related = () - self._cw_related_cache['%s_%s' % (rtype, role)] = (rset, related) - - def cw_clear_relation_cache(self, rtype=None, role=None): - """clear cached values for the given relation or the entire cache if - no relation is given - """ - if rtype is None: - self._cw_related_cache.clear() - self._cw_adapters_cache.clear() - else: - assert role - self._cw_related_cache.pop('%s_%s' % (rtype, role), None) - - def cw_clear_all_caches(self): - """flush all caches on this entity. Further attributes/relations access - will triggers new database queries to get back values. - - If you use custom caches on your entity class (take care to @cached!), - you should override this method to clear them as well. - """ - # clear attributes cache - self._cw_completed = False - self.cw_attr_cache.clear() - # clear relations cache - self.cw_clear_relation_cache() - # rest path unique cache - try: - del self.__unique - except AttributeError: - pass - - # raw edition utilities ################################################### - - def cw_set(self, **kwargs): - """update this entity using given attributes / relation, working in the - same fashion as :meth:`cw_instantiate`. - - Example (in a shell session): - - >>> c = rql('Any X WHERE X is Company').get_entity(0, 0) - >>> p = rql('Any X WHERE X is Person').get_entity(0, 0) - >>> c.cw_set(name=u'Logilab') - >>> p.cw_set(firstname=u'John', lastname=u'Doe', works_for=c) - - You can also set relations where the entity has 'object' role by - prefixing the relation name by 'reverse_'. Also, relation values may be - an entity or eid, a list of entities or eids, or None (meaning that all - relations of the given type from or to this object should be deleted). - """ - assert kwargs - assert self.cw_is_saved(), "should not call set_attributes while entity "\ - "hasn't been saved yet" - rql, qargs, pendingrels, attrcache = self._cw_build_entity_query(kwargs) - if rql: - rql = 'SET ' + rql - qargs['x'] = self.eid - if ' WHERE ' in rql: - rql += ', X eid %(x)s' - else: - rql += ' WHERE X eid %(x)s' - self._cw.execute(rql, qargs) - # update current local object _after_ the rql query to avoid - # interferences between the query execution itself and the cw_edited / - # skip_security machinery - self._cw_update_attr_cache(attrcache) - self._cw_handle_pending_relations(self.eid, pendingrels, self._cw.execute) - # XXX update relation cache - - def cw_delete(self, **kwargs): - assert self.has_eid(), self.eid - self._cw.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema, - {'x': self.eid}, **kwargs) - - # server side utilities #################################################### - - def _cw_clear_local_perm_cache(self, action): - for rqlexpr in self.e_schema.get_rqlexprs(action): - self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) - - # deprecated stuff ######################################################### - - @deprecated('[3.16] use cw_set() instead of set_attributes()') - def set_attributes(self, **kwargs): # XXX cw_set_attributes - if kwargs: - self.cw_set(**kwargs) - - @deprecated('[3.16] use cw_set() instead of set_relations()') - def set_relations(self, **kwargs): # XXX cw_set_relations - """add relations to the given object. To set a relation where this entity - is the object of the relation, use 'reverse_' as argument name. - - Values may be an entity or eid, a list of entities or eids, or None - (meaning that all relations of the given type from or to this object - should be deleted). - """ - if kwargs: - self.cw_set(**kwargs) - - @deprecated('[3.13] use entity.cw_clear_all_caches()') - def clear_all_caches(self): - return self.cw_clear_all_caches() - - -# attribute and relation descriptors ########################################## - -class Attribute(object): - """descriptor that controls schema attribute access""" - - def __init__(self, attrname): - assert attrname != 'eid' - self._attrname = attrname - - def __get__(self, eobj, eclass): - if eobj is None: - return self - return eobj.cw_attr_value(self._attrname) - - @deprecated('[3.10] assign to entity.cw_attr_cache[attr] or entity.cw_edited[attr]') - def __set__(self, eobj, value): - if hasattr(eobj, 'cw_edited') and not eobj.cw_edited.saved: - eobj.cw_edited[self._attrname] = value - else: - eobj.cw_attr_cache[self._attrname] = value - - -class Relation(object): - """descriptor that controls schema relation access""" - - def __init__(self, rschema, role): - self._rtype = rschema.type - self._role = role - - def __get__(self, eobj, eclass): - if eobj is None: - raise AttributeError('%s can only be accessed from instances' - % self._rtype) - return eobj.related(self._rtype, self._role, entities=True) - - def __set__(self, eobj, value): - raise NotImplementedError - - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(Entity, getLogger('cubicweb.entity')) diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/__init__.py --- a/etwist/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" CW - nevow/twisted client - -""" diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/http.py --- a/etwist/http.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,43 +0,0 @@ -"""twisted server for CubicWeb web instances - -:organization: Logilab -:copyright: 2001-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -__docformat__ = "restructuredtext en" - -class HTTPResponse(object): - """An object representing an HTTP Response to be sent to the client. - """ - def __init__(self, twisted_request, code=None, headers=None, stream=None): - self._headers_out = headers - self._twreq = twisted_request - self._stream = stream - self._code = code - - self._init_headers() - self._finalize() - - def _init_headers(self): - if self._headers_out is None: - return - # initialize headers - for k, values in self._headers_out.getAllRawHeaders(): - self._twreq.responseHeaders.setRawHeaders(k, values) - # add content-length if not present - if (self._headers_out.getHeader('content-length') is None - and self._stream is not None): - self._twreq.setHeader('content-length', len(self._stream)) - - def _finalize(self): - # we must set code before writing anything, else it's too late - if self._code is not None: - self._twreq.setResponseCode(self._code) - if self._stream is not None: - self._twreq.write(str(self._stream)) - self._twreq.finish() - - def __repr__(self): - return "<%s.%s code=%d>" % (self.__module__, self.__class__.__name__, self._code) diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/request.py --- a/etwist/request.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,59 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Twisted request handler for CubicWeb""" - -__docformat__ = "restructuredtext en" - - -from cubicweb.web.request import CubicWebRequestBase - - -class CubicWebTwistedRequestAdapter(CubicWebRequestBase): - """ from twisted .req to cubicweb .form - req.files are put into .form[] - """ - def __init__(self, req, vreg, https): - self._twreq = req - super(CubicWebTwistedRequestAdapter, self).__init__( - vreg, https, req.args, headers=req.received_headers) - for key, name_stream_list in req.files.items(): - for name, stream in name_stream_list: - if name is not None: - name = unicode(name, self.encoding) - self.form.setdefault(key, []).append((name, stream)) - # 3.16.4 backward compat - if len(self.form[key]) == 1: - self.form[key] = self.form[key][0] - self.content = self._twreq.content # stream - - def http_method(self): - """returns 'POST', 'GET', 'HEAD', etc.""" - return self._twreq.method - - def relative_path(self, includeparams=True): - """return the normalized path of the request (ie at least relative to - the instance's root, but some other normalization may be needed so that - the returned path may be used to compare to generated urls - - :param includeparams: - boolean indicating if GET form parameters should be kept in the path - """ - path = self._twreq.uri[1:] # remove the root '/' - if not includeparams: - path = path.split('?', 1)[0] - return path diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/server.py --- a/etwist/server.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,298 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""twisted server for CubicWeb web instances""" -__docformat__ = "restructuredtext en" - -import sys -import select -import traceback -import threading -from cgi import FieldStorage, parse_header - -from six.moves.urllib.parse import urlsplit, urlunsplit - -from cubicweb.statsd_logger import statsd_timeit - -from twisted.internet import reactor, task, threads -from twisted.web import http, server -from twisted.web import resource -from twisted.web.server import NOT_DONE_YET - - -from logilab.mtconverter import xml_escape -from logilab.common.decorators import monkeypatch - -from cubicweb import ConfigurationError, CW_EVENT_MANAGER -from cubicweb.utils import json_dumps -from cubicweb.web import DirectResponse -from cubicweb.web.application import CubicWebPublisher -from cubicweb.etwist.request import CubicWebTwistedRequestAdapter -from cubicweb.etwist.http import HTTPResponse - -def start_task(interval, func): - lc = task.LoopingCall(func) - # wait until interval has expired to actually start the task, else we have - # to wait all tasks to be finished for the server to be actually started - lc.start(interval, now=False) - - -class CubicWebRootResource(resource.Resource): - def __init__(self, config, repo): - resource.Resource.__init__(self) - self.config = config - # instantiate publisher here and not in init_publisher to get some - # checks done before daemonization (eg versions consistency) - self.appli = CubicWebPublisher(repo, config) - self.base_url = config['base-url'] - self.https_url = config['https-url'] - global MAX_POST_LENGTH - MAX_POST_LENGTH = config['max-post-length'] - - def init_publisher(self): - config = self.config - # when we have an in-memory repository, clean unused sessions every XX - # seconds and properly shutdown the server - if config['repository-uri'] == 'inmemory://': - if config.mode != 'test': - reactor.addSystemEventTrigger('before', 'shutdown', - self.shutdown_event) - self.appli.repo.start_looping_tasks() - self.set_url_rewriter() - CW_EVENT_MANAGER.bind('after-registry-reload', self.set_url_rewriter) - - def start_service(self): - start_task(self.appli.session_handler.clean_sessions_interval, - self.appli.session_handler.clean_sessions) - - def set_url_rewriter(self): - self.url_rewriter = self.appli.vreg['components'].select_or_none('urlrewriter') - - def shutdown_event(self): - """callback fired when the server is shutting down to properly - clean opened sessions - """ - self.appli.repo.shutdown() - - def getChild(self, path, request): - """Indicate which resource to use to process down the URL's path""" - return self - - def render(self, request): - """Render a page from the root resource""" - # reload modified files in debug mode - if self.config.debugmode: - self.config.uiprops.reload_if_needed() - if self.https_url: - self.config.https_uiprops.reload_if_needed() - self.appli.vreg.reload_if_needed() - if self.config['profile']: # default profiler don't trace threads - return self.render_request(request) - else: - deferred = threads.deferToThread(self.render_request, request) - return NOT_DONE_YET - - @statsd_timeit - def render_request(self, request): - try: - # processing HUGE files (hundred of megabytes) in http.processReceived - # blocks other HTTP requests processing - # due to the clumsy & slow parsing algorithm of cgi.FieldStorage - # so we deferred that part to the cubicweb thread - request.process_multipart() - return self._render_request(request) - except Exception: - trace = traceback.format_exc() - return HTTPResponse(stream='
      %s
      ' % xml_escape(trace), - code=500, twisted_request=request) - - def _render_request(self, request): - origpath = request.path - host = request.host - # dual http/https access handling: expect a rewrite rule to prepend - # 'https' to the path to detect https access - https = False - if origpath.split('/', 2)[1] == 'https': - origpath = origpath[6:] - request.uri = request.uri[6:] - https = True - if self.url_rewriter is not None: - # XXX should occur before authentication? - path = self.url_rewriter.rewrite(host, origpath, request) - request.uri.replace(origpath, path, 1) - else: - path = origpath - req = CubicWebTwistedRequestAdapter(request, self.appli.vreg, https) - try: - ### Try to generate the actual request content - content = self.appli.handle_request(req, path) - except DirectResponse as ex: - return ex.response - # at last: create twisted object - return HTTPResponse(code = req.status_out, - headers = req.headers_out, - stream = content, - twisted_request=req._twreq) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - @classmethod - def debug(cls, msg, *a, **kw): - pass - info = warning = error = critical = exception = debug - - -JSON_PATHS = set(('json',)) -FRAME_POST_PATHS = set(('validateform',)) - -orig_gotLength = http.Request.gotLength -@monkeypatch(http.Request) -def gotLength(self, length): - orig_gotLength(self, length) - if length > MAX_POST_LENGTH: # length is 0 on GET - path = self.channel._path.split('?', 1)[0].rstrip('/').rsplit('/', 1)[-1] - self.clientproto = 'HTTP/1.1' # not yet initialized - self.channel.persistent = 0 # force connection close on cleanup - self.setResponseCode(http.REQUEST_ENTITY_TOO_LARGE) - if path in JSON_PATHS: # XXX better json path detection - self.setHeader('content-type',"application/json") - body = json_dumps({'reason': 'request max size exceeded'}) - elif path in FRAME_POST_PATHS: # XXX better frame post path detection - self.setHeader('content-type',"text/html") - body = ('' % json_dumps( (False, 'request max size exceeded', None) )) - else: - self.setHeader('content-type',"text/html") - body = ("Processing Failed" - "request max size exceeded") - self.setHeader('content-length', str(len(body))) - self.write(body) - # see request.finish(). Done here since we get error due to not full - # initialized request - self.finished = 1 - if not self.queued: - self._cleanup() - for d in self.notifications: - d.callback(None) - self.notifications = [] - -@monkeypatch(http.Request) -def requestReceived(self, command, path, version): - """Called by channel when all data has been received. - - This method is not intended for users. - """ - self.content.seek(0, 0) - self.args = {} - self.files = {} - self.stack = [] - self.method, self.uri = command, path - self.clientproto = version - x = self.uri.split('?', 1) - if len(x) == 1: - self.path = self.uri - else: - self.path, argstring = x - self.args = http.parse_qs(argstring, 1) - # cache the client and server information, we'll need this later to be - # serialized and sent with the request so CGIs will work remotely - self.client = self.channel.transport.getPeer() - self.host = self.channel.transport.getHost() - # Argument processing - ctype = self.getHeader('content-type') - self._do_process_multipart = False - if self.method == "POST" and ctype: - key, pdict = parse_header(ctype) - if key == 'application/x-www-form-urlencoded': - self.args.update(http.parse_qs(self.content.read(), 1)) - self.content.seek(0) - elif key == 'multipart/form-data': - # defer this as it can be extremely time consumming - # with big files - self._do_process_multipart = True - self.process() - -@monkeypatch(http.Request) -def process_multipart(self): - if not self._do_process_multipart: - return - form = FieldStorage(self.content, self.received_headers, - environ={'REQUEST_METHOD': 'POST'}, - keep_blank_values=1, - strict_parsing=1) - for key in form: - values = form[key] - if not isinstance(values, list): - values = [values] - for value in values: - if value.filename: - if value.done != -1: # -1 is transfer has been interrupted - self.files.setdefault(key, []).append((value.filename, value.file)) - else: - self.files.setdefault(key, []).append((None, None)) - else: - self.args.setdefault(key, []).append(value.value) - -from logging import getLogger -from cubicweb import set_log_methods -LOGGER = getLogger('cubicweb.twisted') -set_log_methods(CubicWebRootResource, LOGGER) - -def run(config, debug=None, repo=None): - # repo may by passed during test. - # - # Test has already created a repo object so we should not create a new one. - # Explicitly passing the repo object avoid relying on the fragile - # config.repository() cache. We could imagine making repo a mandatory - # argument and receives it from the starting command directly. - if debug is not None: - config.debugmode = debug - config.check_writeable_uid_directory(config.appdatahome) - # create the site - if repo is None: - repo = config.repository() - root_resource = CubicWebRootResource(config, repo) - website = server.Site(root_resource) - # serve it via standard HTTP on port set in the configuration - port = config['port'] or 8080 - interface = config['interface'] - reactor.suggestThreadPoolSize(config['webserver-threadpool-size']) - reactor.listenTCP(port, website, interface=interface) - if not config.debugmode: - if sys.platform == 'win32': - raise ConfigurationError("Under windows, you must use the service management " - "commands (e.g : 'net start my_instance)'") - from logilab.common.daemon import daemonize - LOGGER.info('instance started in the background on %s', root_resource.base_url) - whichproc = daemonize(config['pid-file'], umask=config['umask']) - if whichproc: # 1 = orig process, 2 = first fork, None = second fork (eg daemon process) - return whichproc # parent process - root_resource.init_publisher() # before changing uid - if config['uid'] is not None: - from logilab.common.daemon import setugid - setugid(config['uid']) - root_resource.start_service() - LOGGER.info('instance started on %s', root_resource.base_url) - # avoid annoying warnign if not in Main Thread - signals = threading.currentThread().getName() == 'MainThread' - if config['profile']: - import cProfile - cProfile.runctx('reactor.run(installSignalHandlers=%s)' % signals, - globals(), locals(), config['profile']) - else: - reactor.run(installSignalHandlers=signals) diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/service.py --- a/etwist/service.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,99 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from __future__ import print_function - -import os -import sys - -try: - import win32serviceutil - import win32service -except ImportError: - print('Win32 extensions for Python are likely not installed.') - sys.exit(3) - -from os.path import join - -from cubicweb.etwist.server import (CubicWebRootResource, reactor, server) - -from logilab.common.shellutils import rm - -import logging -from logging import getLogger, handlers -from cubicweb import set_log_methods -from cubicweb.cwconfig import CubicWebConfiguration as cwcfg - -def _check_env(env): - env_vars = ('CW_INSTANCES_DIR', 'CW_INSTANCES_DATA_DIR', 'CW_RUNTIME_DIR') - for var in env_vars: - if var not in env: - raise Exception('The environment variables %s must be set.' % \ - ', '.join(env_vars)) - if not env.get('USERNAME'): - env['USERNAME'] = 'cubicweb' - -class CWService(object, win32serviceutil.ServiceFramework): - _svc_name_ = None - _svc_display_name_ = None - instance = None - - def __init__(self, *args, **kwargs): - win32serviceutil.ServiceFramework.__init__(self, *args, **kwargs) - cwcfg.load_cwctl_plugins() - logger = getLogger('cubicweb') - set_log_methods(CubicWebRootResource, logger) - - def SvcStop(self): - self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) - logger = getLogger('cubicweb.twisted') - logger.info('stopping %s service' % self.instance) - reactor.stop() - self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) - - def SvcDoRun(self): - self.ReportServiceStatus(win32service.SERVICE_START_PENDING) - logger = getLogger('cubicweb.twisted') - handler = handlers.NTEventLogHandler('cubicweb') - handler.setLevel(logging.INFO) - logger.addHandler(handler) - logger.info('starting %s service' % self.instance) - try: - _check_env(os.environ) - # create the site - config = cwcfg.config_for(self.instance) - config.init_log(force=True) - config.debugmode = False - logger.info('starting cubicweb instance %s ', self.instance) - config.info('clear ui caches') - for cachedir in ('uicache', 'uicachehttps'): - rm(join(config.appdatahome, cachedir, '*')) - root_resource = CubicWebRootResource(config, config.repository()) - website = server.Site(root_resource) - # serve it via standard HTTP on port set in the configuration - port = config['port'] or 8080 - logger.info('listening on port %s' % port) - reactor.listenTCP(port, website) - root_resource.init_publisher() - root_resource.start_service() - logger.info('instance started on %s', root_resource.base_url) - self.ReportServiceStatus(win32service.SERVICE_RUNNING) - reactor.run() - except Exception as e: - logger.error('service %s stopped (cause: %s)' % (self.instance, e)) - logger.exception('what happened ...') - self.ReportServiceStatus(win32service.SERVICE_STOPPED) diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/test/data/views.py --- a/etwist/test/data/views.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""only for unit tests !""" - -from cubicweb.view import View -from cubicweb.predicates import match_http_method - -class PutView(View): - __regid__ = 'put' - __select__ = match_http_method('PUT') | match_http_method('POST') - binary = True - - def call(self): - self.w(self._cw.content.read()) diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/test/requirements.txt --- a/etwist/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Twisted diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/test/unittest_server.py --- a/etwist/test/unittest_server.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,38 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -import os, os.path as osp, glob -import urllib - -from cubicweb.devtools.httptest import CubicWebServerTC - - -class ETwistHTTPTC(CubicWebServerTC): - def test_put_content(self): - data = {'hip': 'hop'} - headers = {'Content-Type': 'application/x-www-form-urlencoded'} - body = urllib.urlencode(data) - response = self.web_request('?vid=put', method='PUT', body=body) - self.assertEqual(body, response.body) - response = self.web_request('?vid=put', method='POST', body=body, - headers=headers) - self.assertEqual(body, response.body) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/twconfig.py --- a/etwist/twconfig.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,115 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""twisted server configurations: - -* the "all-in-one" configuration to get a web instance running in a twisted - web server integrating a repository server in the same process (only available - if the repository part of the software is installed -""" -__docformat__ = "restructuredtext en" - -from os.path import join - -from logilab.common.configuration import Method, merge_options - -from cubicweb.cwconfig import CONFIGURATIONS -from cubicweb.web.webconfig import WebConfiguration - - -class WebConfigurationBase(WebConfiguration): - """web instance (in a twisted web server) client of a RQL server""" - - options = merge_options(( - # ctl configuration - ('port', - {'type' : 'int', - 'default': None, - 'help': 'http server port number (default to 8080)', - 'group': 'web', 'level': 0, - }), - ('interface', - {'type' : 'string', - 'default': "", - 'help': 'http server address on which to listen (default to everywhere)', - 'group': 'web', 'level': 1, - }), - ('max-post-length', - {'type' : 'bytes', - 'default': '100MB', - 'help': 'maximum length of HTTP request. Default to 100 MB.', - 'group': 'web', 'level': 1, - }), - ('profile', - {'type' : 'string', - 'default': None, - 'help': 'profile code and use the specified file to store stats if this option is set', - 'group': 'web', 'level': 3, - }), - ('host', - {'type' : 'string', - 'default': None, - 'help': 'host name if not correctly detectable through gethostname', - 'group': 'main', 'level': 1, - }), - ('pid-file', - {'type' : 'string', - 'default': Method('default_pid_file'), - 'help': 'repository\'s pid file', - 'group': 'main', 'level': 2, - }), - ('uid', - {'type' : 'string', - 'default': None, - 'help': 'if this option is set, use the specified user to start \ -the repository rather than the user running the command', - 'group': 'main', 'level': WebConfiguration.mode == 'system' - }), - ('webserver-threadpool-size', - {'type': 'int', - 'default': 4, - 'help': "size of twisted's reactor threadpool. It should probably be not too \ -much greater than connection-poolsize", - 'group': 'web', 'level': 3, - }), - ) + WebConfiguration.options) - - def server_file(self): - return join(self.apphome, '%s-%s.py' % (self.appid, self.name)) - - def default_base_url(self): - from socket import getfqdn - return 'http://%s:%s/' % (self['host'] or getfqdn().lower(), self['port'] or 8080) - - -try: - from cubicweb.server.serverconfig import ServerConfiguration - - class AllInOneConfiguration(WebConfigurationBase, ServerConfiguration): - """repository and web instance in the same twisted process""" - name = 'all-in-one' - options = merge_options(WebConfigurationBase.options - + ServerConfiguration.options) - - cubicweb_appobject_path = WebConfigurationBase.cubicweb_appobject_path | ServerConfiguration.cubicweb_appobject_path - cube_appobject_path = WebConfigurationBase.cube_appobject_path | ServerConfiguration.cube_appobject_path - - - CONFIGURATIONS.append(AllInOneConfiguration) - -except ImportError: - pass diff -r 058bb3dc685f -r 0b59724cb3f2 etwist/twctl.py --- a/etwist/twctl.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,79 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb-clt handlers for twisted""" - -from cubicweb.toolsutils import CommandHandler -from cubicweb.web.webctl import WebCreateHandler, WebUpgradeHandler - -# trigger configuration registration -import cubicweb.etwist.twconfig # pylint: disable=W0611 - -class TWCreateHandler(WebCreateHandler): - cfgname = 'twisted' - -class TWStartHandler(CommandHandler): - cmdname = 'start' - cfgname = 'twisted' - - def start_server(self, config): - from cubicweb.etwist import server - return server.run(config) - -class TWStopHandler(CommandHandler): - cmdname = 'stop' - cfgname = 'twisted' - - def poststop(self): - pass - -class TWUpgradeHandler(WebUpgradeHandler): - cfgname = 'twisted' - - -try: - from cubicweb.server import serverctl - class AllInOneCreateHandler(serverctl.RepositoryCreateHandler, - TWCreateHandler): - """configuration to get an instance running in a twisted web server - integrating a repository server in the same process - """ - cfgname = 'all-in-one' - - def bootstrap(self, cubes, automatic=False, inputlevel=0): - """bootstrap this configuration""" - serverctl.RepositoryCreateHandler.bootstrap(self, cubes, automatic, inputlevel) - TWCreateHandler.bootstrap(self, cubes, automatic, inputlevel) - - class AllInOneStartHandler(TWStartHandler): - cmdname = 'start' - cfgname = 'all-in-one' - subcommand = 'cubicweb-twisted' - - class AllInOneStopHandler(CommandHandler): - cmdname = 'stop' - cfgname = 'all-in-one' - subcommand = 'cubicweb-twisted' - - def poststop(self): - pass - - class AllInOneUpgradeHandler(TWUpgradeHandler): - cfgname = 'all-in-one' - -except ImportError: - pass diff -r 058bb3dc685f -r 0b59724cb3f2 ext/__init__.py --- a/ext/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 ext/html4zope.py --- a/ext/html4zope.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,174 +0,0 @@ -# Author: David Goodger -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -# Contact: goodger@users.sourceforge.net -# Revision: $Revision: 1.2 $ -# Date: $Date: 2005-07-04 16:36:50 $ -# Copyright: This module has been placed in the public domain. - -""" -Simple HyperText Markup Language document tree Writer. - -The output conforms to the HTML 4.01 Transitional DTD and to the Extensible -HTML version 1.0 Transitional DTD (*almost* strict). The output contains a -minimum of formatting information. A cascading style sheet ("default.css" by -default) is required for proper viewing with a modern graphical browser. - -http://cvs.zope.org/Zope/lib/python/docutils/writers/Attic/html4zope.py?rev=1.1.2.2&only_with_tag=ajung-restructuredtext-integration-branch&content-type=text/vnd.viewcvs-markup -""" - -__docformat__ = 'reStructuredText' - -import os - -from logilab.mtconverter import xml_escape - -from docutils import nodes -from docutils.writers.html4css1 import Writer as CSS1Writer -from docutils.writers.html4css1 import HTMLTranslator as CSS1HTMLTranslator - -default_level = int(os.environ.get('STX_DEFAULT_LEVEL', 3)) - -class Writer(CSS1Writer): - """css writer using our html translator""" - def __init__(self, base_url): - CSS1Writer.__init__(self) - self.translator_class = URLBinder(base_url, HTMLTranslator) - - def apply_template(self): - """overriding this is necessary with docutils >= 0.5""" - return self.visitor.astext() - -class URLBinder: - def __init__(self, url, klass): - self.base_url = url - self.translator_class = HTMLTranslator - - def __call__(self, document): - translator = self.translator_class(document) - translator.base_url = self.base_url - return translator - -class HTMLTranslator(CSS1HTMLTranslator): - """ReST tree to html translator""" - - def astext(self): - """return the extracted html""" - return ''.join(self.body) - - def visit_title(self, node): - """Only 6 section levels are supported by HTML.""" - if isinstance(node.parent, nodes.topic): - self.body.append( - self.starttag(node, 'p', '', CLASS='topic-title')) - if node.parent.hasattr('id'): - self.body.append( - self.starttag({}, 'a', '', name=node.parent['id'])) - self.context.append('

      \n') - else: - self.context.append('

      \n') - elif self.section_level == 0: - # document title - self.head.append('%s\n' - % self.encode(node.astext())) - self.body.append(self.starttag(node, 'h%d' % default_level, '', - CLASS='title')) - self.context.append('\n' % default_level) - else: - self.body.append( - self.starttag(node, 'h%s' % ( - default_level+self.section_level-1), '')) - atts = {} - if node.hasattr('refid'): - atts['class'] = 'toc-backref' - atts['href'] = '%s#%s' % (self.base_url, node['refid']) - self.body.append(self.starttag({}, 'a', '', **atts)) - self.context.append('\n' % ( - default_level+self.section_level-1)) - - def visit_subtitle(self, node): - """format a subtitle""" - if isinstance(node.parent, nodes.sidebar): - self.body.append(self.starttag(node, 'p', '', - CLASS='sidebar-subtitle')) - self.context.append('

      \n') - else: - self.body.append( - self.starttag(node, 'h%s' % (default_level+1), '', - CLASS='subtitle')) - self.context.append('\n' % (default_level+1)) - - def visit_document(self, node): - """syt: i don't want the enclosing
      """ - def depart_document(self, node): - """syt: i don't want the enclosing
      """ - - def visit_reference(self, node): - """syt: i want absolute urls""" - if 'refuri' in node: - href = node['refuri'] - if ( self.settings.cloak_email_addresses - and href.startswith('mailto:')): - href = self.cloak_mailto(href) - self.in_mailto = 1 - else: - assert 'refid' in node, \ - 'References must have "refuri" or "refid" attribute.' - href = '%s#%s' % (self.base_url, node['refid']) - atts = {'href': href, 'class': 'reference'} - if not isinstance(node.parent, nodes.TextElement): - assert len(node) == 1 and isinstance(node[0], nodes.image) - atts['class'] += ' image-reference' - self.body.append(self.starttag(node, 'a', '', **atts)) - - ## override error messages to avoid XHTML problems ######################## - def visit_problematic(self, node): - pass - - def depart_problematic(self, node): - pass - - def visit_system_message(self, node): - backref_text = '' - if len(node['backrefs']): - backrefs = node['backrefs'] - if len(backrefs) == 1: - backref_text = '; backlink' - else: - i = 1 - backlinks = [] - for backref in backrefs: - backlinks.append(str(i)) - i += 1 - backref_text = ('; backlinks: %s' - % ', '.join(backlinks)) - if node.hasattr('line'): - line = ', line %s' % node['line'] - else: - line = '' - a_start = a_end = '' - error = u'System Message: %s%s/%s%s (%s %s)%s

      \n' % ( - a_start, node['type'], node['level'], a_end, - self.encode(node['source']), line, backref_text) - self.body.append(u'
      ReST / HTML errors:%s
      ' % xml_escape(error)) - - def depart_system_message(self, node): - pass diff -r 058bb3dc685f -r 0b59724cb3f2 ext/markdown.py --- a/ext/markdown.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,27 +0,0 @@ -from __future__ import absolute_import -import markdown - -import logging - -log = logging.getLogger(__name__) - - -def markdown_publish(context, data): - """publish a string formatted as MarkDown Text to HTML - - :type context: a cubicweb application object - - :type data: str - :param data: some MarkDown text - - :rtype: unicode - :return: - the data formatted as HTML or the original data if an error occurred - """ - md = markdown.Markdown() - try: - return md.convert(data) - except: - import traceback; traceback.print_exc() - log.exception("Error while converting Markdown to HTML") - return data diff -r 058bb3dc685f -r 0b59724cb3f2 ext/rest.py --- a/ext/rest.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,469 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""rest publishing functions - -contains some functions and setup of docutils for cubicweb. Provides the -following ReST directives: - -* `eid`, create link to entity in the repository by their eid - -* `card`, create link to card entity in the repository by their wikiid - (proposing to create it when the refered card doesn't exist yet) - -* `winclude`, reference to a web documentation file (in wdoc/ directories) - -* `sourcecode` (if pygments is installed), source code colorization - -* `rql-table`, create a table from a RQL query - -""" -__docformat__ = "restructuredtext en" - -import sys -from itertools import chain -from logging import getLogger -from os.path import join - -from six import text_type -from six.moves.urllib.parse import urlsplit - -from docutils import statemachine, nodes, utils, io -from docutils.core import Publisher -from docutils.parsers.rst import Parser, states, directives, Directive -from docutils.parsers.rst.roles import register_canonical_role, set_classes - -from logilab.mtconverter import ESC_UCAR_TABLE, ESC_CAR_TABLE, xml_escape - -from cubicweb import UnknownEid -from cubicweb.ext.html4zope import Writer - -from cubicweb.web.views import vid_from_rset # XXX better not to import c.w.views here... - -# We provide our own parser as an attempt to get rid of -# state machine reinstanciation - -import re -# compile states.Body patterns -for k, v in states.Body.patterns.items(): - if isinstance(v, str): - states.Body.patterns[k] = re.compile(v) - -# register ReStructured Text mimetype / extensions -import mimetypes -mimetypes.add_type('text/rest', '.rest') -mimetypes.add_type('text/rest', '.rst') - - -LOGGER = getLogger('cubicweb.rest') - - -def eid_reference_role(role, rawtext, text, lineno, inliner, - options={}, content=[]): - try: - try: - eid_num, rest = text.split(u':', 1) - except ValueError: - eid_num, rest = text, '#'+text - eid_num = int(eid_num) - if eid_num < 0: - raise ValueError - except ValueError: - msg = inliner.reporter.error( - 'EID number must be a positive number; "%s" is invalid.' - % text, line=lineno) - prb = inliner.problematic(rawtext, rawtext, msg) - return [prb], [msg] - # Base URL mainly used by inliner.pep_reference; so this is correct: - context = inliner.document.settings.context - try: - refedentity = context._cw.entity_from_eid(eid_num) - except UnknownEid: - ref = '#' - rest += u' ' + context._cw._('(UNEXISTANT EID)') - else: - ref = refedentity.absolute_url() - set_classes(options) - return [nodes.reference(rawtext, utils.unescape(rest), refuri=ref, - **options)], [] - - -def rql_role(role, rawtext, text, lineno, inliner, options={}, content=[]): - """``:rql:```` or ``:rql:`:``` - - Example: ``:rql:`Any X,Y WHERE X is CWUser, X login Y:table``` - - Replace the directive with the output of applying the view to the resultset - returned by the query. - - "X eid %(userid)s" can be used in the RQL query for this query will be - executed with the argument {'userid': _cw.user.eid}. - """ - _cw = inliner.document.settings.context._cw - text = text.strip() - if ':' in text: - rql, vid = text.rsplit(u':', 1) - rql = rql.strip() - else: - rql, vid = text, None - _cw.ensure_ro_rql(rql) - try: - rset = _cw.execute(rql, {'userid': _cw.user.eid}) - if rset: - if vid is None: - vid = vid_from_rset(_cw, rset, _cw.vreg.schema) - else: - vid = 'noresult' - view = _cw.vreg['views'].select(vid, _cw, rset=rset) - content = view.render() - except Exception as exc: - content = 'an error occurred while interpreting this rql directive: %r' % exc - set_classes(options) - return [nodes.raw('', content, format='html')], [] - - -def bookmark_role(role, rawtext, text, lineno, inliner, options={}, content=[]): - """``:bookmark:```` or ``:bookmark:`:``` - - Example: ``:bookmark:`1234:table``` - - Replace the directive with the output of applying the view to the resultset - returned by the query stored in the bookmark. By default, the view is the one - stored in the bookmark, but it can be overridden by the directive as in the - example above. - - "X eid %(userid)s" can be used in the RQL query stored in the Bookmark, for - this query will be executed with the argument {'userid': _cw.user.eid}. - """ - _cw = inliner.document.settings.context._cw - text = text.strip() - try: - if ':' in text: - eid, vid = text.rsplit(u':', 1) - eid = int(eid) - else: - eid, vid = int(text), None - except ValueError: - msg = inliner.reporter.error( - 'EID number must be a positive number; "%s" is invalid.' - % text, line=lineno) - prb = inliner.problematic(rawtext, rawtext, msg) - return [prb], [msg] - try: - bookmark = _cw.entity_from_eid(eid) - except UnknownEid: - msg = inliner.reporter.error('Unknown EID %s.' % text, line=lineno) - prb = inliner.problematic(rawtext, rawtext, msg) - return [prb], [msg] - try: - params = dict(_cw.url_parse_qsl(urlsplit(bookmark.path).query)) - rql = params['rql'] - if vid is None: - vid = params.get('vid') - except (ValueError, KeyError) as exc: - msg = inliner.reporter.error('Could not parse bookmark path %s [%s].' - % (bookmark.path, exc), line=lineno) - prb = inliner.problematic(rawtext, rawtext, msg) - return [prb], [msg] - try: - rset = _cw.execute(rql, {'userid': _cw.user.eid}) - if rset: - if vid is None: - vid = vid_from_rset(_cw, rset, _cw.vreg.schema) - else: - vid = 'noresult' - view = _cw.vreg['views'].select(vid, _cw, rset=rset) - content = view.render() - except Exception as exc: - content = 'An error occurred while interpreting directive bookmark: %r' % exc - set_classes(options) - return [nodes.raw('', content, format='html')], [] - - -def winclude_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - """Include a reST file as part of the content of this reST file. - - same as standard include directive but using config.locate_doc_resource to - get actual file to include. - - Most part of this implementation is copied from `include` directive defined - in `docutils.parsers.rst.directives.misc` - """ - context = state.document.settings.context - cw = context._cw - source = state_machine.input_lines.source( - lineno - state_machine.input_offset - 1) - #source_dir = os.path.dirname(os.path.abspath(source)) - fid = arguments[0] - for lang in chain((cw.lang, cw.vreg.property_value('ui.language')), - cw.vreg.config.available_languages()): - rid = '%s_%s.rst' % (fid, lang) - resourcedir = cw.vreg.config.locate_doc_file(rid) - if resourcedir: - break - else: - severe = state_machine.reporter.severe( - 'Problems with "%s" directive path:\nno resource matching %s.' - % (name, fid), - nodes.literal_block(block_text, block_text), line=lineno) - return [severe] - path = join(resourcedir, rid) - encoding = options.get('encoding', state.document.settings.input_encoding) - try: - state.document.settings.record_dependencies.add(path) - include_file = io.FileInput( - source_path=path, encoding=encoding, - error_handler=state.document.settings.input_encoding_error_handler, - handle_io_errors=None) - except IOError as error: - severe = state_machine.reporter.severe( - 'Problems with "%s" directive path:\n%s: %s.' - % (name, error.__class__.__name__, error), - nodes.literal_block(block_text, block_text), line=lineno) - return [severe] - try: - include_text = include_file.read() - except UnicodeError as error: - severe = state_machine.reporter.severe( - 'Problem with "%s" directive:\n%s: %s' - % (name, error.__class__.__name__, error), - nodes.literal_block(block_text, block_text), line=lineno) - return [severe] - if 'literal' in options: - literal_block = nodes.literal_block(include_text, include_text, - source=path) - literal_block.line = 1 - return literal_block - else: - include_lines = statemachine.string2lines(include_text, - convert_whitespace=1) - state_machine.insert_input(include_lines, path) - return [] - -winclude_directive.arguments = (1, 0, 1) -winclude_directive.options = {'literal': directives.flag, - 'encoding': directives.encoding} - - -class RQLTableDirective(Directive): - """rql-table directive - - Example: - - .. rql-table:: - :vid: mytable - :headers: , , progress - :colvids: 2=progress - - Any X,U,X WHERE X is Project, X url U - - All fields but the RQL string are optionnal. The ``:headers:`` option can - contain empty column names. - """ - - required_arguments = 0 - optional_arguments = 0 - has_content= True - final_argument_whitespace = True - option_spec = {'vid': directives.unchanged, - 'headers': directives.unchanged, - 'colvids': directives.unchanged} - - def run(self): - errid = "rql-table directive" - self.assert_has_content() - if self.arguments: - raise self.warning('%s does not accept arguments' % errid) - rql = ' '.join([l.strip() for l in self.content]) - _cw = self.state.document.settings.context._cw - _cw.ensure_ro_rql(rql) - try: - rset = _cw.execute(rql) - except Exception as exc: - raise self.error("fail to execute RQL query in %s: %r" % - (errid, exc)) - if not rset: - raise self.warning("empty result set") - vid = self.options.get('vid', 'table') - try: - view = _cw.vreg['views'].select(vid, _cw, rset=rset) - except Exception as exc: - raise self.error("fail to select '%s' view in %s: %r" % - (vid, errid, exc)) - headers = None - if 'headers' in self.options: - headers = [h.strip() for h in self.options['headers'].split(',')] - while headers.count(''): - headers[headers.index('')] = None - if len(headers) != len(rset[0]): - raise self.error("the number of 'headers' does not match the " - "number of columns in %s" % errid) - cellvids = None - if 'colvids' in self.options: - cellvids = {} - for f in self.options['colvids'].split(','): - try: - idx, vid = f.strip().split('=') - except ValueError: - raise self.error("malformatted 'colvids' option in %s" % - errid) - cellvids[int(idx.strip())] = vid.strip() - try: - content = view.render(headers=headers, cellvids=cellvids) - except Exception as exc: - raise self.error("Error rendering %s (%s)" % (errid, exc)) - return [nodes.raw('', content, format='html')] - - -try: - from pygments import highlight - from pygments.lexers import get_lexer_by_name - from pygments.formatters.html import HtmlFormatter -except ImportError: - pygments_directive = None -else: - _PYGMENTS_FORMATTER = HtmlFormatter() - - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - try: - lexer = get_lexer_by_name(arguments[0]) - except ValueError: - # no lexer found - lexer = get_lexer_by_name('text') - parsed = highlight(u'\n'.join(content), lexer, _PYGMENTS_FORMATTER) - # don't fail if no context set on the sourcecode directive - try: - context = state.document.settings.context - context._cw.add_css('pygments.css') - except AttributeError: - # used outside cubicweb XXX use hasattr instead - pass - return [nodes.raw('', parsed, format='html')] - - pygments_directive.arguments = (1, 0, 1) - pygments_directive.content = 1 - - -class CubicWebReSTParser(Parser): - """The (customized) reStructuredText parser.""" - - def __init__(self): - self.initial_state = 'Body' - self.state_classes = states.state_classes - self.inliner = states.Inliner() - self.statemachine = states.RSTStateMachine( - state_classes=self.state_classes, - initial_state=self.initial_state, - debug=0) - - def parse(self, inputstring, document): - """Parse `inputstring` and populate `document`, a document tree.""" - self.setup_parse(inputstring, document) - inputlines = statemachine.string2lines(inputstring, - convert_whitespace=1) - self.statemachine.run(inputlines, document, inliner=self.inliner) - self.finish_parse() - - -# XXX docutils keep a ref on context, can't find a correct way to remove it -class CWReSTPublisher(Publisher): - def __init__(self, context, settings, **kwargs): - Publisher.__init__(self, **kwargs) - self.set_components('standalone', 'restructuredtext', 'pseudoxml') - self.process_programmatic_settings(None, settings, None) - self.settings.context = context - - -def rest_publish(context, data): - """publish a string formatted as ReStructured Text to HTML - - :type context: a cubicweb application object - - :type data: str - :param data: some ReST text - - :rtype: unicode - :return: - the data formatted as HTML or the original data if an error occurred - """ - req = context._cw - if isinstance(data, text_type): - encoding = 'unicode' - # remove unprintable characters unauthorized in xml - data = data.translate(ESC_UCAR_TABLE) - else: - encoding = req.encoding - # remove unprintable characters unauthorized in xml - data = data.translate(ESC_CAR_TABLE) - settings = {'input_encoding': encoding, 'output_encoding': 'unicode', - 'warning_stream': False, - 'traceback': True, # don't sys.exit - 'stylesheet': None, # don't try to embed stylesheet (may cause - # obscure bug due to docutils computing - # relative path according to the directory - # used *at import time* - # dunno what's the max, severe is 4, and we never want a crash - # (though try/except may be a better option...). May be the - # above traceback option will avoid this? - 'halt_level': 10, - # disable stupid switch to colspan=2 if field name is above a size limit - 'field_name_limit': sys.maxsize, - } - if context: - if hasattr(req, 'url'): - base_url = req.url() - elif hasattr(context, 'absolute_url'): - base_url = context.absolute_url() - else: - base_url = req.base_url() - else: - base_url = None - try: - pub = CWReSTPublisher(context, settings, - parser=CubicWebReSTParser(), - writer=Writer(base_url=base_url), - source_class=io.StringInput, - destination_class=io.StringOutput) - pub.set_source(data) - pub.set_destination() - res = pub.publish(enable_exit_status=None) - # necessary for proper garbage collection, else a ref is kept somewhere in docutils... - del pub.settings.context - return res - except BaseException: - LOGGER.exception('error while publishing ReST text') - if not isinstance(data, text_type): - data = text_type(data, encoding, 'replace') - return xml_escape(req._('error while publishing ReST text') - + '\n\n' + data) - - -_INITIALIZED = False -def cw_rest_init(): - global _INITIALIZED - if _INITIALIZED: - return - _INITIALIZED = True - register_canonical_role('eid', eid_reference_role) - register_canonical_role('rql', rql_role) - register_canonical_role('bookmark', bookmark_role) - directives.register_directive('winclude', winclude_directive) - if pygments_directive is not None: - directives.register_directive('sourcecode', pygments_directive) - directives.register_directive('rql-table', RQLTableDirective) diff -r 058bb3dc685f -r 0b59724cb3f2 ext/tal.py --- a/ext/tal.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,273 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""provides simpleTAL extensions for CubicWeb - -""" - -__docformat__ = "restructuredtext en" - -import sys -import re -from os.path import exists, isdir, join -from logging import getLogger -from StringIO import StringIO - -from simpletal import simpleTAL, simpleTALES - -from logilab.common.decorators import cached - -LOGGER = getLogger('cubicweb.tal') - - -class LoggerAdapter(object): - def __init__(self, tal_logger): - self.tal_logger = tal_logger - - def debug(self, msg): - LOGGER.debug(msg) - - def warn(self, msg): - LOGGER.warning(msg) - - def __getattr__(self, attrname): - return getattr(self.tal_logger, attrname) - - -class CubicWebContext(simpleTALES.Context): - """add facilities to access entity / resultset""" - - def __init__(self, options=None, allowPythonPath=1): - simpleTALES.Context.__init__(self, options, allowPythonPath) - self.log = LoggerAdapter(self.log) - - def update(self, context): - for varname, value in context.items(): - self.addGlobal(varname, value) - - def addRepeat(self, name, var, initialValue): - simpleTALES.Context.addRepeat(self, name, var, initialValue) - -# XXX FIXME need to find a clean to define OPCODE values for extensions -I18N_CONTENT = 18 -I18N_REPLACE = 19 -RQL_EXECUTE = 20 -# simpleTAL uses the OPCODE values to define priority over commands. -# TAL_ITER should have the same priority than TAL_REPEAT (i.e. 3), but -# we can't use the same OPCODE for two different commands without changing -# the simpleTAL implementation. Another solution would be to totally override -# the REPEAT implementation with the ITER one, but some specific operations -# (involving len() for instance) are not implemented for ITER, so we prefer -# to keep both implementations for now, and to fool simpleTAL by using a float -# number between 3 and 4 -TAL_ITER = 3.1 - - -# FIX simpleTAL HTML 4.01 stupidity -# (simpleTAL never closes tags like INPUT, IMG, HR ...) -simpleTAL.HTML_FORBIDDEN_ENDTAG.clear() - -class CubicWebTemplateCompiler(simpleTAL.HTMLTemplateCompiler): - """extends default compiler by adding i18n:content commands""" - - def __init__(self): - simpleTAL.HTMLTemplateCompiler.__init__(self) - self.commandHandler[I18N_CONTENT] = self.compile_cmd_i18n_content - self.commandHandler[I18N_REPLACE] = self.compile_cmd_i18n_replace - self.commandHandler[RQL_EXECUTE] = self.compile_cmd_rql - self.commandHandler[TAL_ITER] = self.compile_cmd_tal_iter - - def setTALPrefix(self, prefix): - simpleTAL.TemplateCompiler.setTALPrefix(self, prefix) - self.tal_attribute_map['i18n:content'] = I18N_CONTENT - self.tal_attribute_map['i18n:replace'] = I18N_REPLACE - self.tal_attribute_map['rql:execute'] = RQL_EXECUTE - self.tal_attribute_map['tal:iter'] = TAL_ITER - - def compile_cmd_i18n_content(self, argument): - # XXX tal:content structure=, text= should we support this ? - structure_flag = 0 - return (I18N_CONTENT, (argument, False, structure_flag, self.endTagSymbol)) - - def compile_cmd_i18n_replace(self, argument): - # XXX tal:content structure=, text= should we support this ? - structure_flag = 0 - return (I18N_CONTENT, (argument, True, structure_flag, self.endTagSymbol)) - - def compile_cmd_rql(self, argument): - return (RQL_EXECUTE, (argument, self.endTagSymbol)) - - def compile_cmd_tal_iter(self, argument): - original_id, (var_name, expression, end_tag_symbol) = \ - simpleTAL.HTMLTemplateCompiler.compileCmdRepeat(self, argument) - return (TAL_ITER, (var_name, expression, self.endTagSymbol)) - - def getTemplate(self): - return CubicWebTemplate(self.commandList, self.macroMap, self.symbolLocationTable) - - def compileCmdAttributes (self, argument): - """XXX modified to support single attribute - definition ending by a ';' - - backport this to simpleTAL - """ - # Compile tal:attributes into attribute command - # Argument: [(attributeName, expression)] - - # Break up the list of attribute settings first - commandArgs = [] - # We only want to match semi-colons that are not escaped - argumentSplitter = re.compile(r'(?. - - -from cubicweb.web.views import tableview - -class CustomRsetTableView(tableview.RsetTableView): - __regid__ = 'mytable' diff -r 058bb3dc685f -r 0b59724cb3f2 ext/test/requirements.txt --- a/ext/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -docutils diff -r 058bb3dc685f -r 0b59724cb3f2 ext/test/unittest_rest.py --- a/ext/test/unittest_rest.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,244 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from six import PY3 - -from logilab.common.testlib import unittest_main -from cubicweb.devtools.testlib import CubicWebTC - -from cubicweb.ext.rest import rest_publish - -class RestTC(CubicWebTC): - - def context(self, req): - return req.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) - - def test_eid_role(self): - with self.admin_access.web_request() as req: - context = self.context(req) - self.assertEqual(rest_publish(context, ':eid:`%s`' % context.eid), - '

      ' - '#%s

      \n' % context.eid) - self.assertEqual(rest_publish(context, ':eid:`%s:some text`' % context.eid), - '

      ' - 'some text

      \n') - - def test_bad_rest_no_crash(self): - with self.admin_access.web_request() as req: - rest_publish(self.context(req), ''' -| card | implication | --------------------------- -| 1-1 | N1 = N2 | -| 1-? | N1 <= N2 | -| 1-+ | N1 >= N2 | -| 1-* | N1>0 => N2>0 | --------------------------- -| ?-? | N1 # N2 | -| ?-+ | N1 >= N2 | -| ?-* | N1 # N2 | --------------------------- -| +-+ | N1>0 => N2>0 et | -| | N2>0 => N1>0 | -| +-* | N1>+ => N2>0 | --------------------------- -| *-* | N1#N2 | --------------------------- - -''') - - def test_disable_field_name_colspan(self): - with self.admin_access.web_request() as req: - context = self.context(req) - value = rest_publish(context, '''my field list: - -:a long dumb param name: value -''') - self.assertNotIn('colspan', value) - - def test_rql_role_with_vid(self): - with self.admin_access.web_request() as req: - context = self.context(req) - out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:table`') - self.assertTrue(out.endswith('anon\n' - '

      \n')) - - def test_rql_role_with_vid_empty_rset(self): - with self.admin_access.web_request() as req: - context = self.context(req) - out = rest_publish(context, ':rql:`Any X WHERE X is CWUser, X login "nono":table`') - self.assertTrue(out.endswith('

      ' - 'No result matching query
      \n

      \n')) - - def test_rql_role_with_unknown_vid(self): - with self.admin_access.web_request() as req: - context = self.context(req) - out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:toto`') - self.assertTrue(out.startswith("

      an error occurred while interpreting this " - "rql directive: ObjectNotFound(%s'toto',)

      " % - ('' if PY3 else 'u')), - out) - - def test_rql_role_without_vid(self): - with self.admin_access.web_request() as req: - context = self.context(req) - out = rest_publish(context, ':rql:`Any X WHERE X is CWUser`') - self.assertEqual(out, u'

      CWUser_plural

      ' - 'admin' - '
      ' - 'anon' - '

      \n') - - def test_bookmark_role(self): - with self.admin_access.web_request() as req: - context = self.context(req) - rset = req.execute('INSERT Bookmark X: X title "hello", X path ' - '"/view?rql=Any X WHERE X is CWUser"') - eid = rset[0][0] - out = rest_publish(context, ':bookmark:`%s`' % eid) - self.assertEqual(out, u'

      CWUser_plural

      \n') - - def test_rqltable_nocontent(self): - with self.admin_access.web_request() as req: - context = self.context(req) - out = rest_publish(context, """.. rql-table::""") - self.assertIn("System Message: ERROR", out) - self.assertIn("Content block expected for the "rql-table" " - "directive; none found" , out) - - def test_rqltable_norset(self): - with self.admin_access.web_request() as req: - context = self.context(req) - rql = "Any X WHERE X is CWUser, X firstname 'franky'" - out = rest_publish( - context, """\ -.. rql-table:: - - %(rql)s""" % {'rql': rql}) - self.assertIn("System Message: WARNING", out) - self.assertIn("empty result set", out) - - def test_rqltable_nooptions(self): - with self.admin_access.web_request() as req: - rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" - out = rest_publish( - self.context(req), """\ -.. rql-table:: - - %(rql)s - """ % {'rql': rql}) - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - self.assertEqual(view.render(w=None)[49:], out[49:]) - - def test_rqltable_vid(self): - with self.admin_access.web_request() as req: - rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" - vid = 'mytable' - out = rest_publish( - self.context(req), """\ -.. rql-table:: - :vid: %(vid)s - - %(rql)s - """ % {'rql': rql, 'vid': vid}) - view = self.vreg['views'].select(vid, req, rset=req.execute(rql)) - self.assertEqual(view.render(w=None)[49:], out[49:]) - self.assertIn(vid, out[:49]) - - def test_rqltable_badvid(self): - with self.admin_access.web_request() as req: - rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" - vid = 'mytabel' - out = rest_publish( - self.context(req), """\ -.. rql-table:: - :vid: %(vid)s - - %(rql)s - """ % {'rql': rql, 'vid': vid}) - self.assertIn("fail to select '%s' view" % vid, out) - - def test_rqltable_headers(self): - with self.admin_access.web_request() as req: - rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" - headers = ["nom", "prenom", "identifiant"] - out = rest_publish( - self.context(req), """\ -.. rql-table:: - :headers: %(headers)s - - %(rql)s - """ % {'rql': rql, 'headers': ', '.join(headers)}) - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - view.headers = headers - self.assertEqual(view.render(w=None)[49:], out[49:]) - - def test_rqltable_headers_missing(self): - with self.admin_access.web_request() as req: - rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" - headers = ["nom", "", "identifiant"] - out = rest_publish( - self.context(req), """\ -.. rql-table:: - :headers: %(headers)s - - %(rql)s - """ % {'rql': rql, 'headers': ', '.join(headers)}) - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - view.headers = [headers[0], None, headers[2]] - self.assertEqual(view.render(w=None)[49:], out[49:]) - - def test_rqltable_headers_missing_edges(self): - with self.admin_access.web_request() as req: - rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" - headers = [" ", "prenom", ""] - out = rest_publish( - self.context(req), """\ -.. rql-table:: - :headers: %(headers)s - - %(rql)s - """ % {'rql': rql, 'headers': ', '.join(headers)}) - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - view.headers = [None, headers[1], None] - self.assertEqual(view.render(w=None)[49:], out[49:]) - - def test_rqltable_colvids(self): - with self.admin_access.web_request() as req: - rql = "Any X,S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" - colvids = {0: "oneline"} - out = rest_publish( - self.context(req), """\ -.. rql-table:: - :colvids: %(colvids)s - - %(rql)s - """ % {'rql': rql, - 'colvids': ', '.join(["%d=%s" % (k, v) - for k, v in colvids.items()]) - }) - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - view.cellvids = colvids - self.assertEqual(view.render(w=None)[49:], out[49:]) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/__init__.py --- a/hooks/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,84 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""core hooks registering some maintainance tasks as server startup time""" - -__docformat__ = "restructuredtext en" - -from datetime import timedelta, datetime - -from cubicweb.server import hook - -class TransactionsCleanupStartupHook(hook.Hook): - """start task to cleanup transaction data""" - __regid__ = 'cw.looping-tasks.transactions-cleanup' - events = ('server_startup',) - - def __call__(self): - # XXX use named args and inner functions to avoid referencing globals - # which may cause reloading pb - lifetime = timedelta(days=self.repo.config['keep-transaction-lifetime']) - def cleanup_old_transactions(repo=self.repo, lifetime=lifetime): - mindate = datetime.utcnow() - lifetime - with repo.internal_cnx() as cnx: - cnx.system_sql( - 'DELETE FROM transactions WHERE tx_time < %(time)s', - {'time': mindate}) - cnx.commit() - if self.repo.config['undo-enabled']: - self.repo.looping_task(60*60*24, cleanup_old_transactions, - self.repo) - -class UpdateFeedsStartupHook(hook.Hook): - """start task to update datafeed based sources""" - __regid__ = 'cw.looping-tasks.update-feeds' - events = ('server_startup',) - - def __call__(self): - def update_feeds(repo): - # take a list to avoid iterating on a dictionary whose size may - # change - for uri, source in list(repo.sources_by_uri.items()): - if (uri == 'system' - or not repo.config.source_enabled(source) - or not source.config['synchronize']): - continue - with repo.internal_cnx() as cnx: - try: - source.pull_data(cnx) - except Exception as exc: - cnx.exception('while trying to update feed %s', source) - self.repo.looping_task(60, update_feeds, self.repo) - - -class DataImportsCleanupStartupHook(hook.Hook): - """start task to cleanup old data imports (ie datafeed import logs)""" - __regid__ = 'cw.looping-tasks.dataimports-cleanup' - events = ('server_startup',) - - def __call__(self): - def expire_dataimports(repo=self.repo): - for uri, source in repo.sources_by_uri.items(): - if (uri == 'system' - or not repo.config.source_enabled(source)): - continue - with repo.internal_cnx() as cnx: - mindate = datetime.utcnow() - timedelta(seconds=source.config['logs-lifetime']) - cnx.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s', - {'time': mindate}) - cnx.commit() - self.repo.looping_task(60*60*24, expire_dataimports, self.repo) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/bookmark.py --- a/hooks/bookmark.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,42 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""bookmark related hooks""" - -__docformat__ = "restructuredtext en" - -from cubicweb.server import hook - - -class AutoDeleteBookmarkOp(hook.Operation): - bookmark = None # make pylint happy - def precommit_event(self): - if not self.cnx.deleted_in_transaction(self.bookmark.eid): - if not self.bookmark.bookmarked_by: - self.bookmark.cw_delete() - - -class DelBookmarkedByHook(hook.Hook): - """ensure user logins are stripped""" - __regid__ = 'autodelbookmark' - __select__ = hook.Hook.__select__ & hook.match_rtype('bookmarked_by',) - category = 'bookmark' - events = ('after_delete_relation',) - - def __call__(self): - AutoDeleteBookmarkOp(self._cw, - bookmark=self._cw.entity_from_eid(self.eidfrom)) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/email.py --- a/hooks/email.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,80 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""hooks to ensure use_email / primary_email relations consistency""" - -__docformat__ = "restructuredtext en" - -from cubicweb.server import hook - - -class SetUseEmailRelationOp(hook.Operation): - """delay this operation to commit to avoid conflict with a late rql query - already setting the relation - """ - rtype = 'use_email' - entity = email = None # make pylint happy - - def condition(self): - """check entity has use_email set for the email address""" - return not any(e for e in self.entity.use_email - if self.email.eid == e.eid) - - def precommit_event(self): - if self.cnx.deleted_in_transaction(self.entity.eid): - return - if self.cnx.deleted_in_transaction(self.email.eid): - return - if self.condition(): - self.cnx.execute( - 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype, - {'x': self.entity.eid, 'y': self.email.eid}) - - -class SetPrimaryEmailRelationOp(SetUseEmailRelationOp): - rtype = 'primary_email' - - def condition(self): - """check entity has no primary_email set""" - return not self.entity.primary_email - - -class SetPrimaryEmailHook(hook.Hook): - """notify when a bug or story or version has its state modified""" - __regid__ = 'setprimaryemail' - __select__ = hook.Hook.__select__ & hook.match_rtype('use_email') - category = 'email' - events = ('after_add_relation',) - - def __call__(self): - entity = self._cw.entity_from_eid(self.eidfrom) - if 'primary_email' in entity.e_schema.subject_relations(): - SetPrimaryEmailRelationOp(self._cw, entity=entity, - email=self._cw.entity_from_eid(self.eidto)) - -class SetUseEmailHook(hook.Hook): - """notify when a bug or story or version has its state modified""" - __regid__ = 'setprimaryemail' - __select__ = hook.Hook.__select__ & hook.match_rtype('primary_email') - category = 'email' - events = ('after_add_relation',) - - def __call__(self): - entity = self._cw.entity_from_eid(self.eidfrom) - if 'use_email' in entity.e_schema.subject_relations(): - SetUseEmailRelationOp(self._cw, entity=entity, - email=self._cw.entity_from_eid(self.eidto)) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/integrity.py --- a/hooks/integrity.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,347 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Core hooks: check for data integrity according to the instance'schema -validity -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from threading import Lock - -from six import text_type - -from cubicweb import validation_error, neg_role -from cubicweb.schema import (META_RTYPES, WORKFLOW_RTYPES, - RQLConstraint, RQLUniqueConstraint) -from cubicweb.predicates import is_instance, composite_etype -from cubicweb.uilib import soup2xhtml -from cubicweb.server import hook - -# special relations that don't have to be checked for integrity, usually -# because they are handled internally by hooks (so we trust ourselves) -DONT_CHECK_RTYPES_ON_ADD = META_RTYPES | WORKFLOW_RTYPES -DONT_CHECK_RTYPES_ON_DEL = META_RTYPES | WORKFLOW_RTYPES - -_UNIQUE_CONSTRAINTS_LOCK = Lock() -_UNIQUE_CONSTRAINTS_HOLDER = None - - -def _acquire_unique_cstr_lock(cnx): - """acquire the _UNIQUE_CONSTRAINTS_LOCK for the cnx. - - This lock used to avoid potential integrity pb when checking - RQLUniqueConstraint in two different transactions, as explained in - https://extranet.logilab.fr/3577926 - """ - if 'uniquecstrholder' in cnx.transaction_data: - return - _UNIQUE_CONSTRAINTS_LOCK.acquire() - cnx.transaction_data['uniquecstrholder'] = True - # register operation responsible to release the lock on commit/rollback - _ReleaseUniqueConstraintsOperation(cnx) - -def _release_unique_cstr_lock(cnx): - if 'uniquecstrholder' in cnx.transaction_data: - del cnx.transaction_data['uniquecstrholder'] - _UNIQUE_CONSTRAINTS_LOCK.release() - -class _ReleaseUniqueConstraintsOperation(hook.Operation): - def postcommit_event(self): - _release_unique_cstr_lock(self.cnx) - def rollback_event(self): - _release_unique_cstr_lock(self.cnx) - - -class _CheckRequiredRelationOperation(hook.DataOperationMixIn, - hook.LateOperation): - """checking relation cardinality has to be done after commit in case the - relation is being replaced - """ - containercls = list - role = key = base_rql = None - - def precommit_event(self): - cnx = self.cnx - pendingeids = cnx.transaction_data.get('pendingeids', ()) - pendingrtypes = cnx.transaction_data.get('pendingrtypes', ()) - for eid, rtype in self.get_data(): - # recheck pending eids / relation types - if eid in pendingeids: - continue - if rtype in pendingrtypes: - continue - if not cnx.execute(self.base_rql % rtype, {'x': eid}): - etype = cnx.entity_metas(eid)['type'] - msg = _('at least one relation %(rtype)s is required on ' - '%(etype)s (%(eid)s)') - raise validation_error(eid, {(rtype, self.role): msg}, - {'rtype': rtype, 'etype': etype, 'eid': eid}, - ['rtype', 'etype']) - - -class _CheckSRelationOp(_CheckRequiredRelationOperation): - """check required subject relation""" - role = 'subject' - base_rql = 'Any O WHERE S eid %%(x)s, S %s O' - -class _CheckORelationOp(_CheckRequiredRelationOperation): - """check required object relation""" - role = 'object' - base_rql = 'Any S WHERE O eid %%(x)s, S %s O' - - -class IntegrityHook(hook.Hook): - __abstract__ = True - category = 'integrity' - - -class _EnsureSymmetricRelationsAdd(hook.Hook): - """ ensure X r Y => Y r X iff r is symmetric """ - __regid__ = 'cw.add_ensure_symmetry' - __abstract__ = True - category = 'activeintegrity' - events = ('after_add_relation',) - # __select__ is set in the registration callback - - def __call__(self): - self._cw.repo.system_source.add_relation(self._cw, self.eidto, - self.rtype, self.eidfrom) - - -class _EnsureSymmetricRelationsDelete(hook.Hook): - """ ensure X r Y => Y r X iff r is symmetric """ - __regid__ = 'cw.delete_ensure_symmetry' - __abstract__ = True - category = 'activeintegrity' - events = ('after_delete_relation',) - # __select__ is set in the registration callback - - def __call__(self): - self._cw.repo.system_source.delete_relation(self._cw, self.eidto, - self.rtype, self.eidfrom) - - -class CheckCardinalityHookBeforeDeleteRelation(IntegrityHook): - """check cardinalities are satisfied""" - __regid__ = 'checkcard_before_delete_relation' - events = ('before_delete_relation',) - - def __call__(self): - rtype = self.rtype - if rtype in DONT_CHECK_RTYPES_ON_DEL: - return - cnx = self._cw - eidfrom, eidto = self.eidfrom, self.eidto - rdef = cnx.rtype_eids_rdef(rtype, eidfrom, eidto) - if (rdef.subject, rtype, rdef.object) in cnx.transaction_data.get('pendingrdefs', ()): - return - card = rdef.cardinality - if card[0] in '1+' and not cnx.deleted_in_transaction(eidfrom): - _CheckSRelationOp.get_instance(cnx).add_data((eidfrom, rtype)) - if card[1] in '1+' and not cnx.deleted_in_transaction(eidto): - _CheckORelationOp.get_instance(cnx).add_data((eidto, rtype)) - - -class CheckCardinalityHookAfterAddEntity(IntegrityHook): - """check cardinalities are satisfied""" - __regid__ = 'checkcard_after_add_entity' - events = ('after_add_entity',) - - def __call__(self): - eid = self.entity.eid - eschema = self.entity.e_schema - for rschema, targetschemas, role in eschema.relation_definitions(): - # skip automatically handled relations - if rschema.type in DONT_CHECK_RTYPES_ON_ADD: - continue - rdef = rschema.role_rdef(eschema, targetschemas[0], role) - if rdef.role_cardinality(role) in '1+': - if role == 'subject': - op = _CheckSRelationOp.get_instance(self._cw) - else: - op = _CheckORelationOp.get_instance(self._cw) - op.add_data((eid, rschema.type)) - - -class _CheckConstraintsOp(hook.DataOperationMixIn, hook.LateOperation): - """ check a new relation satisfy its constraints """ - containercls = list - def precommit_event(self): - cnx = self.cnx - for values in self.get_data(): - eidfrom, rtype, eidto, constraints = values - # first check related entities have not been deleted in the same - # transaction - if cnx.deleted_in_transaction(eidfrom): - continue - if cnx.deleted_in_transaction(eidto): - continue - for constraint in constraints: - # XXX - # * lock RQLConstraint as well? - # * use a constraint id to use per constraint lock and avoid - # unnecessary commit serialization ? - if isinstance(constraint, RQLUniqueConstraint): - _acquire_unique_cstr_lock(cnx) - try: - constraint.repo_check(cnx, eidfrom, rtype, eidto) - except NotImplementedError: - self.critical('can\'t check constraint %s, not supported', - constraint) - - -class CheckConstraintHook(IntegrityHook): - """check the relation satisfy its constraints - - this is delayed to a precommit time operation since other relation which - will make constraint satisfied (or unsatisfied) may be added later. - """ - __regid__ = 'checkconstraint' - events = ('after_add_relation',) - - def __call__(self): - # XXX get only RQL[Unique]Constraints? - rdef = self._cw.rtype_eids_rdef(self.rtype, self.eidfrom, self.eidto) - constraints = rdef.constraints - if constraints: - _CheckConstraintsOp.get_instance(self._cw).add_data( - (self.eidfrom, self.rtype, self.eidto, constraints)) - - -class CheckAttributeConstraintHook(IntegrityHook): - """check the attribute relation satisfy its constraints - - this is delayed to a precommit time operation since other relation which - will make constraint satisfied (or unsatisfied) may be added later. - """ - __regid__ = 'checkattrconstraint' - events = ('after_add_entity', 'after_update_entity') - - def __call__(self): - eschema = self.entity.e_schema - for attr in self.entity.cw_edited: - if eschema.subjrels[attr].final: - constraints = [c for c in eschema.rdef(attr).constraints - if isinstance(c, (RQLUniqueConstraint, RQLConstraint))] - if constraints: - _CheckConstraintsOp.get_instance(self._cw).add_data( - (self.entity.eid, attr, None, constraints)) - - -class CheckUniqueHook(IntegrityHook): - __regid__ = 'checkunique' - events = ('before_add_entity', 'before_update_entity') - - def __call__(self): - entity = self.entity - eschema = entity.e_schema - for attr, val in entity.cw_edited.items(): - if eschema.subjrels[attr].final and eschema.has_unique_values(attr): - if val is None: - continue - rql = '%s X WHERE X %s %%(val)s' % (entity.e_schema, attr) - rset = self._cw.execute(rql, {'val': val}) - if rset and rset[0][0] != entity.eid: - msg = _('the value "%s" is already used, use another one') - raise validation_error(entity, {(attr, 'subject'): msg}, - (val,)) - - -class DontRemoveOwnersGroupHook(IntegrityHook): - """delete the composed of a composite relation when this relation is deleted - """ - __regid__ = 'checkownersgroup' - __select__ = IntegrityHook.__select__ & is_instance('CWGroup') - events = ('before_delete_entity', 'before_update_entity') - - def __call__(self): - entity = self.entity - if self.event == 'before_delete_entity' and entity.name == 'owners': - raise validation_error(entity, {None: _("can't be deleted")}) - elif self.event == 'before_update_entity' \ - and 'name' in entity.cw_edited: - oldname, newname = entity.cw_edited.oldnewvalue('name') - if oldname == 'owners' and newname != oldname: - raise validation_error(entity, {('name', 'subject'): _("can't be changed")}) - - -class TidyHtmlFields(IntegrityHook): - """tidy HTML in rich text strings""" - __regid__ = 'htmltidy' - events = ('before_add_entity', 'before_update_entity') - - def __call__(self): - entity = self.entity - metaattrs = entity.e_schema.meta_attributes() - edited = entity.cw_edited - for metaattr, (metadata, attr) in metaattrs.items(): - if metadata == 'format' and attr in edited: - try: - value = edited[attr] - except KeyError: - continue # no text to tidy - if isinstance(value, text_type): # filter out None and Binary - if getattr(entity, str(metaattr)) == 'text/html': - edited[attr] = soup2xhtml(value, self._cw.encoding) - - -class StripCWUserLoginHook(IntegrityHook): - """ensure user logins are stripped""" - __regid__ = 'stripuserlogin' - __select__ = IntegrityHook.__select__ & is_instance('CWUser') - events = ('before_add_entity', 'before_update_entity',) - - def __call__(self): - login = self.entity.cw_edited.get('login') - if login: - self.entity.cw_edited['login'] = login.strip() - - -class DeleteCompositeOrphanHook(hook.Hook): - """Delete the composed of a composite relation when the composite is - deleted (this is similar to the cascading ON DELETE CASCADE - semantics of sql). - """ - __regid__ = 'deletecomposite' - __select__ = hook.Hook.__select__ & composite_etype() - events = ('before_delete_entity',) - category = 'activeintegrity' - # give the application's before_delete_entity hooks a chance to run before we cascade - order = 99 - - def __call__(self): - eid = self.entity.eid - for rdef, role in self.entity.e_schema.composite_rdef_roles: - rtype = rdef.rtype.type - target = getattr(rdef, neg_role(role)) - expr = ('C %s X' % rtype) if role == 'subject' else ('X %s C' % rtype) - self._cw.execute('DELETE %s X WHERE C eid %%(c)s, %s' % (target, expr), - {'c': eid}) - - -def registration_callback(vreg): - vreg.register_all(globals().values(), __name__) - symmetric_rtypes = [rschema.type for rschema in vreg.schema.relations() - if rschema.symmetric] - class EnsureSymmetricRelationsAdd(_EnsureSymmetricRelationsAdd): - __select__ = _EnsureSymmetricRelationsAdd.__select__ & hook.match_rtype(*symmetric_rtypes) - vreg.register(EnsureSymmetricRelationsAdd) - class EnsureSymmetricRelationsDelete(_EnsureSymmetricRelationsDelete): - __select__ = _EnsureSymmetricRelationsDelete.__select__ & hook.match_rtype(*symmetric_rtypes) - vreg.register(EnsureSymmetricRelationsDelete) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/logstats.py --- a/hooks/logstats.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,59 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -"""looping task for dumping instance's stats in a file -""" - -__docformat__ = "restructuredtext en" - -from datetime import datetime -import json - -from cubicweb.server import hook - -class LogStatsStartHook(hook.Hook): - """register task to regularly dump instance's stats in a file - - data are stored as one json entry per row - """ - __regid__ = 'cubicweb.hook.logstats.start' - events = ('server_startup',) - - def __call__(self): - interval = self.repo.config.get('logstat-interval', 0) - if interval <= 0: - return - - def dump_stats(repo): - statsfile = repo.config.get('logstat-file') - with repo.internal_cnx() as cnx: - stats = cnx.call_service('repo_stats') - gcstats = cnx.call_service('repo_gc_stats', nmax=5) - - allstats = {'resources': stats, - 'memory': gcstats, - 'timestamp': datetime.utcnow().isoformat(), - } - try: - with open(statsfile, 'ab') as ofile: - json.dump(allstats, ofile) - ofile.write('\n') - except IOError: - repo.warning('Cannot open stats file for writing: %s', statsfile) - - self.repo.looping_task(interval, dump_stats, self.repo) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/metadata.py --- a/hooks/metadata.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,219 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Core hooks: set generic metadata""" - -__docformat__ = "restructuredtext en" - -from datetime import datetime -from base64 import b64encode - -from pytz import utc - -from cubicweb.predicates import is_instance -from cubicweb.server import hook -from cubicweb.server.edition import EditedEntity - - -class MetaDataHook(hook.Hook): - __abstract__ = True - category = 'metadata' - - -class InitMetaAttrsHook(MetaDataHook): - """before create a new entity -> set creation and modification date - - this is a conveniency hook, you shouldn't have to disable it - """ - __regid__ = 'metaattrsinit' - events = ('before_add_entity',) - - def __call__(self): - timestamp = datetime.now(utc) - edited = self.entity.cw_edited - if not edited.get('creation_date'): - edited['creation_date'] = timestamp - if not edited.get('modification_date'): - edited['modification_date'] = timestamp - if not self._cw.transaction_data.get('do-not-insert-cwuri'): - cwuri = u'%s%s' % (self._cw.base_url(), self.entity.eid) - edited.setdefault('cwuri', cwuri) - - -class UpdateMetaAttrsHook(MetaDataHook): - """update an entity -> set modification date""" - __regid__ = 'metaattrsupdate' - events = ('before_update_entity',) - - def __call__(self): - # repairing is true during c-c upgrade/shell and similar commands. We - # usually don't want to update modification date in such cases. - # - # XXX to be really clean, we should turn off modification_date update - # explicitly on each command where we do not want that behaviour. - if not self._cw.vreg.config.repairing: - self.entity.cw_edited.setdefault('modification_date', datetime.now(utc)) - - -class SetCreatorOp(hook.DataOperationMixIn, hook.Operation): - - def precommit_event(self): - cnx = self.cnx - relations = [(eid, cnx.user.eid) for eid in self.get_data() - # don't consider entities that have been created and deleted in - # the same transaction, nor ones where created_by has been - # explicitly set - if not cnx.deleted_in_transaction(eid) and \ - not cnx.entity_from_eid(eid).created_by] - cnx.add_relations([('created_by', relations)]) - - -class SetOwnershipHook(MetaDataHook): - """create a new entity -> set owner and creator metadata""" - __regid__ = 'setowner' - events = ('after_add_entity',) - - def __call__(self): - if not self._cw.is_internal_session: - self._cw.add_relation(self.entity.eid, 'owned_by', self._cw.user.eid) - SetCreatorOp.get_instance(self._cw).add_data(self.entity.eid) - - -class SyncOwnersOp(hook.DataOperationMixIn, hook.Operation): - def precommit_event(self): - for compositeeid, composedeid in self.get_data(): - if self.cnx.deleted_in_transaction(compositeeid): - continue - if self.cnx.deleted_in_transaction(composedeid): - continue - self.cnx.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,' - 'NOT EXISTS(X owned_by U, X eid %(x)s)', - {'c': compositeeid, 'x': composedeid}) - - -class SyncCompositeOwner(MetaDataHook): - """when adding composite relation, the composed should have the same owners - has the composite - """ - __regid__ = 'synccompositeowner' - events = ('after_add_relation',) - - def __call__(self): - if self.rtype == 'wf_info_for': - # skip this special composite relation # XXX (syt) why? - return - eidfrom, eidto = self.eidfrom, self.eidto - composite = self._cw.rtype_eids_rdef(self.rtype, eidfrom, eidto).composite - if composite == 'subject': - SyncOwnersOp.get_instance(self._cw).add_data( (eidfrom, eidto) ) - elif composite == 'object': - SyncOwnersOp.get_instance(self._cw).add_data( (eidto, eidfrom) ) - - -class FixUserOwnershipHook(MetaDataHook): - """when a user has been created, add owned_by relation on itself""" - __regid__ = 'fixuserowner' - __select__ = MetaDataHook.__select__ & is_instance('CWUser') - events = ('after_add_entity',) - - def __call__(self): - self._cw.add_relation(self.entity.eid, 'owned_by', self.entity.eid) - - -class UpdateFTIHook(MetaDataHook): - """sync fulltext index text index container when a relation with - fulltext_container set is added / removed - """ - __regid__ = 'updateftirel' - events = ('after_add_relation', 'after_delete_relation') - - def __call__(self): - rtype = self.rtype - cnx = self._cw - ftcontainer = cnx.vreg.schema.rschema(rtype).fulltext_container - if ftcontainer == 'subject': - cnx.repo.system_source.index_entity( - cnx, cnx.entity_from_eid(self.eidfrom)) - elif ftcontainer == 'object': - cnx.repo.system_source.index_entity( - cnx, cnx.entity_from_eid(self.eidto)) - - - -# entity source handling ####################################################### - -class ChangeEntitySourceUpdateCaches(hook.Operation): - oldsource = newsource = entity = None # make pylint happy - - def postcommit_event(self): - self.oldsource.reset_caches() - repo = self.cnx.repo - entity = self.entity - extid = entity.cw_metainformation()['extid'] - repo._type_source_cache[entity.eid] = ( - entity.cw_etype, None, self.newsource.uri) - repo._extid_cache[extid] = -entity.eid - - -class ChangeEntitySourceDeleteHook(MetaDataHook): - """support for moving an entity from an external source by watching 'Any - cw_source CWSource' relation - """ - - __regid__ = 'cw.metadata.source-change' - __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source') - events = ('before_delete_relation',) - - def __call__(self): - if (self._cw.deleted_in_transaction(self.eidfrom) - or self._cw.deleted_in_transaction(self.eidto)): - return - schange = self._cw.transaction_data.setdefault('cw_source_change', {}) - schange[self.eidfrom] = self.eidto - - -class ChangeEntitySourceAddHook(MetaDataHook): - __regid__ = 'cw.metadata.source-change' - __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source') - events = ('before_add_relation',) - - def __call__(self): - schange = self._cw.transaction_data.get('cw_source_change') - if schange is not None and self.eidfrom in schange: - newsource = self._cw.entity_from_eid(self.eidto) - if newsource.name != 'system': - raise Exception('changing source to something else than the ' - 'system source is unsupported') - syssource = newsource.repo_source - oldsource = self._cw.entity_from_eid(schange[self.eidfrom]) - entity = self._cw.entity_from_eid(self.eidfrom) - # we don't want the moved entity to be reimported later. To - # distinguish this state, move the record from the 'entities' table - # to 'moved_entities'. External source will then have consider - # case where `extid2eid` returns a negative eid as 'this entity was - # known but has been moved, ignore it'. - extid = self._cw.entity_metas(entity.eid)['extid'] - assert extid is not None - attrs = {'eid': entity.eid, 'extid': b64encode(extid).decode('ascii')} - self._cw.system_sql(syssource.sqlgen.insert('moved_entities', attrs), attrs) - attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': None, - 'asource': 'system'} - self._cw.system_sql(syssource.sqlgen.update('entities', attrs, ['eid']), attrs) - # register an operation to update repository/sources caches - ChangeEntitySourceUpdateCaches(self._cw, entity=entity, - oldsource=oldsource.repo_source, - newsource=syssource) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/notification.py --- a/hooks/notification.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,244 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some hooks to handle notification on entity's changes""" - -__docformat__ = "restructuredtext en" - -from logilab.common.textutils import normalize_text -from logilab.common.deprecation import deprecated - -from cubicweb import RegistryNotFound -from cubicweb.predicates import is_instance -from cubicweb.server import hook -from cubicweb.sobjects.supervising import SupervisionMailOp - - -@deprecated('[3.17] use notify_on_commit instead') -def RenderAndSendNotificationView(cnx, view, viewargs=None): - notify_on_commit(cnx, view, viewargs) - - -def notify_on_commit(cnx, view, viewargs=None): - """register a notification view (see - :class:`~cubicweb.sobjects.notification.NotificationView`) to be sent at - post-commit time, ie only if the transaction has succeeded. - - `viewargs` is an optional dictionary containing extra argument to be given - to :meth:`~cubicweb.sobjects.notification.NotificationView.render_and_send` - """ - if viewargs is None: - viewargs = {} - notif_op = _RenderAndSendNotificationOp.get_instance(cnx) - notif_op.add_data((view, viewargs)) - - -class _RenderAndSendNotificationOp(hook.DataOperationMixIn, hook.Operation): - """End of the notification chain. Do render and send views after commit - - All others Operations end up adding data to this Operation. - The notification are done on ``postcommit_event`` to make sure to prevent - sending notification about rolled back data. - """ - - containercls = list - - def postcommit_event(self): - deleted = self.cnx.deleted_in_transaction - for view, viewargs in self.get_data(): - if view.cw_rset is not None: - if not view.cw_rset: - # entity added and deleted in the same transaction - # (cache effect) - continue - elif deleted(view.cw_rset[view.cw_row or 0][view.cw_col or 0]): - # entity added and deleted in the same transaction - continue - try: - view.render_and_send(**viewargs) - except Exception: - # error in post commit are not propagated - # We keep this logic here to prevent a small notification error - # to prevent them all. - self.exception('Notification failed') - - -class NotificationHook(hook.Hook): - __abstract__ = True - category = 'notification' - - def select_view(self, vid, rset, row=0, col=0): - try: - return self._cw.vreg['views'].select_or_none(vid, self._cw, rset=rset, - row=row, col=col) - except RegistryNotFound: # can happen in some config - # (e.g. repo only config with no - # notification views registered by - # the instance's cubes) - return None - - -class StatusChangeHook(NotificationHook): - """notify when a workflowable entity has its state modified""" - __regid__ = 'notifystatuschange' - __select__ = NotificationHook.__select__ & is_instance('TrInfo') - events = ('after_add_entity',) - - def __call__(self): - entity = self.entity - if not entity.from_state: # not a transition - return - rset = entity.related('wf_info_for') - view = self.select_view('notif_status_change', rset=rset, row=0) - if view is None: - return - comment = entity.printable_value('comment', format='text/plain') - # XXX don't try to wrap rest until we've a proper transformation (see - # #103822) - if comment and entity.comment_format != 'text/rest': - comment = normalize_text(comment, 80) - viewargs = {'comment': comment, - 'previous_state': entity.previous_state.name, - 'current_state': entity.new_state.name} - notify_on_commit(self._cw, view, viewargs=viewargs) - -class RelationChangeHook(NotificationHook): - __regid__ = 'notifyrelationchange' - events = ('before_add_relation', 'after_add_relation', - 'before_delete_relation', 'after_delete_relation') - - def __call__(self): - """if a notification view is defined for the event, send notification - email defined by the view - """ - rset = self._cw.eid_rset(self.eidfrom) - view = self.select_view('notif_%s_%s' % (self.event, self.rtype), - rset=rset, row=0) - if view is None: - return - notify_on_commit(self._cw, view) - - -class EntityChangeHook(NotificationHook): - """if a notification view is defined for the event, send notification - email defined by the view - """ - __regid__ = 'notifyentitychange' - events = ('after_add_entity', 'after_update_entity') - - def __call__(self): - rset = self.entity.as_rset() - view = self.select_view('notif_%s' % self.event, rset=rset, row=0) - if view is None: - return - notify_on_commit(self._cw, view) - - -class EntityUpdatedNotificationOp(hook.SingleLastOperation): - """scrap all changed entity to prepare a Notification Operation for them""" - - def precommit_event(self): - # precommit event that creates postcommit operation - cnx = self.cnx - for eid in cnx.transaction_data['changes']: - view = cnx.vreg['views'].select('notif_entity_updated', cnx, - rset=cnx.eid_rset(eid), - row=0) - notify_on_commit(self.cnx, view, - viewargs={'changes': cnx.transaction_data['changes'][eid]}) - - -class EntityUpdateHook(NotificationHook): - __regid__ = 'notifentityupdated' - __abstract__ = True # do not register by default - __select__ = NotificationHook.__select__ & hook.issued_from_user_query() - events = ('before_update_entity',) - skip_attrs = set() - - def __call__(self): - cnx = self._cw - if cnx.added_in_transaction(self.entity.eid): - return # entity is being created - # then compute changes - attrs = [k for k in self.entity.cw_edited - if not k in self.skip_attrs] - if not attrs: - return - changes = cnx.transaction_data.setdefault('changes', {}) - thisentitychanges = changes.setdefault(self.entity.eid, set()) - rqlsel, rqlrestr = [], ['X eid %(x)s'] - for i, attr in enumerate(attrs): - var = chr(65+i) - rqlsel.append(var) - rqlrestr.append('X %s %s' % (attr, var)) - rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr)) - rset = cnx.execute(rql, {'x': self.entity.eid}) - for i, attr in enumerate(attrs): - oldvalue = rset[0][i] - newvalue = self.entity.cw_edited[attr] - if oldvalue != newvalue: - thisentitychanges.add((attr, oldvalue, newvalue)) - if thisentitychanges: - EntityUpdatedNotificationOp(cnx) - - -# supervising ################################################################## - -class SomethingChangedHook(NotificationHook): - __regid__ = 'supervising' - __select__ = NotificationHook.__select__ & hook.issued_from_user_query() - events = ('before_add_relation', 'before_delete_relation', - 'after_add_entity', 'before_update_entity') - - def __call__(self): - dest = self._cw.vreg.config['supervising-addrs'] - if not dest: # no supervisors, don't do this for nothing... - return - if self._call(): - SupervisionMailOp(self._cw) - - def _call(self): - event = self.event.split('_', 1)[1] - if event == 'update_entity': - if self._cw.added_in_transaction(self.entity.eid): - return False - if self.entity.e_schema == 'CWUser': - if not (frozenset(self.entity.cw_edited) - - frozenset(('eid', 'modification_date', - 'last_login_time'))): - # don't record last_login_time update which are done - # automatically at login time - return False - self._cw.transaction_data.setdefault('pendingchanges', []).append( - (event, self)) - return True - - -class EntityDeleteHook(SomethingChangedHook): - __regid__ = 'supervisingentitydel' - events = ('before_delete_entity',) - - def _call(self): - try: - title = self.entity.dc_title() - except Exception: - # may raise an error during deletion process, for instance due to - # missing required relation - title = '#%s' % self.entity.eid - self._cw.transaction_data.setdefault('pendingchanges', []).append( - ('delete_entity', (self.entity.eid, self.entity.cw_etype, title))) - return True diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/security.py --- a/hooks/security.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,209 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Security hooks: check permissions to add/delete/update entities according to -the connected user -""" - -__docformat__ = "restructuredtext en" -from warnings import warn - -from logilab.common.registry import objectify_predicate - -from yams import buildobjs - -from cubicweb import Unauthorized -from cubicweb.server import BEFORE_ADD_RELATIONS, ON_COMMIT_ADD_RELATIONS, hook - - - -def check_entity_attributes(cnx, entity, action, editedattrs=None): - eid = entity.eid - eschema = entity.e_schema - if action == 'delete': - eschema.check_perm(session, action, eid=eid) - return - # ._cw_skip_security_attributes is there to bypass security for attributes - # set by hooks by modifying the entity's dictionary - if editedattrs is None: - editedattrs = entity.cw_edited - dontcheck = editedattrs.skip_security - etypechecked = False - for attr in editedattrs: - if attr in dontcheck: - continue - rdef = eschema.rdef(attr, takefirst=True) - if rdef.final: # non final relation are checked by standard hooks - perms = rdef.permissions.get(action) - # comparison below works because the default update perm is: - # - # ('managers', ERQLExpression(Any X WHERE U has_update_permission X, - # X eid %(x)s, U eid %(u)s)) - # - # is deserialized in this order (groups first), and ERQLExpression - # implements comparison by rql expression. - if perms == buildobjs.DEFAULT_ATTRPERMS[action]: - # The default rule is to delegate to the entity - # rule. This needs to be checked only once. - if not etypechecked: - entity.cw_check_perm(action) - etypechecked = True - continue - if perms == (): - # That means an immutable attribute; as an optimization, avoid - # going through check_perm. - raise Unauthorized(action, str(rdef)) - rdef.check_perm(cnx, action, eid=eid) - - if action == 'add' and not etypechecked: - # think about cnx.create_entity('Foo') - # the standard metadata were inserted by a hook - # with a bypass ... we conceptually need to check - # the eid attribute at *creation* time - entity.cw_check_perm(action) - - -class CheckEntityPermissionOp(hook.DataOperationMixIn, hook.LateOperation): - def precommit_event(self): - cnx = self.cnx - for eid, action, edited in self.get_data(): - entity = cnx.entity_from_eid(eid) - check_entity_attributes(cnx, entity, action, edited) - - -class CheckRelationPermissionOp(hook.DataOperationMixIn, hook.LateOperation): - def precommit_event(self): - cnx = self.cnx - for action, rschema, eidfrom, eidto in self.get_data(): - rdef = rschema.rdef(cnx.entity_metas(eidfrom)['type'], - cnx.entity_metas(eidto)['type']) - rdef.check_perm(cnx, action, fromeid=eidfrom, toeid=eidto) - - -@objectify_predicate -def write_security_enabled(cls, req, **kwargs): - if req is None or not req.write_security: - return 0 - return 1 - -class SecurityHook(hook.Hook): - __abstract__ = True - category = 'security' - __select__ = hook.Hook.__select__ & write_security_enabled() - - -class AfterAddEntitySecurityHook(SecurityHook): - __regid__ = 'securityafteraddentity' - events = ('after_add_entity',) - - def __call__(self): - CheckEntityPermissionOp.get_instance(self._cw).add_data( - (self.entity.eid, 'add', self.entity.cw_edited) ) - - -class AfterUpdateEntitySecurityHook(SecurityHook): - __regid__ = 'securityafterupdateentity' - events = ('after_update_entity',) - - def __call__(self): - # save back editedattrs in case the entity is reedited later in the - # same transaction, which will lead to cw_edited being - # overwritten - action = 'add' if self._cw.added_in_transaction(self.entity.eid) else 'update' - CheckEntityPermissionOp.get_instance(self._cw).add_data( - (self.entity.eid, action, self.entity.cw_edited) ) - - -class BeforeDelEntitySecurityHook(SecurityHook): - __regid__ = 'securitybeforedelentity' - events = ('before_delete_entity',) - - def __call__(self): - self.entity.cw_check_perm('delete') - - -def skip_inlined_relation_security(cnx, rschema, eid): - """return True if security for the given inlined relation should be skipped, - in case where the relation has been set through modification of - `entity.cw_edited` in a hook - """ - assert rschema.inlined - try: - entity = cnx.entity_cache(eid) - except KeyError: - return False - edited = getattr(entity, 'cw_edited', None) - if edited is None: - return False - return rschema.type in edited.skip_security - - -class BeforeAddRelationSecurityHook(SecurityHook): - __regid__ = 'securitybeforeaddrelation' - events = ('before_add_relation',) - - def __call__(self): - if self.rtype in BEFORE_ADD_RELATIONS: - nocheck = self._cw.transaction_data.get('skip-security', ()) - if (self.eidfrom, self.rtype, self.eidto) in nocheck: - return - rschema = self._cw.repo.schema[self.rtype] - if rschema.inlined and skip_inlined_relation_security( - self._cw, rschema, self.eidfrom): - return - rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], - self._cw.entity_metas(self.eidto)['type']) - rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto) - - -class AfterAddRelationSecurityHook(SecurityHook): - __regid__ = 'securityafteraddrelation' - events = ('after_add_relation',) - - def __call__(self): - if self.rtype not in BEFORE_ADD_RELATIONS: - nocheck = self._cw.transaction_data.get('skip-security', ()) - if (self.eidfrom, self.rtype, self.eidto) in nocheck: - return - rschema = self._cw.repo.schema[self.rtype] - if rschema.inlined and skip_inlined_relation_security( - self._cw, rschema, self.eidfrom): - return - if self.rtype in ON_COMMIT_ADD_RELATIONS: - CheckRelationPermissionOp.get_instance(self._cw).add_data( - ('add', rschema, self.eidfrom, self.eidto) ) - else: - rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], - self._cw.entity_metas(self.eidto)['type']) - rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto) - - -class BeforeDeleteRelationSecurityHook(SecurityHook): - __regid__ = 'securitybeforedelrelation' - events = ('before_delete_relation',) - - def __call__(self): - nocheck = self._cw.transaction_data.get('skip-security', ()) - if (self.eidfrom, self.rtype, self.eidto) in nocheck: - return - rschema = self._cw.repo.schema[self.rtype] - if rschema.inlined and skip_inlined_relation_security( - self._cw, rschema, self.eidfrom): - return - rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], - self._cw.entity_metas(self.eidto)['type']) - rdef.check_perm(self._cw, 'delete', fromeid=self.eidfrom, toeid=self.eidto) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/synccomputed.py --- a/hooks/synccomputed.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,227 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Hooks for synchronizing computed attributes""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from collections import defaultdict - -from rql import nodes - -from cubicweb.server import hook - - -class RecomputeAttributeOperation(hook.DataOperationMixIn, hook.Operation): - """Operation to recompute caches of computed attribute at commit time, - depending on what's have been modified in the transaction and avoiding to - recompute twice the same attribute - """ - containercls = dict - def add_data(self, computed_attribute, eid=None): - try: - self._container[computed_attribute].add(eid) - except KeyError: - self._container[computed_attribute] = set((eid,)) - - def precommit_event(self): - for computed_attribute_rdef, eids in self.get_data().items(): - attr = computed_attribute_rdef.rtype - formula = computed_attribute_rdef.formula - select = self.cnx.repo.vreg.rqlhelper.parse(formula).children[0] - xvar = select.get_variable('X') - select.add_selected(xvar, index=0) - select.add_group_var(xvar, index=0) - if None in eids: - select.add_type_restriction(xvar, computed_attribute_rdef.subject) - else: - select.add_eid_restriction(xvar, eids) - update_rql = 'SET X %s %%(value)s WHERE X eid %%(x)s' % attr - for eid, value in self.cnx.execute(select.as_string()): - self.cnx.execute(update_rql, {'value': value, 'x': eid}) - - -class EntityWithCACreatedHook(hook.Hook): - """When creating an entity that has some computed attribute, those - attributes have to be computed. - - Concret class of this hook are generated at registration time by - introspecting the schema. - """ - __abstract__ = True - events = ('after_add_entity',) - # list of computed attribute rdefs that have to be recomputed - computed_attributes = None - - def __call__(self): - for rdef in self.computed_attributes: - RecomputeAttributeOperation.get_instance(self._cw).add_data( - rdef, self.entity.eid) - - -class RelationInvolvedInCAModifiedHook(hook.Hook): - """When some relation used in a computed attribute is updated, those - attributes have to be recomputed. - - Concret class of this hook are generated at registration time by - introspecting the schema. - """ - __abstract__ = True - events = ('after_add_relation', 'before_delete_relation') - # list of (computed attribute rdef, optimize_on) that have to be recomputed - optimized_computed_attributes = None - - def __call__(self): - for rdef, optimize_on in self.optimized_computed_attributes: - if optimize_on is None: - eid = None - else: - eid = getattr(self, optimize_on) - RecomputeAttributeOperation.get_instance(self._cw).add_data(rdef, eid) - - -class AttributeInvolvedInCAModifiedHook(hook.Hook): - """When some attribute used in a computed attribute is updated, those - attributes have to be recomputed. - - Concret class of this hook are generated at registration time by - introspecting the schema. - """ - __abstract__ = True - events = ('after_update_entity',) - # list of (computed attribute rdef, attributes of this entity type involved) - # that may have to be recomputed - attributes_computed_attributes = None - - def __call__(self): - edited_attributes = frozenset(self.entity.cw_edited) - for rdef, used_attributes in self.attributes_computed_attributes.items(): - if edited_attributes.intersection(used_attributes): - # XXX optimize if the modified attributes belong to the same - # entity as the computed attribute - RecomputeAttributeOperation.get_instance(self._cw).add_data(rdef) - - -# code generation at registration time ######################################### - -def _optimize_on(formula_select, rtype): - """Given a formula and some rtype, tells whether on update of the given - relation, formula may be recomputed only for rhe relation's subject - ('eidfrom' returned), object ('eidto' returned) or None. - - Optimizing is only possible when X is used as direct subject/object of this - relation, else we may miss some necessary update. - """ - for rel in formula_select.get_nodes(nodes.Relation): - if rel.r_type == rtype: - sub = rel.get_variable_parts()[0] - obj = rel.get_variable_parts()[1] - if sub.name == 'X': - return 'eidfrom' - elif obj.name == 'X': - return 'eidto' - else: - return None - - -class _FormulaDependenciesMatrix(object): - """This class computes and represents the dependencies of computed attributes - towards relations and attributes - """ - - def __init__(self, schema): - """Analyzes the schema to compute the dependencies""" - # entity types holding some computed attribute {etype: [computed rdefs]} - self.computed_attribute_by_etype = defaultdict(list) - # depending entity types {dep. etype: {computed rdef: dep. etype attributes}} - self.computed_attribute_by_etype_attrs = defaultdict(lambda: defaultdict(set)) - # depending relations def {dep. rdef: [computed rdefs] - self.computed_attribute_by_relation = defaultdict(list) # by rdef - # Walk through all attributes definitions - for rdef in schema.iter_computed_attributes(): - self.computed_attribute_by_etype[rdef.subject.type].append(rdef) - # extract the relations it depends upon - `rdef.formula_select` is - # expected to have been set by finalize_computed_attributes - select = rdef.formula_select - for rel_node in select.get_nodes(nodes.Relation): - if rel_node.is_types_restriction(): - continue - rschema = schema.rschema(rel_node.r_type) - lhs, rhs = rel_node.get_variable_parts() - for sol in select.solutions: - subject_etype = sol[lhs.name] - if isinstance(rhs, nodes.VariableRef): - object_etypes = set(sol[rhs.name] for sol in select.solutions) - else: - object_etypes = rschema.objects(subject_etype) - for object_etype in object_etypes: - if rschema.final: - attr_for_computations = self.computed_attribute_by_etype_attrs[subject_etype] - attr_for_computations[rdef].add(rschema.type) - else: - depend_on_rdef = rschema.rdefs[subject_etype, object_etype] - self.computed_attribute_by_relation[depend_on_rdef].append(rdef) - - def generate_entity_creation_hooks(self): - for etype, computed_attributes in self.computed_attribute_by_etype.items(): - regid = 'computed_attribute.%s_created' % etype - selector = hook.is_instance(etype) - yield type('%sCreatedHook' % etype, - (EntityWithCACreatedHook,), - {'__regid__': regid, - '__select__': hook.Hook.__select__ & selector, - 'computed_attributes': computed_attributes}) - - def generate_relation_change_hooks(self): - for rdef, computed_attributes in self.computed_attribute_by_relation.items(): - regid = 'computed_attribute.%s_modified' % rdef.rtype - selector = hook.match_rtype(rdef.rtype.type, - frometypes=(rdef.subject.type,), - toetypes=(rdef.object.type,)) - optimized_computed_attributes = [] - for computed_rdef in computed_attributes: - optimized_computed_attributes.append( - (computed_rdef, - _optimize_on(computed_rdef.formula_select, rdef.rtype)) - ) - yield type('%sModifiedHook' % rdef.rtype, - (RelationInvolvedInCAModifiedHook,), - {'__regid__': regid, - '__select__': hook.Hook.__select__ & selector, - 'optimized_computed_attributes': optimized_computed_attributes}) - - def generate_entity_update_hooks(self): - for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.items(): - regid = 'computed_attribute.%s_updated' % etype - selector = hook.is_instance(etype) - yield type('%sModifiedHook' % etype, - (AttributeInvolvedInCAModifiedHook,), - {'__regid__': regid, - '__select__': hook.Hook.__select__ & selector, - 'attributes_computed_attributes': attributes_computed_attributes}) - - -def registration_callback(vreg): - vreg.register_all(globals().values(), __name__) - dependencies = _FormulaDependenciesMatrix(vreg.schema) - for hook_class in dependencies.generate_entity_creation_hooks(): - vreg.register(hook_class) - for hook_class in dependencies.generate_relation_change_hooks(): - vreg.register(hook_class) - for hook_class in dependencies.generate_entity_update_hooks(): - vreg.register(hook_class) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/syncschema.py --- a/hooks/syncschema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1417 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""schema hooks: - -- synchronize the living schema object with the persistent schema -- perform physical update on the source when necessary - -checking for schema consistency is done in hooks.py -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -import json -from copy import copy -from hashlib import md5 - -from yams.schema import (BASE_TYPES, BadSchemaDefinition, - RelationSchema, RelationDefinitionSchema) -from yams import buildobjs as ybo, convert_default_value - -from logilab.common.decorators import clear_cache - -from cubicweb import validation_error -from cubicweb.predicates import is_instance -from cubicweb.schema import (SCHEMA_TYPES, META_RTYPES, VIRTUAL_RTYPES, - CONSTRAINTS, UNIQUE_CONSTRAINTS, ETYPE_NAME_MAP) -from cubicweb.server import hook, schemaserial as ss, schema2sql as y2sql -from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.hooks.synccomputed import RecomputeAttributeOperation - -# core entity and relation types which can't be removed -CORE_TYPES = BASE_TYPES | SCHEMA_TYPES | META_RTYPES | set( - ('CWUser', 'CWGroup','login', 'upassword', 'name', 'in_group')) - - -def get_constraints(cnx, entity): - constraints = [] - for cstreid in cnx.transaction_data.get(entity.eid, ()): - cstrent = cnx.entity_from_eid(cstreid) - cstr = CONSTRAINTS[cstrent.type].deserialize(cstrent.value) - cstr.eid = cstreid - constraints.append(cstr) - return constraints - -def group_mapping(cw): - try: - return cw.transaction_data['groupmap'] - except KeyError: - cw.transaction_data['groupmap'] = gmap = ss.group_mapping(cw) - return gmap - -def add_inline_relation_column(cnx, etype, rtype): - """add necessary column and index for an inlined relation""" - attrkey = '%s.%s' % (etype, rtype) - createdattrs = cnx.transaction_data.setdefault('createdattrs', set()) - if attrkey in createdattrs: - return - createdattrs.add(attrkey) - table = SQL_PREFIX + etype - column = SQL_PREFIX + rtype - try: - cnx.system_sql(str('ALTER TABLE %s ADD %s integer REFERENCES entities (eid)' % (table, column)), - rollback_on_failure=False) - cnx.info('added column %s to table %s', column, table) - except Exception: - # silent exception here, if this error has not been raised because the - # column already exists, index creation will fail anyway - cnx.exception('error while adding column %s to table %s', - table, column) - # create index before alter table which may expectingly fail during test - # (sqlite) while index creation should never fail (test for index existence - # is done by the dbhelper) - cnx.repo.system_source.create_index(cnx, table, column) - cnx.info('added index on %s(%s)', table, column) - - -def insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, props): - # XXX 'infered': True/False, not clear actually - props.update({'constraints': rdefdef.constraints, - 'description': rdefdef.description, - 'cardinality': rdefdef.cardinality, - 'permissions': rdefdef.get_permissions(), - 'order': rdefdef.order, - 'infered': False, 'eid': None - }) - cstrtypemap = ss.cstrtype_mapping(cnx) - groupmap = group_mapping(cnx) - object = rschema.schema.eschema(rdefdef.object) - for specialization in eschema.specialized_by(False): - if (specialization, rdefdef.object) in rschema.rdefs: - continue - sperdef = RelationDefinitionSchema(specialization, rschema, - object, None, values=props) - ss.execschemarql(cnx.execute, sperdef, - ss.rdef2rql(sperdef, cstrtypemap, groupmap)) - - -def check_valid_changes(cnx, entity, ro_attrs=('name', 'final')): - errors = {} - # don't use getattr(entity, attr), we would get the modified value if any - for attr in entity.cw_edited: - if attr in ro_attrs: - origval, newval = entity.cw_edited.oldnewvalue(attr) - if newval != origval: - errors[attr] = _("can't change this attribute") - if errors: - raise validation_error(entity, errors) - - -class _MockEntity(object): # XXX use a named tuple with python 2.6 - def __init__(self, eid): - self.eid = eid - - -class SyncSchemaHook(hook.Hook): - """abstract class for schema synchronization hooks (in the `syncschema` - category) - """ - __abstract__ = True - category = 'syncschema' - - -# operations for low-level database alteration ################################ - -class DropTable(hook.Operation): - """actually remove a database from the instance's schema""" - table = None # make pylint happy - def precommit_event(self): - dropped = self.cnx.transaction_data.setdefault('droppedtables', - set()) - if self.table in dropped: - return # already processed - dropped.add(self.table) - self.cnx.system_sql('DROP TABLE %s' % self.table) - self.info('dropped table %s', self.table) - - # XXX revertprecommit_event - - -class DropRelationTable(DropTable): - def __init__(self, cnx, rtype): - super(DropRelationTable, self).__init__( - cnx, table='%s_relation' % rtype) - cnx.transaction_data.setdefault('pendingrtypes', set()).add(rtype) - - -class DropColumn(hook.DataOperationMixIn, hook.Operation): - """actually remove the attribut's column from entity table in the system - database - """ - def precommit_event(self): - cnx = self.cnx - for etype, attr in self.get_data(): - table = SQL_PREFIX + etype - column = SQL_PREFIX + attr - source = cnx.repo.system_source - # drop index if any - source.drop_index(cnx, table, column) - if source.dbhelper.alter_column_support: - cnx.system_sql('ALTER TABLE %s DROP COLUMN %s' % (table, column), - rollback_on_failure=False) - self.info('dropped column %s from table %s', column, table) - else: - # not supported by sqlite for instance - self.error('dropping column not supported by the backend, handle ' - 'it yourself (%s.%s)', table, column) - - # XXX revertprecommit_event - - -# base operations for in-memory schema synchronization ######################## - -class MemSchemaNotifyChanges(hook.SingleLastOperation): - """the update schema operation: - - special operation which should be called once and after all other schema - operations. It will trigger internal structures rebuilding to consider - schema changes. - """ - - def __init__(self, cnx): - hook.SingleLastOperation.__init__(self, cnx) - - def precommit_event(self): - for eschema in self.cnx.repo.schema.entities(): - if not eschema.final: - clear_cache(eschema, 'ordered_relations') - - def postcommit_event(self): - repo = self.cnx.repo - # commit event should not raise error, while set_schema has chances to - # do so because it triggers full vreg reloading - try: - repo.schema.rebuild_infered_relations() - # trigger vreg reload - repo.set_schema(repo.schema) - # CWUser class might have changed, update current session users - cwuser_cls = self.cnx.vreg['etypes'].etype_class('CWUser') - for session in repo._sessions.values(): - session.user.__class__ = cwuser_cls - except Exception: - self.critical('error while setting schema', exc_info=True) - - def rollback_event(self): - self.precommit_event() - - -class MemSchemaOperation(hook.Operation): - """base class for schema operations""" - def __init__(self, cnx, **kwargs): - hook.Operation.__init__(self, cnx, **kwargs) - # every schema operation is triggering a schema update - MemSchemaNotifyChanges(cnx) - - -# operations for high-level source database alteration ######################## - -class CWETypeAddOp(MemSchemaOperation): - """after adding a CWEType entity: - * add it to the instance's schema - * create the necessary table - * set creation_date and modification_date by creating the necessary - CWAttribute entities - * add relation by creating the necessary CWRelation entity - """ - entity = None # make pylint happy - - def precommit_event(self): - cnx = self.cnx - entity = self.entity - schema = cnx.vreg.schema - etype = ybo.EntityType(eid=entity.eid, name=entity.name, - description=entity.description) - eschema = schema.add_entity_type(etype) - # create the necessary table - tablesql = y2sql.eschema2sql(cnx.repo.system_source.dbhelper, - eschema, prefix=SQL_PREFIX) - for sql in tablesql.split(';'): - if sql.strip(): - cnx.system_sql(sql) - # add meta relations - gmap = group_mapping(cnx) - cmap = ss.cstrtype_mapping(cnx) - for rtype in (META_RTYPES - VIRTUAL_RTYPES): - try: - rschema = schema[rtype] - except KeyError: - self.critical('rtype %s was not handled at cwetype creation time', rtype) - continue - if not rschema.rdefs: - self.warning('rtype %s has no relation definition yet', rtype) - continue - sampletype = rschema.subjects()[0] - desttype = rschema.objects()[0] - try: - rdef = copy(rschema.rdef(sampletype, desttype)) - except KeyError: - # this combo does not exist because this is not a universal META_RTYPE - continue - rdef.subject = _MockEntity(eid=entity.eid) - mock = _MockEntity(eid=None) - ss.execschemarql(cnx.execute, mock, ss.rdef2rql(rdef, cmap, gmap)) - - def revertprecommit_event(self): - # revert changes on in memory schema - self.cnx.vreg.schema.del_entity_type(self.entity.name) - # revert changes on database - self.cnx.system_sql('DROP TABLE %s%s' % (SQL_PREFIX, self.entity.name)) - - -class CWETypeRenameOp(MemSchemaOperation): - """this operation updates physical storage accordingly""" - oldname = newname = None # make pylint happy - - def rename(self, oldname, newname): - self.cnx.vreg.schema.rename_entity_type(oldname, newname) - # we need sql to operate physical changes on the system database - sqlexec = self.cnx.system_sql - dbhelper = self.cnx.repo.system_source.dbhelper - sql = dbhelper.sql_rename_table(SQL_PREFIX+oldname, - SQL_PREFIX+newname) - sqlexec(sql) - self.info('renamed table %s to %s', oldname, newname) - sqlexec('UPDATE entities SET type=%(newname)s WHERE type=%(oldname)s', - {'newname': newname, 'oldname': oldname}) - for eid, (etype, extid, auri) in self.cnx.repo._type_source_cache.items(): - if etype == oldname: - self.cnx.repo._type_source_cache[eid] = (newname, extid, auri) - # XXX transaction records - - def precommit_event(self): - self.rename(self.oldname, self.newname) - - def revertprecommit_event(self): - self.rename(self.newname, self.oldname) - - -class CWRTypeUpdateOp(MemSchemaOperation): - """actually update some properties of a relation definition""" - rschema = entity = values = None # make pylint happy - oldvalues = None - - def precommit_event(self): - rschema = self.rschema - if rschema.final: - return # watched changes to final relation type are unexpected - cnx = self.cnx - if 'fulltext_container' in self.values: - op = UpdateFTIndexOp.get_instance(cnx) - for subjtype, objtype in rschema.rdefs: - if self.values['fulltext_container'] == 'subject': - op.add_data(subjtype) - op.add_data(objtype) - else: - op.add_data(objtype) - op.add_data(subjtype) - # update the in-memory schema first - self.oldvalues = dict( (attr, getattr(rschema, attr)) for attr in self.values) - self.rschema.__dict__.update(self.values) - # then make necessary changes to the system source database - if 'inlined' not in self.values: - return # nothing to do - inlined = self.values['inlined'] - # check in-lining is possible when inlined - if inlined: - self.entity.check_inlined_allowed() - # inlined changed, make necessary physical changes! - sqlexec = self.cnx.system_sql - rtype = rschema.type - eidcolumn = SQL_PREFIX + 'eid' - if not inlined: - # need to create the relation if it has not been already done by - # another event of the same transaction - if not rschema.type in cnx.transaction_data.get('createdtables', ()): - tablesql = y2sql.rschema2sql(rschema) - # create the necessary table - for sql in tablesql.split(';'): - if sql.strip(): - sqlexec(sql) - cnx.transaction_data.setdefault('createdtables', []).append( - rschema.type) - # copy existant data - column = SQL_PREFIX + rtype - for etype in rschema.subjects(): - table = SQL_PREFIX + str(etype) - sqlexec('INSERT INTO %s_relation SELECT %s, %s FROM %s WHERE NOT %s IS NULL' - % (rtype, eidcolumn, column, table, column)) - # drop existant columns - #if cnx.repo.system_source.dbhelper.alter_column_support: - for etype in rschema.subjects(): - DropColumn.get_instance(cnx).add_data((str(etype), rtype)) - else: - for etype in rschema.subjects(): - try: - add_inline_relation_column(cnx, str(etype), rtype) - except Exception as ex: - # the column probably already exists. this occurs when the - # entity's type has just been added or if the column has not - # been previously dropped (eg sqlite) - self.error('error while altering table %s: %s', etype, ex) - # copy existant data. - # XXX don't use, it's not supported by sqlite (at least at when i tried it) - #sqlexec('UPDATE %(etype)s SET %(rtype)s=eid_to ' - # 'FROM %(rtype)s_relation ' - # 'WHERE %(etype)s.eid=%(rtype)s_relation.eid_from' - # % locals()) - table = SQL_PREFIX + str(etype) - cursor = sqlexec('SELECT eid_from, eid_to FROM %(table)s, ' - '%(rtype)s_relation WHERE %(table)s.%(eidcolumn)s=' - '%(rtype)s_relation.eid_from' % locals()) - args = [{'val': eid_to, 'x': eid} for eid, eid_to in cursor.fetchall()] - if args: - column = SQL_PREFIX + rtype - cursor.executemany('UPDATE %s SET %s=%%(val)s WHERE %s=%%(x)s' - % (table, column, eidcolumn), args) - # drop existant table - DropRelationTable(cnx, rtype) - - def revertprecommit_event(self): - # revert changes on in memory schema - self.rschema.__dict__.update(self.oldvalues) - # XXX revert changes on database - - -class CWComputedRTypeUpdateOp(MemSchemaOperation): - """actually update some properties of a computed relation definition""" - rschema = entity = rule = None # make pylint happy - old_rule = None - - def precommit_event(self): - # update the in-memory schema first - self.old_rule = self.rschema.rule - self.rschema.rule = self.rule - - def revertprecommit_event(self): - # revert changes on in memory schema - self.rschema.rule = self.old_rule - - -class CWAttributeAddOp(MemSchemaOperation): - """an attribute relation (CWAttribute) has been added: - * add the necessary column - * set default on this column if any and possible - * register an operation to add the relation definition to the - instance's schema on commit - - constraints are handled by specific hooks - """ - entity = None # make pylint happy - - def init_rdef(self, **kwargs): - entity = self.entity - fromentity = entity.stype - rdefdef = self.rdefdef = ybo.RelationDefinition( - str(fromentity.name), entity.rtype.name, str(entity.otype.name), - description=entity.description, cardinality=entity.cardinality, - constraints=get_constraints(self.cnx, entity), - order=entity.ordernum, eid=entity.eid, **kwargs) - try: - self.cnx.vreg.schema.add_relation_def(rdefdef) - except BadSchemaDefinition: - # rdef has been infered then explicitly added (current consensus is - # not clear at all versus infered relation handling (and much - # probably buggy) - rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object] - assert rdef.infered - else: - rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object] - - self.cnx.execute('SET X ordernum Y+1 ' - 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, ' - 'X ordernum >= %(order)s, NOT X eid %(x)s', - {'x': entity.eid, 'se': fromentity.eid, - 'order': entity.ordernum or 0}) - return rdefdef, rdef - - def precommit_event(self): - cnx = self.cnx - entity = self.entity - # entity.defaultval is a Binary or None, but we need a correctly typed - # value - default = entity.defaultval - if default is not None: - default = default.unzpickle() - props = {'default': default, - 'indexed': entity.indexed, - 'fulltextindexed': entity.fulltextindexed, - 'internationalizable': entity.internationalizable} - if entity.extra_props: - props.update(json.loads(entity.extra_props.getvalue().decode('ascii'))) - # entity.formula may not exist yet if we're migrating to 3.20 - if hasattr(entity, 'formula'): - props['formula'] = entity.formula - # update the in-memory schema first - rdefdef, rdef = self.init_rdef(**props) - # then make necessary changes to the system source database - syssource = cnx.repo.system_source - attrtype = y2sql.type_from_rdef(syssource.dbhelper, rdef) - # XXX should be moved somehow into lgdb: sqlite doesn't support to - # add a new column with UNIQUE, it should be added after the ALTER TABLE - # using ADD INDEX - if syssource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype: - extra_unique_index = True - attrtype = attrtype.replace(' UNIQUE', '') - else: - extra_unique_index = False - # added some str() wrapping query since some backend (eg psycopg) don't - # allow unicode queries - table = SQL_PREFIX + rdefdef.subject - column = SQL_PREFIX + rdefdef.name - try: - cnx.system_sql(str('ALTER TABLE %s ADD %s %s' - % (table, column, attrtype)), - rollback_on_failure=False) - self.info('added column %s to table %s', column, table) - except Exception as ex: - # the column probably already exists. this occurs when - # the entity's type has just been added or if the column - # has not been previously dropped - self.error('error while altering table %s: %s', table, ex) - if extra_unique_index or entity.indexed: - try: - syssource.create_index(cnx, table, column, - unique=extra_unique_index) - except Exception as ex: - self.error('error while creating index for %s.%s: %s', - table, column, ex) - # final relations are not infered, propagate - schema = cnx.vreg.schema - try: - eschema = schema.eschema(rdefdef.subject) - except KeyError: - return # entity type currently being added - # propagate attribute to children classes - rschema = schema.rschema(rdefdef.name) - # if relation type has been inserted in the same transaction, its final - # attribute is still set to False, so we've to ensure it's False - rschema.final = True - insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, props) - # update existing entities with the default value of newly added attribute - if default is not None: - default = convert_default_value(self.rdefdef, default) - cnx.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column), - {'default': default}) - # if attribute is computed, compute it - if getattr(entity, 'formula', None): - # add rtype attribute for RelationDefinitionSchema api compat, this - # is what RecomputeAttributeOperation expect - rdefdef.rtype = rdefdef.name - RecomputeAttributeOperation.get_instance(cnx).add_data(rdefdef) - - def revertprecommit_event(self): - # revert changes on in memory schema - if getattr(self, 'rdefdef', None) is None: - return - self.cnx.vreg.schema.del_relation_def( - self.rdefdef.subject, self.rdefdef.name, self.rdefdef.object) - # XXX revert changes on database - - -class CWRelationAddOp(CWAttributeAddOp): - """an actual relation has been added: - - * add the relation definition to the instance's schema - - * if this is an inlined relation, add the necessary column else if it's the - first instance of this relation type, add the necessary table and set - default permissions - - constraints are handled by specific hooks - """ - entity = None # make pylint happy - - def precommit_event(self): - cnx = self.cnx - entity = self.entity - # update the in-memory schema first - rdefdef, rdef = self.init_rdef(composite=entity.composite) - # then make necessary changes to the system source database - schema = cnx.vreg.schema - rtype = rdefdef.name - rschema = schema.rschema(rtype) - # this have to be done before permissions setting - if rschema.inlined: - # need to add a column if the relation is inlined and if this is the - # first occurence of "Subject relation Something" whatever Something - if len(rschema.objects(rdefdef.subject)) == 1: - add_inline_relation_column(cnx, rdefdef.subject, rtype) - eschema = schema[rdefdef.subject] - insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, - {'composite': entity.composite}) - else: - if rschema.symmetric: - # for symmetric relations, rdefs will store relation definitions - # in both ways (i.e. (subj -> obj) and (obj -> subj)) - relation_already_defined = len(rschema.rdefs) > 2 - else: - relation_already_defined = len(rschema.rdefs) > 1 - # need to create the relation if no relation definition in the - # schema and if it has not been added during other event of the same - # transaction - if not (relation_already_defined or - rtype in cnx.transaction_data.get('createdtables', ())): - rschema = schema.rschema(rtype) - # create the necessary table - for sql in y2sql.rschema2sql(rschema).split(';'): - if sql.strip(): - cnx.system_sql(sql) - cnx.transaction_data.setdefault('createdtables', []).append( - rtype) - - # XXX revertprecommit_event - - -class RDefDelOp(MemSchemaOperation): - """an actual relation has been removed""" - rdef = None # make pylint happy - - def precommit_event(self): - cnx = self.cnx - rdef = self.rdef - rschema = rdef.rtype - # make necessary changes to the system source database first - rdeftype = rschema.final and 'CWAttribute' or 'CWRelation' - execute = cnx.execute - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' - 'R eid %%(x)s' % rdeftype, {'x': rschema.eid}) - lastrel = rset[0][0] == 0 - # we have to update physical schema systematically for final and inlined - # relations, but only if it's the last instance for this relation type - # for other relations - if (rschema.final or rschema.inlined): - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' - 'R eid %%(r)s, X from_entity E, E eid %%(e)s' - % rdeftype, - {'r': rschema.eid, 'e': rdef.subject.eid}) - if rset[0][0] == 0 and not cnx.deleted_in_transaction(rdef.subject.eid): - ptypes = cnx.transaction_data.setdefault('pendingrtypes', set()) - ptypes.add(rschema.type) - DropColumn.get_instance(cnx).add_data((str(rdef.subject), str(rschema))) - elif rschema.inlined: - cnx.system_sql('UPDATE %s%s SET %s%s=NULL WHERE ' - 'EXISTS(SELECT 1 FROM entities ' - ' WHERE eid=%s%s AND type=%%(to_etype)s)' - % (SQL_PREFIX, rdef.subject, SQL_PREFIX, rdef.rtype, - SQL_PREFIX, rdef.rtype), - {'to_etype': rdef.object.type}) - elif lastrel: - DropRelationTable(cnx, str(rschema)) - else: - cnx.system_sql('DELETE FROM %s_relation WHERE ' - 'EXISTS(SELECT 1 FROM entities ' - ' WHERE eid=eid_from AND type=%%(from_etype)s)' - ' AND EXISTS(SELECT 1 FROM entities ' - ' WHERE eid=eid_to AND type=%%(to_etype)s)' - % rschema, - {'from_etype': rdef.subject.type, 'to_etype': rdef.object.type}) - # then update the in-memory schema - if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP: - rschema.del_relation_def(rdef.subject, rdef.object) - # if this is the last relation definition of this type, drop associated - # relation type - if lastrel and not cnx.deleted_in_transaction(rschema.eid): - execute('DELETE CWRType X WHERE X eid %(x)s', {'x': rschema.eid}) - - def revertprecommit_event(self): - # revert changes on in memory schema - # - # Note: add_relation_def takes a RelationDefinition, not a - # RelationDefinitionSchema, needs to fake it - rdef = self.rdef - rdef.name = str(rdef.rtype) - if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP: - self.cnx.vreg.schema.add_relation_def(rdef) - - - -class RDefUpdateOp(MemSchemaOperation): - """actually update some properties of a relation definition""" - rschema = rdefkey = values = None # make pylint happy - rdef = oldvalues = None - indexed_changed = null_allowed_changed = False - - def precommit_event(self): - cnx = self.cnx - rdef = self.rdef = self.rschema.rdefs[self.rdefkey] - # update the in-memory schema first - self.oldvalues = dict( (attr, getattr(rdef, attr)) for attr in self.values) - rdef.update(self.values) - # then make necessary changes to the system source database - syssource = cnx.repo.system_source - if 'indexed' in self.values: - syssource.update_rdef_indexed(cnx, rdef) - self.indexed_changed = True - if 'cardinality' in self.values and rdef.rtype.final \ - and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]: - syssource.update_rdef_null_allowed(self.cnx, rdef) - self.null_allowed_changed = True - if 'fulltextindexed' in self.values: - UpdateFTIndexOp.get_instance(cnx).add_data(rdef.subject) - if 'formula' in self.values: - RecomputeAttributeOperation.get_instance(cnx).add_data(rdef) - - def revertprecommit_event(self): - if self.rdef is None: - return - # revert changes on in memory schema - self.rdef.update(self.oldvalues) - # revert changes on database - syssource = self.cnx.repo.system_source - if self.indexed_changed: - syssource.update_rdef_indexed(self.cnx, self.rdef) - if self.null_allowed_changed: - syssource.update_rdef_null_allowed(self.cnx, self.rdef) - - -def _set_modifiable_constraints(rdef): - # for proper in-place modification of in-memory schema: if rdef.constraints - # is already a list, reuse it (we're updating multiple constraints of the - # same rdef in the same transaction) - if not isinstance(rdef.constraints, list): - rdef.constraints = list(rdef.constraints) - - -class CWConstraintDelOp(MemSchemaOperation): - """actually remove a constraint of a relation definition""" - rdef = oldcstr = newcstr = None # make pylint happy - size_cstr_changed = unique_changed = False - - def precommit_event(self): - cnx = self.cnx - rdef = self.rdef - # in-place modification of in-memory schema first - _set_modifiable_constraints(rdef) - if self.oldcstr in rdef.constraints: - rdef.constraints.remove(self.oldcstr) - else: - self.critical('constraint %s for rdef %s was missing or already removed', - self.oldcstr, rdef) - if cnx.deleted_in_transaction(rdef.eid): - # don't try to alter a table that's going away (or is already gone) - return - # then update database: alter the physical schema on size/unique - # constraint changes - syssource = cnx.repo.system_source - cstrtype = self.oldcstr.type() - if cstrtype == 'SizeConstraint': - # if the size constraint is being replaced with a new max size, we'll - # call update_rdef_column in CWConstraintAddOp, skip it here - for cstr in cnx.transaction_data.get('newsizecstr', ()): - rdefentity = cstr.reverse_constrained_by[0] - cstrrdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid) - if cstrrdef == rdef: - return - - # we found that the size constraint for this rdef is really gone, - # not just replaced by another - syssource.update_rdef_column(cnx, rdef) - self.size_cstr_changed = True - elif cstrtype == 'UniqueConstraint': - syssource.update_rdef_unique(cnx, rdef) - self.unique_changed = True - if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'): - cstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype + - (self.oldcstr.serialize() or '')).encode('utf-8')).hexdigest() - cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' % (SQL_PREFIX, rdef.subject.type, cstrname)) - - def revertprecommit_event(self): - # revert changes on in memory schema - if self.newcstr is not None: - self.rdef.constraints.remove(self.newcstr) - if self.oldcstr is not None: - self.rdef.constraints.append(self.oldcstr) - # revert changes on database - syssource = self.cnx.repo.system_source - if self.size_cstr_changed: - syssource.update_rdef_column(self.cnx, self.rdef) - if self.unique_changed: - syssource.update_rdef_unique(self.cnx, self.rdef) - - -class CWConstraintAddOp(CWConstraintDelOp): - """actually update constraint of a relation definition""" - entity = None # make pylint happy - - def precommit_event(self): - cnx = self.cnx - rdefentity = self.entity.reverse_constrained_by[0] - # when the relation is added in the same transaction, the constraint - # object is created by the operation adding the attribute or relation, - # so there is nothing to do here - if cnx.added_in_transaction(rdefentity.eid): - return - rdef = self.rdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid) - cstrtype = self.entity.type - if cstrtype in UNIQUE_CONSTRAINTS: - oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype) - else: - oldcstr = None - newcstr = self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) - # in-place modification of in-memory schema first - _set_modifiable_constraints(rdef) - newcstr.eid = self.entity.eid - if oldcstr is not None: - rdef.constraints.remove(oldcstr) - rdef.constraints.append(newcstr) - # then update database: alter the physical schema on size/unique - # constraint changes - syssource = cnx.repo.system_source - if cstrtype == 'SizeConstraint' and (oldcstr is None or - oldcstr.max != newcstr.max): - syssource.update_rdef_column(cnx, rdef) - self.size_cstr_changed = True - elif cstrtype == 'UniqueConstraint' and oldcstr is None: - syssource.update_rdef_unique(cnx, rdef) - self.unique_changed = True - if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'): - if oldcstr is not None: - oldcstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype + - (self.oldcstr.serialize() or '')).encode('ascii')).hexdigest() - cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' % - (SQL_PREFIX, rdef.subject.type, oldcstrname)) - cstrname, check = y2sql.check_constraint(rdef.subject, rdef.object, rdef.rtype.type, - newcstr, syssource.dbhelper, prefix=SQL_PREFIX) - cnx.system_sql('ALTER TABLE %s%s ADD CONSTRAINT %s CHECK(%s)' % - (SQL_PREFIX, rdef.subject.type, cstrname, check)) - - -class CWUniqueTogetherConstraintAddOp(MemSchemaOperation): - entity = None # make pylint happy - - def precommit_event(self): - cnx = self.cnx - prefix = SQL_PREFIX - entity = self.entity - table = '%s%s' % (prefix, entity.constraint_of[0].name) - cols = ['%s%s' % (prefix, r.name) for r in entity.relations] - dbhelper = cnx.repo.system_source.dbhelper - sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, entity.name) - for sql in sqls: - cnx.system_sql(sql) - - def postcommit_event(self): - entity = self.entity - eschema = self.cnx.vreg.schema.schema_by_eid(entity.constraint_of[0].eid) - attrs = [r.name for r in entity.relations] - eschema._unique_together.append(attrs) - - -class CWUniqueTogetherConstraintDelOp(MemSchemaOperation): - entity = cstrname = None # for pylint - cols = () # for pylint - - def insert_index(self): - # We need to run before CWConstraintDelOp: if a size constraint is - # removed and the column is part of a unique_together constraint, we - # remove the unique_together index before changing the column's type. - # SQL Server does not support unique indices on unlimited text columns. - return 0 - - def precommit_event(self): - cnx = self.cnx - prefix = SQL_PREFIX - table = '%s%s' % (prefix, self.entity.type) - dbhelper = cnx.repo.system_source.dbhelper - cols = ['%s%s' % (prefix, c) for c in self.cols] - sqls = dbhelper.sqls_drop_multicol_unique_index(table, cols, self.cstrname) - for sql in sqls: - cnx.system_sql(sql) - - def postcommit_event(self): - eschema = self.cnx.vreg.schema.schema_by_eid(self.entity.eid) - cols = set(self.cols) - unique_together = [ut for ut in eschema._unique_together - if set(ut) != cols] - eschema._unique_together = unique_together - - -# operations for in-memory schema synchronization ############################# - -class MemSchemaCWETypeDel(MemSchemaOperation): - """actually remove the entity type from the instance's schema""" - etype = None # make pylint happy - - def postcommit_event(self): - # del_entity_type also removes entity's relations - self.cnx.vreg.schema.del_entity_type(self.etype) - - -class MemSchemaCWRTypeAdd(MemSchemaOperation): - """actually add the relation type to the instance's schema""" - rtypedef = None # make pylint happy - - def precommit_event(self): - self.cnx.vreg.schema.add_relation_type(self.rtypedef) - - def revertprecommit_event(self): - self.cnx.vreg.schema.del_relation_type(self.rtypedef.name) - - -class MemSchemaCWRTypeDel(MemSchemaOperation): - """actually remove the relation type from the instance's schema""" - rtype = None # make pylint happy - - def postcommit_event(self): - try: - self.cnx.vreg.schema.del_relation_type(self.rtype) - except KeyError: - # s/o entity type have already been deleted - pass - - -class MemSchemaPermissionAdd(MemSchemaOperation): - """synchronize schema when a *_permission relation has been added on a group - """ - eid = action = group_eid = expr = None # make pylint happy - - def precommit_event(self): - """the observed connections.cnxset has been commited""" - try: - erschema = self.cnx.vreg.schema.schema_by_eid(self.eid) - except KeyError: - # duh, schema not found, log error and skip operation - self.warning('no schema for %s', self.eid) - return - perms = list(erschema.action_permissions(self.action)) - if self.group_eid is not None: - perm = self.cnx.entity_from_eid(self.group_eid).name - else: - perm = erschema.rql_expression(self.expr) - try: - perms.index(perm) - self.warning('%s already in permissions for %s on %s', - perm, self.action, erschema) - except ValueError: - perms.append(perm) - erschema.set_action_permissions(self.action, perms) - - # XXX revertprecommit_event - - -class MemSchemaPermissionDel(MemSchemaPermissionAdd): - """synchronize schema when a *_permission relation has been deleted from a - group - """ - - def precommit_event(self): - """the observed connections set has been commited""" - try: - erschema = self.cnx.vreg.schema.schema_by_eid(self.eid) - except KeyError: - # duh, schema not found, log error and skip operation - self.warning('no schema for %s', self.eid) - return - perms = list(erschema.action_permissions(self.action)) - if self.group_eid is not None: - perm = self.cnx.entity_from_eid(self.group_eid).name - else: - perm = erschema.rql_expression(self.expr) - try: - perms.remove(perm) - erschema.set_action_permissions(self.action, perms) - except ValueError: - self.error('can\'t remove permission %s for %s on %s', - perm, self.action, erschema) - - # XXX revertprecommit_event - - -class MemSchemaSpecializesAdd(MemSchemaOperation): - etypeeid = parentetypeeid = None # make pylint happy - - def precommit_event(self): - eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid) - parenteschema = self.cnx.vreg.schema.schema_by_eid(self.parentetypeeid) - eschema._specialized_type = parenteschema.type - parenteschema._specialized_by.append(eschema.type) - - # XXX revertprecommit_event - - -class MemSchemaSpecializesDel(MemSchemaOperation): - etypeeid = parentetypeeid = None # make pylint happy - - def precommit_event(self): - try: - eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid) - parenteschema = self.cnx.vreg.schema.schema_by_eid(self.parentetypeeid) - except KeyError: - # etype removed, nothing to do - return - eschema._specialized_type = None - parenteschema._specialized_by.remove(eschema.type) - - # XXX revertprecommit_event - - -# CWEType hooks ################################################################ - -class DelCWETypeHook(SyncSchemaHook): - """before deleting a CWEType entity: - * check that we don't remove a core entity type - * cascade to delete related CWAttribute and CWRelation entities - * instantiate an operation to delete the entity type on commit - """ - __regid__ = 'syncdelcwetype' - __select__ = SyncSchemaHook.__select__ & is_instance('CWEType') - events = ('before_delete_entity',) - - def __call__(self): - # final entities can't be deleted, don't care about that - name = self.entity.name - if name in CORE_TYPES: - raise validation_error(self.entity, {None: _("can't be deleted")}) - # delete every entities of this type - if name not in ETYPE_NAME_MAP: - MemSchemaCWETypeDel(self._cw, etype=name) - DropTable(self._cw, table=SQL_PREFIX + name) - - -class AfterDelCWETypeHook(DelCWETypeHook): - __regid__ = 'wfcleanup' - events = ('after_delete_entity',) - - def __call__(self): - # workflow cleanup - self._cw.execute('DELETE Workflow X WHERE NOT X workflow_of Y') - - -class AfterAddCWETypeHook(DelCWETypeHook): - """after adding a CWEType entity: - * create the necessary table - * set creation_date and modification_date by creating the necessary - CWAttribute entities - * add owned_by relation by creating the necessary CWRelation entity - * register an operation to add the entity type to the instance's - schema on commit - """ - __regid__ = 'syncaddcwetype' - events = ('after_add_entity',) - - def __call__(self): - entity = self.entity - if entity.cw_edited.get('final'): - # final entity types don't need a table in the database and are - # systematically added by yams at schema initialization time so - # there is no need to do further processing. Simply assign its eid. - self._cw.vreg.schema[entity.name].eid = entity.eid - return - CWETypeAddOp(self._cw, entity=entity) - - -class BeforeUpdateCWETypeHook(DelCWETypeHook): - """check name change, handle final""" - __regid__ = 'syncupdatecwetype' - events = ('before_update_entity',) - - def __call__(self): - entity = self.entity - check_valid_changes(self._cw, entity, ro_attrs=('final',)) - # don't use getattr(entity, attr), we would get the modified value if any - if 'name' in entity.cw_edited: - oldname, newname = entity.cw_edited.oldnewvalue('name') - if newname.lower() != oldname.lower(): - CWETypeRenameOp(self._cw, oldname=oldname, newname=newname) - - -# CWRType hooks ################################################################ - -class DelCWRTypeHook(SyncSchemaHook): - """before deleting a CWRType entity: - * check that we don't remove a core relation type - * cascade to delete related CWAttribute and CWRelation entities - * instantiate an operation to delete the relation type on commit - """ - __regid__ = 'syncdelcwrtype' - __select__ = SyncSchemaHook.__select__ & is_instance('CWRType') - events = ('before_delete_entity',) - - def __call__(self): - name = self.entity.name - if name in CORE_TYPES: - raise validation_error(self.entity, {None: _("can't be deleted")}) - # delete relation definitions using this relation type - self._cw.execute('DELETE CWAttribute X WHERE X relation_type Y, Y eid %(x)s', - {'x': self.entity.eid}) - self._cw.execute('DELETE CWRelation X WHERE X relation_type Y, Y eid %(x)s', - {'x': self.entity.eid}) - MemSchemaCWRTypeDel(self._cw, rtype=name) - - -class AfterAddCWComputedRTypeHook(SyncSchemaHook): - """after a CWComputedRType entity has been added: - * register an operation to add the relation type to the instance's - schema on commit - - We don't know yet this point if a table is necessary - """ - __regid__ = 'syncaddcwcomputedrtype' - __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType') - events = ('after_add_entity',) - - def __call__(self): - entity = self.entity - rtypedef = ybo.ComputedRelation(name=entity.name, - eid=entity.eid, - rule=entity.rule) - MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef) - - -class AfterAddCWRTypeHook(SyncSchemaHook): - """after a CWRType entity has been added: - * register an operation to add the relation type to the instance's - schema on commit - - We don't know yet this point if a table is necessary - """ - __regid__ = 'syncaddcwrtype' - __select__ = SyncSchemaHook.__select__ & is_instance('CWRType') - events = ('after_add_entity',) - - def __call__(self): - entity = self.entity - rtypedef = ybo.RelationType(name=entity.name, - description=entity.description, - inlined=entity.cw_edited.get('inlined', False), - symmetric=entity.cw_edited.get('symmetric', False), - eid=entity.eid) - MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef) - - -class BeforeUpdateCWRTypeHook(SyncSchemaHook): - """check name change, handle final""" - __regid__ = 'syncupdatecwrtype' - __select__ = SyncSchemaHook.__select__ & is_instance('CWRType') - events = ('before_update_entity',) - - def __call__(self): - entity = self.entity - check_valid_changes(self._cw, entity) - newvalues = {} - for prop in ('symmetric', 'inlined', 'fulltext_container'): - if prop in entity.cw_edited: - old, new = entity.cw_edited.oldnewvalue(prop) - if old != new: - newvalues[prop] = new - if newvalues: - rschema = self._cw.vreg.schema.rschema(entity.name) - CWRTypeUpdateOp(self._cw, rschema=rschema, entity=entity, - values=newvalues) - - -class BeforeUpdateCWComputedRTypeHook(SyncSchemaHook): - """check name change, handle final""" - __regid__ = 'syncupdatecwcomputedrtype' - __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType') - events = ('before_update_entity',) - - def __call__(self): - entity = self.entity - check_valid_changes(self._cw, entity) - if 'rule' in entity.cw_edited: - old, new = entity.cw_edited.oldnewvalue('rule') - if old != new: - rschema = self._cw.vreg.schema.rschema(entity.name) - CWComputedRTypeUpdateOp(self._cw, rschema=rschema, - entity=entity, rule=new) - - -class AfterDelRelationTypeHook(SyncSchemaHook): - """before deleting a CWAttribute or CWRelation entity: - * if this is a final or inlined relation definition, instantiate an - operation to drop necessary column, else if this is the last instance - of a non final relation, instantiate an operation to drop necessary - table - * instantiate an operation to delete the relation definition on commit - * delete the associated relation type when necessary - """ - __regid__ = 'syncdelrelationtype' - __select__ = SyncSchemaHook.__select__ & hook.match_rtype('relation_type') - events = ('after_delete_relation',) - - def __call__(self): - cnx = self._cw - try: - rdef = cnx.vreg.schema.schema_by_eid(self.eidfrom) - except KeyError: - self.critical('cant get schema rdef associated to %s', self.eidfrom) - return - subjschema, rschema, objschema = rdef.as_triple() - pendingrdefs = cnx.transaction_data.setdefault('pendingrdefs', set()) - # first delete existing relation if necessary - if rschema.final: - rdeftype = 'CWAttribute' - pendingrdefs.add((subjschema, rschema)) - else: - rdeftype = 'CWRelation' - pendingrdefs.add((subjschema, rschema, objschema)) - RDefDelOp(cnx, rdef=rdef) - - -# CWComputedRType hooks ####################################################### - -class DelCWComputedRTypeHook(SyncSchemaHook): - """before deleting a CWComputedRType entity: - * check that we don't remove a core relation type - * instantiate an operation to delete the relation type on commit - """ - __regid__ = 'syncdelcwcomputedrtype' - __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType') - events = ('before_delete_entity',) - - def __call__(self): - name = self.entity.name - if name in CORE_TYPES: - raise validation_error(self.entity, {None: _("can't be deleted")}) - MemSchemaCWRTypeDel(self._cw, rtype=name) - - -# CWAttribute / CWRelation hooks ############################################### - -class AfterAddCWAttributeHook(SyncSchemaHook): - __regid__ = 'syncaddcwattribute' - __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute') - events = ('after_add_entity',) - - def __call__(self): - CWAttributeAddOp(self._cw, entity=self.entity) - - -class AfterAddCWRelationHook(AfterAddCWAttributeHook): - __regid__ = 'syncaddcwrelation' - __select__ = SyncSchemaHook.__select__ & is_instance('CWRelation') - - def __call__(self): - CWRelationAddOp(self._cw, entity=self.entity) - - -class AfterUpdateCWRDefHook(SyncSchemaHook): - __regid__ = 'syncaddcwattribute' - __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute', - 'CWRelation') - events = ('before_update_entity',) - - def __call__(self): - entity = self.entity - if self._cw.deleted_in_transaction(entity.eid): - return - subjtype = entity.stype.name - objtype = entity.otype.name - if subjtype in ETYPE_NAME_MAP or objtype in ETYPE_NAME_MAP: - return - rschema = self._cw.vreg.schema[entity.rtype.name] - # note: do not access schema rdef here, it may be added later by an - # operation - newvalues = {} - for prop in RelationDefinitionSchema.rproperty_defs(objtype): - if prop == 'constraints': - continue - if prop == 'order': - attr = 'ordernum' - else: - attr = prop - if attr in entity.cw_edited: - old, new = entity.cw_edited.oldnewvalue(attr) - if old != new: - newvalues[prop] = new - if newvalues: - RDefUpdateOp(self._cw, rschema=rschema, rdefkey=(subjtype, objtype), - values=newvalues) - - -# constraints synchronization hooks ############################################ - -class AfterAddCWConstraintHook(SyncSchemaHook): - __regid__ = 'syncaddcwconstraint' - __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint') - events = ('after_add_entity', 'after_update_entity') - - def __call__(self): - if self.entity.cstrtype[0].name == 'SizeConstraint': - txdata = self._cw.transaction_data - if 'newsizecstr' not in txdata: - txdata['newsizecstr'] = set() - txdata['newsizecstr'].add(self.entity) - CWConstraintAddOp(self._cw, entity=self.entity) - - -class AfterAddConstrainedByHook(SyncSchemaHook): - __regid__ = 'syncaddconstrainedby' - __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constrained_by') - events = ('after_add_relation',) - - def __call__(self): - if self._cw.added_in_transaction(self.eidfrom): - # used by get_constraints() which is called in CWAttributeAddOp - self._cw.transaction_data.setdefault(self.eidfrom, []).append(self.eidto) - - -class BeforeDeleteCWConstraintHook(SyncSchemaHook): - __regid__ = 'syncdelcwconstraint' - __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint') - events = ('before_delete_entity',) - - def __call__(self): - entity = self.entity - schema = self._cw.vreg.schema - try: - # KeyError, e.g. composite chain deletion - rdef = schema.schema_by_eid(entity.reverse_constrained_by[0].eid) - # IndexError - cstr = rdef.constraint_by_eid(entity.eid) - except (KeyError, IndexError): - self._cw.critical('constraint type no more accessible') - else: - CWConstraintDelOp(self._cw, rdef=rdef, oldcstr=cstr) - -# unique_together constraints -# XXX: use setoperations and before_add_relation here (on constraint_of and relations) -class AfterAddCWUniqueTogetherConstraintHook(SyncSchemaHook): - __regid__ = 'syncadd_cwuniquetogether_constraint' - __select__ = SyncSchemaHook.__select__ & is_instance('CWUniqueTogetherConstraint') - events = ('after_add_entity',) - - def __call__(self): - CWUniqueTogetherConstraintAddOp(self._cw, entity=self.entity) - - -class BeforeDeleteConstraintOfHook(SyncSchemaHook): - __regid__ = 'syncdelconstraintof' - __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constraint_of') - events = ('before_delete_relation',) - - def __call__(self): - if self._cw.deleted_in_transaction(self.eidto): - return - schema = self._cw.vreg.schema - cstr = self._cw.entity_from_eid(self.eidfrom) - entity = schema.schema_by_eid(self.eidto) - cols = tuple(r.name for r in cstr.relations) - CWUniqueTogetherConstraintDelOp(self._cw, entity=entity, - cstrname=cstr.name, cols=cols) - - -# permissions synchronization hooks ############################################ - -class AfterAddPermissionHook(SyncSchemaHook): - """added entity/relation *_permission, need to update schema""" - __regid__ = 'syncaddperm' - __select__ = SyncSchemaHook.__select__ & hook.match_rtype( - 'read_permission', 'add_permission', 'delete_permission', - 'update_permission') - events = ('after_add_relation',) - - def __call__(self): - action = self.rtype.split('_', 1)[0] - if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup': - MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom, - group_eid=self.eidto) - else: # RQLExpression - expr = self._cw.entity_from_eid(self.eidto).expression - MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom, - expr=expr) - - -class BeforeDelPermissionHook(AfterAddPermissionHook): - """delete entity/relation *_permission, need to update schema - - skip the operation if the related type is being deleted - """ - __regid__ = 'syncdelperm' - events = ('before_delete_relation',) - - def __call__(self): - if self._cw.deleted_in_transaction(self.eidfrom): - return - action = self.rtype.split('_', 1)[0] - if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup': - MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom, - group_eid=self.eidto) - else: # RQLExpression - expr = self._cw.entity_from_eid(self.eidto).expression - MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom, - expr=expr) - - - -class UpdateFTIndexOp(hook.DataOperationMixIn, hook.SingleLastOperation): - """operation to update full text indexation of entity whose schema change - - We wait after the commit to as the schema in memory is only updated after - the commit. - """ - containercls = list - - def postcommit_event(self): - cnx = self.cnx - source = cnx.repo.system_source - schema = cnx.repo.vreg.schema - to_reindex = self.get_data() - self.info('%i etypes need full text indexed reindexation', - len(to_reindex)) - for etype in to_reindex: - rset = cnx.execute('Any X WHERE X is %s' % etype) - self.info('Reindexing full text index for %i entity of type %s', - len(rset), etype) - still_fti = list(schema[etype].indexable_attributes()) - for entity in rset.entities(): - source.fti_unindex_entities(cnx, [entity]) - for container in entity.cw_adapt_to('IFTIndexable').fti_containers(): - if still_fti or container is not entity: - source.fti_unindex_entities(cnx, [container]) - source.fti_index_entities(cnx, [container]) - if to_reindex: - # Transaction has already been committed - cnx.cnxset.commit() - - - - -# specializes synchronization hooks ############################################ - - -class AfterAddSpecializesHook(SyncSchemaHook): - __regid__ = 'syncaddspecializes' - __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes') - events = ('after_add_relation',) - - def __call__(self): - MemSchemaSpecializesAdd(self._cw, etypeeid=self.eidfrom, - parentetypeeid=self.eidto) - - -class AfterDelSpecializesHook(SyncSchemaHook): - __regid__ = 'syncdelspecializes' - __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes') - events = ('after_delete_relation',) - - def __call__(self): - MemSchemaSpecializesDel(self._cw, etypeeid=self.eidfrom, - parentetypeeid=self.eidto) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/syncsession.py --- a/hooks/syncsession.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,255 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Core hooks: synchronize living session on persistent data changes""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from cubicweb import UnknownProperty, BadConnectionId, validation_error -from cubicweb.predicates import is_instance -from cubicweb.server import hook - - -def get_user_sessions(repo, ueid): - for session in repo._sessions.values(): - if ueid == session.user.eid: - yield session - - -class SyncSessionHook(hook.Hook): - __abstract__ = True - category = 'syncsession' - - -# user/groups synchronisation ################################################# - -class _GroupOperation(hook.Operation): - """base class for group operation""" - cnxuser = None # make pylint happy - - def __init__(self, cnx, *args, **kwargs): - """override to get the group name before actual groups manipulation: - - we may temporarily loose right access during a commit event, so - no query should be emitted while comitting - """ - rql = 'Any N WHERE G eid %(x)s, G name N' - result = cnx.execute(rql, {'x': kwargs['geid']}, build_descr=False) - hook.Operation.__init__(self, cnx, *args, **kwargs) - self.group = result[0][0] - - -class _DeleteGroupOp(_GroupOperation): - """synchronize user when a in_group relation has been deleted""" - - def postcommit_event(self): - """the observed connections set has been commited""" - groups = self.cnxuser.groups - try: - groups.remove(self.group) - except KeyError: - self.error('user %s not in group %s', self.cnxuser, self.group) - - -class _AddGroupOp(_GroupOperation): - """synchronize user when a in_group relation has been added""" - def postcommit_event(self): - """the observed connections set has been commited""" - groups = self.cnxuser.groups - if self.group in groups: - self.warning('user %s already in group %s', self.cnxuser, - self.group) - else: - groups.add(self.group) - - -class SyncInGroupHook(SyncSessionHook): - __regid__ = 'syncingroup' - __select__ = SyncSessionHook.__select__ & hook.match_rtype('in_group') - events = ('after_delete_relation', 'after_add_relation') - - def __call__(self): - if self.event == 'after_delete_relation': - opcls = _DeleteGroupOp - else: - opcls = _AddGroupOp - for session in get_user_sessions(self._cw.repo, self.eidfrom): - opcls(self._cw, cnxuser=session.user, geid=self.eidto) - - -class _DelUserOp(hook.Operation): - """close associated user's session when it is deleted""" - def __init__(self, cnx, sessionid): - self.sessionid = sessionid - hook.Operation.__init__(self, cnx) - - def postcommit_event(self): - """the observed connections set has been commited""" - try: - self.cnx.repo.close(self.sessionid) - except BadConnectionId: - pass # already closed - - -class CloseDeletedUserSessionsHook(SyncSessionHook): - __regid__ = 'closession' - __select__ = SyncSessionHook.__select__ & is_instance('CWUser') - events = ('after_delete_entity',) - - def __call__(self): - """modify user permission, need to update users""" - for session in get_user_sessions(self._cw.repo, self.entity.eid): - _DelUserOp(self._cw, session.sessionid) - - -# CWProperty hooks ############################################################# - -class _DelCWPropertyOp(hook.Operation): - """a user's custom properties has been deleted""" - cwpropdict = key = None # make pylint happy - - def postcommit_event(self): - """the observed connections set has been commited""" - try: - del self.cwpropdict[self.key] - except KeyError: - self.error('%s has no associated value', self.key) - - -class _ChangeCWPropertyOp(hook.Operation): - """a user's custom properties has been added/changed""" - cwpropdict = key = value = None # make pylint happy - - def postcommit_event(self): - """the observed connections set has been commited""" - self.cwpropdict[self.key] = self.value - - -class _AddCWPropertyOp(hook.Operation): - """a user's custom properties has been added/changed""" - cwprop = None # make pylint happy - - def postcommit_event(self): - """the observed connections set has been commited""" - cwprop = self.cwprop - if not cwprop.for_user: - self.cnx.vreg['propertyvalues'][cwprop.pkey] = cwprop.value - # if for_user is set, update is handled by a ChangeCWPropertyOp operation - - -class AddCWPropertyHook(SyncSessionHook): - __regid__ = 'addcwprop' - __select__ = SyncSessionHook.__select__ & is_instance('CWProperty') - events = ('after_add_entity',) - - def __call__(self): - key, value = self.entity.pkey, self.entity.value - if key.startswith('sources.'): - return - cnx = self._cw - try: - value = cnx.vreg.typed_value(key, value) - except UnknownProperty: - msg = _('unknown property key %s') - raise validation_error(self.entity, {('pkey', 'subject'): msg}, (key,)) - except ValueError as ex: - raise validation_error(self.entity, - {('value', 'subject'): str(ex)}) - if not cnx.user.matching_groups('managers'): - cnx.add_relation(self.entity.eid, 'for_user', cnx.user.eid) - else: - _AddCWPropertyOp(cnx, cwprop=self.entity) - - -class UpdateCWPropertyHook(AddCWPropertyHook): - __regid__ = 'updatecwprop' - events = ('after_update_entity',) - - def __call__(self): - entity = self.entity - if not ('pkey' in entity.cw_edited or - 'value' in entity.cw_edited): - return - key, value = entity.pkey, entity.value - if key.startswith('sources.'): - return - cnx = self._cw - try: - value = cnx.vreg.typed_value(key, value) - except UnknownProperty: - return - except ValueError as ex: - raise validation_error(entity, {('value', 'subject'): str(ex)}) - if entity.for_user: - for session in get_user_sessions(cnx.repo, entity.for_user[0].eid): - _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties, - key=key, value=value) - else: - # site wide properties - _ChangeCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'], - key=key, value=value) - - -class DeleteCWPropertyHook(AddCWPropertyHook): - __regid__ = 'delcwprop' - events = ('before_delete_entity',) - - def __call__(self): - eid = self.entity.eid - cnx = self._cw - for eidfrom, rtype, eidto in cnx.transaction_data.get('pendingrelations', ()): - if rtype == 'for_user' and eidfrom == self.entity.eid: - # if for_user was set, delete has already been handled - break - else: - _DelCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'], - key=self.entity.pkey) - - -class AddForUserRelationHook(SyncSessionHook): - __regid__ = 'addcwpropforuser' - __select__ = SyncSessionHook.__select__ & hook.match_rtype('for_user') - events = ('after_add_relation',) - - def __call__(self): - cnx = self._cw - eidfrom = self.eidfrom - if not cnx.entity_metas(eidfrom)['type'] == 'CWProperty': - return - key, value = cnx.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V', - {'x': eidfrom})[0] - if cnx.vreg.property_info(key)['sitewide']: - msg = _("site-wide property can't be set for user") - raise validation_error(eidfrom, {('for_user', 'subject'): msg}) - for session in get_user_sessions(cnx.repo, self.eidto): - _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties, - key=key, value=value) - - -class DelForUserRelationHook(AddForUserRelationHook): - __regid__ = 'delcwpropforuser' - events = ('after_delete_relation',) - - def __call__(self): - cnx = self._cw - key = cnx.execute('Any K WHERE P eid %(x)s, P pkey K', - {'x': self.eidfrom})[0][0] - cnx.transaction_data.setdefault('pendingrelations', []).append( - (self.eidfrom, self.rtype, self.eidto)) - for session in get_user_sessions(cnx.repo, self.eidto): - _DelCWPropertyOp(cnx, cwpropdict=session.user.properties, key=key) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/syncsources.py --- a/hooks/syncsources.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,208 +0,0 @@ -# copyright 2010-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""hooks for repository sources synchronization""" - -from cubicweb import _ - -from socket import gethostname - -from logilab.common.decorators import clear_cache - -from cubicweb import validation_error -from cubicweb.predicates import is_instance -from cubicweb.server import SOURCE_TYPES, hook - -class SourceHook(hook.Hook): - __abstract__ = True - category = 'cw.sources' - - -# repo sources synchronization ################################################# - -class SourceAddedOp(hook.Operation): - entity = None # make pylint happy - def postcommit_event(self): - self.cnx.repo.add_source(self.entity) - -class SourceAddedHook(SourceHook): - __regid__ = 'cw.sources.added' - __select__ = SourceHook.__select__ & is_instance('CWSource') - events = ('after_add_entity',) - def __call__(self): - try: - sourcecls = SOURCE_TYPES[self.entity.type] - except KeyError: - msg = _('Unknown source type') - raise validation_error(self.entity, {('type', 'subject'): msg}) - # ignore creation of the system source done during database - # initialisation, as config for this source is in a file and handling - # is done separatly (no need for the operation either) - if self.entity.name != 'system': - sourcecls.check_conf_dict(self.entity.eid, self.entity.host_config, - fail_if_unknown=not self._cw.vreg.config.repairing) - SourceAddedOp(self._cw, entity=self.entity) - - -class SourceRemovedOp(hook.Operation): - uri = None # make pylint happy - def postcommit_event(self): - self.cnx.repo.remove_source(self.uri) - -class SourceRemovedHook(SourceHook): - __regid__ = 'cw.sources.removed' - __select__ = SourceHook.__select__ & is_instance('CWSource') - events = ('before_delete_entity',) - def __call__(self): - if self.entity.name == 'system': - msg = _("You cannot remove the system source") - raise validation_error(self.entity, {None: msg}) - SourceRemovedOp(self._cw, uri=self.entity.name) - - -class SourceConfigUpdatedOp(hook.DataOperationMixIn, hook.Operation): - - def precommit_event(self): - self.__processed = [] - for source in self.get_data(): - if not self.cnx.deleted_in_transaction(source.eid): - conf = source.repo_source.check_config(source) - self.__processed.append( (source, conf) ) - - def postcommit_event(self): - for source, conf in self.__processed: - source.repo_source.update_config(source, conf) - - -class SourceRenamedOp(hook.LateOperation): - oldname = newname = None # make pylint happy - - def precommit_event(self): - source = self.cnx.repo.sources_by_uri[self.oldname] - sql = 'UPDATE entities SET asource=%(newname)s WHERE asource=%(oldname)s' - self.cnx.system_sql(sql, {'oldname': self.oldname, - 'newname': self.newname}) - - def postcommit_event(self): - repo = self.cnx.repo - # XXX race condition - source = repo.sources_by_uri.pop(self.oldname) - source.uri = self.newname - source.public_config['uri'] = self.newname - repo.sources_by_uri[self.newname] = source - repo._type_source_cache.clear() - clear_cache(repo, 'source_defs') - - -class SourceUpdatedHook(SourceHook): - __regid__ = 'cw.sources.configupdate' - __select__ = SourceHook.__select__ & is_instance('CWSource') - events = ('before_update_entity',) - def __call__(self): - if 'name' in self.entity.cw_edited: - oldname, newname = self.entity.cw_edited.oldnewvalue('name') - if oldname == 'system': - msg = _("You cannot rename the system source") - raise validation_error(self.entity, {('name', 'subject'): msg}) - SourceRenamedOp(self._cw, oldname=oldname, newname=newname) - if 'config' in self.entity.cw_edited or 'url' in self.entity.cw_edited: - if self.entity.name == 'system' and self.entity.config: - msg = _("Configuration of the system source goes to " - "the 'sources' file, not in the database") - raise validation_error(self.entity, {('config', 'subject'): msg}) - SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity) - - -class SourceHostConfigUpdatedHook(SourceHook): - __regid__ = 'cw.sources.hostconfigupdate' - __select__ = SourceHook.__select__ & is_instance('CWSourceHostConfig') - events = ('after_add_entity', 'after_update_entity', 'before_delete_entity',) - def __call__(self): - if self.entity.match(gethostname()): - if self.event == 'after_update_entity' and \ - not 'config' in self.entity.cw_edited: - return - try: - SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity.cwsource) - except IndexError: - # XXX no source linked to the host config yet - pass - - -# source mapping synchronization ############################################### -# -# Expect cw_for_source/cw_schema are immutable relations (i.e. can't change from -# a source or schema to another). - -class SourceMappingImmutableHook(SourceHook): - """check cw_for_source and cw_schema are immutable relations - - XXX empty delete perms would be enough? - """ - __regid__ = 'cw.sources.mapping.immutable' - __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source', 'cw_schema') - events = ('before_add_relation',) - def __call__(self): - if not self._cw.added_in_transaction(self.eidfrom): - msg = _("You can't change this relation") - raise validation_error(self.eidfrom, {self.rtype: msg}) - - -class SourceMappingChangedOp(hook.DataOperationMixIn, hook.Operation): - def check_or_update(self, checkonly): - cnx = self.cnx - # take care, can't call get_data() twice - try: - data = self.__data - except AttributeError: - data = self.__data = self.get_data() - for schemacfg, source in data: - if source is None: - source = schemacfg.cwsource.repo_source - if cnx.added_in_transaction(schemacfg.eid): - if not cnx.deleted_in_transaction(schemacfg.eid): - source.add_schema_config(schemacfg, checkonly=checkonly) - elif cnx.deleted_in_transaction(schemacfg.eid): - source.del_schema_config(schemacfg, checkonly=checkonly) - else: - source.update_schema_config(schemacfg, checkonly=checkonly) - - def precommit_event(self): - self.check_or_update(True) - - def postcommit_event(self): - self.check_or_update(False) - - -class SourceMappingChangedHook(SourceHook): - __regid__ = 'cw.sources.schemaconfig' - __select__ = SourceHook.__select__ & is_instance('CWSourceSchemaConfig') - events = ('after_add_entity', 'after_update_entity') - def __call__(self): - if self.event == 'after_add_entity' or ( - self.event == 'after_update_entity' and 'options' in self.entity.cw_edited): - SourceMappingChangedOp.get_instance(self._cw).add_data( - (self.entity, None) ) - -class SourceMappingDeleteHook(SourceHook): - __regid__ = 'cw.sources.delschemaconfig' - __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source') - events = ('before_delete_relation',) - def __call__(self): - SourceMappingChangedOp.get_instance(self._cw).add_data( - (self._cw.entity_from_eid(self.eidfrom), - self._cw.entity_from_eid(self.eidto).repo_source) ) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/data-computed/schema.py --- a/hooks/test/data-computed/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,46 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from yams.buildobjs import EntityType, String, Int, SubjectRelation, RelationDefinition - -THISYEAR = 2014 - -class Person(EntityType): - name = String() - salaire = Int() - birth_year = Int(required=True) - travaille = SubjectRelation('Societe') - age = Int(formula='Any %d - D WHERE X birth_year D' % THISYEAR) - -class Societe(EntityType): - nom = String() - salaire_total = Int(formula='Any SUM(SA) GROUPBY X WHERE P travaille X, P salaire SA') - - -class Agent(EntityType): - asalae_id = String(formula='Any E WHERE M mirror_of X, M extid E') - -class MirrorEntity(EntityType): - extid = String(required=True, unique=True, - description=_('external identifier of the object')) - - -class mirror_of(RelationDefinition): - subject = 'MirrorEntity' - object = ('Agent', 'Societe') - cardinality = '?*' - inlined = True diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/data/schema.py --- a/hooks/test/data/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,85 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import (RelationDefinition, RelationType, EntityType, - String, Datetime, Int) -from yams.reader import context - -from cubicweb.schema import ERQLExpression - -from cubicweb import _ - -class friend(RelationDefinition): - subject = ('CWUser', 'CWGroup') - object = ('CWUser', 'CWGroup') - symmetric = True - -class Folder(EntityType): - name = String() - -class parent(RelationDefinition): - subject = 'Folder' - object = 'Folder' - composite = 'object' - cardinality = '?*' - -class children(RelationDefinition): - subject = 'Folder' - object = 'Folder' - composite = 'subject' - - -class Email(EntityType): - """electronic mail""" - subject = String(fulltextindexed=True) - date = Datetime(description=_('UTC time on which the mail was sent')) - messageid = String(required=True, indexed=True) - headers = String(description=_('raw headers')) - - - -class EmailPart(EntityType): - """an email attachment""" - __permissions__ = { - 'read': ('managers', 'users', 'guests',), # XXX if E parts X, U has_read_permission E - 'add': ('managers', ERQLExpression('E parts X, U has_update_permission E'),), - 'delete': ('managers', ERQLExpression('E parts X, U has_update_permission E')), - 'update': ('managers', 'owners',), - } - - content = String(fulltextindexed=True) - content_format = String(required=True, maxsize=50) - ordernum = Int(required=True) - - -class parts(RelationType): - subject = 'Email' - object = 'EmailPart' - cardinality = '*1' - composite = 'subject' - fulltext_container = 'subject' - -class sender(RelationDefinition): - subject = 'Email' - object = 'EmailAddress' - cardinality = '?*' - inlined = True - -class recipients(RelationDefinition): - subject = 'Email' - object = 'EmailAddress' diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/requirements.txt --- a/hooks/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -psycopg2 diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/unittest_bookmarks.py --- a/hooks/test/unittest_bookmarks.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,38 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from logilab.common.testlib import unittest_main -from cubicweb.devtools.testlib import CubicWebTC - -class BookmarkHooksTC(CubicWebTC): - - - def test_auto_delete_bookmarks(self): - with self.admin_access.repo_cnx() as cnx: - beid = cnx.execute('INSERT Bookmark X: X title "hop", X path "view", X bookmarked_by U ' - 'WHERE U login "admin"')[0][0] - cnx.execute('SET X bookmarked_by U WHERE U login "anon"') - cnx.commit() - cnx.execute('DELETE X bookmarked_by U WHERE U login "admin"') - cnx.commit() - self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': beid})) - cnx.execute('DELETE X bookmarked_by U WHERE U login "anon"') - cnx.commit() - self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': beid})) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/unittest_hooks.py --- a/hooks/test/unittest_hooks.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,221 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""functional tests for core hooks - -Note: - syncschema.py hooks are mostly tested in server/test/unittest_migrations.py -""" - -from datetime import datetime - -from six import text_type - -from pytz import utc -from cubicweb import ValidationError, AuthenticationError, BadConnectionId -from cubicweb.devtools.testlib import CubicWebTC - - -class CoreHooksTC(CubicWebTC): - - def test_inlined(self): - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(self.repo.schema['sender'].inlined, True) - cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' - 'X content "this is a test"') - eeid = cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", ' - 'X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart')[0][0] - cnx.execute('SET X sender Y WHERE X is Email, Y is EmailAddress') - rset = cnx.execute('Any S WHERE X sender S, X eid %s' % eeid) - self.assertEqual(len(rset), 1) - - def test_symmetric(self): - with self.admin_access.repo_cnx() as cnx: - u1 = self.create_user(cnx, u'1') - u2 = self.create_user(cnx, u'2') - u3 = self.create_user(cnx, u'3') - ga = cnx.create_entity('CWGroup', name=u'A') - gb = cnx.create_entity('CWGroup', name=u'B') - u1.cw_set(friend=u2) - u2.cw_set(friend=u3) - ga.cw_set(friend=gb) - ga.cw_set(friend=u1) - cnx.commit() - for l1, l2 in ((u'1', u'2'), - (u'2', u'3')): - self.assertTrue(cnx.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s', - {'l1': l1, 'l2': l2})) - self.assertTrue(cnx.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s', - {'l1': l1, 'l2': l2})) - self.assertTrue(cnx.execute('Any GA,GB WHERE GA friend GB, GA name "A", GB name "B"')) - self.assertTrue(cnx.execute('Any GA,GB WHERE GB friend GA, GA name "A", GB name "B"')) - self.assertTrue(cnx.execute('Any GA,U1 WHERE GA friend U1, GA name "A", U1 login "1"')) - self.assertTrue(cnx.execute('Any GA,U1 WHERE U1 friend GA, GA name "A", U1 login "1"')) - self.assertFalse(cnx.execute('Any GA,U WHERE GA friend U, GA name "A", U login "2"')) - for l1, l2 in ((u'1', u'3'), - (u'3', u'1')): - self.assertFalse(cnx.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s', - {'l1': l1, 'l2': l2})) - self.assertFalse(cnx.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s', - {'l1': l1, 'l2': l2})) - - def test_html_tidy_hook(self): - with self.admin_access.client_cnx() as cnx: - entity = cnx.create_entity('Workflow', name=u'wf1', - description_format=u'text/html', - description=u'yo') - self.assertEqual(u'yo', entity.description) - entity = cnx.create_entity('Workflow', name=u'wf2', - description_format=u'text/html', - description=u'yo') - self.assertEqual(u'yo', entity.description) - entity = cnx.create_entity('Workflow', name=u'wf3', - description_format=u'text/html', - description=u'yo') - self.assertEqual(u'yo', entity.description) - entity = cnx.create_entity('Workflow', name=u'wf4', - description_format=u'text/html', - description=u'R&D') - self.assertEqual(u'R&D', entity.description, ) - entity = cnx.create_entity('Workflow', name=u'wf5', - description_format=u'text/html', - description=u"
      c'est l'été") - self.assertEqual(u"
      c'est l'été
      ", entity.description) - - def test_nonregr_html_tidy_hook_no_update(self): - with self.admin_access.client_cnx() as cnx: - entity = cnx.create_entity('Workflow', name=u'wf1', - description_format=u'text/html', - description=u'yo') - entity.cw_set(name=u'wf2') - self.assertEqual(entity.description, u'yo') - entity.cw_set(description=u'R&D

      yo') - self.assertEqual(entity.description, u'R&D

      yo

      ') - - def test_metadata_cwuri(self): - with self.admin_access.repo_cnx() as cnx: - entity = cnx.create_entity('Workflow', name=u'wf1') - self.assertEqual(entity.cwuri, self.repo.config['base-url'] + str(entity.eid)) - - def test_metadata_creation_modification_date(self): - with self.admin_access.repo_cnx() as cnx: - _now = datetime.now(utc) - entity = cnx.create_entity('Workflow', name=u'wf1') - self.assertEqual((entity.creation_date - _now).seconds, 0) - self.assertEqual((entity.modification_date - _now).seconds, 0) - - def test_metadata_created_by(self): - with self.admin_access.repo_cnx() as cnx: - entity = cnx.create_entity('Bookmark', title=u'wf1', path=u'/view') - cnx.commit() # fire operations - self.assertEqual(len(entity.created_by), 1) # make sure we have only one creator - self.assertEqual(entity.created_by[0].eid, cnx.user.eid) - - def test_metadata_owned_by(self): - with self.admin_access.repo_cnx() as cnx: - entity = cnx.create_entity('Bookmark', title=u'wf1', path=u'/view') - cnx.commit() # fire operations - self.assertEqual(len(entity.owned_by), 1) # make sure we have only one owner - self.assertEqual(entity.owned_by[0].eid, cnx.user.eid) - - def test_user_login_stripped(self): - with self.admin_access.repo_cnx() as cnx: - u = self.create_user(cnx, ' joe ') - tname = cnx.execute('Any L WHERE E login L, E eid %(e)s', - {'e': u.eid})[0][0] - self.assertEqual(tname, 'joe') - cnx.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid}) - tname = cnx.execute('Any L WHERE E login L, E eid %(e)s', - {'e': u.eid})[0][0] - self.assertEqual(tname, 'jijoe') - - - -class UserGroupHooksTC(CubicWebTC): - - def test_user_group_synchronization(self): - with self.admin_access.repo_cnx() as cnx: - user = cnx.user - self.assertEqual(user.groups, set(('managers',))) - cnx.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.assertEqual(user.groups, set(('managers',))) - cnx.commit() - self.assertEqual(user.groups, set(('managers', 'guests'))) - cnx.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.assertEqual(user.groups, set(('managers', 'guests'))) - cnx.commit() - self.assertEqual(user.groups, set(('managers',))) - - def test_user_composite_owner(self): - with self.admin_access.repo_cnx() as cnx: - self.create_user(cnx, 'toto').eid - # composite of euser should be owned by the euser regardless of who created it - cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X ' - 'WHERE U login "toto"') - cnx.commit() - self.assertEqual(cnx.execute('Any A WHERE X owned_by U, U use_email X,' - 'U login "toto", X address A')[0][0], - 'toto@logilab.fr') - - def test_user_composite_no_owner_on_deleted_entity(self): - with self.admin_access.repo_cnx() as cnx: - u = self.create_user(cnx, 'toto').eid - cnx.commit() - e = cnx.create_entity('EmailAddress', address=u'toto@logilab.fr', reverse_use_email=u) - e.cw_delete() - cnx.commit() - self.assertFalse(cnx.system_sql( - 'SELECT * FROM owned_by_relation ' - 'WHERE eid_from NOT IN (SELECT eid FROM entities)').fetchall()) - - def test_no_created_by_on_deleted_entity(self): - with self.admin_access.repo_cnx() as cnx: - eid = cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0] - cnx.execute('DELETE EmailAddress X WHERE X eid %s' % eid) - cnx.commit() - self.assertFalse(cnx.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid})) - - - -class SchemaHooksTC(CubicWebTC): - - def test_duplicate_etype_error(self): - with self.admin_access.repo_cnx() as cnx: - # check we can't add a CWEType or CWRType entity if it already exists one - # with the same name - self.assertRaises(ValidationError, - cnx.execute, 'INSERT CWEType X: X name "CWUser"') - cnx.rollback() - self.assertRaises(ValidationError, - cnx.execute, 'INSERT CWRType X: X name "in_group"') - - def test_validation_unique_constraint(self): - with self.admin_access.repo_cnx() as cnx: - with self.assertRaises(ValidationError) as cm: - cnx.execute('INSERT CWUser X: X login "admin"') - ex = cm.exception - ex.translate(text_type) - self.assertIsInstance(ex.entity, int) - self.assertEqual(ex.errors, {'login-subject': 'the value "admin" is already used, use another one'}) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/unittest_integrity.py --- a/hooks/test/unittest_integrity.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,162 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""functional tests for integrity hooks""" - -from cubicweb import ValidationError -from cubicweb.devtools.testlib import CubicWebTC - -class CoreHooksTC(CubicWebTC): - - def test_delete_internal_entities(self): - with self.admin_access.repo_cnx() as cnx: - self.assertRaises(ValidationError, cnx.execute, - 'DELETE CWEType X WHERE X name "CWEType"') - cnx.rollback() - self.assertRaises(ValidationError, cnx.execute, - 'DELETE CWRType X WHERE X name "relation_type"') - cnx.rollback() - self.assertRaises(ValidationError, cnx.execute, - 'DELETE CWGroup X WHERE X name "owners"') - - def test_delete_required_relations_subject(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y ' - 'WHERE Y name "users"') - cnx.commit() - cnx.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"') - self.assertRaises(ValidationError, cnx.commit) - cnx.rollback() - cnx.execute('DELETE X in_group Y WHERE X login "toto"') - cnx.execute('SET X in_group Y WHERE X login "toto", Y name "guests"') - cnx.commit() - - def test_static_vocabulary_check(self): - with self.admin_access.repo_cnx() as cnx: - self.assertRaises(ValidationError, - cnx.execute, - 'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", ' - 'X relation_type RT, RT name "in_group"') - - def test_missing_required_relations_subject_inline(self): - with self.admin_access.repo_cnx() as cnx: - # missing in_group relation - cnx.execute('INSERT CWUser X: X login "toto", X upassword "hop"') - self.assertRaises(ValidationError, cnx.commit) - - def test_composite_1(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' - 'X content "this is a test"') - cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' - 'X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - self.assertTrue(cnx.execute('Email X WHERE X sender Y')) - cnx.commit() - cnx.execute('DELETE Email X') - rset = cnx.execute('Any X WHERE X is EmailPart') - self.assertEqual(len(rset), 0) - cnx.commit() - rset = cnx.execute('Any X WHERE X is EmailPart') - self.assertEqual(len(rset), 0) - - def test_composite_2(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' - 'X content "this is a test"') - cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' - 'X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - cnx.commit() - cnx.execute('DELETE Email X') - cnx.execute('DELETE EmailPart X') - cnx.commit() - rset = cnx.execute('Any X WHERE X is EmailPart') - self.assertEqual(len(rset), 0) - - def test_composite_redirection(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' - 'X content "this is a test"') - cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' - 'X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - cnx.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, ' - 'X recipients Y ' - 'WHERE Y is EmailAddress') - cnx.commit() - cnx.execute('DELETE X parts Y WHERE X messageid "<1234>"') - cnx.execute('SET X parts Y WHERE X messageid "<2345>"') - cnx.commit() - rset = cnx.execute('Any X WHERE X is EmailPart') - self.assertEqual(len(rset), 1) - self.assertEqual(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') - - def test_composite_object_relation_deletion(self): - with self.admin_access.repo_cnx() as cnx: - root = cnx.create_entity('Folder', name=u'root') - a = cnx.create_entity('Folder', name=u'a', parent=root) - cnx.create_entity('Folder', name=u'b', parent=a) - cnx.create_entity('Folder', name=u'c', parent=root) - cnx.commit() - cnx.execute('DELETE Folder F WHERE F name "a"') - cnx.execute('DELETE F parent R WHERE R name "root"') - cnx.commit() - self.assertEqual([['root'], ['c']], - cnx.execute('Any NF WHERE F is Folder, F name NF').rows) - self.assertEqual([], cnx.execute('Any NF,NP WHERE F parent P, F name NF, P name NP').rows) - - def test_composite_subject_relation_deletion(self): - with self.admin_access.repo_cnx() as cnx: - root = cnx.create_entity('Folder', name=u'root') - a = cnx.create_entity('Folder', name=u'a') - b = cnx.create_entity('Folder', name=u'b') - c = cnx.create_entity('Folder', name=u'c') - root.cw_set(children=(a, c)) - a.cw_set(children=b) - cnx.commit() - cnx.execute('DELETE Folder F WHERE F name "a"') - cnx.execute('DELETE R children F WHERE R name "root"') - cnx.commit() - self.assertEqual([['root'], ['c']], - cnx.execute('Any NF WHERE F is Folder, F name NF').rows) - self.assertEqual([], cnx.execute('Any NF,NP WHERE F parent P, F name NF, P name NP').rows) - - def test_unsatisfied_constraints(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0] - with self.assertRaises(ValidationError) as cm: - cnx.commit() - self.assertEqual(cm.exception.errors, - {'in_group-object': u'RQLConstraint NOT O name "owners" failed'}) - - def test_unique_constraint(self): - with self.admin_access.repo_cnx() as cnx: - entity = cnx.create_entity('CWGroup', name=u'trout') - cnx.commit() - self.assertRaises(ValidationError, cnx.create_entity, 'CWGroup', name=u'trout') - cnx.rollback() - cnx.execute('SET X name "trout" WHERE X eid %(x)s', {'x': entity.eid}) - cnx.commit() - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/unittest_security.py --- a/hooks/test/unittest_security.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,56 +0,0 @@ -# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.server import hook -from cubicweb.predicates import is_instance - - -class SecurityHooksTC(CubicWebTC): - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - self.add_eid = cnx.create_entity('EmailAddress', - address=u'hop@perdu.com', - reverse_use_email=cnx.user.eid).eid - cnx.commit() - - def test_inlined_cw_edited_relation(self): - """modification of cw_edited to add an inlined relation shouldn't trigger a security error. - - Test for https://www.cubicweb.org/ticket/5477315 - """ - sender = self.repo.schema['Email'].rdef('sender') - with self.temporary_permissions((sender, {'add': ()})): - - class MyHook(hook.Hook): - __regid__ = 'test.pouet' - __select__ = hook.Hook.__select__ & is_instance('Email') - events = ('before_add_entity',) - - def __call__(self): - self.entity.cw_edited['sender'] = self._cw.user.primary_email[0].eid - - with self.temporary_appobjects(MyHook): - with self.admin_access.repo_cnx() as cnx: - email = cnx.create_entity('Email', messageid=u'1234') - cnx.commit() - self.assertEqual(email.sender[0].eid, self.add_eid) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/unittest_synccomputed.py --- a/hooks/test/unittest_synccomputed.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,146 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for computed attributes/relations hooks""" - -from unittest import TestCase - -from yams.buildobjs import EntityType, String, Int, SubjectRelation - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.schema import build_schema_from_namespace - - -class FormulaDependenciesMatrixTC(TestCase): - - def simple_schema(self): - THISYEAR = 2014 - - class Person(EntityType): - name = String() - salary = Int() - birth_year = Int(required=True) - works_for = SubjectRelation('Company') - age = Int(formula='Any %d - D WHERE X birth_year D' % THISYEAR) - - class Company(EntityType): - name = String() - total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA') - - schema = build_schema_from_namespace(vars().items()) - return schema - - def setUp(self): - from cubicweb.hooks.synccomputed import _FormulaDependenciesMatrix - self.schema = self.simple_schema() - self.dependencies = _FormulaDependenciesMatrix(self.schema) - - def test_computed_attributes_by_etype(self): - comp_by_etype = self.dependencies.computed_attribute_by_etype - self.assertEqual(len(comp_by_etype), 2) - values = comp_by_etype['Person'] - self.assertEqual(len(values), 1) - self.assertEqual(values[0].rtype, 'age') - values = comp_by_etype['Company'] - self.assertEqual(len(values), 1) - self.assertEqual(values[0].rtype, 'total_salary') - - def test_computed_attribute_by_relation(self): - comp_by_rdef = self.dependencies.computed_attribute_by_relation - self.assertEqual(len(comp_by_rdef), 1) - key, values = next(iter(comp_by_rdef.items())) - self.assertEqual(key.rtype, 'works_for') - self.assertEqual(len(values), 1) - self.assertEqual(values[0].rtype, 'total_salary') - - def test_computed_attribute_by_etype_attrs(self): - comp_by_attr = self.dependencies.computed_attribute_by_etype_attrs - self.assertEqual(len(comp_by_attr), 1) - values = comp_by_attr['Person'] - self.assertEqual(len(values), 2) - values = set((rdef.formula, tuple(v)) - for rdef, v in values.items()) - self.assertEquals(values, - set((('Any 2014 - D WHERE X birth_year D', tuple(('birth_year',))), - ('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', tuple(('salary',))))) - ) - - -class ComputedAttributeTC(CubicWebTC): - appid = 'data-computed' - - def setup_entities(self, req): - self.societe = req.create_entity('Societe', nom=u'Foo') - req.create_entity('Person', name=u'Titi', salaire=1000, - travaille=self.societe, birth_year=2001) - self.tata = req.create_entity('Person', name=u'Tata', salaire=2000, - travaille=self.societe, birth_year=1990) - - - def test_update_on_add_remove_relation(self): - """check the rewriting of a computed attribute""" - with self.admin_access.web_request() as req: - self.setup_entities(req) - req.cnx.commit() - rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') - self.assertEqual(rset[0][0], 3000) - # Add relation. - toto = req.create_entity('Person', name=u'Toto', salaire=1500, - travaille=self.societe, birth_year=1988) - req.cnx.commit() - rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') - self.assertEqual(rset[0][0], 4500) - # Delete relation. - toto.cw_set(travaille=None) - req.cnx.commit() - rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') - self.assertEqual(rset[0][0], 3000) - - def test_recompute_on_attribute_update(self): - """check the modification of an attribute triggers the update of the - computed attributes that depend on it""" - with self.admin_access.web_request() as req: - self.setup_entities(req) - req.cnx.commit() - rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') - self.assertEqual(rset[0][0], 3000) - # Update attribute. - self.tata.cw_set(salaire=1000) - req.cnx.commit() - rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"') - self.assertEqual(rset[0][0], 2000) - - def test_init_on_entity_creation(self): - """check the computed attribute is initialized on entity creation""" - with self.admin_access.web_request() as req: - p = req.create_entity('Person', name=u'Tata', salaire=2000, - birth_year=1990) - req.cnx.commit() - rset = req.execute('Any A, X WHERE X age A, X name "Tata"') - self.assertEqual(rset[0][0], 2014 - 1990) - - - def test_recompute_on_ambiguous_relation(self): - # check we don't end up with TypeResolverException as in #4901163 - with self.admin_access.client_cnx() as cnx: - societe = cnx.create_entity('Societe', nom=u'Foo') - cnx.create_entity('MirrorEntity', mirror_of=societe, extid=u'1') - cnx.commit() - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/unittest_syncschema.py --- a/hooks/test/unittest_syncschema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,405 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb.server.hooks.syncschema unit and functional tests""" - -from logilab.common.testlib import unittest_main - -from yams.constraints import BoundaryConstraint -from cubicweb import ValidationError, Binary -from cubicweb.schema import META_RTYPES -from cubicweb.devtools import startpgcluster, stoppgcluster, PostgresApptestConfiguration -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.devtools.repotest import schema_eids_idx - - -def setUpModule(): - startpgcluster(__file__) - - -def tearDownModule(*args): - stoppgcluster(__file__) - del SchemaModificationHooksTC.schema_eids - - -class SchemaModificationHooksTC(CubicWebTC): - configcls = PostgresApptestConfiguration - - def setUp(self): - super(SchemaModificationHooksTC, self).setUp() - self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) - self.__class__.schema_eids = schema_eids_idx(self.repo.schema) - - def index_exists(self, cnx, etype, attr, unique=False): - dbhelper = self.repo.system_source.dbhelper - sqlcursor = cnx.cnxset.cu - return dbhelper.index_exists(sqlcursor, - SQL_PREFIX + etype, - SQL_PREFIX + attr, - unique=unique) - - def _set_perms(self, cnx, eid): - cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', - {'x': eid}) - cnx.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, ' - 'G name "managers"', {'x': eid}) - cnx.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, ' - 'G name "owners"', {'x': eid}) - - def _set_attr_perms(self, cnx, eid): - cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', - {'x': eid}) - cnx.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', - {'x': eid}) - - def test_base(self): - with self.admin_access.repo_cnx() as cnx: - schema = self.repo.schema - self.assertFalse(schema.has_entity('Societe2')) - self.assertFalse(schema.has_entity('concerne2')) - # schema should be update on insertion (after commit) - eeid = cnx.execute('INSERT CWEType X: X name "Societe2", ' - 'X description "", X final FALSE')[0][0] - self._set_perms(cnx, eeid) - cnx.execute('INSERT CWRType X: X name "concerne2", X description "", ' - 'X final FALSE, X symmetric FALSE') - self.assertFalse(schema.has_entity('Societe2')) - self.assertFalse(schema.has_entity('concerne2')) - # have to commit before adding definition relations - cnx.commit() - self.assertTrue(schema.has_entity('Societe2')) - self.assertTrue(schema.has_relation('concerne2')) - attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", ' - 'X defaultval %(default)s, X indexed TRUE, ' - 'X relation_type RT, X from_entity E, X to_entity F ' - 'WHERE RT name "name", E name "Societe2", ' - 'F name "String"', - {'default': Binary.zpickle('noname')})[0][0] - self._set_attr_perms(cnx, attreid) - concerne2_rdef_eid = cnx.execute( - 'INSERT CWRelation X: X cardinality "**", X relation_type RT, ' - 'X from_entity E, X to_entity E ' - 'WHERE RT name "concerne2", E name "Societe2"')[0][0] - self._set_perms(cnx, concerne2_rdef_eid) - self.assertNotIn('name', schema['Societe2'].subject_relations()) - self.assertNotIn('concerne2', schema['Societe2'].subject_relations()) - self.assertFalse(self.index_exists(cnx, 'Societe2', 'name')) - cnx.commit() - self.assertIn('name', schema['Societe2'].subject_relations()) - self.assertIn('concerne2', schema['Societe2'].subject_relations()) - self.assertTrue(self.index_exists(cnx, 'Societe2', 'name')) - # now we should be able to insert and query Societe2 - s2eid = cnx.execute('INSERT Societe2 X: X name "logilab"')[0][0] - cnx.execute('Societe2 X WHERE X name "logilab"') - cnx.execute('SET X concerne2 X WHERE X name "logilab"') - rset = cnx.execute('Any X WHERE X concerne2 Y') - self.assertEqual(rset.rows, [[s2eid]]) - # check that when a relation definition is deleted, existing relations are deleted - rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' - ' X from_entity E, X to_entity E ' - 'WHERE RT name "concerne2", E name "CWUser"')[0][0] - self._set_perms(cnx, rdefeid) - cnx.commit() - cnx.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}) - cnx.commit() - self.assertIn('concerne2', schema['CWUser'].subject_relations()) - self.assertNotIn('concerne2', schema['Societe2'].subject_relations()) - self.assertFalse(cnx.execute('Any X WHERE X concerne2 Y')) - # schema should be cleaned on delete (after commit) - cnx.execute('DELETE CWEType X WHERE X name "Societe2"') - cnx.execute('DELETE CWRType X WHERE X name "concerne2"') - self.assertTrue(self.index_exists(cnx, 'Societe2', 'name')) - self.assertTrue(schema.has_entity('Societe2')) - self.assertTrue(schema.has_relation('concerne2')) - cnx.commit() - self.assertFalse(self.index_exists(cnx, 'Societe2', 'name')) - self.assertFalse(schema.has_entity('Societe2')) - self.assertFalse(schema.has_entity('concerne2')) - self.assertNotIn('concerne2', schema['CWUser'].subject_relations()) - - def test_metartype_with_nordefs(self): - with self.admin_access.repo_cnx() as cnx: - META_RTYPES.add('custom_meta') - cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", ' - 'X final FALSE, X symmetric FALSE') - cnx.commit() - eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", ' - 'X description "", X final FALSE')[0][0] - self._set_perms(cnx, eeid) - cnx.commit() - META_RTYPES.remove('custom_meta') - - def test_metartype_with_somerdefs(self): - with self.admin_access.repo_cnx() as cnx: - META_RTYPES.add('custom_meta') - cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", ' - 'X final FALSE, X symmetric FALSE') - cnx.commit() - rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' - ' X from_entity E, X to_entity E ' - 'WHERE RT name "custom_meta", E name "CWUser"')[0][0] - self._set_perms(cnx, rdefeid) - cnx.commit() - eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", ' - 'X description "", X final FALSE')[0][0] - self._set_perms(cnx, eeid) - cnx.commit() - META_RTYPES.remove('custom_meta') - - def test_is_instance_of_insertions(self): - with self.admin_access.repo_cnx() as cnx: - seid = cnx.execute('INSERT Transition T: T name "subdiv"')[0][0] - is_etypes = [etype for etype, in cnx.execute('Any ETN WHERE X eid %s, ' - 'X is ET, ET name ETN' % seid)] - self.assertEqual(is_etypes, ['Transition']) - instanceof_etypes = [etype - for etype, in cnx.execute('Any ETN WHERE X eid %s, ' - 'X is_instance_of ET, ET name ETN' - % seid)] - self.assertEqual(sorted(instanceof_etypes), ['BaseTransition', 'Transition']) - snames = [name for name, in cnx.execute('Any N WHERE S is BaseTransition, S name N')] - self.assertNotIn('subdiv', snames) - snames = [name for name, in cnx.execute('Any N WHERE S is_instance_of BaseTransition, ' - 'S name N')] - self.assertIn('subdiv', snames) - - - def test_perms_synchronization_1(self): - with self.admin_access.repo_cnx() as cnx: - schema = self.repo.schema - self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users'))) - self.assertTrue(cnx.execute('Any X, Y WHERE X is CWEType, X name "CWUser", ' - 'Y is CWGroup, Y name "users"')[0]) - cnx.execute('DELETE X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') - self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users', ))) - cnx.commit() - self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers',))) - cnx.execute('SET X read_permission Y WHERE X is CWEType, ' - 'X name "CWUser", Y name "users"') - cnx.commit() - self.assertEqual(schema['CWUser'].get_groups('read'), - set(('managers', 'users',))) - - def test_perms_synchronization_2(self): - with self.admin_access.repo_cnx() as cnx: - schema = self.repo.schema['in_group'].rdefs[('CWUser', 'CWGroup')] - self.assertEqual(schema.get_groups('read'), - set(('managers', 'users', 'guests'))) - cnx.execute('DELETE X read_permission Y WHERE X relation_type RT, ' - 'RT name "in_group", Y name "guests"') - self.assertEqual(schema.get_groups('read'), - set(('managers', 'users', 'guests'))) - cnx.commit() - self.assertEqual(schema.get_groups('read'), - set(('managers', 'users'))) - cnx.execute('SET X read_permission Y WHERE X relation_type RT, ' - 'RT name "in_group", Y name "guests"') - self.assertEqual(schema.get_groups('read'), - set(('managers', 'users'))) - cnx.commit() - self.assertEqual(schema.get_groups('read'), - set(('managers', 'users', 'guests'))) - - def test_nonregr_user_edit_itself(self): - with self.admin_access.repo_cnx() as cnx: - ueid = cnx.user.eid - groupeids = [eid for eid, in cnx.execute('CWGroup G WHERE G name ' - 'in ("managers", "users")')] - cnx.execute('DELETE X in_group Y WHERE X eid %s' % ueid) - cnx.execute('SET X surname "toto" WHERE X eid %s' % ueid) - cnx.execute('SET X in_group Y WHERE X eid %s, Y name "managers"' % ueid) - cnx.commit() - eeid = cnx.execute('Any X WHERE X is CWEType, X name "CWEType"')[0][0] - cnx.execute('DELETE X read_permission Y WHERE X eid %s' % eeid) - cnx.execute('SET X final FALSE WHERE X eid %s' % eeid) - cnx.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)' - % (eeid, groupeids[0], groupeids[1])) - cnx.commit() - cnx.execute('Any X WHERE X is CWEType, X name "CWEType"') - - # schema modification hooks tests ######################################### - - def test_uninline_relation(self): - with self.admin_access.repo_cnx() as cnx: - try: - self.assertTrue(self.schema['state_of'].inlined) - cnx.execute('SET X inlined FALSE WHERE X name "state_of"') - self.assertTrue(self.schema['state_of'].inlined) - cnx.commit() - self.assertFalse(self.schema['state_of'].inlined) - self.assertFalse(self.index_exists(cnx, 'State', 'state_of')) - rset = cnx.execute('Any X, Y WHERE X state_of Y') - self.assertEqual(len(rset), 2) # user states - finally: - cnx.execute('SET X inlined TRUE WHERE X name "state_of"') - self.assertFalse(self.schema['state_of'].inlined) - cnx.commit() - self.assertTrue(self.schema['state_of'].inlined) - self.assertTrue(self.index_exists(cnx, 'State', 'state_of')) - rset = cnx.execute('Any X, Y WHERE X state_of Y') - self.assertEqual(len(rset), 2) - - def test_indexed_change(self): - with self.admin_access.repo_cnx() as cnx: - try: - cnx.execute('SET X indexed FALSE WHERE X relation_type R, R name "name"') - self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed) - self.assertTrue(self.index_exists(cnx, 'Workflow', 'name')) - cnx.commit() - self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed) - self.assertFalse(self.index_exists(cnx, 'Workflow', 'name')) - finally: - cnx.execute('SET X indexed TRUE WHERE X relation_type R, R name "name"') - self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed) - self.assertFalse(self.index_exists(cnx, 'Workflow', 'name')) - cnx.commit() - self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed) - self.assertTrue(self.index_exists(cnx, 'Workflow', 'name')) - - def test_unique_change(self): - with self.admin_access.repo_cnx() as cnx: - try: - eid = cnx.execute('INSERT CWConstraint X: X cstrtype CT, DEF constrained_by X ' - 'WHERE CT name "UniqueConstraint", DEF relation_type RT, ' - 'DEF from_entity E, RT name "name", ' - 'E name "Workflow"').rows[0][0] - self.assertFalse(self.schema['Workflow'].has_unique_values('name')) - self.assertFalse(self.index_exists(cnx, 'Workflow', 'name', unique=True)) - cnx.commit() - self.assertTrue(self.schema['Workflow'].has_unique_values('name')) - self.assertTrue(self.index_exists(cnx, 'Workflow', 'name', unique=True)) - finally: - cnx.execute('DELETE CWConstraint C WHERE C eid %(eid)s', {'eid': eid}) - cnx.commit() - self.assertFalse(self.schema['Workflow'].has_unique_values('name')) - self.assertFalse(self.index_exists(cnx, 'Workflow', 'name', unique=True)) - - def test_required_change_1(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('SET DEF cardinality "?1" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "title", E name "Bookmark"') - cnx.commit() - # should now be able to add bookmark without title - cnx.execute('INSERT Bookmark X: X path "/view"') - cnx.commit() - - def test_required_change_2(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('SET DEF cardinality "11" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "surname", E name "CWUser"') - cnx.execute('SET U surname "Doe" WHERE U surname NULL') - cnx.commit() - # should not be able anymore to add cwuser without surname - self.assertRaises(ValidationError, self.create_user, cnx, "toto") - cnx.rollback() - cnx.execute('SET DEF cardinality "?1" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "surname", E name "CWUser"') - cnx.commit() - - def test_add_attribute_to_base_class(self): - with self.admin_access.repo_cnx() as cnx: - attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", X defaultval %(default)s, ' - 'X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' - 'WHERE RT name "messageid", E name "BaseTransition", F name "String"', - {'default': Binary.zpickle('noname')})[0][0] - assert cnx.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', - {'x': attreid}) - cnx.commit() - self.schema.rebuild_infered_relations() - self.assertIn('Transition', self.schema['messageid'].subjects()) - self.assertIn('WorkflowTransition', self.schema['messageid'].subjects()) - cnx.execute('Any X WHERE X is_instance_of BaseTransition, X messageid "hop"') - - def test_change_fulltextindexed(self): - with self.admin_access.repo_cnx() as cnx: - target = cnx.create_entity(u'Email', messageid=u'1234', - subject=u'rick.roll@dance.com') - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(target.eid, [item[0] for item in rset]) - assert cnx.execute('SET A fulltextindexed FALSE ' - 'WHERE E is CWEType, E name "Email", A is CWAttribute,' - 'A from_entity E, A relation_type R, R name "subject"') - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text "rick.roll"') - self.assertFalse(rset) - assert cnx.execute('SET A fulltextindexed TRUE ' - 'WHERE A from_entity E, A relation_type R, ' - 'E name "Email", R name "subject"') - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(target.eid, [item[0] for item in rset]) - - def test_change_fulltext_container(self): - with self.admin_access.repo_cnx() as cnx: - target = cnx.create_entity(u'EmailAddress', address=u'rick.roll@dance.com') - target.cw_set(reverse_use_email=cnx.user) - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(cnx.user.eid, [item[0] for item in rset]) - assert cnx.execute('SET R fulltext_container NULL ' - 'WHERE R name "use_email"') - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(target.eid, [item[0] for item in rset]) - assert cnx.execute('SET R fulltext_container "subject" ' - 'WHERE R name "use_email"') - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(cnx.user.eid, [item[0] for item in rset]) - - def test_update_constraint(self): - with self.admin_access.repo_cnx() as cnx: - rdef = self.schema['Transition'].rdef('type') - cstr = rdef.constraint_by_type('StaticVocabularyConstraint') - if not getattr(cstr, 'eid', None): - # bug in schema reloading, constraint's eid not restored - self.skipTest('start me alone') - cnx.execute('SET X value %(v)s WHERE X eid %(x)s', - {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}) - cnx.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, ' - 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', - {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}) - cnx.commit() - cstr = rdef.constraint_by_type('StaticVocabularyConstraint') - self.assertEqual(cstr.values, (u'normal', u'auto', u'new')) - cnx.execute('INSERT Transition T: T name "hop", T type "new"') - - def test_add_constraint(self): - with self.admin_access.repo_cnx() as cnx: - rdef = self.schema['EmailPart'].rdef('ordernum') - cstr = BoundaryConstraint('>=', 0) - cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, ' - 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', - {'ct': cstr.__class__.__name__, 'v': cstr.serialize(), 'x': rdef.eid}) - cnx.commit() - cstr2 = rdef.constraint_by_type('BoundaryConstraint') - self.assertEqual(cstr, cstr2) - cstr3 = BoundaryConstraint('<=', 1000) - cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, ' - 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', - {'ct': cstr3.__class__.__name__, 'v': cstr3.serialize(), 'x': rdef.eid}) - cnx.commit() - self.assertCountEqual(rdef.constraints, [cstr, cstr3]) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/test/unittest_syncsession.py --- a/hooks/test/unittest_syncsession.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""functional tests for core hooks - -Note: - syncschema.py hooks are mostly tested in server/test/unittest_migrations.py -""" - -from six import text_type - -from cubicweb import ValidationError -from cubicweb.devtools.testlib import CubicWebTC - -class CWPropertyHooksTC(CubicWebTC): - - def test_unexistant_cwproperty(self): - with self.admin_access.web_request() as req: - with self.assertRaises(ValidationError) as cm: - req.execute('INSERT CWProperty X: X pkey "bla.bla", ' - 'X value "hop", X for_user U') - cm.exception.translate(text_type) - self.assertEqual(cm.exception.errors, - {'pkey-subject': 'unknown property key bla.bla'}) - - with self.assertRaises(ValidationError) as cm: - req.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"') - cm.exception.translate(text_type) - self.assertEqual(cm.exception.errors, - {'pkey-subject': 'unknown property key bla.bla'}) - - def test_site_wide_cwproperty(self): - with self.admin_access.web_request() as req: - with self.assertRaises(ValidationError) as cm: - req.execute('INSERT CWProperty X: X pkey "ui.site-title", ' - 'X value "hop", X for_user U') - self.assertEqual(cm.exception.errors, - {'for_user-subject': "site-wide property can't be set for user"}) - - def test_system_cwproperty(self): - with self.admin_access.web_request() as req: - with self.assertRaises(ValidationError) as cm: - req.execute('INSERT CWProperty X: X pkey "system.version.cubicweb", ' - 'X value "hop", X for_user U') - self.assertEqual(cm.exception.errors, - {'for_user-subject': "site-wide property can't be set for user"}) - - def test_bad_type_cwproperty(self): - with self.admin_access.web_request() as req: - with self.assertRaises(ValidationError) as cm: - req.execute('INSERT CWProperty X: X pkey "ui.language", ' - 'X value "hop", X for_user U') - self.assertEqual(cm.exception.errors, - {'value-subject': u'unauthorized value'}) - with self.assertRaises(ValidationError) as cm: - req.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop"') - self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'}) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/workflow.py --- a/hooks/workflow.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,357 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Core hooks: workflow related hooks""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from datetime import datetime - - -from cubicweb import RepositoryError, validation_error -from cubicweb.predicates import is_instance, adaptable -from cubicweb.server import hook - - -def _change_state(cnx, x, oldstate, newstate): - nocheck = cnx.transaction_data.setdefault('skip-security', set()) - nocheck.add((x, 'in_state', oldstate)) - nocheck.add((x, 'in_state', newstate)) - # delete previous state first - cnx.delete_relation(x, 'in_state', oldstate) - cnx.add_relation(x, 'in_state', newstate) - - -# operations ################################################################### - -class _SetInitialStateOp(hook.Operation): - """make initial state be a default state""" - entity = None # make pylint happy - - def precommit_event(self): - cnx = self.cnx - entity = self.entity - iworkflowable = entity.cw_adapt_to('IWorkflowable') - # if there is an initial state and the entity's state is not set, - # use the initial state as a default state - if not (cnx.deleted_in_transaction(entity.eid) or entity.in_state) \ - and iworkflowable.current_workflow: - state = iworkflowable.current_workflow.initial - if state: - cnx.add_relation(entity.eid, 'in_state', state.eid) - _FireAutotransitionOp(cnx, entity=entity) - -class _FireAutotransitionOp(hook.Operation): - """try to fire auto transition after state changes""" - entity = None # make pylint happy - - def precommit_event(self): - entity = self.entity - iworkflowable = entity.cw_adapt_to('IWorkflowable') - autotrs = list(iworkflowable.possible_transitions('auto')) - if autotrs: - assert len(autotrs) == 1 - iworkflowable.fire_transition(autotrs[0]) - - -class _WorkflowChangedOp(hook.Operation): - """fix entity current state when changing its workflow""" - eid = wfeid = None # make pylint happy - - def precommit_event(self): - # notice that enforcement that new workflow apply to the entity's type is - # done by schema rule, no need to check it here - cnx = self.cnx - pendingeids = cnx.transaction_data.get('pendingeids', ()) - if self.eid in pendingeids: - return - entity = cnx.entity_from_eid(self.eid) - iworkflowable = entity.cw_adapt_to('IWorkflowable') - # check custom workflow has not been rechanged to another one in the same - # transaction - mainwf = iworkflowable.main_workflow - if mainwf.eid == self.wfeid: - deststate = mainwf.initial - if not deststate: - msg = _('workflow has no initial state') - raise validation_error(entity, {('custom_workflow', 'subject'): msg}) - if mainwf.state_by_eid(iworkflowable.current_state.eid): - # nothing to do - return - # if there are no history, simply go to new workflow's initial state - if not iworkflowable.workflow_history: - if iworkflowable.current_state.eid != deststate.eid: - _change_state(cnx, entity.eid, - iworkflowable.current_state.eid, deststate.eid) - _FireAutotransitionOp(cnx, entity=entity) - return - msg = cnx._('workflow changed to "%s"') - msg %= cnx._(mainwf.name) - cnx.transaction_data[(entity.eid, 'customwf')] = self.wfeid - iworkflowable.change_state(deststate, msg, u'text/plain') - - -class _CheckTrExitPoint(hook.Operation): - treid = None # make pylint happy - - def precommit_event(self): - tr = self.cnx.entity_from_eid(self.treid) - outputs = set() - for ep in tr.subworkflow_exit: - if ep.subwf_state.eid in outputs: - msg = _("can't have multiple exits on the same state") - raise validation_error(self.treid, {('subworkflow_exit', 'subject'): msg}) - outputs.add(ep.subwf_state.eid) - - -class _SubWorkflowExitOp(hook.Operation): - forentity = trinfo = None # make pylint happy - - def precommit_event(self): - cnx = self.cnx - forentity = self.forentity - iworkflowable = forentity.cw_adapt_to('IWorkflowable') - trinfo = self.trinfo - # we're in a subworkflow, check if we've reached an exit point - wftr = iworkflowable.subworkflow_input_transition() - if wftr is None: - # inconsistency detected - msg = _("state doesn't belong to entity's current workflow") - raise validation_error(self.trinfo, {('to_state', 'subject'): msg}) - tostate = wftr.get_exit_point(forentity, trinfo.cw_attr_cache['to_state']) - if tostate is not None: - # reached an exit point - msg = _('exiting from subworkflow %s') - msg %= cnx._(iworkflowable.current_workflow.name) - cnx.transaction_data[(forentity.eid, 'subwfentrytr')] = True - iworkflowable.change_state(tostate, msg, u'text/plain', tr=wftr) - - -# hooks ######################################################################## - -class WorkflowHook(hook.Hook): - __abstract__ = True - category = 'metadata' - - -class SetInitialStateHook(WorkflowHook): - __regid__ = 'wfsetinitial' - __select__ = WorkflowHook.__select__ & adaptable('IWorkflowable') - events = ('after_add_entity',) - - def __call__(self): - _SetInitialStateOp(self._cw, entity=self.entity) - - -class FireTransitionHook(WorkflowHook): - """check the transition is allowed and add missing information into the - TrInfo entity. - - Expect that: - * wf_info_for inlined relation is set - * by_transition or to_state (managers only) inlined relation is set - - Check for automatic transition to be fired at the end - """ - __regid__ = 'wffiretransition' - __select__ = WorkflowHook.__select__ & is_instance('TrInfo') - events = ('before_add_entity',) - - def __call__(self): - cnx = self._cw - entity = self.entity - # first retreive entity to which the state change apply - try: - foreid = entity.cw_attr_cache['wf_info_for'] - except KeyError: - msg = _('mandatory relation') - raise validation_error(entity, {('wf_info_for', 'subject'): msg}) - forentity = cnx.entity_from_eid(foreid) - # see comment in the TrInfo entity definition - entity.cw_edited['tr_count']=len(forentity.reverse_wf_info_for) - iworkflowable = forentity.cw_adapt_to('IWorkflowable') - # then check it has a workflow set, unless we're in the process of changing - # entity's workflow - if cnx.transaction_data.get((forentity.eid, 'customwf')): - wfeid = cnx.transaction_data[(forentity.eid, 'customwf')] - wf = cnx.entity_from_eid(wfeid) - else: - wf = iworkflowable.current_workflow - if wf is None: - msg = _('related entity has no workflow set') - raise validation_error(entity, {None: msg}) - # then check it has a state set - fromstate = iworkflowable.current_state - if fromstate is None: - msg = _('related entity has no state') - raise validation_error(entity, {None: msg}) - # True if we are coming back from subworkflow - swtr = cnx.transaction_data.pop((forentity.eid, 'subwfentrytr'), None) - cowpowers = (cnx.user.is_in_group('managers') - or not cnx.write_security) - # no investigate the requested state change... - try: - treid = entity.cw_attr_cache['by_transition'] - except KeyError: - # no transition set, check user is a manager and destination state - # is specified (and valid) - if not cowpowers: - msg = _('mandatory relation') - raise validation_error(entity, {('by_transition', 'subject'): msg}) - deststateeid = entity.cw_attr_cache.get('to_state') - if not deststateeid: - msg = _('mandatory relation') - raise validation_error(entity, {('by_transition', 'subject'): msg}) - deststate = wf.state_by_eid(deststateeid) - if deststate is None: - msg = _("state doesn't belong to entity's workflow") - raise validation_error(entity, {('to_state', 'subject'): msg}) - else: - # check transition is valid and allowed, unless we're coming back - # from subworkflow - tr = cnx.entity_from_eid(treid) - if swtr is None: - qname = ('by_transition', 'subject') - if tr is None: - msg = _("transition doesn't belong to entity's workflow") - raise validation_error(entity, {qname: msg}) - if not tr.has_input_state(fromstate): - msg = _("transition %(tr)s isn't allowed from %(st)s") - raise validation_error(entity, {qname: msg}, { - 'tr': tr.name, 'st': fromstate.name}, ['tr', 'st']) - if not tr.may_be_fired(foreid): - msg = _("transition may not be fired") - raise validation_error(entity, {qname: msg}) - deststateeid = entity.cw_attr_cache.get('to_state') - if deststateeid is not None: - if not cowpowers and deststateeid != tr.destination(forentity).eid: - msg = _("transition isn't allowed") - raise validation_error(entity, {('by_transition', 'subject'): msg}) - if swtr is None: - deststate = cnx.entity_from_eid(deststateeid) - if not cowpowers and deststate is None: - msg = _("state doesn't belong to entity's workflow") - raise validation_error(entity, {('to_state', 'subject'): msg}) - else: - deststateeid = tr.destination(forentity).eid - # everything is ok, add missing information on the trinfo entity - entity.cw_edited['from_state'] = fromstate.eid - entity.cw_edited['to_state'] = deststateeid - nocheck = cnx.transaction_data.setdefault('skip-security', set()) - nocheck.add((entity.eid, 'from_state', fromstate.eid)) - nocheck.add((entity.eid, 'to_state', deststateeid)) - _FireAutotransitionOp(cnx, entity=forentity) - - -class FiredTransitionHook(WorkflowHook): - """change related entity state and handle exit of subworkflow""" - __regid__ = 'wffiretransition' - __select__ = WorkflowHook.__select__ & is_instance('TrInfo') - events = ('after_add_entity',) - - def __call__(self): - trinfo = self.entity - rcache = trinfo.cw_attr_cache - _change_state(self._cw, rcache['wf_info_for'], rcache['from_state'], - rcache['to_state']) - forentity = self._cw.entity_from_eid(rcache['wf_info_for']) - iworkflowable = forentity.cw_adapt_to('IWorkflowable') - assert iworkflowable.current_state.eid == rcache['to_state'] - if iworkflowable.main_workflow.eid != iworkflowable.current_workflow.eid: - _SubWorkflowExitOp(self._cw, forentity=forentity, trinfo=trinfo) - - -class CheckInStateChangeAllowed(WorkflowHook): - """check state apply, in case of direct in_state change using unsafe execute - """ - __regid__ = 'wfcheckinstate' - __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state') - events = ('before_add_relation',) - category = 'integrity' - - def __call__(self): - cnx = self._cw - nocheck = cnx.transaction_data.get('skip-security', ()) - if (self.eidfrom, 'in_state', self.eidto) in nocheck: - # state changed through TrInfo insertion, so we already know it's ok - return - entity = cnx.entity_from_eid(self.eidfrom) - iworkflowable = entity.cw_adapt_to('IWorkflowable') - mainwf = iworkflowable.main_workflow - if mainwf is None: - msg = _('entity has no workflow set') - raise validation_error(entity, {None: msg}) - for wf in mainwf.iter_workflows(): - if wf.state_by_eid(self.eidto): - break - else: - msg = _("state doesn't belong to entity's workflow. You may " - "want to set a custom workflow for this entity first.") - raise validation_error(self.eidfrom, {('in_state', 'subject'): msg}) - if iworkflowable.current_workflow and wf.eid != iworkflowable.current_workflow.eid: - msg = _("state doesn't belong to entity's current workflow") - raise validation_error(self.eidfrom, {('in_state', 'subject'): msg}) - - -class SetModificationDateOnStateChange(WorkflowHook): - """update entity's modification date after changing its state""" - __regid__ = 'wfsyncmdate' - __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state') - events = ('after_add_relation',) - - def __call__(self): - if self._cw.added_in_transaction(self.eidfrom): - # new entity, not needed - return - entity = self._cw.entity_from_eid(self.eidfrom) - try: - entity.cw_set(modification_date=datetime.utcnow()) - except RepositoryError as ex: - # usually occurs if entity is coming from a read-only source - # (eg ldap user) - self.warning('cant change modification date for %s: %s', entity, ex) - - -class CheckWorkflowTransitionExitPoint(WorkflowHook): - """check that there is no multiple exits from the same state""" - __regid__ = 'wfcheckwftrexit' - __select__ = WorkflowHook.__select__ & hook.match_rtype('subworkflow_exit') - events = ('after_add_relation',) - - def __call__(self): - _CheckTrExitPoint(self._cw, treid=self.eidfrom) - - -class SetCustomWorkflow(WorkflowHook): - __regid__ = 'wfsetcustom' - __select__ = WorkflowHook.__select__ & hook.match_rtype('custom_workflow') - events = ('after_add_relation',) - - def __call__(self): - _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=self.eidto) - - -class DelCustomWorkflow(SetCustomWorkflow): - __regid__ = 'wfdelcustom' - events = ('after_delete_relation',) - - def __call__(self): - entity = self._cw.entity_from_eid(self.eidfrom) - typewf = entity.cw_adapt_to('IWorkflowable').cwetype_workflow() - if typewf is not None: - _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=typewf.eid) diff -r 058bb3dc685f -r 0b59724cb3f2 hooks/zmq.py --- a/hooks/zmq.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from cubicweb.server import hook - -class ZMQStopHook(hook.Hook): - __regid__ = 'zmqstop' - events = ('server_shutdown',) - - def __call__(self): - self.repo.app_instances_bus.stop() - -class ZMQStartHook(hook.Hook): - __regid__ = 'zmqstart' - events = ('server_startup',) - order = -1 - - def __call__(self): - config = self.repo.config - address_pub = config.get('zmq-address-pub') - address_sub = config.get('zmq-address-sub') - if not address_pub and not address_sub: - return - from cubicweb.server import cwzmq - self.repo.app_instances_bus = cwzmq.ZMQComm() - if address_pub: - self.repo.app_instances_bus.add_publisher(address_pub) - def clear_cache_callback(msg): - self.debug('clear_caches: %s', ' '.join(msg)) - self.repo.clear_caches(msg[1:]) - self.repo.app_instances_bus.add_subscription('delete', clear_cache_callback) - for address in address_sub: - self.repo.app_instances_bus.add_subscriber(address) - self.repo.app_instances_bus.start() diff -r 058bb3dc685f -r 0b59724cb3f2 i18n.py --- a/i18n.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,117 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Some i18n/gettext utilities.""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import re -import os -from os.path import join, basename, splitext, exists -from glob import glob - -from six import PY2 - -from cubicweb.toolsutils import create_dir - -def extract_from_tal(files, output_file): - """extract i18n strings from tal and write them into the given output file - using standard python gettext marker (_) - """ - output = open(output_file, 'w') - for filepath in files: - for match in re.finditer('i18n:(content|replace)="([^"]+)"', open(filepath).read()): - output.write('_("%s")' % match.group(2)) - output.close() - - -def add_msg(w, msgid, msgctx=None): - """write an empty pot msgid definition""" - if PY2 and isinstance(msgid, unicode): - msgid = msgid.encode('utf-8') - if msgctx: - if PY2 and isinstance(msgctx, unicode): - msgctx = msgctx.encode('utf-8') - w('msgctxt "%s"\n' % msgctx) - msgid = msgid.replace('"', r'\"').splitlines() - if len(msgid) > 1: - w('msgid ""\n') - for line in msgid: - w('"%s"' % line.replace('"', r'\"')) - else: - w('msgid "%s"\n' % msgid[0]) - w('msgstr ""\n\n') - -def execute2(args): - # XXX replace this with check_output in Python 2.7 - from subprocess import Popen, PIPE, CalledProcessError - p = Popen(args, stdout=PIPE, stderr=PIPE) - out, err = p.communicate() - if p.returncode != 0: - exc = CalledProcessError(p.returncode, args[0]) - exc.cmd = args - exc.data = (out, err) - raise exc - -def available_catalogs(i18ndir=None): - if i18ndir is None: - wildcard = '*.po' - else: - wildcard = join(i18ndir, '*.po') - for popath in glob(wildcard): - lang = splitext(basename(popath))[0] - yield lang, popath - - -def compile_i18n_catalogs(sourcedirs, destdir, langs): - """generate .mo files for a set of languages into the `destdir` i18n directory - """ - from subprocess import CalledProcessError - from logilab.common.fileutils import ensure_fs_mode - print('-> compiling message catalogs to %s' % destdir) - errors = [] - for lang in langs: - langdir = join(destdir, lang, 'LC_MESSAGES') - if not exists(langdir): - create_dir(langdir) - pofiles = [join(path, '%s.po' % lang) for path in sourcedirs] - pofiles = [pof for pof in pofiles if exists(pof)] - mergedpo = join(destdir, '%s_merged.po' % lang) - try: - # merge instance/cubes messages catalogs with the stdlib's one - cmd = ['msgcat', '--use-first', '--sort-output', '--strict', - '-o', mergedpo] + pofiles - execute2(cmd) - # make sure the .mo file is writeable and compiles with *msgfmt* - applmo = join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo') - try: - ensure_fs_mode(applmo) - except OSError: - pass # suppose not exists - execute2(['msgfmt', mergedpo, '-o', applmo]) - except CalledProcessError as exc: - errors.append(u'while handling language %s:\ncmd:\n%s\nstdout:\n%s\nstderr:\n%s\n' % - (lang, exc.cmd, repr(exc.data[0]), repr(exc.data[1]))) - except Exception as exc: - errors.append(u'while handling language %s: %s' % (lang, exc)) - try: - # clean everything - os.unlink(mergedpo) - except Exception: - continue - return errors diff -r 058bb3dc685f -r 0b59724cb3f2 i18n/de.po --- a/i18n/de.po Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4690 +0,0 @@ -# cubicweb i18n catalog -# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# Logilab -msgid "" -msgstr "" -"Project-Id-Version: 2.0\n" -"POT-Creation-Date: 2006-01-12 17:35+CET\n" -"PO-Revision-Date: 2010-09-15 14:55+0200\n" -"Last-Translator: Dr. Leo \n" -"Language-Team: English \n" -"Language: de\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: pygettext.py 1.5\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" - -#, python-format -msgid "" -"\n" -"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for " -"entity\n" -"'%(title)s'\n" -"\n" -"%(comment)s\n" -"\n" -"url: %(url)s\n" -msgstr "" -"\n" -"%(user)s hat den Zustand geändert von <%(previous_state)s> in <" -"%(current_state)s> für die Entität\n" -"'%(title)s'\n" -"\n" -"%(comment)s\n" -"\n" -"url: %(url)s\n" - -#, python-format -msgid " from state %(fromstate)s to state %(tostate)s\n" -msgstr " aus dem Zustand %(fromstate)s in den Zustand %(tostate)s\n" - -msgid " :" -msgstr " :" - -#, python-format -msgid "\"action\" must be specified in options; allowed values are %s" -msgstr "" - -msgid "\"role=subject\" or \"role=object\" must be specified in options" -msgstr "" - -#, python-format -msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" -msgstr "" - -#, python-format -msgid "%(KEY-rtype)s is part of violated unicity constraint" -msgstr "" - -#, python-format -msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" -msgstr "" - -#, python-format -msgid "%(attr)s set to %(newvalue)s" -msgstr "%(attr)s geändert in %(newvalue)s" - -#, python-format -msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" -msgstr "%(attr)s geändert von %(oldvalue)s in %(newvalue)s" - -#, python-format -msgid "%(etype)s by %(author)s" -msgstr "" - -#, python-format -msgid "%(firstname)s %(surname)s" -msgstr "%(firstname)s %(surname)s" - -#, python-format -msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)" -msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)" - -#, python-format -msgid "%d days" -msgstr "%d Tage" - -#, python-format -msgid "%d hours" -msgstr "%d Stunden" - -#, python-format -msgid "%d minutes" -msgstr "%d Minuten" - -#, python-format -msgid "%d months" -msgstr "%d Monate" - -#, python-format -msgid "%d seconds" -msgstr "%d Sekunden" - -#, python-format -msgid "%d weeks" -msgstr "%d Wochen" - -#, python-format -msgid "%d years" -msgstr "%d Jahre" - -#, python-format -msgid "%s could be supported" -msgstr "" - -#, python-format -msgid "%s error report" -msgstr "%s Fehlerbericht" - -#, python-format -msgid "%s software version of the database" -msgstr "Software-Version der Datenbank %s" - -#, python-format -msgid "%s updated" -msgstr "%s aktualisiert" - -#, python-format -msgid "'%s' action doesn't take any options" -msgstr "" - -#, python-format -msgid "" -"'%s' action for in_state relation should at least have 'linkattr=name' option" -msgstr "" - -#, python-format -msgid "'%s' action requires 'linkattr' option" -msgstr "" - -msgid "(UNEXISTANT EID)" -msgstr "(EID nicht gefunden)" - -#, python-format -msgid "(suppressed) entity #%d" -msgstr "" - -msgid "**" -msgstr "0..n 0..n" - -msgid "*+" -msgstr "0..n 1..n" - -msgid "*1" -msgstr "0..n 1" - -msgid "*?" -msgstr "0..n 0..1" - -msgid "+*" -msgstr "1..n 0..n" - -msgid "++" -msgstr "1..n 1..n" - -msgid "+1" -msgstr "1..n 1" - -msgid "+?" -msgstr "1..n 0..1" - -msgid "1*" -msgstr "1 0..n" - -msgid "1+" -msgstr "1 1..n" - -msgid "11" -msgstr "1 1" - -msgid "1?" -msgstr "1 0..1" - -#, python-format -msgid "<%s not specified>" -msgstr "<%s nicht spezifiziert>" - -#, python-format -msgid "" -"
      This schema of the data model excludes the meta-data, but you " -"can also display a complete schema with meta-data.
      " -msgstr "" -"
      Dieses Schema des Datenmodells enthält<>keine Meta-Daten, aber Sie " -"können ein vollständiges Schema mit Meta-Daten anzeigen." - -msgid "" -msgstr "" - -msgid "" -msgstr "" - -msgid "?*" -msgstr "0..1 0..n" - -msgid "?+" -msgstr "0..1 1..n" - -msgid "?1" -msgstr "0..1 1" - -msgid "??" -msgstr "0..1 0..1" - -msgid "AND" -msgstr "UND" - -msgid "About this site" -msgstr "Über diese Seite" - -#, python-format -msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "" - -msgid "Attributes permissions:" -msgstr "Rechte der Attribute" - -# schema pot file, generated on 2009-09-16 16:46:55 -# -# singular and plural forms for each entity type -msgid "BaseTransition" -msgstr "Übergang (abstrakt)" - -msgid "BaseTransition_plural" -msgstr "Übergänge (abstrakt)" - -msgid "BigInt" -msgstr "" - -msgid "BigInt_plural" -msgstr "" - -msgid "Bookmark" -msgstr "Lesezeichen" - -msgid "Bookmark_plural" -msgstr "Lesezeichen" - -msgid "Boolean" -msgstr "Boolean" - -msgid "Boolean_plural" -msgstr "Booleans" - -msgid "BoundConstraint" -msgstr "gebundene Einschränkung" - -msgid "BoundaryConstraint" -msgstr "Rand-einschränkung" - -msgid "Browse by entity type" -msgstr "nach Identitätstyp navigieren" - -#, python-format -msgid "By %(user)s on %(dt)s [%(undo_link)s]" -msgstr "" - -msgid "Bytes" -msgstr "Bytes" - -msgid "Bytes_plural" -msgstr "Bytes" - -msgid "CWAttribute" -msgstr "Attribut" - -msgid "CWAttribute_plural" -msgstr "Attribute" - -msgid "CWCache" -msgstr "Cache" - -msgid "CWCache_plural" -msgstr "Caches" - -msgid "CWComputedRType" -msgstr "" - -msgid "CWComputedRType_plural" -msgstr "" - -msgid "CWConstraint" -msgstr "Einschränkung" - -msgid "CWConstraintType" -msgstr "Einschränkungstyp" - -msgid "CWConstraintType_plural" -msgstr "Einschränkungstypen" - -msgid "CWConstraint_plural" -msgstr "Einschränkungen" - -msgid "CWDataImport" -msgstr "" - -msgid "CWDataImport_plural" -msgstr "" - -msgid "CWEType" -msgstr "Entitätstyp" - -msgctxt "inlined:CWRelation.from_entity.subject" -msgid "CWEType" -msgstr "Entitätstyp" - -msgctxt "inlined:CWRelation.to_entity.subject" -msgid "CWEType" -msgstr "Entitätstyp" - -msgid "CWEType_plural" -msgstr "Entitätstypen" - -msgid "CWGroup" -msgstr "Gruppe" - -msgid "CWGroup_plural" -msgstr "Gruppen" - -msgid "CWProperty" -msgstr "Eigenschaft" - -msgid "CWProperty_plural" -msgstr "Eigenschaften" - -msgid "CWRType" -msgstr "Relationstyp" - -msgctxt "inlined:CWRelation.relation_type.subject" -msgid "CWRType" -msgstr "Relationstyp" - -msgid "CWRType_plural" -msgstr "Relationstypen" - -msgid "CWRelation" -msgstr "Relation" - -msgid "CWRelation_plural" -msgstr "Relationen" - -msgid "CWSource" -msgstr "" - -msgid "CWSourceHostConfig" -msgstr "" - -msgid "CWSourceHostConfig_plural" -msgstr "" - -msgid "CWSourceSchemaConfig" -msgstr "" - -msgid "CWSourceSchemaConfig_plural" -msgstr "" - -msgid "CWSource_plural" -msgstr "" - -msgid "CWUniqueTogetherConstraint" -msgstr "unique-together-Einschränkung" - -msgid "CWUniqueTogetherConstraint_plural" -msgstr "unique-together-Einschränkungen" - -msgid "CWUser" -msgstr "Nutzer" - -msgid "CWUser_plural" -msgstr "Nutzer" - -#, python-format -msgid "" -"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " -"linked using this relation." -msgstr "" -"Kann die Relation %(role)s %(rtype)s zu einer Entität %(eid)s nicht wieder " -"herstellen, die durch diese Relation bereits mit einer anderen Entität " -"verbunden ist." - -#, python-format -msgid "" -"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " -"does not exists anymore in the schema." -msgstr "" -"Kann die Relation %(rtype)s zwischen %(subj)s und %(obj)s nicht wieder " -"herstellen, diese Relation existiert nicht mehr in dem Schema." - -#, python-format -msgid "" -"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " -"anymore." -msgstr "" -"Kann die Relation %(rtype)s nicht wieder herstellen, die Entität %(role)s " -"%(eid)s existiert nicht mehr." - -#, python-format -msgid "" -"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " -"exist anymore" -msgstr "" -"Kann das Hinzufügen der Relation %(rtype)s von %(subj)s zu %(obj)s nicht " -"rückgängig machen , diese Relation existiert nicht mehr." - -#, python-format -msgid "" -"Can't undo creation of entity %(eid)s of type %(etype)s, type no more " -"supported" -msgstr "" -"Kann die Erstelllung der Entität %(eid)s vom Typ %(etype)s nicht rückgängig " -"machen, dieser Typ existiert nicht mehr." - -msgid "Click to sort on this column" -msgstr "" - -msgid "" -"Configuration of the system source goes to the 'sources' file, not in the " -"database" -msgstr "" - -#, python-format -msgid "Created %(etype)s : %(entity)s" -msgstr "" - -msgid "DEBUG" -msgstr "" - -msgid "Date" -msgstr "Datum" - -msgid "Date_plural" -msgstr "Daten" - -msgid "Datetime" -msgstr "Datum und Uhrzeit" - -msgid "Datetime_plural" -msgstr "Daten und Uhrzeiten" - -msgid "Decimal" -msgstr "Dezimalzahl" - -msgid "Decimal_plural" -msgstr "Dezimalzahlen" - -#, python-format -msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "" - -#, python-format -msgid "Deleted %(etype)s : %(entity)s" -msgstr "" - -msgid "Detected problems" -msgstr "" - -msgid "Do you want to delete the following element(s)?" -msgstr "Wollen Sie das/die folgend(n) Element(e) löschen?" - -msgid "Download schema as OWL" -msgstr "Herunterladen des Schemas im OWL-Format" - -msgid "ERROR" -msgstr "" - -msgid "EmailAddress" -msgstr "Email-Adresse" - -msgctxt "inlined:CWUser.use_email.subject" -msgid "EmailAddress" -msgstr "Email-Adresse" - -msgid "EmailAddress_plural" -msgstr "Email-Adressen" - -msgid "Entities" -msgstr "Entitäten" - -#, python-format -msgid "" -"Entity %(eid)s has changed since you started to edit it. Reload the page and " -"reapply your changes." -msgstr "" - -msgid "Entity and relation supported by this source" -msgstr "" - -msgid "ExternalUri" -msgstr "Externer Uri" - -msgid "ExternalUri_plural" -msgstr "Externe Uris" - -msgid "FATAL" -msgstr "" - -msgid "Float" -msgstr "Gleitkommazahl" - -msgid "Float_plural" -msgstr "Gleitkommazahlen" - -# schema pot file, generated on 2009-12-03 09:22:35 -# -# singular and plural forms for each entity type -msgid "FormatConstraint" -msgstr "Format-Einschränkung" - -msgid "Garbage collection information" -msgstr "Information zur Speicherbereinigung" - -msgid "Help" -msgstr "Hilfe" - -msgid "INFO" -msgstr "" - -msgid "Instance" -msgstr "Instanz" - -msgid "Int" -msgstr "Ganzzahl" - -msgid "Int_plural" -msgstr "Ganzzahlen" - -msgid "Interval" -msgstr "Zeitraum" - -msgid "IntervalBoundConstraint" -msgstr "interval-Einschränkung" - -msgid "Interval_plural" -msgstr "Intervalle" - -msgid "Link:" -msgstr "" - -msgid "Looked up classes" -msgstr "gesuchte Klassen" - -msgid "Manage" -msgstr "" - -msgid "Manage security" -msgstr "Sicherheitsverwaltung" - -msgid "Message threshold" -msgstr "" - -msgid "Most referenced classes" -msgstr "meist-referenzierte Klassen" - -msgid "New BaseTransition" -msgstr "neuer Übergang (abstrakt)" - -msgid "New Bookmark" -msgstr "Neues Lesezeichen" - -msgid "New CWAttribute" -msgstr "Neue finale Relationsdefinition" - -msgid "New CWCache" -msgstr "Neuer Anwendungs-Cache" - -msgid "New CWComputedRType" -msgstr "" - -msgid "New CWConstraint" -msgstr "Neue Einschränkung" - -msgid "New CWConstraintType" -msgstr "Neuer Einschränkungstyp" - -msgid "New CWDataImport" -msgstr "" - -msgid "New CWEType" -msgstr "Neuer Entitätstyp" - -msgid "New CWGroup" -msgstr "Neue Gruppe" - -msgid "New CWProperty" -msgstr "Neue Eigenschaft" - -msgid "New CWRType" -msgstr "Neuer Relationstyp" - -msgid "New CWRelation" -msgstr "Neue Relation" - -msgid "New CWSource" -msgstr "" - -msgid "New CWSourceHostConfig" -msgstr "" - -msgid "New CWSourceSchemaConfig" -msgstr "" - -msgid "New CWUniqueTogetherConstraint" -msgstr "Neue unique-together-Einschränkung" - -msgid "New CWUser" -msgstr "Neuer Nutzer" - -msgid "New EmailAddress" -msgstr "Neue Email-Adresse" - -msgid "New ExternalUri" -msgstr "Neuer externer URI" - -msgid "New RQLExpression" -msgstr "Neuer RQL Ausdruck" - -msgid "New State" -msgstr "Neuer Zustand" - -msgid "New SubWorkflowExitPoint" -msgstr "Neuer subworkflow-Endpunkt" - -msgid "New TrInfo" -msgstr "Neue Übergangsinformation" - -msgid "New Transition" -msgstr "Neuer Übergang" - -msgid "New Workflow" -msgstr "Neuer workflow" - -msgid "New WorkflowTransition" -msgstr "Neuer workflow-Übergang" - -msgid "No result matching query" -msgstr "Ihre Suche ergab keine Treffer." - -msgid "Non exhaustive list of views that may apply to entities of this type" -msgstr "" -"nicht abschließende Liste von Ansichten, die auf Entitäten dieses Typs " -"Anwendung finden" - -msgid "OR" -msgstr "oder" - -msgid "Ownership" -msgstr "Eigentum" - -msgid "Parent class:" -msgstr "Elternklasse" - -msgid "Password" -msgstr "Passwort" - -msgid "Password_plural" -msgstr "Passwörter" - -msgid "Please note that this is only a shallow copy" -msgstr "Achtung: dies ist nur eine flache Kopie!" - -msgid "Powered by CubicWeb" -msgstr "Powered by CubicWeb" - -msgid "RQLConstraint" -msgstr "RQL-Einschränkung" - -msgid "RQLExpression" -msgstr "RQL-Ausdruck" - -msgid "RQLExpression_plural" -msgstr "RQL-Ausdrücke" - -msgid "RQLUniqueConstraint" -msgstr "RQL Einschränkung bzgl. Eindeutigkeit" - -msgid "RQLVocabularyConstraint" -msgstr "RQL Wortschatz-Einschränkung" - -msgid "RegexpConstraint" -msgstr "regulärer Ausdruck Einschränkung" - -msgid "Registry's content" -msgstr "Inhalt der Registry" - -msgid "Relations" -msgstr "Relationen" - -msgid "Repository" -msgstr "Ablage" - -#, python-format -msgid "Schema %s" -msgstr "Schema %s" - -msgid "Schema's permissions definitions" -msgstr "Im Schema definierte Rechte" - -msgid "Search for" -msgstr "Suchen" - -msgid "Site information" -msgstr "" - -msgid "SizeConstraint" -msgstr "Größeneinschränkung" - -msgid "" -"Source's configuration for a particular host. One key=value per line, " -"authorized keys depending on the source's type, overriding values defined on " -"the source." -msgstr "" - -msgid "Startup views" -msgstr "Startansichten" - -msgid "State" -msgstr "Zustand" - -msgid "State_plural" -msgstr "Zustände" - -msgid "StaticVocabularyConstraint" -msgstr "Wortschatz-Einschränkung" - -msgid "String" -msgstr "String" - -msgid "String_plural" -msgstr "Strings" - -msgid "Sub-classes:" -msgstr "Unterklassen" - -msgid "SubWorkflowExitPoint" -msgstr "Subworkflow Endpunkt" - -msgid "SubWorkflowExitPoint_plural" -msgstr "subworkflow Endpunkte" - -msgid "Submit bug report" -msgstr "Fehlerbericht senden" - -msgid "Submit bug report by mail" -msgstr "Diesen Bericht als E-Mail senden" - -msgid "TZDatetime" -msgstr "" - -msgid "TZDatetime_plural" -msgstr "" - -msgid "TZTime" -msgstr "" - -msgid "TZTime_plural" -msgstr "" - -#, python-format -msgid "The view %s can not be applied to this query" -msgstr "Die Ansicht %s ist auf diese Anfrage nicht anwendbar." - -#, python-format -msgid "The view %s could not be found" -msgstr "Die Ansicht %s konnte nicht gefunden werden." - -msgid "There is no default workflow" -msgstr "Dieser Entitätstyp hat standardmäßig keinen Workflow." - -msgid "This BaseTransition:" -msgstr "Diese abstracte Transition:" - -msgid "This Bookmark:" -msgstr "Dieses Lesezeichen:" - -msgid "This CWAttribute:" -msgstr "diese finale Relationsdefinition:" - -msgid "This CWCache:" -msgstr "Dieser Anwendungs-Cache:" - -msgid "This CWComputedRType:" -msgstr "" - -msgid "This CWConstraint:" -msgstr "diese Einschränkung:" - -msgid "This CWConstraintType:" -msgstr "Dieser Einschränkungstyp:" - -msgid "This CWDataImport:" -msgstr "" - -msgid "This CWEType:" -msgstr "Dieser Entitätstyp:" - -msgid "This CWGroup:" -msgstr "Diese Gruppe:" - -msgid "This CWProperty:" -msgstr "Diese Eigenschaft:" - -msgid "This CWRType:" -msgstr "Dieser Relationstyp:" - -msgid "This CWRelation:" -msgstr "Diese Relation:" - -msgid "This CWSource:" -msgstr "" - -msgid "This CWSourceHostConfig:" -msgstr "" - -msgid "This CWSourceSchemaConfig:" -msgstr "" - -msgid "This CWUniqueTogetherConstraint:" -msgstr "Diese unique-together-Einschränkung:" - -msgid "This CWUser:" -msgstr "Dieser Nutzer:" - -msgid "This EmailAddress:" -msgstr "Diese E-Mail-Adresse:" - -msgid "This ExternalUri:" -msgstr "dieser externe URI:" - -msgid "This RQLExpression:" -msgstr "Dieser RQL-Ausdruck:" - -msgid "This State:" -msgstr "Dieser Zustand:" - -msgid "This SubWorkflowExitPoint:" -msgstr "Dieser Subworkflow Endpunkt:" - -msgid "This TrInfo:" -msgstr "Diese Übergangs-Information:" - -msgid "This Transition:" -msgstr "Dieser Übergang:" - -msgid "This Workflow:" -msgstr "Dieser Workflow:" - -msgid "This WorkflowTransition:" -msgstr "Dieser Workflow-Übergang:" - -msgid "" -"This action is forbidden. If you think it should be allowed, please contact " -"the site administrator." -msgstr "" - -msgid "This entity type permissions:" -msgstr "Berechtigungen für diesen Entitätstyp" - -msgid "Time" -msgstr "Zeit" - -msgid "Time_plural" -msgstr "Zeiten" - -msgid "TrInfo" -msgstr "Übergangs-Information" - -msgid "TrInfo_plural" -msgstr "Übergangs-Informationen" - -msgid "Transition" -msgstr "Übergang" - -msgid "Transition_plural" -msgstr "Übergänge" - -msgid "URLs from which content will be imported. You can put one url per line" -msgstr "" - -msgid "Undoable actions" -msgstr "" - -msgid "Undoing" -msgstr "" - -msgid "UniqueConstraint" -msgstr "eindeutige Einschränkung" - -msgid "Unknown source type" -msgstr "" - -msgid "Unreachable objects" -msgstr "unzugängliche Objekte" - -#, python-format -msgid "Updated %(etype)s : %(entity)s" -msgstr "" - -msgid "Used by:" -msgstr "benutzt von:" - -msgid "Users and groups management" -msgstr "" - -msgid "WARNING" -msgstr "" - -msgid "Web server" -msgstr "Web-Server" - -msgid "Workflow" -msgstr "Workflow" - -msgid "Workflow history" -msgstr "Workflow-Chronik" - -msgid "WorkflowTransition" -msgstr "Workflow-Übergang" - -msgid "WorkflowTransition_plural" -msgstr "Workflow-Übergänge" - -msgid "Workflow_plural" -msgstr "Workflows" - -msgid "" -"You can either submit a new file using the browse button above, or choose to " -"remove already uploaded file by checking the \"detach attached file\" check-" -"box, or edit file content online with the widget below." -msgstr "" -"Sie können entweder mit dem bouton\n" -"\"Durchsuchen\" oberhalb eine neue Datei hochladen, eine bereits " -"hochgeladene Datei durch anklicken des Kästchens \"angehängte Datei abtrennen" -"\" entfernen, oder den Datei-Inhalt mit dem Widget unterhalb editieren." - -msgid "" -"You can either submit a new file using the browse button above, or edit file " -"content online with the widget below." -msgstr "" -"Sie können entweder mit dem bouton\n" -"\"Durchsuchen\" oberhalb eine neue Datei hochladen, oder den Datei-Inhalt " -"mit dem Widget unterhalb editieren." - -msgid "You can't change this relation" -msgstr "" - -msgid "You cannot remove the system source" -msgstr "" - -msgid "You cannot rename the system source" -msgstr "" - -msgid "" -"You have no access to this view or it can not be used to display the current " -"data." -msgstr "" -"Sie haben entweder keinen Zugriff auf diese Ansicht, oder die Ansicht kann " -"nicht zur Anzeite dieser Daten verwendet werden." - -msgid "" -"You're not authorized to access this page. If you think you should, please " -"contact the site administrator." -msgstr "" -"Sie haben keinen Zugriff auf diese Seite.Bitte wenden Sie sich ggfs. an " -"Ihren Administrator." - -#, python-format -msgid "[%s supervision] changes summary" -msgstr "[%s supervision] Beschreibung der Änderungen" - -msgid "" -"a RQL expression which should return some results, else the transition won't " -"be available. This query may use X and U variables that will respectivly " -"represents the current entity and the current user." -msgstr "" -"ein RQL-Ausdruck, der einige Treffer liefern sollte, sonst wird der Übergang " -"nicht verfügbar sein. Diese Abfrage kann X und U Variable benutzen, die " -"jeweils die aktuelle Entität und den aktuellen Nutzer repräsentieren." - -msgid "a URI representing an object in external data store" -msgstr "ein URI, der ein Objekt in einem externen Data-Store repräsentiert" - -msgid "a float is expected" -msgstr "Eine Dezimalzahl (float) wird erwartet." - -msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" -msgstr "" - -msgid "" -"a simple cache entity characterized by a name and a validity date. The " -"target application is responsible for updating timestamp when necessary to " -"invalidate the cache (typically in hooks). Also, checkout the AppObject." -"get_cache() method." -msgstr "" - -msgid "abstract base class for transitions" -msgstr "abstrakte Basisklasse für Übergänge" - -msgid "action menu" -msgstr "" - -msgid "action(s) on this selection" -msgstr "Aktionen(en) bei dieser Auswahl" - -msgid "actions" -msgstr "Aktionen" - -msgid "activate" -msgstr "aktivieren" - -msgid "activated" -msgstr "aktiviert" - -msgid "add" -msgstr "hinzufügen" - -msgid "add Bookmark bookmarked_by CWUser object" -msgstr "Lesezeichen" - -msgid "add CWAttribute add_permission RQLExpression subject" -msgstr "" - -msgid "add CWAttribute constrained_by CWConstraint subject" -msgstr "Einschränkung" - -msgid "add CWAttribute read_permission RQLExpression subject" -msgstr "RQL-Ausdruck zum lesen" - -msgid "add CWAttribute relation_type CWRType object" -msgstr "Attributdefinition" - -msgid "add CWAttribute update_permission RQLExpression subject" -msgstr "RQL-Ausdruck für Berechtigung zum Aktualisieren" - -msgid "add CWEType add_permission RQLExpression subject" -msgstr "RQL-Ausdruck für Berechtigung zum Hinzufügen" - -msgid "add CWEType delete_permission RQLExpression subject" -msgstr "RQL-Ausdruck für Berechtigung zum Löschen" - -msgid "add CWEType read_permission RQLExpression subject" -msgstr "RQL-Ausdruck für Berechtigung zum Lesen" - -msgid "add CWEType update_permission RQLExpression subject" -msgstr "RQL-Ausdruck für Berechtigung zum Aktualisieren" - -msgid "add CWProperty for_user CWUser object" -msgstr "Eigenschaft" - -msgid "add CWRelation add_permission RQLExpression subject" -msgstr "RQL-Ausdruck hinzufügen" - -msgid "add CWRelation constrained_by CWConstraint subject" -msgstr "Einschränkung" - -msgid "add CWRelation delete_permission RQLExpression subject" -msgstr "RQL-Ausdruck löschen" - -msgid "add CWRelation read_permission RQLExpression subject" -msgstr "RQL-Ausdruck lesen" - -msgid "add CWRelation relation_type CWRType object" -msgstr "Relationsdefinition" - -msgid "add CWSourceHostConfig cw_host_config_of CWSource object" -msgstr "" - -msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" -msgstr "unique-together-Einschränkung hinzufügen" - -msgid "add CWUser in_group CWGroup object" -msgstr "Nutzer" - -msgid "add CWUser use_email EmailAddress subject" -msgstr "Email-Adresse" - -msgid "add State allowed_transition Transition object" -msgstr "Anfangszustand" - -msgid "add State allowed_transition Transition subject" -msgstr "erlaubter Übergang" - -msgid "add State allowed_transition WorkflowTransition subject" -msgstr "Workflow-Übergang" - -msgid "add State state_of Workflow object" -msgstr "Status" - -msgid "add Transition condition RQLExpression subject" -msgstr "Bedingung" - -msgid "add Transition destination_state State object" -msgstr "ausstehender Übergang" - -msgid "add Transition destination_state State subject" -msgstr "Zielstatus" - -msgid "add Transition transition_of Workflow object" -msgstr "Übergang" - -msgid "add WorkflowTransition condition RQLExpression subject" -msgstr "Workflow-Übergang" - -msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" -msgstr "Subworkflow Exit-Punkt" - -msgid "add WorkflowTransition transition_of Workflow object" -msgstr "Workflow-Übergang" - -msgid "add a BaseTransition" -msgstr "" - -msgid "add a Bookmark" -msgstr "" - -msgid "add a CWAttribute" -msgstr "" - -msgid "add a CWCache" -msgstr "" - -msgid "add a CWComputedRType" -msgstr "" - -msgid "add a CWConstraint" -msgstr "" - -msgid "add a CWConstraintType" -msgstr "" - -msgid "add a CWDataImport" -msgstr "" - -msgid "add a CWEType" -msgstr "" - -msgctxt "inlined:CWRelation.from_entity.subject" -msgid "add a CWEType" -msgstr "einen Entitätstyp hinzufügen" - -msgctxt "inlined:CWRelation.to_entity.subject" -msgid "add a CWEType" -msgstr "einen Entitätstyp hinzufügen" - -msgid "add a CWGroup" -msgstr "" - -msgid "add a CWProperty" -msgstr "" - -msgid "add a CWRType" -msgstr "" - -msgctxt "inlined:CWRelation.relation_type.subject" -msgid "add a CWRType" -msgstr "einen Relationstyp hinzufügen" - -msgid "add a CWRelation" -msgstr "" - -msgid "add a CWSource" -msgstr "" - -msgid "add a CWSourceHostConfig" -msgstr "" - -msgid "add a CWSourceSchemaConfig" -msgstr "" - -msgid "add a CWUniqueTogetherConstraint" -msgstr "" - -msgid "add a CWUser" -msgstr "" - -msgid "add a EmailAddress" -msgstr "" - -msgctxt "inlined:CWUser.use_email.subject" -msgid "add a EmailAddress" -msgstr "Email-Adresse hinzufügen" - -msgid "add a ExternalUri" -msgstr "" - -msgid "add a RQLExpression" -msgstr "" - -msgid "add a State" -msgstr "" - -msgid "add a SubWorkflowExitPoint" -msgstr "" - -msgid "add a TrInfo" -msgstr "" - -msgid "add a Transition" -msgstr "" - -msgid "add a Workflow" -msgstr "" - -msgid "add a WorkflowTransition" -msgstr "" - -# subject and object forms for each relation type -# (no object form for final relation types) -msgid "add_permission" -msgstr "kann hinzugefügt werden durch" - -msgctxt "CWAttribute" -msgid "add_permission" -msgstr "" - -# subject and object forms for each relation type -# (no object form for final relation types) -msgctxt "CWEType" -msgid "add_permission" -msgstr "Berechtigung hinzufügen" - -msgctxt "CWRelation" -msgid "add_permission" -msgstr "Berechtigung hinzufügen" - -msgid "add_permission_object" -msgstr "hat die Berechtigung zum Hinzufügen" - -msgctxt "CWGroup" -msgid "add_permission_object" -msgstr "kann hinzufügen" - -msgctxt "RQLExpression" -msgid "add_permission_object" -msgstr "benutzt, um die Hinzufüge-Berechtigung zu festzulegen für" - -msgid "add_relation" -msgstr "hinzufügen" - -#, python-format -msgid "added %(etype)s #%(eid)s (%(title)s)" -msgstr "Hinzufügen der Entität %(etype)s #%(eid)s (%(title)s)" - -#, python-format -msgid "" -"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" -"%(eidto)s" -msgstr "" -"Die Relation %(rtype)s von %(frometype)s #%(eidfrom)s zu %(toetype)s #" -"%(eidto)s wurde hinzugefügt." - -msgid "additional type specific properties" -msgstr "" - -msgid "addrelated" -msgstr "hinzufügen" - -msgid "address" -msgstr "Adresse" - -msgctxt "EmailAddress" -msgid "address" -msgstr "Adresse" - -msgid "alias" -msgstr "Alias" - -msgctxt "EmailAddress" -msgid "alias" -msgstr "Alias" - -msgid "allow to set a specific workflow for an entity" -msgstr "erlaube, einen bestimmten Workflow für eine Entität zu setzen" - -msgid "allowed options depends on the source type" -msgstr "" - -msgid "allowed transitions from this state" -msgstr "erlaubte Übergänge von diesem Zustand" - -#, python-format -msgid "allowed values for \"action\" are %s" -msgstr "" - -msgid "allowed_transition" -msgstr "erlaubter Übergang" - -msgctxt "State" -msgid "allowed_transition" -msgstr "erlaubter Übergang" - -msgid "allowed_transition_object" -msgstr "ausstehende Zustände" - -msgctxt "BaseTransition" -msgid "allowed_transition_object" -msgstr "ausstehende Zustände" - -msgctxt "Transition" -msgid "allowed_transition_object" -msgstr "ausstehende Zustände" - -msgctxt "WorkflowTransition" -msgid "allowed_transition_object" -msgstr "ausstehende Zustände" - -msgid "an electronic mail address associated to a short alias" -msgstr "Eine E-Mail-Adresse wurde mit einem Alias verknüpft." - -msgid "an error occurred" -msgstr "Es ist ein Fehler aufgetreten." - -msgid "an error occurred while processing your request" -msgstr "Während der Bearbeitung Ihrer Anfrage ist ein Fehler aufgetreten." - -msgid "an error occurred, the request cannot be fulfilled" -msgstr "" -"Es ist ein Fehler aufgetreten, Ihre Anfrage kann nicht bearbeitet werden." - -msgid "an integer is expected" -msgstr "Ganze Zahl (integer) erwartet." - -msgid "and linked" -msgstr "und verknüpft" - -msgid "and/or between different values" -msgstr "und/oder zwischen verschiedenen Werten" - -msgid "anyrsetview" -msgstr "" - -msgid "april" -msgstr "April" - -#, python-format -msgid "archive for %(author)s" -msgstr "" - -#, python-format -msgid "archive for %(month)s/%(year)s" -msgstr "" - -#, python-format -msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" -msgstr "" -"Die Entität %(eid)s ´vom Typ %(etype)s muss mindestens mit einer \n" -"anderen durch die Relation %(rtype)s verknüpft sein." - -msgid "attribute" -msgstr "Attribut" - -msgid "august" -msgstr "August" - -msgid "authentication failure" -msgstr "Nutzername oder Passwort falsch" - -msgid "auto" -msgstr "automatisch" - -msgid "autocomputed attribute used to ensure transition coherency" -msgstr "" - -msgid "automatic" -msgstr "automatisch" - -#, python-format -msgid "back to pagination (%s results)" -msgstr "" - -msgid "bad value" -msgstr "Unzulässiger Wert" - -msgid "badly formatted url" -msgstr "" - -msgid "base url" -msgstr "Basis-URL" - -msgid "bookmark has been removed" -msgstr "Das Lesezeichen wurde gelöscht." - -msgid "bookmark this page" -msgstr "diese Seite merken" - -msgid "bookmark this search" -msgstr "diese Suche merken" - -msgid "bookmarked_by" -msgstr "Lesezeichen angelegt durch" - -msgctxt "Bookmark" -msgid "bookmarked_by" -msgstr "Lesezeichen angelegt durch" - -msgid "bookmarked_by_object" -msgstr "hat Lesezeichen" - -msgctxt "CWUser" -msgid "bookmarked_by_object" -msgstr "verwendet Lesezeichen" - -msgid "bookmarks" -msgstr "Lesezeichen" - -msgid "bookmarks are used to have user's specific internal links" -msgstr "Lesezeichen werden für nutzer-spezifische interne Links verwendet" - -msgid "boxes" -msgstr "Boxen" - -msgid "bug report sent" -msgstr "Fehlerbericht gesendet" - -msgid "button_apply" -msgstr "Anwenden" - -msgid "button_cancel" -msgstr "Abbrechen" - -msgid "button_delete" -msgstr "Löschen" - -msgid "button_ok" -msgstr "OK" - -msgid "by" -msgstr "durch" - -msgid "by relation" -msgstr "durch die Relation" - -msgid "by_transition" -msgstr "je Übergang" - -msgctxt "TrInfo" -msgid "by_transition" -msgstr "je Übergang" - -msgid "by_transition_object" -msgstr "Übergangsinformation" - -msgctxt "BaseTransition" -msgid "by_transition_object" -msgstr "Übergangsinformation" - -msgctxt "Transition" -msgid "by_transition_object" -msgstr "Übergangsinformation" - -msgctxt "WorkflowTransition" -msgid "by_transition_object" -msgstr "Übergangsinformation" - -msgid "calendar" -msgstr "Kalender anzeigen" - -msgid "can not resolve entity types:" -msgstr "Die Typen konnten nicht ermittelt werden:" - -msgid "can only have one url" -msgstr "" - -msgid "can't be changed" -msgstr "kann nicht geändert werden" - -msgid "can't be deleted" -msgstr "kann nicht entfernt werden" - -msgid "can't change this attribute" -msgstr "" - -#, python-format -msgid "can't display data, unexpected error: %s" -msgstr "Kann die Daten aufgrund des folgenden Fehlers nicht anzeigen: %s" - -msgid "can't have multiple exits on the same state" -msgstr "Mehrere Ausgänge aus demselben Zustand nicht möglich." - -#, python-format -msgid "can't parse %(value)r (expected %(format)s)" -msgstr "" -"Kann den Wert %(value)r nicht analysieren (erwartetes Format: %(format)s)" - -#, python-format -msgid "" -"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " -"%(value)s) does not exist any longer" -msgstr "" - -#, python-format -msgid "" -"can't restore relation %(rtype)s of entity %(eid)s, this relation does not " -"exist in the schema anymore." -msgstr "" - -#, python-format -msgid "can't restore state of entity %s, it has been deleted inbetween" -msgstr "" - -#, python-format -msgid "" -"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality=" -"%(card)s" -msgstr "" - -msgid "cancel" -msgstr "" - -msgid "cancel select" -msgstr "Auswahl aufheben" - -msgid "cancel this insert" -msgstr "diese Einfügung aufheben" - -msgid "cardinality" -msgstr "Kardinalität" - -msgctxt "CWAttribute" -msgid "cardinality" -msgstr "Kardinalität" - -msgctxt "CWRelation" -msgid "cardinality" -msgstr "Kardinalität" - -msgid "category" -msgstr "Kategorie" - -#, python-format -msgid "changed state of %(etype)s #%(eid)s (%(title)s)" -msgstr "Änderung des Zustands von %(etype)s #%(eid)s (%(title)s)" - -msgid "changes applied" -msgstr "Änderungen übernommen" - -msgid "click here to see created entity" -msgstr "Hier klicken, um die angelegte Entität anzusehen" - -msgid "click here to see edited entity" -msgstr "" - -msgid "click on the box to cancel the deletion" -msgstr "Klicken Sie die Box an, um das Löschen rückgängig zu machen." - -msgid "click to add a value" -msgstr "Klicken Sie, um einen Wert hinzuzufügen" - -msgid "click to delete this value" -msgstr "Klicken Sie, um diesen Wert zu löschen" - -msgid "click to edit this field" -msgstr "Klicken Sie, um dieses Feld zu editieren" - -msgid "close all" -msgstr "" - -msgid "comment" -msgstr "Kommentar" - -msgctxt "TrInfo" -msgid "comment" -msgstr "Kommentar" - -msgid "comment_format" -msgstr "Format" - -msgctxt "TrInfo" -msgid "comment_format" -msgstr "Format" - -msgid "components" -msgstr "Komponenten" - -msgid "components_navigation" -msgstr "Seitennavigation" - -msgid "components_navigation_description" -msgstr "Paginierungs-Komponente für große Ergebnismengen" - -msgid "components_rqlinput" -msgstr "rql Eingabefeld" - -msgid "components_rqlinput_description" -msgstr "das rql-Eingabefeld im Seitenkopf" - -msgid "composite" -msgstr "" - -msgctxt "CWRelation" -msgid "composite" -msgstr "composite" - -msgid "condition" -msgstr "Bedingung" - -msgctxt "BaseTransition" -msgid "condition" -msgstr "Bedingung" - -msgctxt "Transition" -msgid "condition" -msgstr "Bedingung" - -msgctxt "WorkflowTransition" -msgid "condition" -msgstr "Bedingung" - -msgid "condition_object" -msgstr "Bedingung von" - -msgctxt "RQLExpression" -msgid "condition_object" -msgstr "Bedingung von" - -msgid "conditions" -msgstr "Bedingungen" - -msgid "config" -msgstr "" - -msgctxt "CWSource" -msgid "config" -msgstr "" - -msgctxt "CWSourceHostConfig" -msgid "config" -msgstr "" - -msgid "config mode" -msgstr "Konfigurationsmodus" - -msgid "config type" -msgstr "Konfigurationstyp" - -msgid "confirm password" -msgstr "Passwort bestätigen" - -msgid "constrained_by" -msgstr "eingeschränkt durch" - -msgctxt "CWAttribute" -msgid "constrained_by" -msgstr "eingeschränkt durch" - -msgctxt "CWRelation" -msgid "constrained_by" -msgstr "eingeschränkt durch" - -msgid "constrained_by_object" -msgstr "Einschränkungen" - -msgctxt "CWConstraint" -msgid "constrained_by_object" -msgstr "Einschränkungen" - -msgid "constraint factory" -msgstr "Einschränkungs-Factory" - -msgid "constraint_of" -msgstr "" - -msgctxt "CWUniqueTogetherConstraint" -msgid "constraint_of" -msgstr "" - -msgid "constraint_of_object" -msgstr "" - -msgctxt "CWEType" -msgid "constraint_of_object" -msgstr "" - -msgid "constraints" -msgstr "Einschränkungen" - -msgid "constraints applying on this relation" -msgstr "auf diese Relation angewandte Einschränkung" - -msgid "content type" -msgstr "MIME-Typ" - -msgid "context" -msgstr "Kontext" - -msgid "context where this box should be displayed" -msgstr "Kontext, in dem diese Box angezeigt werden soll" - -msgid "context where this component should be displayed" -msgstr "Kontext, in dem diese Komponente angezeigt werden soll" - -msgid "context where this facet should be displayed, leave empty for both" -msgstr "" -"Kontext, wo diese Nachricht angezeigt werden soll; für beides: frei lassen." - -msgid "control subject entity's relations order" -msgstr "" - -msgid "copy" -msgstr "kopieren" - -msgid "core relation indicating a user's groups" -msgstr "Kernrelation für die Gruppen eines Nutzers" - -msgid "" -"core relation indicating owners of an entity. This relation implicitly put " -"the owner into the owners group for the entity" -msgstr "" - -msgid "core relation indicating the original creator of an entity" -msgstr "Kernrelation für den Urheber einer Entität" - -msgid "core relation indicating the type of an entity" -msgstr "Kernrelation für den Identitätstyp" - -msgid "" -"core relation indicating the types (including specialized types) of an entity" -msgstr "" - -msgid "could not connect to the SMTP server" -msgstr "Keine Verbindung mit dem SMTP-Server" - -msgid "create an index for quick search on this attribute" -msgstr "Erstelle einen Index zur schnellen Suche über dieses Attribut" - -msgid "created on" -msgstr "angelegt am" - -msgid "created_by" -msgstr "erstellt von" - -msgid "created_by_object" -msgstr "hat erstellt" - -msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)" -msgstr "Erstelle Lesezeichen für %(linkto)s" - -msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)" -msgstr "Erstelle Attribut %(linkto)s" - -msgid "" -"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)" -msgstr "Erstelle Einschränkung für attribute %(linkto)s" - -msgid "" -"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)" -msgstr "Erstelle Einschränkung für Relation %(linkto)s" - -msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)" -msgstr "Erstelle Eigenschaft für Nutzer %(linkto)s" - -msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" -msgstr "Erstelle Relation %(linkto)s" - -msgid "" -"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource " -"%(linkto)s)" -msgstr "" - -msgid "" -"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " -"constraint_of CWEType %(linkto)s)" -msgstr "" - -msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" -msgstr "Erstelle neuen Nutzer in Gruppe %(linkto)s" - -msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)" -msgstr "Erstelle E-Mail-Adresse für Nutzer %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" -msgstr "" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" -msgstr "RQL-Ausdruck für Leseberechtigung für %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s update_permission " -"RQLExpression)" -msgstr "RQL Ausdruck für Aktualisierungs-Berechtigung für %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" -msgstr "Erstelle rql-Ausdruck für Hinzufüge-Berechtigung für %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)" -msgstr "Erstelle rql-Ausdruck für Lösch-Berechtigung für %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" -msgstr "Erstelle rql-Ausdruck für Lese-Berechtigung für %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" -msgstr "Erstelle rql-Ausdruck für Aktualisierungs-Berechtigung für %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" -msgstr "RQL-Ausdruck zur Vergabe der Hinzufüge-Berechtigung für %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s delete_permission " -"RQLExpression)" -msgstr "RQL-Ausdruck zur Vergabe der Lösch-Berechtigung für %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" -msgstr "RQL-Ausdruck zur Vergabe der Lese-Berechtigung für %(linkto)s" - -msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" -msgstr "Erstelle RQL-Ausdruck für Übergang %(linkto)s" - -msgid "" -"creating RQLExpression (WorkflowTransition %(linkto)s condition " -"RQLExpression)" -msgstr "Erstelle RQL-Ausdruck für Workflow-Übergang %(linkto)s" - -msgid "creating State (State allowed_transition Transition %(linkto)s)" -msgstr "Erstelle einen zustand, der den Übergang %(linkto)s auslösen kann." - -msgid "creating State (State state_of Workflow %(linkto)s)" -msgstr "Erstelle Zustand des Workflows %(linkto)s" - -msgid "creating State (Transition %(linkto)s destination_state State)" -msgstr "Erstelle Zielzustand für Übergang %(linkto)s" - -msgid "" -"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " -"subworkflow_exit SubWorkflowExitPoint)" -msgstr "Erstelle Subworkflow Exit-Punkt für Workflow-Übergang %(linkto)s" - -msgid "creating Transition (State %(linkto)s allowed_transition Transition)" -msgstr "Erstelle auslösbaren Übergang für Zustand %(linkto)s" - -msgid "creating Transition (Transition destination_state State %(linkto)s)" -msgstr "Erstelle Übergang, der zu Zustand %(linkto)s führt." - -msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" -msgstr "Erstelle Übergang des Workflows %(linkto)s" - -msgid "" -"creating WorkflowTransition (State %(linkto)s allowed_transition " -"WorkflowTransition)" -msgstr "Erstelle Workflow-Übergang, der zum Zustand %(linkto)s führt." - -msgid "" -"creating WorkflowTransition (WorkflowTransition transition_of Workflow " -"%(linkto)s)" -msgstr "Erstelle Workflow-Übergang des Workflows %(linkto)s" - -msgid "creation" -msgstr "Erstellung" - -msgid "creation date" -msgstr "Erstellungsdatum" - -msgid "creation time of an entity" -msgstr "Erstellungszeitpunkt einer Entität" - -msgid "creation_date" -msgstr "Erstellungsdatum" - -msgid "cstrtype" -msgstr "Typ der Einschränkung" - -msgctxt "CWConstraint" -msgid "cstrtype" -msgstr "Einschränkungstyp" - -msgid "cstrtype_object" -msgstr "benutzt von" - -msgctxt "CWConstraintType" -msgid "cstrtype_object" -msgstr "Einschränkungstyp von" - -msgid "csv export" -msgstr "CSV-Export" - -msgid "csv export (entities)" -msgstr "" - -msgid "ctxcomponents" -msgstr "Kontext-Komponenten" - -msgid "ctxcomponents_anonuserlink" -msgstr "" - -msgid "ctxcomponents_anonuserlink_description" -msgstr "" - -msgid "ctxcomponents_appliname" -msgstr "" - -msgid "ctxcomponents_appliname_description" -msgstr "" - -msgid "ctxcomponents_bookmarks_box" -msgstr "Lesezeichen-Box" - -msgid "ctxcomponents_bookmarks_box_description" -msgstr "Box mit einer Liste der Lesezeichen des Nutzers" - -msgid "ctxcomponents_breadcrumbs" -msgstr "Brotkrumen" - -msgid "ctxcomponents_breadcrumbs_description" -msgstr "" -"Anzeigen eines Pfads zur Lokalisierung der aktuellen Seite innerhalb der Site" - -msgid "ctxcomponents_download_box" -msgstr "Download-Box" - -msgid "ctxcomponents_download_box_description" -msgstr "" - -msgid "ctxcomponents_edit_box" -msgstr "Aktionsbox" - -msgid "ctxcomponents_edit_box_description" -msgstr "Box mit verfügbaren Aktionen für die angezeigten Daten" - -msgid "ctxcomponents_facet.filterbox" -msgstr "Filter" - -msgid "ctxcomponents_facet.filterbox_description" -msgstr "Box mit Filter für aktuelle Suchergebnis-Funktionalität" - -msgid "ctxcomponents_logo" -msgstr "Icon" - -msgid "ctxcomponents_logo_description" -msgstr "Das Anwendungs-Ikon angezeigt im Bildschirmkopf" - -msgid "ctxcomponents_metadata" -msgstr "Metadaten für Entität metadata" - -msgid "ctxcomponents_metadata_description" -msgstr "" - -msgid "ctxcomponents_possible_views_box" -msgstr "Box mit möglichen Ansichten" - -msgid "ctxcomponents_possible_views_box_description" -msgstr "Box mit möglichen Ansichten für die angezeigten Daten" - -msgid "ctxcomponents_prevnext" -msgstr "vorherige/nächste Entität" - -msgid "ctxcomponents_prevnext_description" -msgstr "" -"display link to go from one entity to another on entities implementing the " -"\"previous/next\" interface." - -msgid "ctxcomponents_rss" -msgstr "RSS-Box" - -msgid "ctxcomponents_rss_description" -msgstr "RSS icon um die angezeigten Daten als RSS-Thread zu erhalten" - -msgid "ctxcomponents_search_box" -msgstr "Suchbox" - -msgid "ctxcomponents_search_box_description" -msgstr "Suchbox" - -msgid "ctxcomponents_startup_views_box" -msgstr "Box für Start-Ansicht" - -msgid "ctxcomponents_startup_views_box_description" -msgstr "Box mit möglichen Start-Ansichten" - -msgid "ctxcomponents_userstatus" -msgstr "" - -msgid "ctxcomponents_userstatus_description" -msgstr "" - -msgid "ctxcomponents_wfhistory" -msgstr "Workflow-Chronik" - -msgid "ctxcomponents_wfhistory_description" -msgstr "Zeite die Workflow-Chronik." - -msgid "ctxtoolbar" -msgstr "Werkzeugleiste" - -msgid "custom_workflow" -msgstr "angepasster Workflow" - -msgid "custom_workflow_object" -msgstr "angepasster Workflow von" - -msgid "cw.groups-management" -msgstr "" - -msgid "cw.users-management" -msgstr "" - -msgid "cw_for_source" -msgstr "" - -msgctxt "CWSourceSchemaConfig" -msgid "cw_for_source" -msgstr "" - -msgid "cw_for_source_object" -msgstr "" - -msgctxt "CWSource" -msgid "cw_for_source_object" -msgstr "" - -msgid "cw_host_config_of" -msgstr "" - -msgctxt "CWSourceHostConfig" -msgid "cw_host_config_of" -msgstr "" - -msgid "cw_host_config_of_object" -msgstr "" - -msgctxt "CWSource" -msgid "cw_host_config_of_object" -msgstr "" - -msgid "cw_import_of" -msgstr "" - -msgctxt "CWDataImport" -msgid "cw_import_of" -msgstr "" - -msgid "cw_import_of_object" -msgstr "" - -msgctxt "CWSource" -msgid "cw_import_of_object" -msgstr "" - -msgid "cw_schema" -msgstr "" - -msgctxt "CWSourceSchemaConfig" -msgid "cw_schema" -msgstr "" - -msgid "cw_schema_object" -msgstr "" - -msgctxt "CWEType" -msgid "cw_schema_object" -msgstr "" - -msgctxt "CWRType" -msgid "cw_schema_object" -msgstr "" - -msgctxt "CWRelation" -msgid "cw_schema_object" -msgstr "" - -msgid "cw_source" -msgstr "" - -msgid "cw_source_object" -msgstr "" - -msgid "cwetype-box" -msgstr "Box-Ansicht" - -msgid "cwetype-description" -msgstr "Beschreibung" - -msgid "cwetype-permissions" -msgstr "Berechtigungen" - -msgid "cwetype-views" -msgstr "Ansichten" - -msgid "cwetype-workflow" -msgstr "Workflow" - -msgid "cwgroup-main" -msgstr "Beschreibung" - -msgid "cwgroup-permissions" -msgstr "Berechtigungen" - -msgid "cwrtype-description" -msgstr "Beschreibung" - -msgid "cwrtype-permissions" -msgstr "Berechtigungen" - -msgid "cwsource-imports" -msgstr "" - -msgid "cwsource-main" -msgstr "" - -msgid "cwsource-mapping" -msgstr "" - -msgid "cwuri" -msgstr "interner URI" - -msgid "data directory url" -msgstr "URL des Daten-Pools" - -msgid "data model schema" -msgstr "Schema der Website" - -msgid "data sources" -msgstr "" - -msgid "data sources management" -msgstr "" - -msgid "date" -msgstr "Datum" - -msgid "deactivate" -msgstr "deaktivieren" - -msgid "deactivated" -msgstr "deaktiviert" - -msgid "december" -msgstr "Dezember" - -msgid "default" -msgstr "Standardwert" - -msgid "default text format for rich text fields." -msgstr "Standardformat für Textfelder" - -msgid "default user workflow" -msgstr "Standard-Workflow für Nutzer" - -msgid "default value" -msgstr "Standardwert" - -msgid "default value as gziped pickled python object" -msgstr "" - -msgid "default workflow for an entity type" -msgstr "Standard-Workflow eines Entitätstyps" - -msgid "default_workflow" -msgstr "Standard-Workflow" - -msgctxt "CWEType" -msgid "default_workflow" -msgstr "Standard-Workflow" - -msgid "default_workflow_object" -msgstr "Standard-Workflow von" - -msgctxt "Workflow" -msgid "default_workflow_object" -msgstr "Standard-Workflow von" - -msgid "defaultval" -msgstr "Standard-Wert" - -msgctxt "CWAttribute" -msgid "defaultval" -msgstr "Standard-Wert" - -msgid "define a CubicWeb user" -msgstr "Einen CubicWeb-Nutzer definieren" - -msgid "define a CubicWeb users group" -msgstr "Eine CubicWeb-Nutzergruppe definieren" - -msgid "" -"define a final relation: link a final relation type from a non final entity " -"to a final entity type. used to build the instance schema" -msgstr "" - -msgid "" -"define a non final relation: link a non final relation type from a non final " -"entity to a non final entity type. used to build the instance schema" -msgstr "" - -msgid "define a relation type, used to build the instance schema" -msgstr "" -"Definieren eines Relationstyps, der zur Erstellung des Instanz-Schemas " -"benutzt wird." - -msgid "define a rql expression used to define permissions" -msgstr "Definieren eines RQL-Ausdrucks zur Festlegung von Berechtigungen." - -msgid "define a schema constraint" -msgstr "Eine Schema-Einschränkung definieren" - -msgid "define a schema constraint type" -msgstr "den Typ einer Schema-Einschränkung definieren" - -msgid "define a virtual relation type, used to build the instance schema" -msgstr "" - -msgid "define an entity type, used to build the instance schema" -msgstr "definieren eines Entitätstyps zur Erstellung des Instanz-Schemas" - -msgid "define how we get out from a sub-workflow" -msgstr "Definieren, wie man aus einem Sub-Workflow herauskommt" - -msgid "defines a sql-level multicolumn unique index" -msgstr "definiert auf SQL-Ebene einen eindeutigen Index über mehrere Spalten" - -msgid "" -"defines what's the property is applied for. You must select this first to be " -"able to set value" -msgstr "" -"definiert, worauf die Eigenschaft angewendet wird. Sie müssen dies zunächst " -"markieren,um den Wert zuzuweisen." - -msgid "delete" -msgstr "löschen" - -msgid "delete this bookmark" -msgstr "dieses Lesezeichen löschen" - -msgid "delete this relation" -msgstr "diese Relation löschen" - -msgid "delete_permission" -msgstr "kann gelöscht werden durch" - -msgctxt "CWEType" -msgid "delete_permission" -msgstr "Lösch-Berechtigung" - -msgctxt "CWRelation" -msgid "delete_permission" -msgstr "Lösch-Berechtigung" - -msgid "delete_permission_object" -msgstr "hat Lösch-Berechtigung" - -msgctxt "CWGroup" -msgid "delete_permission_object" -msgstr "hat Lösch-Berechtigung für" - -msgctxt "RQLExpression" -msgid "delete_permission_object" -msgstr "hat die Berechtigung, zu löschen" - -#, python-format -msgid "deleted %(etype)s #%(eid)s (%(title)s)" -msgstr "Löschen der Entität %(etype)s #%(eid)s (%(title)s)" - -#, python-format -msgid "" -"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" -"%(eidto)s" -msgstr "" -"Relation %(rtype)s von %(frometype)s #%(eidfrom)s zu %(toetype)s #%(eidto)s " -"gelöscht" - -msgid "depends on the constraint type" -msgstr "hängt vom Einschränkungsgyp ab" - -msgid "description" -msgstr "Beschreibung" - -msgctxt "BaseTransition" -msgid "description" -msgstr "Beschreibung" - -msgctxt "CWAttribute" -msgid "description" -msgstr "Beschreibung" - -msgctxt "CWComputedRType" -msgid "description" -msgstr "" - -msgctxt "CWEType" -msgid "description" -msgstr "Beschreibung" - -msgctxt "CWRType" -msgid "description" -msgstr "Beschreibung" - -msgctxt "CWRelation" -msgid "description" -msgstr "Beschreibung" - -msgctxt "State" -msgid "description" -msgstr "Beschreibung" - -msgctxt "Transition" -msgid "description" -msgstr "Beschreibung" - -msgctxt "Workflow" -msgid "description" -msgstr "Beschreibung" - -msgctxt "WorkflowTransition" -msgid "description" -msgstr "Beschreibung" - -msgid "description_format" -msgstr "Format" - -msgctxt "BaseTransition" -msgid "description_format" -msgstr "Format" - -msgctxt "CWAttribute" -msgid "description_format" -msgstr "Format" - -msgctxt "CWComputedRType" -msgid "description_format" -msgstr "" - -msgctxt "CWEType" -msgid "description_format" -msgstr "Format" - -msgctxt "CWRType" -msgid "description_format" -msgstr "Format" - -msgctxt "CWRelation" -msgid "description_format" -msgstr "Format" - -msgctxt "State" -msgid "description_format" -msgstr "Format" - -msgctxt "Transition" -msgid "description_format" -msgstr "Format" - -msgctxt "Workflow" -msgid "description_format" -msgstr "Format" - -msgctxt "WorkflowTransition" -msgid "description_format" -msgstr "Format" - -msgid "destination state for this transition" -msgstr "Zielzustand dieses Übergangs" - -msgid "destination state must be in the same workflow as our parent transition" -msgstr "Zielzustand muss im selben Workflow sein wie unser Parent-Übergang" - -msgid "destination state of a transition" -msgstr "Zielzustand eines Übergangs" - -msgid "" -"destination state. No destination state means that transition should go back " -"to the state from which we've entered the subworkflow." -msgstr "" -"Zielzustand. Kein Zielzustand bedeutet, dass der Übergang in den Zustand " -"zurückführen soll, von dem aus der Subworkflow erreicht wurde." - -msgid "destination_state" -msgstr "Zielzustand" - -msgctxt "SubWorkflowExitPoint" -msgid "destination_state" -msgstr "Zielzustand" - -msgctxt "Transition" -msgid "destination_state" -msgstr "Zielzustand" - -msgid "destination_state_object" -msgstr "Ziel von" - -msgctxt "State" -msgid "destination_state_object" -msgstr "Ziel von" - -msgid "detach attached file" -msgstr "angehängte Datei abtrennen" - -msgid "display order of the box" -msgstr "angezeigte Reihenfolge der Boxen" - -msgid "display order of the component" -msgstr "angezeigte Reihenfolge der Komponenten" - -msgid "display order of the facet" -msgstr "angezeigte Reihenfolge der Facetten" - -msgid "display the box or not" -msgstr "die Box anzeigen oder nicht" - -msgid "display the component or not" -msgstr "die Komponente anzeigen oder nicht" - -msgid "display the facet or not" -msgstr "die Facette anzeigen oder nicht" - -msgid "download" -msgstr "Herunterladen" - -#, python-format -msgid "download %s" -msgstr "Herunterladen von %s" - -msgid "download icon" -msgstr "Ikone 'herunterladen'" - -msgid "download schema as owl" -msgstr "Schema als OWL herunterladen" - -msgid "edit bookmarks" -msgstr "Lesezeichen bearbeiten" - -msgid "edit canceled" -msgstr "Änderungen verwerfen" - -msgid "editable-table" -msgstr "bearbeitbare Tabelle" - -msgid "eid" -msgstr "" - -msgid "embedded html" -msgstr "HTML-Inhalt" - -msgid "end_timestamp" -msgstr "" - -msgctxt "CWDataImport" -msgid "end_timestamp" -msgstr "" - -msgid "entities deleted" -msgstr "Entitäten gelöscht" - -msgid "entity and relation types can't be mapped, only attributes or relations" -msgstr "" - -msgid "entity copied" -msgstr "Entität kopiert" - -msgid "entity created" -msgstr "Entität erstellt" - -msgid "entity creation" -msgstr "Erstellung der Entität" - -msgid "entity deleted" -msgstr "Entität gelöscht" - -msgid "entity deletion" -msgstr "Löschen der Entität" - -msgid "entity edited" -msgstr "Entität bearbeitet" - -msgid "entity has no workflow set" -msgstr "Entität hat keinen Workflow" - -msgid "entity linked" -msgstr "Entität verknüpft" - -msgid "entity type" -msgstr "Entitätstyp" - -msgid "entity types which may use this workflow" -msgstr "Entitätstypen, die diesen Workflow benutzen können." - -msgid "entity update" -msgstr "Aktualisierung der Entität" - -msgid "entityview" -msgstr "" - -msgid "error" -msgstr "" - -msgid "error while publishing ReST text" -msgstr "Fehler beim Übersetzen von reST" - -msgid "exit state must be a subworkflow state" -msgstr "Exit-Zustand muss ein Subworkflow-Zustand sein." - -msgid "exit_point" -msgstr "Exit-Punkt " - -msgid "exit_point_object" -msgstr "Exit-Punkt für" - -#, python-format -msgid "exiting from subworkflow %s" -msgstr "verlasse Subworkflow %s" - -msgid "expression" -msgstr "Ausdruck" - -msgctxt "RQLExpression" -msgid "expression" -msgstr "Ausdruck" - -msgid "exprtype" -msgstr "Typ des Ausdrucks" - -msgctxt "RQLExpression" -msgid "exprtype" -msgstr "Typ des Ausdrucks" - -msgid "extra_props" -msgstr "" - -msgctxt "CWAttribute" -msgid "extra_props" -msgstr "" - -msgid "facet-loading-msg" -msgstr "" - -msgid "facet.filters" -msgstr "" - -msgid "facetbox" -msgstr "Facetten-Box" - -msgid "facets_created_by-facet" -msgstr "\"erstellt durch\" facet" - -msgid "facets_created_by-facet_description" -msgstr "" - -msgid "facets_cw_source-facet" -msgstr "" - -msgid "facets_cw_source-facet_description" -msgstr "" - -msgid "facets_cwfinal-facet" -msgstr "\"finaler Entitäts- oder Relationstyp\" facet" - -msgid "facets_cwfinal-facet_description" -msgstr "" - -msgid "facets_datafeed.dataimport.status" -msgstr "" - -msgid "facets_datafeed.dataimport.status_description" -msgstr "" - -msgid "facets_etype-facet" -msgstr "\"Entitätstyp\" facet" - -msgid "facets_etype-facet_description" -msgstr "" - -msgid "facets_has_text-facet" -msgstr "\"hat Text\" facet" - -msgid "facets_has_text-facet_description" -msgstr "" - -msgid "facets_in_group-facet" -msgstr "\"in Gruppe\" facet" - -msgid "facets_in_group-facet_description" -msgstr "" - -msgid "facets_in_state-facet" -msgstr "\"in Zustand\" facet" - -msgid "facets_in_state-facet_description" -msgstr "" - -msgid "failed" -msgstr "" - -#, python-format -msgid "failed to uniquify path (%s, %s)" -msgstr "Konnte keinen eindeutigen Dateinamen erzeugen (%s, %s)" - -msgid "february" -msgstr "Februar" - -msgid "file tree view" -msgstr "Baumansicht (Dateien)" - -msgid "final" -msgstr "" - -msgctxt "CWEType" -msgid "final" -msgstr "" - -msgctxt "CWRType" -msgid "final" -msgstr "" - -msgid "first name" -msgstr "Vorname" - -msgid "firstname" -msgstr "Vorname" - -msgctxt "CWUser" -msgid "firstname" -msgstr "Vorname" - -msgid "foaf" -msgstr "FOAF" - -msgid "focus on this selection" -msgstr "" - -msgid "follow" -msgstr "dem Link folgen" - -#, python-format -msgid "follow this link for more information on this %s" -msgstr "Folgend Sie dem Link für mehr Informationen über %s" - -msgid "for_user" -msgstr "für den Nutzer" - -msgctxt "CWProperty" -msgid "for_user" -msgstr "für Nutzer" - -msgid "for_user_object" -msgstr "hat als Eigenschaft" - -msgctxt "CWUser" -msgid "for_user_object" -msgstr "verwendet die Eigenschaften" - -msgid "formula" -msgstr "" - -msgctxt "CWAttribute" -msgid "formula" -msgstr "" - -msgid "friday" -msgstr "Freitag" - -msgid "from" -msgstr "von" - -#, python-format -msgid "from %(date)s" -msgstr "vom %(date)s" - -msgid "from_entity" -msgstr "der Entität" - -msgctxt "CWAttribute" -msgid "from_entity" -msgstr "Attribut der Entität" - -msgctxt "CWRelation" -msgid "from_entity" -msgstr "Relation der Entität" - -msgid "from_entity_object" -msgstr "der Entität" - -msgctxt "CWEType" -msgid "from_entity_object" -msgstr "Subjektrelation" - -msgid "from_interval_start" -msgstr "Von" - -msgid "from_state" -msgstr "des Zustands" - -msgctxt "TrInfo" -msgid "from_state" -msgstr "Anfangszustand" - -msgid "from_state_object" -msgstr "Übergänge aus diesem Zustand" - -msgctxt "State" -msgid "from_state_object" -msgstr "Anfangszustand von" - -msgid "full text or RQL query" -msgstr "Volltextsuche oder RQL-Anfrage" - -msgid "fulltext_container" -msgstr "Container des indizierten Textes" - -msgctxt "CWRType" -msgid "fulltext_container" -msgstr "zu indizierendes Objekt" - -msgid "fulltextindexed" -msgstr "Indizierung des Textes" - -msgctxt "CWAttribute" -msgid "fulltextindexed" -msgstr "indizierter Text" - -msgid "gc" -msgstr "" - -msgid "generic plot" -msgstr "generischer Plot" - -msgid "generic relation to link one entity to another" -msgstr "generische Relation zur Verbindung einer Entität mit einer anderen" - -msgid "" -"generic relation to specify that an external entity represent the same " -"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" -msgstr "" -"generische Relation, die anzeigt, dass eine Entität mit einer anderen Web-" -"Ressource identisch ist (siehe http://www.w3.org/TR/owl-ref/#sameAs-def)." - -msgid "granted to groups" -msgstr "an Gruppen gewährt" - -#, python-format -msgid "graphical representation of %(appid)s data model" -msgstr "graphische Darstellung des Datenmodells von %(appid)s" - -#, fuzzy, python-format -msgid "" -"graphical representation of the %(etype)s entity type from %(appid)s data " -"model" -msgstr "" -"graphische Darstellung des Datenmodells des Entitätstyps (etype)s von " -"%(appid)s" - -#, python-format -msgid "" -"graphical representation of the %(rtype)s relation type from %(appid)s data " -"model" -msgstr "" -"graphische Darstellung des Datenmodells des Relationstyps %(rtype)s von " -"%(appid)s" - -msgid "group in which a user should be to be allowed to pass this transition" -msgstr "" -"Gruppe, zu welcher der Nutzer gehören muss, um die Transaktion durchzuführen" - -msgid "groups" -msgstr "Gruppen" - -msgid "groups allowed to add entities/relations of this type" -msgstr "" - -msgid "groups allowed to delete entities/relations of this type" -msgstr "" - -msgid "groups allowed to read entities/relations of this type" -msgstr "" - -msgid "groups allowed to update entities/relations of this type" -msgstr "" - -msgid "groups grant permissions to the user" -msgstr "die Gruppen geben dem Nutzer Rechte" - -msgid "guests" -msgstr "Gäste" - -msgid "hCalendar" -msgstr "hCalendar" - -msgid "has_text" -msgstr "enthält Text" - -msgid "header-center" -msgstr "" - -msgid "header-left" -msgstr "" - -msgid "header-right" -msgstr "" - -msgid "hide filter form" -msgstr "Filter verbergen" - -msgid "" -"how to format date and time in the ui (see this page for format " -"description)" -msgstr "" -"Wie formatiert man das Datum im Interface (Beschreibung des Formats)" - -msgid "" -"how to format date in the ui (see this page for format " -"description)" -msgstr "" -"Wie formatiert man das Datum im Interface (Beschreibung des Formats)" - -msgid "how to format float numbers in the ui" -msgstr "Wie man Dezimalzahlen (float) im Interface formatiert" - -msgid "" -"how to format time in the ui (see this page for format " -"description)" -msgstr "" -"Wie formatiert man die Uhrzeit im Interface (Beschreibung des " -"Formats)" - -msgid "i18n_bookmark_url_fqs" -msgstr "Parameter" - -msgid "i18n_bookmark_url_path" -msgstr "Pfad" - -msgid "i18n_login_popup" -msgstr "Anmelden" - -msgid "i18ncard_*" -msgstr "0..n" - -msgid "i18ncard_+" -msgstr "1..n" - -msgid "i18ncard_1" -msgstr "1" - -msgid "i18ncard_?" -msgstr "0..1" - -msgid "i18nprevnext_next" -msgstr "Weiter" - -msgid "i18nprevnext_previous" -msgstr "Zurück" - -msgid "i18nprevnext_up" -msgstr "eine Ebene auf" - -msgid "iCalendar" -msgstr "iCalendar" - -msgid "id of main template used to render pages" -msgstr "id der Hauptvorlage" - -msgid "identical to" -msgstr "identisch mit" - -msgid "identical_to" -msgstr "identisch mit" - -msgid "identity" -msgstr "ist identisch mit" - -msgid "identity_object" -msgstr "ist identisch mit" - -msgid "" -"if full text content of subject/object entity should be added to other side " -"entity (the container)." -msgstr "" -"falls der indizierte Text der Subjekt/Objekt-Entität der anderen Seite der " -"Relation (dem Container) hinzugefügt werden muss" - -msgid "image" -msgstr "Bild" - -msgid "in progress" -msgstr "" - -msgid "in_group" -msgstr "in der Gruppe" - -msgctxt "CWUser" -msgid "in_group" -msgstr "gehört zu der Gruppe" - -msgid "in_group_object" -msgstr "Mitglieder" - -msgctxt "CWGroup" -msgid "in_group_object" -msgstr "enthält die Nutzer" - -msgid "in_state" -msgstr "Zustand" - -msgid "in_state_object" -msgstr "Zustand von" - -msgid "in_synchronization" -msgstr "" - -msgctxt "CWSource" -msgid "in_synchronization" -msgstr "" - -msgid "incontext" -msgstr "im Kontext" - -msgid "incorrect captcha value" -msgstr "Unzulässiger Wert für Überschrift" - -#, python-format -msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" -msgstr "" - -msgid "index this attribute's value in the plain text index" -msgstr "indizieren des Wertes dieses Attributs im Volltext-Index" - -msgid "indexed" -msgstr "Index" - -msgctxt "CWAttribute" -msgid "indexed" -msgstr "indiziert" - -msgid "indicate the current state of an entity" -msgstr "zeigt den aktuellen Zustand einer Entität an" - -msgid "" -"indicate which state should be used by default when an entity using states " -"is created" -msgstr "" -"zeigt an, welcher Zustand standardmäßig benutzt werden soll, wenn eine " -"Entität erstellt wird" - -msgid "indifferent" -msgstr "gleichgültig" - -msgid "info" -msgstr "Information" - -msgid "initial state for this workflow" -msgstr "Anfangszustand für diesen Workflow" - -msgid "initial_state" -msgstr "Anfangszustand" - -msgctxt "Workflow" -msgid "initial_state" -msgstr "Anfangszustand" - -msgid "initial_state_object" -msgstr "Anfangszustand von" - -msgctxt "State" -msgid "initial_state_object" -msgstr "Anfangszustand von" - -msgid "inlined" -msgstr "eingereiht" - -msgctxt "CWRType" -msgid "inlined" -msgstr "eingereiht" - -msgid "instance home" -msgstr "Startseite der Instanz" - -msgid "internal entity uri" -msgstr "interner URI" - -msgid "internationalizable" -msgstr "internationalisierbar" - -msgctxt "CWAttribute" -msgid "internationalizable" -msgstr "internationalisierbar" - -#, python-format -msgid "invalid action %r" -msgstr "Ungültige Aktion %r" - -#, python-format -msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" -msgstr "" - -msgid "is" -msgstr "vom Typ" - -msgid "is object of:" -msgstr "ist Objekt von" - -msgid "is subject of:" -msgstr "ist Subjekt von" - -msgid "" -"is the subject/object entity of the relation composed of the other ? This " -"implies that when the composite is deleted, composants are also deleted." -msgstr "" -"Ist die Subjekt/Objekt-Entität der Relation aus der anderen Seite " -"zusammengesetzt?Falls ja, werden beim Löschen der Entität auch deren " -"Bausteine gelöscht." - -msgid "is this attribute's value translatable" -msgstr "Ist der Wert dieses Attributs übersetzbar?" - -msgid "is this relation equivalent in both direction ?" -msgstr "Ist diese Relation in beiden Richtungen äquivalent?" - -msgid "" -"is this relation physically inlined? you should know what you're doing if " -"you are changing this!" -msgstr "" -"Ist diese Relation in die Datenbank eingereiht? Sie sollten wissen, was Sie " -"tun, wenn Sie dies ändern." - -msgid "is_instance_of" -msgstr "ist eine Instanz von" - -msgid "is_instance_of_object" -msgstr "Typ von" - -msgid "is_object" -msgstr "hat als Instanz" - -msgid "january" -msgstr "Januar" - -msgid "json-entities-export-view" -msgstr "" - -msgid "json-export-view" -msgstr "" - -msgid "july" -msgstr "Juli" - -msgid "june" -msgstr "Juni" - -msgid "language of the user interface" -msgstr "Sprache der Nutzer-Schnittstelle" - -msgid "last connection date" -msgstr "Datum der letzten Verbindung" - -msgid "last login time" -msgstr "Datum der letzten Verbindung" - -msgid "last name" -msgstr "Name" - -msgid "last usage" -msgstr "letzte Benutzung" - -msgid "last_login_time" -msgstr "Datum der letzten Verbindung" - -msgctxt "CWUser" -msgid "last_login_time" -msgstr "Datum der letzten Verbindung" - -msgid "latest import" -msgstr "" - -msgid "latest modification time of an entity" -msgstr "Datum der letzten Änderung einer Entität" - -msgid "latest synchronization time" -msgstr "" - -msgid "latest update on" -msgstr "letzte Änderung am" - -msgid "latest_retrieval" -msgstr "" - -msgctxt "CWSource" -msgid "latest_retrieval" -msgstr "" - -msgid "left" -msgstr "links" - -msgid "line" -msgstr "" - -msgid "" -"link a property to the user which want this property customization. Unless " -"you're a site manager, this relation will be handled automatically." -msgstr "" -"verknüpft eine Eigenschaft mit einem Nutzer, der diese Personalisierung " -"wünscht. Sofern Sie nicht Site-Manager sind, wird diese Relation automatisch " -"behandelt." - -msgid "link a relation definition to its object entity type" -msgstr "verknüpft eine Relationsdefinition mit dem Entitätstyp ihres Objekts" - -msgid "link a relation definition to its relation type" -msgstr "verknüpft eine Relationsdefinition mit ihrem Relationstyp" - -msgid "link a relation definition to its subject entity type" -msgstr "verknüpft eine Relationsdefinition mit dem Entitätstyp ihres Subjekts" - -msgid "link a state to one or more workflow" -msgstr "verknüpft einen Zustand mit einem oder mehreren Workflows" - -msgid "link a transition information to its object" -msgstr "verknüpft eine Übergangsinformation mit ihrem Objekt" - -msgid "link a transition to one or more workflow" -msgstr "verknüpft einen Übergang mit einem oder mehreren Workflows" - -msgid "link a workflow to one or more entity type" -msgstr "verknüpft einen Workflow mit einem oder mehreren Entitätstypen" - -msgid "list" -msgstr "Liste" - -msgid "log" -msgstr "" - -msgctxt "CWDataImport" -msgid "log" -msgstr "" - -msgid "log in" -msgstr "anmelden" - -msgid "login" -msgstr "Anmeldung" - -msgctxt "CWUser" -msgid "login" -msgstr "Anmeldung" - -msgid "login / password" -msgstr "" - -msgid "login or email" -msgstr "Nutzername oder E-Mail-Adresse" - -msgid "login_action" -msgstr "Melden Sie sich an" - -msgid "logout" -msgstr "Abmelden" - -#, python-format -msgid "loop in %(rel)s relation (%(eid)s)" -msgstr "" -"Endlosschleife gefunden in der Relation %(rel)s von der Entität #%(eid)s" - -msgid "main informations" -msgstr "Allgemeine Informationen" - -msgid "main_tab" -msgstr "" - -msgid "mainvars" -msgstr "Hauptvariablen" - -msgctxt "RQLExpression" -msgid "mainvars" -msgstr "Hauptvariablen" - -msgid "manage" -msgstr "Verwalten" - -msgid "manage bookmarks" -msgstr "Lesezeichen verwalten" - -msgid "manage permissions" -msgstr "Rechte verwalten" - -msgid "managers" -msgstr "Administratoren" - -msgid "mandatory relation" -msgstr "obligatorische Relation" - -msgid "march" -msgstr "März" - -msgid "match_host" -msgstr "" - -msgctxt "CWSourceHostConfig" -msgid "match_host" -msgstr "" - -msgid "maximum number of characters in short description" -msgstr "Maximale Anzahl von Zeichen in der Kurzbeschreibung" - -msgid "maximum number of entities to display in related combo box" -msgstr "maximale Anzahl Entitäten zur Anzeige im Listenfeld" - -msgid "maximum number of objects displayed by page of results" -msgstr "maximale Anzahl pro Seite angezeigter Objekte" - -msgid "maximum number of related entities to display in the primary view" -msgstr "maximale anzahl verknüpfter Entitäten zur Anzeige auf der hauptseite" - -msgid "may" -msgstr "Mai" - -msgid "memory leak debugging" -msgstr "Fehlersuche bei Speicherlöschern" - -msgid "message" -msgstr "" - -#, python-format -msgid "missing parameters for entity %s" -msgstr "Fehlende Parameter für Entität %s" - -msgid "modification" -msgstr "Änderung" - -msgid "modification_date" -msgstr "Datum der Änderung" - -msgid "modify" -msgstr "ändern" - -msgid "monday" -msgstr "Montag" - -msgid "more actions" -msgstr "weitere Aktionen" - -msgid "more info about this workflow" -msgstr "mehr Informationen zu diesem Workflow" - -msgid "multiple edit" -msgstr "mehrfache Bearbeitung" - -msgid "my custom search" -msgstr "meine personalisierte Suche" - -msgid "name" -msgstr "Name" - -msgctxt "BaseTransition" -msgid "name" -msgstr "Name" - -msgctxt "CWCache" -msgid "name" -msgstr "Name" - -msgctxt "CWComputedRType" -msgid "name" -msgstr "" - -msgctxt "CWConstraintType" -msgid "name" -msgstr "Name" - -msgctxt "CWEType" -msgid "name" -msgstr "Name" - -msgctxt "CWGroup" -msgid "name" -msgstr "Name" - -msgctxt "CWRType" -msgid "name" -msgstr "Name" - -msgctxt "CWSource" -msgid "name" -msgstr "" - -msgctxt "CWUniqueTogetherConstraint" -msgid "name" -msgstr "" - -msgctxt "State" -msgid "name" -msgstr "Name" - -msgctxt "Transition" -msgid "name" -msgstr "Name" - -msgctxt "Workflow" -msgid "name" -msgstr "Name" - -msgctxt "WorkflowTransition" -msgid "name" -msgstr "Name" - -msgid "name of the cache" -msgstr "Name des Caches" - -msgid "" -"name of the main variables which should be used in the selection if " -"necessary (comma separated)" -msgstr "" -"Name der Hauptvariablen, die in der Auswahl benutzt werden sollten (mehrere " -"Variablen durch ',' trennen!)" - -msgid "name of the source" -msgstr "" - -msgid "navbottom" -msgstr "zum Seitenende" - -msgid "navcontentbottom" -msgstr "zum Hauptinhalt" - -msgid "navcontenttop" -msgstr "zum Seitenanfang" - -msgid "navigation" -msgstr "Navigation" - -msgid "navigation.combobox-limit" -msgstr "Anzahl Entitäten pro Listenfeld" - -msgid "navigation.page-size" -msgstr "Anzahl Suchergebnisse" - -msgid "navigation.related-limit" -msgstr "Anzahl Entitäten in der Hauptansicht" - -msgid "navigation.short-line-size" -msgstr "Kurzbeschreibung" - -msgid "navtop" -msgstr "zum Hauptinhalt springen" - -msgid "new" -msgstr "neu" - -msgid "next page" -msgstr "" - -msgid "next_results" -msgstr "weitere Ergebnisse" - -msgid "no" -msgstr "Nein" - -msgid "no content next link" -msgstr "" - -msgid "no content prev link" -msgstr "" - -msgid "no edited fields specified" -msgstr "" - -msgid "no log to display" -msgstr "" - -msgid "no related entity" -msgstr "keine verknüpfte Entität" - -msgid "no repository sessions found" -msgstr "keine Datenbank-Sitzung gefunden" - -msgid "no selected entities" -msgstr "keine Entitäten ausgewählt" - -#, python-format -msgid "no such entity type %s" -msgstr "Der Entitätstyp '%s' existiert nicht." - -msgid "no version information" -msgstr "Keine Versionsangaben." - -msgid "no web sessions found" -msgstr "Keine Sitzung gefunden." - -msgid "normal" -msgstr "normal" - -msgid "not authorized" -msgstr "nicht authrisiert" - -msgid "not selected" -msgstr "nicht ausgewählt" - -msgid "november" -msgstr "November" - -msgid "num. users" -msgstr "" - -msgid "object" -msgstr "Objekt" - -msgid "object type" -msgstr "Objekttyp" - -msgid "october" -msgstr "Oktober" - -msgid "one month" -msgstr "ein Monat" - -msgid "one week" -msgstr "eine Woche" - -msgid "oneline" -msgstr "eine Zeile" - -msgid "only select queries are authorized" -msgstr "Nur Auswahl-Anfragen sind erlaubt." - -msgid "open all" -msgstr "alle öffnen" - -msgid "opened sessions" -msgstr "offene Sitzungen" - -msgid "opened web sessions" -msgstr "offene Web-Sitzungen" - -msgid "options" -msgstr "Optionen" - -msgctxt "CWSourceSchemaConfig" -msgid "options" -msgstr "" - -msgid "order" -msgstr "Reihenfolge" - -msgid "ordernum" -msgstr "Reihenfolge" - -msgctxt "CWAttribute" -msgid "ordernum" -msgstr "Ordnungszahl" - -msgctxt "CWRelation" -msgid "ordernum" -msgstr "Ordnungszahl" - -msgid "owl" -msgstr "OWL" - -msgid "owlabox" -msgstr "OWL ABox" - -msgid "owned_by" -msgstr "gehört zu" - -msgid "owned_by_object" -msgstr "besitzt" - -msgid "owners" -msgstr "Besitzer" - -msgid "ownerships have been changed" -msgstr "Die Eigentumsrechte sind geändert worden." - -msgid "pageid-not-found" -msgstr "" -"Notwendige Daten scheinen nicht mehr gültig zu sein. Bitte laden Sie die " -"Seite neu und beginnen Sie von vorn." - -msgid "parser" -msgstr "" - -msgctxt "CWSource" -msgid "parser" -msgstr "" - -msgid "parser to use to extract entities from content retrieved at given URLs." -msgstr "" - -msgid "password" -msgstr "Passwort" - -msgid "password and confirmation don't match" -msgstr "Das Passwort stimmt nicht mit der Bestätigung überein." - -msgid "path" -msgstr "Pfad" - -msgctxt "Bookmark" -msgid "path" -msgstr "Pfad" - -msgid "permalink to this message" -msgstr "" - -msgid "permission" -msgstr "Recht" - -msgid "permissions" -msgstr "Rechte" - -msgid "pick existing bookmarks" -msgstr "Wählen Sie aus den bestehenden lesezeichen aus" - -msgid "pkey" -msgstr "Schlüssel" - -msgctxt "CWProperty" -msgid "pkey" -msgstr "code der Eigenschaft" - -msgid "please correct errors below" -msgstr "Bitte die nachstehenden Fehler korrigieren" - -msgid "please correct the following errors:" -msgstr "Bitte korrigieren Sie die folgenden Fehler:" - -msgid "possible views" -msgstr "Mögliche Ansichten" - -msgid "prefered_form" -msgstr "bevorzugte form" - -msgctxt "EmailAddress" -msgid "prefered_form" -msgstr "bevorzugte form" - -msgid "prefered_form_object" -msgstr "bevorzugte form vor" - -msgctxt "EmailAddress" -msgid "prefered_form_object" -msgstr "bevorzugte form von" - -msgid "preferences" -msgstr "Einstellungen" - -msgid "previous page" -msgstr "" - -msgid "previous_results" -msgstr "vorige Ergebnisse" - -msgid "primary" -msgstr "primär" - -msgid "primary_email" -msgstr "primäre E-Mail-Adresse" - -msgctxt "CWUser" -msgid "primary_email" -msgstr "primäre E-Mail-Adresse" - -msgid "primary_email_object" -msgstr "Objekt der primären E-Mail-Adresse" - -msgctxt "EmailAddress" -msgid "primary_email_object" -msgstr "primäre E-Mail-Adresse von" - -msgid "profile" -msgstr "Profil" - -msgid "rdef-description" -msgstr "Beschreibung" - -msgid "rdef-permissions" -msgstr "Rechte" - -msgid "rdf export" -msgstr "" - -msgid "read" -msgstr "Lesen" - -msgid "read_permission" -msgstr "Leseberechtigung" - -msgctxt "CWAttribute" -msgid "read_permission" -msgstr "Leseberechtigung" - -msgctxt "CWEType" -msgid "read_permission" -msgstr "Leseberechtigung" - -msgctxt "CWRelation" -msgid "read_permission" -msgstr "Leseberechtigung" - -msgid "read_permission_object" -msgstr "hat eine Leseberechtigung" - -msgctxt "CWGroup" -msgid "read_permission_object" -msgstr "kann lesen" - -msgctxt "RQLExpression" -msgid "read_permission_object" -msgstr "kann lesen" - -msgid "regexp matching host(s) to which this config applies" -msgstr "" - -msgid "registry" -msgstr "Registratur" - -msgid "related entity has no state" -msgstr "Verknüpfte Entität hat keinen Zustand" - -msgid "related entity has no workflow set" -msgstr "Verknüpfte Entität hat keinen Workflow" - -msgid "relation" -msgstr "Relation" - -#, python-format -msgid "relation %(relname)s of %(ent)s" -msgstr "Relation %(relname)s von %(ent)s" - -#, python-format -msgid "" -"relation %(rtype)s with %(etype)s as %(role)s is supported but no target " -"type supported" -msgstr "" - -#, python-format -msgid "" -"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is " -"mandatory but not supported" -msgstr "" - -#, python-format -msgid "" -"relation %s is supported but none of its definitions matches supported " -"entities" -msgstr "" - -msgid "relation add" -msgstr "Relation hinzufügen" - -msgid "relation removal" -msgstr "Relation entfernen" - -msgid "relation_type" -msgstr "Relationstyp" - -msgctxt "CWAttribute" -msgid "relation_type" -msgstr "Relationstyp" - -msgctxt "CWRelation" -msgid "relation_type" -msgstr "Relationstyp" - -msgid "relation_type_object" -msgstr "Definition" - -msgctxt "CWRType" -msgid "relation_type_object" -msgstr "definition" - -msgid "relations" -msgstr "Relationen" - -msgctxt "CWUniqueTogetherConstraint" -msgid "relations" -msgstr "Relationen" - -msgid "relations deleted" -msgstr "Relationen entfernt" - -msgid "relations_object" -msgstr "Relationen von" - -msgctxt "CWRType" -msgid "relations_object" -msgstr "Relationen von" - -msgid "relative url of the bookmarked page" -msgstr "URL relativ zu der Seite" - -msgid "remove-inlined-entity-form" -msgstr "Entfernen" - -msgid "require_group" -msgstr "benötigt die Gruppe" - -msgctxt "BaseTransition" -msgid "require_group" -msgstr "auf Gruppe beschränkt" - -msgctxt "Transition" -msgid "require_group" -msgstr "auf Gruppe beschränkt" - -msgctxt "WorkflowTransition" -msgid "require_group" -msgstr "auf Gruppe beschränkt" - -msgid "require_group_object" -msgstr "hat die Rechte" - -msgctxt "CWGroup" -msgid "require_group_object" -msgstr "hat die Rechte" - -msgid "required" -msgstr "erforderlich" - -msgid "required attribute" -msgstr "erforderliches Attribut" - -msgid "required field" -msgstr "Pflichtfeld" - -msgid "resources usage" -msgstr "genutzte Ressourcen" - -msgid "" -"restriction part of a rql query. For entity rql expression, X and U are " -"predefined respectivly to the current object and to the request user. For " -"relation rql expression, S, O and U are predefined respectivly to the " -"current relation'subject, object and to the request user. " -msgstr "" -"Restriktionsteil einer RQL-Abfrage. Für einen Ausdruck, der für eine Entität " -"gilt,X und U sind jeweils für die Entität und den Nutzer vordefiniert." -"respectivement prédéfinis au sujet/objet de la relation et à l'utilisateur " - -msgid "revert changes" -msgstr "Änderungen rückgängig machen" - -msgid "right" -msgstr "rechts" - -msgid "rql expression allowing to add entities/relations of this type" -msgstr "" - -msgid "rql expression allowing to delete entities/relations of this type" -msgstr "" - -msgid "rql expression allowing to read entities/relations of this type" -msgstr "" - -msgid "rql expression allowing to update entities/relations of this type" -msgstr "" - -msgid "rql expressions" -msgstr "RQL-Ausdrücke" - -msgid "rss export" -msgstr "" - -msgid "rule" -msgstr "" - -msgctxt "CWComputedRType" -msgid "rule" -msgstr "" - -msgid "same_as" -msgstr "identisch mit" - -msgid "sample format" -msgstr "Beispiel" - -msgid "saturday" -msgstr "Samstag" - -msgid "schema-diagram" -msgstr "Diagramm" - -msgid "schema-entity-types" -msgstr "Entitätstypen" - -msgid "schema-relation-types" -msgstr "Relationstypen" - -msgid "search" -msgstr "suchen" - -msgid "search for association" -msgstr "nach verwandten Ergebnissen suchen" - -msgid "searching for" -msgstr "Suche nach" - -msgid "security" -msgstr "Sicherheit" - -msgid "see more" -msgstr "" - -msgid "see them all" -msgstr "Alle ansehen" - -msgid "see_also" -msgstr "Siehe auch" - -msgid "select" -msgstr "auswählen" - -msgid "select a" -msgstr "wählen Sie einen" - -msgid "select a key first" -msgstr "Wählen Sie zuerst einen Schlüssel." - -msgid "select a relation" -msgstr "Wählen Sie eine Relation." - -msgid "select this entity" -msgstr "Wählen Sie diese Entität" - -msgid "selected" -msgstr "ausgewählt" - -msgid "semantic description of this attribute" -msgstr "Semantische Beschreibung dieses Attributs" - -msgid "semantic description of this entity type" -msgstr "Semantische Beschreibung dieses Entitätstyps" - -msgid "semantic description of this relation" -msgstr "Semantische Beschreibung dieser Relation" - -msgid "semantic description of this relation type" -msgstr "Semantische Beschreibung dieses Relationstyps" - -msgid "semantic description of this state" -msgstr "Semantische Beschreibung dieses Zustands" - -msgid "semantic description of this transition" -msgstr "Semantische Beschreibung dieses Übergangs" - -msgid "semantic description of this workflow" -msgstr "Semantische Beschreibung dieses Workflows" - -msgid "september" -msgstr "September" - -msgid "server information" -msgstr "Server-Informationen" - -msgid "severity" -msgstr "" - -msgid "" -"should html fields being edited using fckeditor (a HTML WYSIWYG editor). " -"You should also select text/html as default text format to actually get " -"fckeditor." -msgstr "" -"Bestimmt, ob HTML-Felder mit fckeditor (ein WYSIWYG-HTML-Editor)\n" -"bearbeitet werden müssen. Es wird auch empfohlen, Text/HTML\n" -"als Standard-Textformat festzulegen, um Text mit fckeditor zu bearbeiten." - -#, python-format -msgid "show %s results" -msgstr "Zeige %s Ergebnisse" - -msgid "show advanced fields" -msgstr "Zeige detaillierte Felder" - -msgid "show filter form" -msgstr "Filter zeigen" - -msgid "site configuration" -msgstr "Konfiguration der Website" - -msgid "site documentation" -msgstr "Dokumentation der Website" - -msgid "site title" -msgstr "Titel der Website" - -msgid "site-wide property can't be set for user" -msgstr "" -"Eine Eigenschaft für die gesamte Website kann nicht für einen Nutzer gesetzt " -"werden." - -msgid "some later transaction(s) touch entity, undo them first" -msgstr "" -"Eine oder mehrere frühere Transaktion(en) betreffen die Tntität. Machen Sie " -"sie zuerst rückgängig." - -msgid "some relations violate a unicity constraint" -msgstr "" - -msgid "sorry, the server is unable to handle this query" -msgstr "Der Server kann diese Anfrage leider nicht bearbeiten." - -msgid "" -"source's configuration. One key=value per line, authorized keys depending on " -"the source's type" -msgstr "" - -msgid "sparql xml" -msgstr "Sparql XML" - -msgid "special transition allowing to go through a sub-workflow" -msgstr "Spezieller Übergang, um in einen Subworkflow hineinzugehen" - -msgid "specializes" -msgstr "leitet sich ab von" - -msgctxt "CWEType" -msgid "specializes" -msgstr "spezialisiert" - -msgid "specializes_object" -msgstr "Vorgänger von" - -msgctxt "CWEType" -msgid "specializes_object" -msgstr "Vorgänger von" - -#, python-format -msgid "specifying %s is mandatory" -msgstr "" - -msgid "" -"start timestamp of the currently in synchronization, or NULL when no " -"synchronization in progress." -msgstr "" - -msgid "start_timestamp" -msgstr "" - -msgctxt "CWDataImport" -msgid "start_timestamp" -msgstr "" - -msgid "startup views" -msgstr "Start-Ansichten" - -msgid "startupview" -msgstr "" - -msgid "state" -msgstr "Zustand" - -msgid "state and transition don't belong the the same workflow" -msgstr "Zustand und Übergang gehören nicht zum selben Workflow." - -msgid "state doesn't apply to this entity's type" -msgstr "Zustand gilt nicht für diesen Entitätstyp." - -msgid "state doesn't belong to entity's current workflow" -msgstr "Der Zustand gehört nicht zum aktuellen Workflow der Entität." - -msgid "state doesn't belong to entity's workflow" -msgstr "Der Zustand gehört nicht zum Workflow der Entität." - -msgid "" -"state doesn't belong to entity's workflow. You may want to set a custom " -"workflow for this entity first." -msgstr "" -"Der Zustand gehört nicht zum Workflow der Entität.Bitte bestimmen Sie zuerst " -"einen Workflow für diese Entität." - -msgid "state doesn't belong to this workflow" -msgstr "Zustand gehört nicht zu diesem Workflow." - -msgid "state_of" -msgstr "Zustand von" - -msgctxt "State" -msgid "state_of" -msgstr "Zustand von" - -msgid "state_of_object" -msgstr "hat als Zustand" - -msgctxt "Workflow" -msgid "state_of_object" -msgstr "enthält die Zustände" - -msgid "status" -msgstr "" - -msgctxt "CWDataImport" -msgid "status" -msgstr "" - -msgid "status change" -msgstr "Zustand ändern" - -msgid "status changed" -msgstr "Zustand geändert" - -#, python-format -msgid "status will change from %(st1)s to %(st2)s" -msgstr "Entität wird vom Zustand %(st1)s in zustand %(st2)s übergehen." - -msgid "subject" -msgstr "Subjekt" - -msgid "subject type" -msgstr "Subjekttyp" - -msgid "subject/object cardinality" -msgstr "Subjekt/Objekt Kardinalität" - -msgid "subworkflow" -msgstr "Subworkflow" - -msgctxt "WorkflowTransition" -msgid "subworkflow" -msgstr "Subworkflow" - -msgid "" -"subworkflow isn't a workflow for the same types as the transition's workflow" -msgstr "" -"Dieser Subworkflow gilt nicht für dieselben Typen wie der Workflow dieses " -"Übergangs." - -msgid "subworkflow state" -msgstr "Zustand des Subworkflows" - -msgid "subworkflow_exit" -msgstr "Ende des Subworkflows" - -msgctxt "WorkflowTransition" -msgid "subworkflow_exit" -msgstr "Ende des Subworkflows" - -msgid "subworkflow_exit_object" -msgstr "Endzustand" - -msgctxt "SubWorkflowExitPoint" -msgid "subworkflow_exit_object" -msgstr "Endzustände" - -msgid "subworkflow_object" -msgstr "verwendet vom Übergang" - -msgctxt "Workflow" -msgid "subworkflow_object" -msgstr "Subworkflow von" - -msgid "subworkflow_state" -msgstr "Zustand des Subworkflows" - -msgctxt "SubWorkflowExitPoint" -msgid "subworkflow_state" -msgstr "Zustand" - -msgid "subworkflow_state_object" -msgstr "Endzustand von" - -msgctxt "State" -msgid "subworkflow_state_object" -msgstr "Endzustand von" - -msgid "success" -msgstr "" - -msgid "sunday" -msgstr "Sonntag" - -msgid "surname" -msgstr "Name" - -msgctxt "CWUser" -msgid "surname" -msgstr "Nachname" - -msgid "symmetric" -msgstr "symmetrisch" - -msgctxt "CWRType" -msgid "symmetric" -msgstr "symmetrisch" - -msgid "synchronization-interval must be greater than 1 minute" -msgstr "" - -msgid "table" -msgstr "Tabelle" - -msgid "tablefilter" -msgstr "Tabellenfilter" - -msgid "text" -msgstr "Text" - -msgid "text/cubicweb-page-template" -msgstr "dynamischer Inhalt" - -msgid "text/html" -msgstr "html" - -msgid "text/markdown" -msgstr "" - -msgid "text/plain" -msgstr "Nur Text" - -msgid "text/rest" -msgstr "reST" - -msgid "the URI of the object" -msgstr "der URI des Objekts" - -msgid "the prefered email" -msgstr "primäre E-Mail-Adresse" - -msgid "the system source has its configuration stored on the file-system" -msgstr "" - -#, python-format -msgid "the value \"%s\" is already used, use another one" -msgstr "" -"Der Wert \"%s\" wird bereits benutzt, bitte verwenden Sie einen anderen Wert" - -msgid "there is no next page" -msgstr "" - -msgid "there is no previous page" -msgstr "" - -#, python-format -msgid "there is no transaction #%s" -msgstr "" - -msgid "this action is not reversible!" -msgstr "Achtung! Diese Aktion ist unumkehrbar." - -msgid "this entity is currently owned by" -msgstr "Diese Entität gehört:" - -msgid "this parser doesn't use a mapping" -msgstr "" - -msgid "this resource does not exist" -msgstr "cette ressource est introuvable" - -msgid "this source doesn't use a mapping" -msgstr "" - -msgid "thursday" -msgstr "Donnerstag" - -msgid "timestamp" -msgstr "Datum" - -msgctxt "CWCache" -msgid "timestamp" -msgstr "gültig seit" - -msgid "timetable" -msgstr "Zeitplan" - -msgid "title" -msgstr "titel" - -msgctxt "Bookmark" -msgid "title" -msgstr "bezeichnet" - -msgid "to" -msgstr "zu" - -#, python-format -msgid "to %(date)s" -msgstr "bis zum %(date)s" - -msgid "to associate with" -msgstr "zu verknüpfen mit" - -msgid "to_entity" -msgstr "zu der Entität" - -msgctxt "CWAttribute" -msgid "to_entity" -msgstr "für die Entität" - -msgctxt "CWRelation" -msgid "to_entity" -msgstr "für die Entität" - -msgid "to_entity_object" -msgstr "Objekt der Relation" - -msgctxt "CWEType" -msgid "to_entity_object" -msgstr "Objekt der Relation" - -msgid "to_interval_end" -msgstr "bis" - -msgid "to_state" -msgstr "zum Zustand" - -msgctxt "TrInfo" -msgid "to_state" -msgstr "Zielstatus" - -msgid "to_state_object" -msgstr "Übergänge zu dem Zustand" - -msgctxt "State" -msgid "to_state_object" -msgstr "Übergang zu diesem Zustand" - -msgid "toggle check boxes" -msgstr "Kontrollkästchen umkehren" - -msgid "tr_count" -msgstr "" - -msgctxt "TrInfo" -msgid "tr_count" -msgstr "" - -msgid "transaction undone" -msgstr "" - -#, python-format -msgid "transition %(tr)s isn't allowed from %(st)s" -msgstr "Der Übergang %(tr)s ist aus dem Zustand %(st)s nicht erlaubt." - -msgid "transition doesn't belong to entity's workflow" -msgstr "Übergang gehört nicht zum Workflow der Entität." - -msgid "transition isn't allowed" -msgstr "Der Übergang ist nicht erleubt." - -msgid "transition may not be fired" -msgstr "Der Übergang kann nicht ausgelöst werden." - -msgid "transition_of" -msgstr "Übergang des/der" - -msgctxt "BaseTransition" -msgid "transition_of" -msgstr "Übergang des/der" - -msgctxt "Transition" -msgid "transition_of" -msgstr "Übergang des/der" - -msgctxt "WorkflowTransition" -msgid "transition_of" -msgstr "Übergang des/der" - -msgid "transition_of_object" -msgstr "hat als Übergang" - -msgctxt "Workflow" -msgid "transition_of_object" -msgstr "hat als Übergang" - -msgid "tree view" -msgstr "Baumansicht" - -msgid "tuesday" -msgstr "Dienstag" - -msgid "type" -msgstr "Typ" - -msgctxt "BaseTransition" -msgid "type" -msgstr "Typ" - -msgctxt "CWSource" -msgid "type" -msgstr "" - -msgctxt "Transition" -msgid "type" -msgstr "Typ" - -msgctxt "WorkflowTransition" -msgid "type" -msgstr "Typ" - -msgid "type here a sparql query" -msgstr "Geben sie eine sparql-Anfrage ein" - -msgid "type of the source" -msgstr "" - -msgid "ui" -msgstr "Allgemeinen Eigenschaften der Nutzerschnittstelle" - -msgid "ui.date-format" -msgstr "Datumsformat" - -msgid "ui.datetime-format" -msgstr "Format von Datum und Zeit" - -msgid "ui.default-text-format" -msgstr "Textformat" - -msgid "ui.encoding" -msgstr "Kodierung" - -msgid "ui.fckeditor" -msgstr "Editor" - -msgid "ui.float-format" -msgstr "Format von Dezimalzahlen (float)" - -msgid "ui.language" -msgstr "Sprache" - -msgid "ui.main-template" -msgstr "Hauptvorlage" - -msgid "ui.site-title" -msgstr "Titel der Website" - -msgid "ui.time-format" -msgstr "Zeitformat" - -msgid "unable to check captcha, please try again" -msgstr "Kann capcha nicht bestätigen. Bitte noch einmal versuchen." - -msgid "unaccessible" -msgstr "nicnt zugänglich" - -msgid "unauthorized value" -msgstr "ungültiger Wert" - -msgid "undefined user" -msgstr "" - -msgid "undo" -msgstr "rückgängig machen" - -msgid "unique identifier used to connect to the application" -msgstr "eindeutiger Bezeichner zur Verbindung mit der Anwendung" - -msgid "unknown external entity" -msgstr "(Externe) Entität nicht gefunden" - -#, python-format -msgid "unknown options %s" -msgstr "" - -#, python-format -msgid "unknown property key %s" -msgstr "Unbekannter Eigentumsschlüssel %s" - -msgid "unknown vocabulary:" -msgstr "Unbekanntes Wörterbuch : " - -msgid "unsupported protocol" -msgstr "" - -msgid "upassword" -msgstr "Passwort" - -msgctxt "CWUser" -msgid "upassword" -msgstr "Passwort" - -msgid "update" -msgstr "Aktualisierung" - -msgid "update_permission" -msgstr "Änderungsrecht" - -msgctxt "CWAttribute" -msgid "update_permission" -msgstr "Änderungsrecht" - -msgctxt "CWEType" -msgid "update_permission" -msgstr "Änderungsrecht" - -msgid "update_permission_object" -msgstr "hat die Änderungsberechtigung" - -msgctxt "CWGroup" -msgid "update_permission_object" -msgstr "kann ändern" - -msgctxt "RQLExpression" -msgid "update_permission_object" -msgstr "kann ändern" - -msgid "update_relation" -msgstr "aktualisieren" - -msgid "updated" -msgstr "aktualisiert" - -#, python-format -msgid "updated %(etype)s #%(eid)s (%(title)s)" -msgstr "Entität %(etype)s #%(eid)s (%(title)s) aktualisiert" - -msgid "uri" -msgstr "URI" - -msgctxt "ExternalUri" -msgid "uri" -msgstr "URI" - -msgid "url" -msgstr "" - -msgctxt "CWSource" -msgid "url" -msgstr "" - -msgid "" -"use to define a transition from one or multiple states to a destination " -"states in workflow's definitions. Transition without destination state will " -"go back to the state from which we arrived to the current state." -msgstr "" -"verwendet, um einen Übergang von einem oder mehreren Zuständenin einen " -"Zielzustand eines Workflows zu definieren.Ein Übergang ohne Zielzustand " -"führt in den Zustand zurück, der dem aktuellen zustand vorausgeht." - -msgid "use_email" -msgstr "E-Mail-Adresse" - -msgctxt "CWUser" -msgid "use_email" -msgstr "verwendet die E-Mail-Adresse" - -msgid "use_email_object" -msgstr "Adresse verwendet von" - -msgctxt "EmailAddress" -msgid "use_email_object" -msgstr "verwendet von" - -msgid "" -"used for cubicweb configuration. Once a property has been created you can't " -"change the key." -msgstr "" -"konfiguriert CubicWeb. Nachdem eine Eigenschafterstellt wurde, können Sie " -"den Schlüssel nicht mehr ändern." - -msgid "" -"used to associate simple states to an entity type and/or to define workflows" -msgstr "" -"assoziiert einfache Zustände mit einem Entitätstyp und/oder definiert " -"Workflows" - -msgid "user" -msgstr "Nutzer" - -#, python-format -msgid "" -"user %s has made the following change(s):\n" -"\n" -msgstr "" -"Nutzer %s hat die folgende(n) Änderung(en) vorgenommen:\n" -"\n" - -msgid "user interface encoding" -msgstr "Kodierung für die Nutzerschnittstelle" - -msgid "user preferences" -msgstr "Nutzereinstellungen" - -msgid "user's email account" -msgstr "" - -msgid "users" -msgstr "Nutzer" - -msgid "users and groups" -msgstr "" - -msgid "users using this bookmark" -msgstr "Nutzer, die dieses Lesezeichen verwenden" - -msgid "validate modifications on selected items" -msgstr "Überprüfen der Änderungen an den ausgewählten Elementen" - -msgid "validating..." -msgstr "Überprüfung läuft..." - -msgid "value" -msgstr "Wert" - -msgctxt "CWConstraint" -msgid "value" -msgstr "Einschränkung" - -msgctxt "CWProperty" -msgid "value" -msgstr "Wert" - -#, python-format -msgid "value %(KEY-value)s must be < %(KEY-boundary)s" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be > %(KEY-boundary)s" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" -msgstr "" - -msgid "value associated to this key is not editable manually" -msgstr "" -"Der mit diesem Schlüssele verbundene Wert kann n icht manuell geändert " -"werden." - -#, python-format -msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" -msgstr "" - -#, python-format -msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" -msgstr "" - -msgid "vcard" -msgstr "VCard" - -msgid "versions configuration" -msgstr "Versionskonfiguration" - -msgid "view" -msgstr "ansehen" - -msgid "view all" -msgstr "alle ansehen" - -msgid "view detail for this entity" -msgstr "Details für diese Entität ansehen" - -msgid "view history" -msgstr "Chronik ansehen" - -msgid "view identifier" -msgstr "Nutzername" - -msgid "view title" -msgstr "Titel" - -msgid "view workflow" -msgstr "mögliche Zustände ansehen" - -msgid "view_index" -msgstr "Index-Seite" - -msgid "visible" -msgstr "sichtbar" - -msgid "warning" -msgstr "" - -msgid "we are not yet ready to handle this query" -msgstr "Momentan können wir diese sparql-Anfrage noch nicht ausführen." - -msgid "wednesday" -msgstr "Mittwoch" - -#, python-format -msgid "welcome %s!" -msgstr "Willkommen %s!" - -msgid "wf_info_for" -msgstr "Chronik von" - -msgid "wf_info_for_object" -msgstr "Chronik der Übergänge" - -msgid "wf_tab_info" -msgstr "Beschreibung" - -msgid "wfgraph" -msgstr "Grafik des Workflows" - -msgid "" -"when multiple addresses are equivalent (such as python-projects@logilab.org " -"and python-projects@lists.logilab.org), set this to indicate which is the " -"preferred form." -msgstr "" -"Wenn mehrere Adressen ähnlich sind (comme python-projects@logilab.org und " -"python-projects@lists.logilab.org), bestimmen Sie die bevorzugte Form." - -msgid "workflow" -msgstr "Workflow" - -#, python-format -msgid "workflow changed to \"%s\"" -msgstr "Workflow geändert in \"%s\"" - -msgid "workflow has no initial state" -msgstr "Workflow hat keinen Anfangszustand" - -msgid "workflow history item" -msgstr "Beginn der Chronik des Workflows" - -msgid "workflow isn't a workflow for this type" -msgstr "Der Workflow gilt nicht für diesen Entitätstyp." - -msgid "workflow to which this state belongs" -msgstr "Workflow, zu dem dieser Zustand gehört" - -msgid "workflow to which this transition belongs" -msgstr "Workflow, zu dem dieser Übergang gehört" - -msgid "workflow_of" -msgstr "Workflow von" - -msgctxt "Workflow" -msgid "workflow_of" -msgstr "Workflow von" - -msgid "workflow_of_object" -msgstr "hat als Workflow" - -msgctxt "CWEType" -msgid "workflow_of_object" -msgstr "hat als Workflow" - -#, python-format -msgid "wrong query parameter line %s" -msgstr "Falscher Anfrage-Parameter Zeile %s" - -msgid "xbel export" -msgstr "" - -msgid "xml export" -msgstr "XML-Export" - -msgid "xml export (entities)" -msgstr "" - -msgid "yes" -msgstr "Ja" - -msgid "you have been logged out" -msgstr "Sie sind jetzt abgemeldet." - -msgid "you should probably delete that property" -msgstr "Sie sollten diese Eigenschaft wahrscheinlich löschen." - -#~ msgid "Any" -#~ msgstr "irgendein" - -#~ msgid "Browse by category" -#~ msgstr "nach Kategorien navigieren" - -#~ msgid "No account? Try public access at %s" -#~ msgstr "Kein Konto? Zur öffentlichen Website: %s" - -#~ msgid "anonymous" -#~ msgstr "anonym" - -#~ msgid "can't connect to source %s, some data may be missing" -#~ msgstr "Keine Verbindung zu der Quelle %s, einige Daten könnten fehlen" - -#~ msgid "components_etypenavigation" -#~ msgstr "nach Typ filtern" - -#~ msgid "components_etypenavigation_description" -#~ msgstr "Erlaubt die Sortierung von Suchergebnissen nach Entitätstyp" - -#~ msgid "error while querying source %s, some data may be missing" -#~ msgstr "" -#~ "Fehler beim Zugriff auf Quelle %s, möglicherweise sind die Daten " -#~ "unvollständig." - -#~ msgid "no edited fields specified for entity %s" -#~ msgstr "kein Eingabefeld spezifiziert Für Entität %s" - -#~ msgid "timeline" -#~ msgstr "Zeitleiste" diff -r 058bb3dc685f -r 0b59724cb3f2 i18n/en.po --- a/i18n/en.po Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4553 +0,0 @@ -# cubicweb i18n catalog -# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# Logilab -msgid "" -msgstr "" -"Project-Id-Version: 2.0\n" -"POT-Creation-Date: 2006-01-12 17:35+CET\n" -"PO-Revision-Date: 2011-04-29 12:57+0200\n" -"Last-Translator: Sylvain Thenault \n" -"Language-Team: English \n" -"Language: en\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: pygettext.py 1.5\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" - -#, python-format -msgid "" -"\n" -"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for " -"entity\n" -"'%(title)s'\n" -"\n" -"%(comment)s\n" -"\n" -"url: %(url)s\n" -msgstr "" - -#, python-format -msgid " from state %(fromstate)s to state %(tostate)s\n" -msgstr "" - -msgid " :" -msgstr ":" - -#, python-format -msgid "\"action\" must be specified in options; allowed values are %s" -msgstr "" - -msgid "\"role=subject\" or \"role=object\" must be specified in options" -msgstr "" - -#, python-format -msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" -msgstr "" - -#, python-format -msgid "%(KEY-rtype)s is part of violated unicity constraint" -msgstr "" - -#, python-format -msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" -msgstr "" - -#, python-format -msgid "%(attr)s set to %(newvalue)s" -msgstr "" - -#, python-format -msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" -msgstr "" - -#, python-format -msgid "%(etype)s by %(author)s" -msgstr "" - -#, python-format -msgid "%(firstname)s %(surname)s" -msgstr "" - -#, python-format -msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)" -msgstr "" - -#, python-format -msgid "%d days" -msgstr "" - -#, python-format -msgid "%d hours" -msgstr "" - -#, python-format -msgid "%d minutes" -msgstr "" - -#, python-format -msgid "%d months" -msgstr "" - -#, python-format -msgid "%d seconds" -msgstr "" - -#, python-format -msgid "%d weeks" -msgstr "" - -#, python-format -msgid "%d years" -msgstr "" - -#, python-format -msgid "%s could be supported" -msgstr "" - -#, python-format -msgid "%s error report" -msgstr "" - -#, python-format -msgid "%s software version of the database" -msgstr "" - -#, python-format -msgid "%s updated" -msgstr "" - -#, python-format -msgid "'%s' action doesn't take any options" -msgstr "" - -#, python-format -msgid "" -"'%s' action for in_state relation should at least have 'linkattr=name' option" -msgstr "" - -#, python-format -msgid "'%s' action requires 'linkattr' option" -msgstr "" - -msgid "(UNEXISTANT EID)" -msgstr "" - -#, python-format -msgid "(suppressed) entity #%d" -msgstr "" - -msgid "**" -msgstr "0..n 0..n" - -msgid "*+" -msgstr "0..n 1..n" - -msgid "*1" -msgstr "0..n 1" - -msgid "*?" -msgstr "0..n 0..1" - -msgid "+*" -msgstr "1..n 0..n" - -msgid "++" -msgstr "1..n 1..n" - -msgid "+1" -msgstr "1..n 1" - -msgid "+?" -msgstr "1..n 0..1" - -msgid "1*" -msgstr "1 0..n" - -msgid "1+" -msgstr "1 1..n" - -msgid "11" -msgstr "1 1" - -msgid "1?" -msgstr "1 0..1" - -#, python-format -msgid "<%s not specified>" -msgstr "" - -#, python-format -msgid "" -"
      This schema of the data model excludes the meta-data, but you " -"can also display a complete schema with meta-data.
      " -msgstr "" - -msgid "" -msgstr "" - -msgid "" -msgstr "" - -msgid "?*" -msgstr "0..1 0..n" - -msgid "?+" -msgstr "0..1 1..n" - -msgid "?1" -msgstr "0..1 1" - -msgid "??" -msgstr "0..1 0..1" - -msgid "AND" -msgstr "" - -msgid "About this site" -msgstr "" - -#, python-format -msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "" - -msgid "Attributes permissions:" -msgstr "" - -# schema pot file, generated on 2009-09-16 16:46:55 -# -# singular and plural forms for each entity type -msgid "BaseTransition" -msgstr "Transition (abstract)" - -msgid "BaseTransition_plural" -msgstr "Transitions (abstract)" - -msgid "BigInt" -msgstr "Big integer" - -msgid "BigInt_plural" -msgstr "Big integers" - -msgid "Bookmark" -msgstr "Bookmark" - -msgid "Bookmark_plural" -msgstr "Bookmarks" - -msgid "Boolean" -msgstr "Boolean" - -msgid "Boolean_plural" -msgstr "Booleans" - -msgid "BoundConstraint" -msgstr "bound constraint" - -msgid "BoundaryConstraint" -msgstr "" - -msgid "Browse by entity type" -msgstr "" - -#, python-format -msgid "By %(user)s on %(dt)s [%(undo_link)s]" -msgstr "" - -msgid "Bytes" -msgstr "Bytes" - -msgid "Bytes_plural" -msgstr "Bytes" - -msgid "CWAttribute" -msgstr "Attribute" - -msgid "CWAttribute_plural" -msgstr "Attributes" - -msgid "CWCache" -msgstr "CubicWeb Cache" - -msgid "CWCache_plural" -msgstr "CubicWeb Caches" - -msgid "CWComputedRType" -msgstr "Virtual relation" - -msgid "CWComputedRType_plural" -msgstr "Virtual relations" - -msgid "CWConstraint" -msgstr "Constraint" - -msgid "CWConstraintType" -msgstr "Constraint type" - -msgid "CWConstraintType_plural" -msgstr "Constraint types" - -msgid "CWConstraint_plural" -msgstr "Constraints" - -msgid "CWDataImport" -msgstr "Data import" - -msgid "CWDataImport_plural" -msgstr "Data imports" - -msgid "CWEType" -msgstr "Entity type" - -msgctxt "inlined:CWRelation.from_entity.subject" -msgid "CWEType" -msgstr "Entity type" - -msgctxt "inlined:CWRelation.to_entity.subject" -msgid "CWEType" -msgstr "Entity type" - -msgid "CWEType_plural" -msgstr "Entity types" - -msgid "CWGroup" -msgstr "Group" - -msgid "CWGroup_plural" -msgstr "Groups" - -msgid "CWProperty" -msgstr "Property" - -msgid "CWProperty_plural" -msgstr "Properties" - -msgid "CWRType" -msgstr "Relation type" - -msgctxt "inlined:CWRelation.relation_type.subject" -msgid "CWRType" -msgstr "Relation type" - -msgid "CWRType_plural" -msgstr "Relation types" - -msgid "CWRelation" -msgstr "Relation" - -msgid "CWRelation_plural" -msgstr "Relations" - -msgid "CWSource" -msgstr "Data source" - -msgid "CWSourceHostConfig" -msgstr "Source host configuration" - -msgid "CWSourceHostConfig_plural" -msgstr "Source host configurations" - -msgid "CWSourceSchemaConfig" -msgstr "Source schema configuration" - -msgid "CWSourceSchemaConfig_plural" -msgstr "Source schema configurations" - -msgid "CWSource_plural" -msgstr "Data sources" - -msgid "CWUniqueTogetherConstraint" -msgstr "Unicity constraint" - -msgid "CWUniqueTogetherConstraint_plural" -msgstr "Unicity constraints" - -msgid "CWUser" -msgstr "User" - -msgid "CWUser_plural" -msgstr "Users" - -#, python-format -msgid "" -"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " -"linked using this relation." -msgstr "" - -#, python-format -msgid "" -"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " -"does not exists anymore in the schema." -msgstr "" - -#, python-format -msgid "" -"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " -"anymore." -msgstr "" - -#, python-format -msgid "" -"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " -"exist anymore" -msgstr "" - -#, python-format -msgid "" -"Can't undo creation of entity %(eid)s of type %(etype)s, type no more " -"supported" -msgstr "" - -msgid "Click to sort on this column" -msgstr "" - -msgid "" -"Configuration of the system source goes to the 'sources' file, not in the " -"database" -msgstr "" - -#, python-format -msgid "Created %(etype)s : %(entity)s" -msgstr "" - -msgid "DEBUG" -msgstr "" - -msgid "Date" -msgstr "Date" - -msgid "Date_plural" -msgstr "Dates" - -msgid "Datetime" -msgstr "Date and time" - -msgid "Datetime_plural" -msgstr "Dates and times" - -msgid "Decimal" -msgstr "Decimal number" - -msgid "Decimal_plural" -msgstr "Decimal numbers" - -#, python-format -msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "" - -#, python-format -msgid "Deleted %(etype)s : %(entity)s" -msgstr "" - -msgid "Detected problems" -msgstr "" - -msgid "Do you want to delete the following element(s)?" -msgstr "" - -msgid "Download schema as OWL" -msgstr "" - -msgid "ERROR" -msgstr "" - -msgid "EmailAddress" -msgstr "Email address" - -msgctxt "inlined:CWUser.use_email.subject" -msgid "EmailAddress" -msgstr "Email address" - -msgid "EmailAddress_plural" -msgstr "Email addresses" - -msgid "Entities" -msgstr "" - -#, python-format -msgid "" -"Entity %(eid)s has changed since you started to edit it. Reload the page and " -"reapply your changes." -msgstr "" - -msgid "Entity and relation supported by this source" -msgstr "" - -msgid "ExternalUri" -msgstr "External Uri" - -msgid "ExternalUri_plural" -msgstr "External Uris" - -msgid "FATAL" -msgstr "" - -msgid "Float" -msgstr "Float" - -msgid "Float_plural" -msgstr "Floats" - -# schema pot file, generated on 2009-12-03 09:22:35 -# -# singular and plural forms for each entity type -msgid "FormatConstraint" -msgstr "format constraint" - -msgid "Garbage collection information" -msgstr "" - -msgid "Help" -msgstr "" - -msgid "INFO" -msgstr "" - -msgid "Instance" -msgstr "" - -msgid "Int" -msgstr "Integer" - -msgid "Int_plural" -msgstr "Integers" - -msgid "Interval" -msgstr "Interval" - -msgid "IntervalBoundConstraint" -msgstr "Interval constraint" - -msgid "Interval_plural" -msgstr "Intervals" - -msgid "Link:" -msgstr "" - -msgid "Looked up classes" -msgstr "" - -msgid "Manage" -msgstr "" - -msgid "Manage security" -msgstr "" - -msgid "Message threshold" -msgstr "" - -msgid "Most referenced classes" -msgstr "" - -msgid "New BaseTransition" -msgstr "XXX" - -msgid "New Bookmark" -msgstr "New bookmark" - -msgid "New CWAttribute" -msgstr "New attribute" - -msgid "New CWCache" -msgstr "New cache" - -msgid "New CWComputedRType" -msgstr "New virtual relation" - -msgid "New CWConstraint" -msgstr "New constraint" - -msgid "New CWConstraintType" -msgstr "New constraint type" - -msgid "New CWDataImport" -msgstr "New data import" - -msgid "New CWEType" -msgstr "New entity type" - -msgid "New CWGroup" -msgstr "New group" - -msgid "New CWProperty" -msgstr "New property" - -msgid "New CWRType" -msgstr "New relation type" - -msgid "New CWRelation" -msgstr "New relation" - -msgid "New CWSource" -msgstr "New source" - -msgid "New CWSourceHostConfig" -msgstr "New source host configuration" - -msgid "New CWSourceSchemaConfig" -msgstr "New source schema configuration" - -msgid "New CWUniqueTogetherConstraint" -msgstr "New unicity constraint" - -msgid "New CWUser" -msgstr "New user" - -msgid "New EmailAddress" -msgstr "New email address" - -msgid "New ExternalUri" -msgstr "New external URI" - -msgid "New RQLExpression" -msgstr "New RQL expression" - -msgid "New State" -msgstr "New state" - -msgid "New SubWorkflowExitPoint" -msgstr "New subworkflow exit-point" - -msgid "New TrInfo" -msgstr "New transition information" - -msgid "New Transition" -msgstr "New transition" - -msgid "New Workflow" -msgstr "New workflow" - -msgid "New WorkflowTransition" -msgstr "New workflow-transition" - -msgid "No result matching query" -msgstr "" - -msgid "Non exhaustive list of views that may apply to entities of this type" -msgstr "" - -msgid "OR" -msgstr "" - -msgid "Ownership" -msgstr "" - -msgid "Parent class:" -msgstr "" - -msgid "Password" -msgstr "Password" - -msgid "Password_plural" -msgstr "Passwords" - -msgid "Please note that this is only a shallow copy" -msgstr "" - -msgid "Powered by CubicWeb" -msgstr "" - -msgid "RQLConstraint" -msgstr "RQL constraint" - -msgid "RQLExpression" -msgstr "RQL expression" - -msgid "RQLExpression_plural" -msgstr "RQL expressions" - -msgid "RQLUniqueConstraint" -msgstr "RQL unique constraint" - -msgid "RQLVocabularyConstraint" -msgstr "RQL vocabulary constraint" - -msgid "RegexpConstraint" -msgstr "regular expression constrainte" - -msgid "Registry's content" -msgstr "" - -msgid "Relations" -msgstr "" - -msgid "Repository" -msgstr "" - -#, python-format -msgid "Schema %s" -msgstr "" - -msgid "Schema's permissions definitions" -msgstr "" - -msgid "Search for" -msgstr "" - -msgid "Site information" -msgstr "" - -msgid "SizeConstraint" -msgstr "size constraint" - -msgid "" -"Source's configuration for a particular host. One key=value per line, " -"authorized keys depending on the source's type, overriding values defined on " -"the source." -msgstr "" - -msgid "Startup views" -msgstr "" - -msgid "State" -msgstr "State" - -msgid "State_plural" -msgstr "States" - -msgid "StaticVocabularyConstraint" -msgstr "vocabulary constraint" - -msgid "String" -msgstr "String" - -msgid "String_plural" -msgstr "Strings" - -msgid "Sub-classes:" -msgstr "" - -msgid "SubWorkflowExitPoint" -msgstr "Subworkflow exit-point" - -msgid "SubWorkflowExitPoint_plural" -msgstr "subworkflow exit-points" - -msgid "Submit bug report" -msgstr "" - -msgid "Submit bug report by mail" -msgstr "" - -msgid "TZDatetime" -msgstr "International date and time" - -msgid "TZDatetime_plural" -msgstr "International dates and times" - -msgid "TZTime" -msgstr "International time" - -msgid "TZTime_plural" -msgstr "International times" - -#, python-format -msgid "The view %s can not be applied to this query" -msgstr "" - -#, python-format -msgid "The view %s could not be found" -msgstr "" - -msgid "There is no default workflow" -msgstr "" - -msgid "This BaseTransition:" -msgstr "This abstract transition:" - -msgid "This Bookmark:" -msgstr "This bookmark:" - -msgid "This CWAttribute:" -msgstr "This attribute:" - -msgid "This CWCache:" -msgstr "This cache:" - -msgid "This CWComputedRType:" -msgstr "This virtual relation:" - -msgid "This CWConstraint:" -msgstr "This constraint:" - -msgid "This CWConstraintType:" -msgstr "This constraint type:" - -msgid "This CWDataImport:" -msgstr "This data import:" - -msgid "This CWEType:" -msgstr "This entity type:" - -msgid "This CWGroup:" -msgstr "This group:" - -msgid "This CWProperty:" -msgstr "This property:" - -msgid "This CWRType:" -msgstr "This relation type:" - -msgid "This CWRelation:" -msgstr "This relation:" - -msgid "This CWSource:" -msgstr "This data source:" - -msgid "This CWSourceHostConfig:" -msgstr "This source host configuration:" - -msgid "This CWSourceSchemaConfig:" -msgstr "This source schema configuration:" - -msgid "This CWUniqueTogetherConstraint:" -msgstr "This unicity constraint:" - -msgid "This CWUser:" -msgstr "This user:" - -msgid "This EmailAddress:" -msgstr "This email address:" - -msgid "This ExternalUri:" -msgstr "This external URI:" - -msgid "This RQLExpression:" -msgstr "This RQL expression:" - -msgid "This State:" -msgstr "This state:" - -msgid "This SubWorkflowExitPoint:" -msgstr "This subworkflow exit-point:" - -msgid "This TrInfo:" -msgstr "This transition information:" - -msgid "This Transition:" -msgstr "This transition:" - -msgid "This Workflow:" -msgstr "This workflow:" - -msgid "This WorkflowTransition:" -msgstr "This workflow-transition:" - -msgid "" -"This action is forbidden. If you think it should be allowed, please contact " -"the site administrator." -msgstr "" - -msgid "This entity type permissions:" -msgstr "" - -msgid "Time" -msgstr "Time" - -msgid "Time_plural" -msgstr "Times" - -msgid "TrInfo" -msgstr "Transition information" - -msgid "TrInfo_plural" -msgstr "Workflow history" - -msgid "Transition" -msgstr "Transition" - -msgid "Transition_plural" -msgstr "Transitions" - -msgid "URLs from which content will be imported. You can put one url per line" -msgstr "" - -msgid "Undoable actions" -msgstr "" - -msgid "Undoing" -msgstr "" - -msgid "UniqueConstraint" -msgstr "unique constraint" - -msgid "Unknown source type" -msgstr "" - -msgid "Unreachable objects" -msgstr "" - -#, python-format -msgid "Updated %(etype)s : %(entity)s" -msgstr "" - -msgid "Used by:" -msgstr "" - -msgid "Users and groups management" -msgstr "" - -msgid "WARNING" -msgstr "" - -msgid "Web server" -msgstr "" - -msgid "Workflow" -msgstr "Workflow" - -msgid "Workflow history" -msgstr "" - -msgid "WorkflowTransition" -msgstr "Workflow-transition" - -msgid "WorkflowTransition_plural" -msgstr "Workflow-transitions" - -msgid "Workflow_plural" -msgstr "Workflows" - -msgid "" -"You can either submit a new file using the browse button above, or choose to " -"remove already uploaded file by checking the \"detach attached file\" check-" -"box, or edit file content online with the widget below." -msgstr "" - -msgid "" -"You can either submit a new file using the browse button above, or edit file " -"content online with the widget below." -msgstr "" - -msgid "You can't change this relation" -msgstr "" - -msgid "You cannot remove the system source" -msgstr "" - -msgid "You cannot rename the system source" -msgstr "" - -msgid "" -"You have no access to this view or it can not be used to display the current " -"data." -msgstr "" - -msgid "" -"You're not authorized to access this page. If you think you should, please " -"contact the site administrator." -msgstr "" - -#, python-format -msgid "[%s supervision] changes summary" -msgstr "" - -msgid "" -"a RQL expression which should return some results, else the transition won't " -"be available. This query may use X and U variables that will respectivly " -"represents the current entity and the current user." -msgstr "" - -msgid "a URI representing an object in external data store" -msgstr "" - -msgid "a float is expected" -msgstr "" - -msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" -msgstr "" - -msgid "" -"a simple cache entity characterized by a name and a validity date. The " -"target application is responsible for updating timestamp when necessary to " -"invalidate the cache (typically in hooks). Also, checkout the AppObject." -"get_cache() method." -msgstr "" - -msgid "abstract base class for transitions" -msgstr "" - -msgid "action menu" -msgstr "" - -msgid "action(s) on this selection" -msgstr "" - -msgid "actions" -msgstr "" - -msgid "activate" -msgstr "" - -msgid "activated" -msgstr "" - -msgid "add" -msgstr "" - -msgid "add Bookmark bookmarked_by CWUser object" -msgstr "bookmark" - -msgid "add CWAttribute add_permission RQLExpression subject" -msgstr "rql expression for add permission" - -msgid "add CWAttribute constrained_by CWConstraint subject" -msgstr "constraint" - -msgid "add CWAttribute read_permission RQLExpression subject" -msgstr "read rql expression" - -msgid "add CWAttribute relation_type CWRType object" -msgstr "attribute definition" - -msgid "add CWAttribute update_permission RQLExpression subject" -msgstr "rql expression for update permission" - -msgid "add CWEType add_permission RQLExpression subject" -msgstr "rql expression for add permission" - -msgid "add CWEType delete_permission RQLExpression subject" -msgstr "rql expression for delete permission" - -msgid "add CWEType read_permission RQLExpression subject" -msgstr "rql expression for read permission" - -msgid "add CWEType update_permission RQLExpression subject" -msgstr "rql expression for update permission" - -msgid "add CWProperty for_user CWUser object" -msgstr "property" - -msgid "add CWRelation add_permission RQLExpression subject" -msgstr "add rql expression" - -msgid "add CWRelation constrained_by CWConstraint subject" -msgstr "constraint" - -msgid "add CWRelation delete_permission RQLExpression subject" -msgstr "delete rql expression" - -msgid "add CWRelation read_permission RQLExpression subject" -msgstr "read rql expression" - -msgid "add CWRelation relation_type CWRType object" -msgstr "relation definition" - -msgid "add CWSourceHostConfig cw_host_config_of CWSource object" -msgstr "host configuration" - -msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" -msgstr "unicity constraint" - -msgid "add CWUser in_group CWGroup object" -msgstr "user" - -msgid "add CWUser use_email EmailAddress subject" -msgstr "email address" - -msgid "add State allowed_transition Transition object" -msgstr "incoming state" - -msgid "add State allowed_transition Transition subject" -msgstr "allowed transition" - -msgid "add State allowed_transition WorkflowTransition subject" -msgstr "workflow-transition" - -msgid "add State state_of Workflow object" -msgstr "state" - -msgid "add Transition condition RQLExpression subject" -msgstr "condition" - -msgid "add Transition destination_state State object" -msgstr "incoming transition" - -msgid "add Transition destination_state State subject" -msgstr "destination state" - -msgid "add Transition transition_of Workflow object" -msgstr "transition" - -msgid "add WorkflowTransition condition RQLExpression subject" -msgstr "workflow-transition" - -msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" -msgstr "subworkflow exit-point" - -msgid "add WorkflowTransition transition_of Workflow object" -msgstr "workflow-transition" - -msgid "add a BaseTransition" -msgstr "" - -msgid "add a Bookmark" -msgstr "" - -msgid "add a CWAttribute" -msgstr "" - -msgid "add a CWCache" -msgstr "" - -msgid "add a CWComputedRType" -msgstr "" - -msgid "add a CWConstraint" -msgstr "" - -msgid "add a CWConstraintType" -msgstr "" - -msgid "add a CWDataImport" -msgstr "" - -msgid "add a CWEType" -msgstr "" - -msgctxt "inlined:CWRelation.from_entity.subject" -msgid "add a CWEType" -msgstr "add an entity type" - -msgctxt "inlined:CWRelation.to_entity.subject" -msgid "add a CWEType" -msgstr "add an entity type" - -msgid "add a CWGroup" -msgstr "" - -msgid "add a CWProperty" -msgstr "" - -msgid "add a CWRType" -msgstr "" - -msgctxt "inlined:CWRelation.relation_type.subject" -msgid "add a CWRType" -msgstr "add a relation type" - -msgid "add a CWRelation" -msgstr "" - -msgid "add a CWSource" -msgstr "" - -msgid "add a CWSourceHostConfig" -msgstr "" - -msgid "add a CWSourceSchemaConfig" -msgstr "" - -msgid "add a CWUniqueTogetherConstraint" -msgstr "" - -msgid "add a CWUser" -msgstr "" - -msgid "add a EmailAddress" -msgstr "" - -msgctxt "inlined:CWUser.use_email.subject" -msgid "add a EmailAddress" -msgstr "add an email address" - -msgid "add a ExternalUri" -msgstr "" - -msgid "add a RQLExpression" -msgstr "" - -msgid "add a State" -msgstr "" - -msgid "add a SubWorkflowExitPoint" -msgstr "" - -msgid "add a TrInfo" -msgstr "" - -msgid "add a Transition" -msgstr "" - -msgid "add a Workflow" -msgstr "" - -msgid "add a WorkflowTransition" -msgstr "" - -# subject and object forms for each relation type -# (no object form for final relation types) -msgid "add_permission" -msgstr "can be added by" - -msgctxt "CWAttribute" -msgid "add_permission" -msgstr "add permission" - -# subject and object forms for each relation type -# (no object form for final relation types) -msgctxt "CWEType" -msgid "add_permission" -msgstr "add permission" - -msgctxt "CWRelation" -msgid "add_permission" -msgstr "add permission" - -msgid "add_permission_object" -msgstr "has permission to add" - -msgctxt "CWGroup" -msgid "add_permission_object" -msgstr "can add" - -msgctxt "RQLExpression" -msgid "add_permission_object" -msgstr "used to define add permission on" - -msgid "add_relation" -msgstr "add" - -#, python-format -msgid "added %(etype)s #%(eid)s (%(title)s)" -msgstr "" - -#, python-format -msgid "" -"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" -"%(eidto)s" -msgstr "" - -msgid "additional type specific properties" -msgstr "" - -msgid "addrelated" -msgstr "add" - -msgid "address" -msgstr "" - -msgctxt "EmailAddress" -msgid "address" -msgstr "address" - -msgid "alias" -msgstr "" - -msgctxt "EmailAddress" -msgid "alias" -msgstr "alias" - -msgid "allow to set a specific workflow for an entity" -msgstr "" - -msgid "allowed options depends on the source type" -msgstr "" - -msgid "allowed transitions from this state" -msgstr "" - -#, python-format -msgid "allowed values for \"action\" are %s" -msgstr "" - -msgid "allowed_transition" -msgstr "allowed transition" - -msgctxt "State" -msgid "allowed_transition" -msgstr "allowed transition" - -msgid "allowed_transition_object" -msgstr "incoming states" - -msgctxt "BaseTransition" -msgid "allowed_transition_object" -msgstr "incoming states" - -msgctxt "Transition" -msgid "allowed_transition_object" -msgstr "incoming states" - -msgctxt "WorkflowTransition" -msgid "allowed_transition_object" -msgstr "incoming states" - -msgid "an electronic mail address associated to a short alias" -msgstr "" - -msgid "an error occurred" -msgstr "" - -msgid "an error occurred while processing your request" -msgstr "" - -msgid "an error occurred, the request cannot be fulfilled" -msgstr "" - -msgid "an integer is expected" -msgstr "" - -msgid "and linked" -msgstr "" - -msgid "and/or between different values" -msgstr "" - -msgid "anyrsetview" -msgstr "rset views" - -msgid "april" -msgstr "" - -#, python-format -msgid "archive for %(author)s" -msgstr "" - -#, python-format -msgid "archive for %(month)s/%(year)s" -msgstr "" - -#, python-format -msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" -msgstr "" - -msgid "attribute" -msgstr "" - -msgid "august" -msgstr "" - -msgid "authentication failure" -msgstr "" - -msgid "auto" -msgstr "automatic" - -msgid "autocomputed attribute used to ensure transition coherency" -msgstr "" - -msgid "automatic" -msgstr "" - -#, python-format -msgid "back to pagination (%s results)" -msgstr "" - -msgid "bad value" -msgstr "" - -msgid "badly formatted url" -msgstr "" - -msgid "base url" -msgstr "" - -msgid "bookmark has been removed" -msgstr "" - -msgid "bookmark this page" -msgstr "" - -msgid "bookmark this search" -msgstr "" - -msgid "bookmarked_by" -msgstr "bookmarked by" - -msgctxt "Bookmark" -msgid "bookmarked_by" -msgstr "bookmarked by" - -msgid "bookmarked_by_object" -msgstr "has bookmarks" - -msgctxt "CWUser" -msgid "bookmarked_by_object" -msgstr "uses bookmarks" - -msgid "bookmarks" -msgstr "" - -msgid "bookmarks are used to have user's specific internal links" -msgstr "" - -msgid "boxes" -msgstr "" - -msgid "bug report sent" -msgstr "" - -msgid "button_apply" -msgstr "apply" - -msgid "button_cancel" -msgstr "cancel" - -msgid "button_delete" -msgstr "delete" - -msgid "button_ok" -msgstr "validate" - -msgid "by" -msgstr "" - -msgid "by relation" -msgstr "" - -msgid "by_transition" -msgstr "by transition" - -msgctxt "TrInfo" -msgid "by_transition" -msgstr "by transition" - -msgid "by_transition_object" -msgstr "transition information" - -msgctxt "BaseTransition" -msgid "by_transition_object" -msgstr "transition information" - -msgctxt "Transition" -msgid "by_transition_object" -msgstr "transition information" - -msgctxt "WorkflowTransition" -msgid "by_transition_object" -msgstr "transition information" - -msgid "calendar" -msgstr "" - -msgid "can not resolve entity types:" -msgstr "" - -msgid "can only have one url" -msgstr "" - -msgid "can't be changed" -msgstr "" - -msgid "can't be deleted" -msgstr "" - -msgid "can't change this attribute" -msgstr "" - -#, python-format -msgid "can't display data, unexpected error: %s" -msgstr "" - -msgid "can't have multiple exits on the same state" -msgstr "" - -#, python-format -msgid "can't parse %(value)r (expected %(format)s)" -msgstr "" - -#, python-format -msgid "" -"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " -"%(value)s) does not exist any longer" -msgstr "" - -#, python-format -msgid "" -"can't restore relation %(rtype)s of entity %(eid)s, this relation does not " -"exist in the schema anymore." -msgstr "" - -#, python-format -msgid "can't restore state of entity %s, it has been deleted inbetween" -msgstr "" - -#, python-format -msgid "" -"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality=" -"%(card)s" -msgstr "" - -msgid "cancel" -msgstr "" - -msgid "cancel select" -msgstr "" - -msgid "cancel this insert" -msgstr "" - -msgid "cardinality" -msgstr "cardinality" - -msgctxt "CWAttribute" -msgid "cardinality" -msgstr "cardinality" - -msgctxt "CWRelation" -msgid "cardinality" -msgstr "cardinality" - -msgid "category" -msgstr "" - -#, python-format -msgid "changed state of %(etype)s #%(eid)s (%(title)s)" -msgstr "" - -msgid "changes applied" -msgstr "" - -msgid "click here to see created entity" -msgstr "" - -msgid "click here to see edited entity" -msgstr "" - -msgid "click on the box to cancel the deletion" -msgstr "" - -msgid "click to add a value" -msgstr "" - -msgid "click to delete this value" -msgstr "" - -msgid "click to edit this field" -msgstr "" - -msgid "close all" -msgstr "" - -msgid "comment" -msgstr "" - -msgctxt "TrInfo" -msgid "comment" -msgstr "comment" - -msgid "comment_format" -msgstr "format" - -msgctxt "TrInfo" -msgid "comment_format" -msgstr "format" - -msgid "components" -msgstr "" - -msgid "components_navigation" -msgstr "page navigation" - -msgid "components_navigation_description" -msgstr "pagination component for large resultsets" - -msgid "components_rqlinput" -msgstr "rql input box" - -msgid "components_rqlinput_description" -msgstr "the rql box in the page's header" - -msgid "composite" -msgstr "" - -msgctxt "CWRelation" -msgid "composite" -msgstr "composite" - -msgid "condition" -msgstr "condition" - -msgctxt "BaseTransition" -msgid "condition" -msgstr "condition" - -msgctxt "Transition" -msgid "condition" -msgstr "condition" - -msgctxt "WorkflowTransition" -msgid "condition" -msgstr "condition" - -msgid "condition_object" -msgstr "condition of" - -msgctxt "RQLExpression" -msgid "condition_object" -msgstr "condition of" - -msgid "conditions" -msgstr "" - -msgid "config" -msgstr "" - -msgctxt "CWSource" -msgid "config" -msgstr "configuration" - -msgctxt "CWSourceHostConfig" -msgid "config" -msgstr "configuration" - -msgid "config mode" -msgstr "" - -msgid "config type" -msgstr "" - -msgid "confirm password" -msgstr "" - -msgid "constrained_by" -msgstr "constrained by" - -msgctxt "CWAttribute" -msgid "constrained_by" -msgstr "constrained by" - -msgctxt "CWRelation" -msgid "constrained_by" -msgstr "constrained by" - -msgid "constrained_by_object" -msgstr "constraints" - -msgctxt "CWConstraint" -msgid "constrained_by_object" -msgstr "constraints" - -msgid "constraint factory" -msgstr "" - -msgid "constraint_of" -msgstr "constraint of" - -msgctxt "CWUniqueTogetherConstraint" -msgid "constraint_of" -msgstr "constraint of" - -msgid "constraint_of_object" -msgstr "constrained by" - -msgctxt "CWEType" -msgid "constraint_of_object" -msgstr "constrained by" - -msgid "constraints" -msgstr "" - -msgid "constraints applying on this relation" -msgstr "" - -msgid "content type" -msgstr "" - -msgid "context" -msgstr "" - -msgid "context where this box should be displayed" -msgstr "" - -msgid "context where this component should be displayed" -msgstr "" - -msgid "context where this facet should be displayed, leave empty for both" -msgstr "" - -msgid "control subject entity's relations order" -msgstr "" - -msgid "copy" -msgstr "" - -msgid "core relation indicating a user's groups" -msgstr "" - -msgid "" -"core relation indicating owners of an entity. This relation implicitly put " -"the owner into the owners group for the entity" -msgstr "" - -msgid "core relation indicating the original creator of an entity" -msgstr "" - -msgid "core relation indicating the type of an entity" -msgstr "" - -msgid "" -"core relation indicating the types (including specialized types) of an entity" -msgstr "" - -msgid "could not connect to the SMTP server" -msgstr "" - -msgid "create an index for quick search on this attribute" -msgstr "" - -msgid "created on" -msgstr "" - -msgid "created_by" -msgstr "created by" - -msgid "created_by_object" -msgstr "has created" - -msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)" -msgstr "creating bookmark for %(linkto)s" - -msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)" -msgstr "creating attribute %(linkto)s" - -msgid "" -"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)" -msgstr "creating constraint for attribute %(linkto)s" - -msgid "" -"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)" -msgstr "creating constraint for relation %(linkto)s" - -msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)" -msgstr "creating property for user %(linkto)s" - -msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" -msgstr "creating relation %(linkto)s" - -msgid "" -"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource " -"%(linkto)s)" -msgstr "creating host configuration for source %(linkto)s" - -msgid "" -"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " -"constraint_of CWEType %(linkto)s)" -msgstr "creating unique together constraint for entity type %(linkto)s" - -msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" -msgstr "creating a new user in group %(linkto)s" - -msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)" -msgstr "creating email address for user %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" -msgstr "RQL expression granting add permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" -msgstr "RQL expression granting read permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s update_permission " -"RQLExpression)" -msgstr "RQL expression granting update permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" -msgstr "creating rql expression for add permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)" -msgstr "creating rql expression for delete permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" -msgstr "creating rql expression for read permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" -msgstr "creating rql expression for update permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" -msgstr "RQL expression granting add permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s delete_permission " -"RQLExpression)" -msgstr "RQL expression granting delete permission on %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" -msgstr "RQL expression granting read permission on %(linkto)s" - -msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" -msgstr "creating rql expression for transition %(linkto)s" - -msgid "" -"creating RQLExpression (WorkflowTransition %(linkto)s condition " -"RQLExpression)" -msgstr "creating rql expression for workflow-transition %(linkto)s" - -msgid "creating State (State allowed_transition Transition %(linkto)s)" -msgstr "creating a state able to trigger transition %(linkto)s" - -msgid "creating State (State state_of Workflow %(linkto)s)" -msgstr "creating state of workflow %(linkto)s" - -msgid "creating State (Transition %(linkto)s destination_state State)" -msgstr "creating destination state for transition %(linkto)s" - -msgid "" -"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " -"subworkflow_exit SubWorkflowExitPoint)" -msgstr "creating subworkflow exit-point for workflow-transition %(linkto)s" - -msgid "creating Transition (State %(linkto)s allowed_transition Transition)" -msgstr "creating triggerable transition for state %(linkto)s" - -msgid "creating Transition (Transition destination_state State %(linkto)s)" -msgstr "creating transition leading to state %(linkto)s" - -msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" -msgstr "creating transition of workflow %(linkto)s" - -msgid "" -"creating WorkflowTransition (State %(linkto)s allowed_transition " -"WorkflowTransition)" -msgstr "creating workflow-transition leading to state %(linkto)s" - -msgid "" -"creating WorkflowTransition (WorkflowTransition transition_of Workflow " -"%(linkto)s)" -msgstr "creating workflow-transition of workflow %(linkto)s" - -msgid "creation" -msgstr "" - -msgid "creation date" -msgstr "" - -msgid "creation time of an entity" -msgstr "" - -msgid "creation_date" -msgstr "creation date" - -msgid "cstrtype" -msgstr "constraint's type" - -msgctxt "CWConstraint" -msgid "cstrtype" -msgstr "constraint type" - -msgid "cstrtype_object" -msgstr "used by" - -msgctxt "CWConstraintType" -msgid "cstrtype_object" -msgstr "constraint type of" - -msgid "csv export" -msgstr "CSV export" - -msgid "csv export (entities)" -msgstr "CSV export (entities)" - -msgid "ctxcomponents" -msgstr "contextual components" - -msgid "ctxcomponents_anonuserlink" -msgstr "user link" - -msgid "ctxcomponents_anonuserlink_description" -msgstr "" -"for anonymous users, this is a link pointing to authentication form, for " -"logged in users, this is a link that makes a box appear and listing some " -"possible user actions" - -msgid "ctxcomponents_appliname" -msgstr "application title" - -msgid "ctxcomponents_appliname_description" -msgstr "display the application title in the page's header" - -msgid "ctxcomponents_bookmarks_box" -msgstr "bookmarks box" - -msgid "ctxcomponents_bookmarks_box_description" -msgstr "box listing the user's bookmarks" - -msgid "ctxcomponents_breadcrumbs" -msgstr "breadcrumb" - -msgid "ctxcomponents_breadcrumbs_description" -msgstr "breadcrumbs bar that display a path locating the page in the site" - -msgid "ctxcomponents_download_box" -msgstr "download box" - -msgid "ctxcomponents_download_box_description" -msgstr "" - -msgid "ctxcomponents_edit_box" -msgstr "actions box" - -msgid "ctxcomponents_edit_box_description" -msgstr "box listing the applicable actions on the displayed data" - -msgid "ctxcomponents_facet.filterbox" -msgstr "facets box" - -msgid "ctxcomponents_facet.filterbox_description" -msgstr "box providing filter within current search results functionality" - -msgid "ctxcomponents_logo" -msgstr "logo" - -msgid "ctxcomponents_logo_description" -msgstr "the application's icon displayed in the page's header" - -msgid "ctxcomponents_metadata" -msgstr "entity's metadata" - -msgid "ctxcomponents_metadata_description" -msgstr "" - -msgid "ctxcomponents_possible_views_box" -msgstr "possible views box" - -msgid "ctxcomponents_possible_views_box_description" -msgstr "box listing the possible views for the displayed data" - -msgid "ctxcomponents_prevnext" -msgstr "previous / next entity" - -msgid "ctxcomponents_prevnext_description" -msgstr "" -"display link to go from one entity to another on entities implementing the " -"\"previous/next\" interface." - -msgid "ctxcomponents_rss" -msgstr "rss box" - -msgid "ctxcomponents_rss_description" -msgstr "RSS icon to get displayed data as a RSS thread" - -msgid "ctxcomponents_search_box" -msgstr "search box" - -msgid "ctxcomponents_search_box_description" -msgstr "search box" - -msgid "ctxcomponents_startup_views_box" -msgstr "startup views box" - -msgid "ctxcomponents_startup_views_box_description" -msgstr "box listing the possible start pages" - -msgid "ctxcomponents_userstatus" -msgstr "" - -msgid "ctxcomponents_userstatus_description" -msgstr "" - -msgid "ctxcomponents_wfhistory" -msgstr "workflow history" - -msgid "ctxcomponents_wfhistory_description" -msgstr "show the workflow's history." - -msgid "ctxtoolbar" -msgstr "toolbar" - -msgid "custom_workflow" -msgstr "custom workflow" - -msgid "custom_workflow_object" -msgstr "custom workflow of" - -msgid "cw.groups-management" -msgstr "groups" - -msgid "cw.users-management" -msgstr "users" - -msgid "cw_for_source" -msgstr "for source" - -msgctxt "CWSourceSchemaConfig" -msgid "cw_for_source" -msgstr "for source" - -msgid "cw_for_source_object" -msgstr "mapping" - -msgctxt "CWSource" -msgid "cw_for_source_object" -msgstr "mapping" - -msgid "cw_host_config_of" -msgstr "source" - -msgctxt "CWSourceHostConfig" -msgid "cw_host_config_of" -msgstr "source" - -msgid "cw_host_config_of_object" -msgstr "host configuration" - -msgctxt "CWSource" -msgid "cw_host_config_of_object" -msgstr "host configuration" - -msgid "cw_import_of" -msgstr "source" - -msgctxt "CWDataImport" -msgid "cw_import_of" -msgstr "source" - -msgid "cw_import_of_object" -msgstr "imports" - -msgctxt "CWSource" -msgid "cw_import_of_object" -msgstr "imports" - -msgid "cw_schema" -msgstr "maps" - -msgctxt "CWSourceSchemaConfig" -msgid "cw_schema" -msgstr "maps" - -msgid "cw_schema_object" -msgstr "mapped by" - -msgctxt "CWEType" -msgid "cw_schema_object" -msgstr "mapped by" - -msgctxt "CWRType" -msgid "cw_schema_object" -msgstr "mapped by" - -msgctxt "CWRelation" -msgid "cw_schema_object" -msgstr "mapped by" - -msgid "cw_source" -msgstr "source" - -msgid "cw_source_object" -msgstr "contains entities" - -msgid "cwetype-box" -msgstr "\"box\" view" - -msgid "cwetype-description" -msgstr "description" - -msgid "cwetype-permissions" -msgstr "permissions" - -msgid "cwetype-views" -msgstr "views" - -msgid "cwetype-workflow" -msgstr "workflow" - -msgid "cwgroup-main" -msgstr "description" - -msgid "cwgroup-permissions" -msgstr "permissions" - -msgid "cwrtype-description" -msgstr "description" - -msgid "cwrtype-permissions" -msgstr "permissions" - -msgid "cwsource-imports" -msgstr "" - -msgid "cwsource-main" -msgstr "description" - -msgid "cwsource-mapping" -msgstr "mapping" - -msgid "cwuri" -msgstr "internal uri" - -msgid "data directory url" -msgstr "" - -msgid "data model schema" -msgstr "" - -msgid "data sources" -msgstr "" - -msgid "data sources management" -msgstr "" - -msgid "date" -msgstr "" - -msgid "deactivate" -msgstr "" - -msgid "deactivated" -msgstr "" - -msgid "december" -msgstr "" - -msgid "default" -msgstr "" - -msgid "default text format for rich text fields." -msgstr "" - -msgid "default user workflow" -msgstr "" - -msgid "default value" -msgstr "" - -msgid "default value as gziped pickled python object" -msgstr "" - -msgid "default workflow for an entity type" -msgstr "" - -msgid "default_workflow" -msgstr "default workflow" - -msgctxt "CWEType" -msgid "default_workflow" -msgstr "default workflow" - -msgid "default_workflow_object" -msgstr "default workflow of" - -msgctxt "Workflow" -msgid "default_workflow_object" -msgstr "default workflow of" - -msgid "defaultval" -msgstr "default value" - -msgctxt "CWAttribute" -msgid "defaultval" -msgstr "default value" - -msgid "define a CubicWeb user" -msgstr "" - -msgid "define a CubicWeb users group" -msgstr "" - -msgid "" -"define a final relation: link a final relation type from a non final entity " -"to a final entity type. used to build the instance schema" -msgstr "" - -msgid "" -"define a non final relation: link a non final relation type from a non final " -"entity to a non final entity type. used to build the instance schema" -msgstr "" - -msgid "define a relation type, used to build the instance schema" -msgstr "" - -msgid "define a rql expression used to define permissions" -msgstr "" - -msgid "define a schema constraint" -msgstr "" - -msgid "define a schema constraint type" -msgstr "" - -msgid "define a virtual relation type, used to build the instance schema" -msgstr "" - -msgid "define an entity type, used to build the instance schema" -msgstr "" - -msgid "define how we get out from a sub-workflow" -msgstr "" - -msgid "defines a sql-level multicolumn unique index" -msgstr "" - -msgid "" -"defines what's the property is applied for. You must select this first to be " -"able to set value" -msgstr "" - -msgid "delete" -msgstr "" - -msgid "delete this bookmark" -msgstr "" - -msgid "delete this relation" -msgstr "" - -msgid "delete_permission" -msgstr "can be deleted by" - -msgctxt "CWEType" -msgid "delete_permission" -msgstr "delete permission" - -msgctxt "CWRelation" -msgid "delete_permission" -msgstr "delete_permission" - -msgid "delete_permission_object" -msgstr "has permission to delete" - -msgctxt "CWGroup" -msgid "delete_permission_object" -msgstr "has permission to delete" - -msgctxt "RQLExpression" -msgid "delete_permission_object" -msgstr "has permission to delete" - -#, python-format -msgid "deleted %(etype)s #%(eid)s (%(title)s)" -msgstr "" - -#, python-format -msgid "" -"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" -"%(eidto)s" -msgstr "" - -msgid "depends on the constraint type" -msgstr "" - -msgid "description" -msgstr "description" - -msgctxt "BaseTransition" -msgid "description" -msgstr "description" - -msgctxt "CWAttribute" -msgid "description" -msgstr "description" - -msgctxt "CWComputedRType" -msgid "description" -msgstr "description" - -msgctxt "CWEType" -msgid "description" -msgstr "description" - -msgctxt "CWRType" -msgid "description" -msgstr "description" - -msgctxt "CWRelation" -msgid "description" -msgstr "description" - -msgctxt "State" -msgid "description" -msgstr "description" - -msgctxt "Transition" -msgid "description" -msgstr "description" - -msgctxt "Workflow" -msgid "description" -msgstr "description" - -msgctxt "WorkflowTransition" -msgid "description" -msgstr "description" - -msgid "description_format" -msgstr "format" - -msgctxt "BaseTransition" -msgid "description_format" -msgstr "format" - -msgctxt "CWAttribute" -msgid "description_format" -msgstr "format" - -msgctxt "CWComputedRType" -msgid "description_format" -msgstr "format" - -msgctxt "CWEType" -msgid "description_format" -msgstr "format" - -msgctxt "CWRType" -msgid "description_format" -msgstr "format" - -msgctxt "CWRelation" -msgid "description_format" -msgstr "format" - -msgctxt "State" -msgid "description_format" -msgstr "format" - -msgctxt "Transition" -msgid "description_format" -msgstr "format" - -msgctxt "Workflow" -msgid "description_format" -msgstr "format" - -msgctxt "WorkflowTransition" -msgid "description_format" -msgstr "format" - -msgid "destination state for this transition" -msgstr "" - -msgid "destination state must be in the same workflow as our parent transition" -msgstr "" - -msgid "destination state of a transition" -msgstr "" - -msgid "" -"destination state. No destination state means that transition should go back " -"to the state from which we've entered the subworkflow." -msgstr "" - -msgid "destination_state" -msgstr "destination state" - -msgctxt "SubWorkflowExitPoint" -msgid "destination_state" -msgstr "destination state" - -msgctxt "Transition" -msgid "destination_state" -msgstr "destination state" - -msgid "destination_state_object" -msgstr "destination of" - -msgctxt "State" -msgid "destination_state_object" -msgstr "destination of" - -msgid "detach attached file" -msgstr "" - -msgid "display order of the box" -msgstr "" - -msgid "display order of the component" -msgstr "" - -msgid "display order of the facet" -msgstr "" - -msgid "display the box or not" -msgstr "" - -msgid "display the component or not" -msgstr "" - -msgid "display the facet or not" -msgstr "" - -msgid "download" -msgstr "" - -#, python-format -msgid "download %s" -msgstr "" - -msgid "download icon" -msgstr "" - -msgid "download schema as owl" -msgstr "" - -msgid "edit bookmarks" -msgstr "" - -msgid "edit canceled" -msgstr "" - -msgid "editable-table" -msgstr "" - -msgid "eid" -msgstr "" - -msgid "embedded html" -msgstr "" - -msgid "end_timestamp" -msgstr "end timestamp" - -msgctxt "CWDataImport" -msgid "end_timestamp" -msgstr "end timestamp" - -msgid "entities deleted" -msgstr "" - -msgid "entity and relation types can't be mapped, only attributes or relations" -msgstr "" - -msgid "entity copied" -msgstr "" - -msgid "entity created" -msgstr "" - -msgid "entity creation" -msgstr "" - -msgid "entity deleted" -msgstr "" - -msgid "entity deletion" -msgstr "" - -msgid "entity edited" -msgstr "" - -msgid "entity has no workflow set" -msgstr "" - -msgid "entity linked" -msgstr "" - -msgid "entity type" -msgstr "" - -msgid "entity types which may use this workflow" -msgstr "" - -msgid "entity update" -msgstr "" - -msgid "entityview" -msgstr "entity views" - -msgid "error" -msgstr "" - -msgid "error while publishing ReST text" -msgstr "" - -msgid "exit state must be a subworkflow state" -msgstr "" - -msgid "exit_point" -msgstr "exit point" - -msgid "exit_point_object" -msgstr "exit point of" - -#, python-format -msgid "exiting from subworkflow %s" -msgstr "" - -msgid "expression" -msgstr "" - -msgctxt "RQLExpression" -msgid "expression" -msgstr "expression" - -msgid "exprtype" -msgstr "expression's type" - -msgctxt "RQLExpression" -msgid "exprtype" -msgstr "expression type" - -msgid "extra_props" -msgstr "" - -msgctxt "CWAttribute" -msgid "extra_props" -msgstr "" - -msgid "facet-loading-msg" -msgstr "processing, please wait" - -msgid "facet.filters" -msgstr "filter" - -msgid "facetbox" -msgstr "facettes" - -msgid "facets_created_by-facet" -msgstr "\"created by\" facet" - -msgid "facets_created_by-facet_description" -msgstr "" - -msgid "facets_cw_source-facet" -msgstr "data source" - -msgid "facets_cw_source-facet_description" -msgstr "" - -msgid "facets_cwfinal-facet" -msgstr "\"final entity or relation type\" facet" - -msgid "facets_cwfinal-facet_description" -msgstr "" - -msgid "facets_datafeed.dataimport.status" -msgstr "" - -msgid "facets_datafeed.dataimport.status_description" -msgstr "" - -msgid "facets_etype-facet" -msgstr "\"entity type\" facet" - -msgid "facets_etype-facet_description" -msgstr "" - -msgid "facets_has_text-facet" -msgstr "\"has text\" facet" - -msgid "facets_has_text-facet_description" -msgstr "" - -msgid "facets_in_group-facet" -msgstr "\"in group\" facet" - -msgid "facets_in_group-facet_description" -msgstr "" - -msgid "facets_in_state-facet" -msgstr "\"in state\" facet" - -msgid "facets_in_state-facet_description" -msgstr "" - -msgid "failed" -msgstr "" - -#, python-format -msgid "failed to uniquify path (%s, %s)" -msgstr "" - -msgid "february" -msgstr "" - -msgid "file tree view" -msgstr "" - -msgid "final" -msgstr "" - -msgctxt "CWEType" -msgid "final" -msgstr "final" - -msgctxt "CWRType" -msgid "final" -msgstr "final" - -msgid "first name" -msgstr "" - -msgid "firstname" -msgstr "" - -msgctxt "CWUser" -msgid "firstname" -msgstr "firstname" - -msgid "foaf" -msgstr "" - -msgid "focus on this selection" -msgstr "" - -msgid "follow" -msgstr "" - -#, python-format -msgid "follow this link for more information on this %s" -msgstr "" - -msgid "for_user" -msgstr "for user" - -msgctxt "CWProperty" -msgid "for_user" -msgstr "for user" - -msgid "for_user_object" -msgstr "use properties" - -msgctxt "CWUser" -msgid "for_user_object" -msgstr "property of" - -msgid "formula" -msgstr "formula" - -msgctxt "CWAttribute" -msgid "formula" -msgstr "formula" - -msgid "friday" -msgstr "" - -msgid "from" -msgstr "" - -#, python-format -msgid "from %(date)s" -msgstr "" - -msgid "from_entity" -msgstr "from entity" - -msgctxt "CWAttribute" -msgid "from_entity" -msgstr "from entity" - -msgctxt "CWRelation" -msgid "from_entity" -msgstr "from entity" - -msgid "from_entity_object" -msgstr "subjet relation" - -msgctxt "CWEType" -msgid "from_entity_object" -msgstr "subjec relation" - -msgid "from_interval_start" -msgstr "from" - -msgid "from_state" -msgstr "from state" - -msgctxt "TrInfo" -msgid "from_state" -msgstr "from state" - -msgid "from_state_object" -msgstr "transitions from this state" - -msgctxt "State" -msgid "from_state_object" -msgstr "transitions from this state" - -msgid "full text or RQL query" -msgstr "" - -msgid "fulltext_container" -msgstr "fulltext container" - -msgctxt "CWRType" -msgid "fulltext_container" -msgstr "fulltext container" - -msgid "fulltextindexed" -msgstr "fulltext indexed" - -msgctxt "CWAttribute" -msgid "fulltextindexed" -msgstr "fulltext indexed" - -msgid "gc" -msgstr "memory leak" - -msgid "generic plot" -msgstr "" - -msgid "generic relation to link one entity to another" -msgstr "" - -msgid "" -"generic relation to specify that an external entity represent the same " -"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" -msgstr "" - -msgid "granted to groups" -msgstr "" - -#, python-format -msgid "graphical representation of %(appid)s data model" -msgstr "" - -#, python-format -msgid "" -"graphical representation of the %(etype)s entity type from %(appid)s data " -"model" -msgstr "" - -#, python-format -msgid "" -"graphical representation of the %(rtype)s relation type from %(appid)s data " -"model" -msgstr "" - -msgid "group in which a user should be to be allowed to pass this transition" -msgstr "" - -msgid "groups" -msgstr "" - -msgid "groups allowed to add entities/relations of this type" -msgstr "" - -msgid "groups allowed to delete entities/relations of this type" -msgstr "" - -msgid "groups allowed to read entities/relations of this type" -msgstr "" - -msgid "groups allowed to update entities/relations of this type" -msgstr "" - -msgid "groups grant permissions to the user" -msgstr "" - -msgid "guests" -msgstr "" - -msgid "hCalendar" -msgstr "" - -msgid "has_text" -msgstr "has text" - -msgid "header-center" -msgstr "" - -msgid "header-left" -msgstr "header (left)" - -msgid "header-right" -msgstr "header (right)" - -msgid "hide filter form" -msgstr "" - -msgid "" -"how to format date and time in the ui (see this page for format " -"description)" -msgstr "" - -msgid "" -"how to format date in the ui (see this page for format " -"description)" -msgstr "" - -msgid "how to format float numbers in the ui" -msgstr "" - -msgid "" -"how to format time in the ui (see this page for format " -"description)" -msgstr "" - -msgid "i18n_bookmark_url_fqs" -msgstr "parameters" - -msgid "i18n_bookmark_url_path" -msgstr "path" - -msgid "i18n_login_popup" -msgstr "login" - -msgid "i18ncard_*" -msgstr "0..n" - -msgid "i18ncard_+" -msgstr "1..n" - -msgid "i18ncard_1" -msgstr "1" - -msgid "i18ncard_?" -msgstr "0..1" - -msgid "i18nprevnext_next" -msgstr "next" - -msgid "i18nprevnext_previous" -msgstr "previous" - -msgid "i18nprevnext_up" -msgstr "up" - -msgid "iCalendar" -msgstr "" - -msgid "id of main template used to render pages" -msgstr "" - -msgid "identical to" -msgstr "" - -msgid "identical_to" -msgstr "identical to" - -msgid "identity" -msgstr "" - -msgid "identity_object" -msgstr "identity" - -msgid "" -"if full text content of subject/object entity should be added to other side " -"entity (the container)." -msgstr "" - -msgid "image" -msgstr "" - -msgid "in progress" -msgstr "" - -msgid "in_group" -msgstr "in group" - -msgctxt "CWUser" -msgid "in_group" -msgstr "in group" - -msgid "in_group_object" -msgstr "contains" - -msgctxt "CWGroup" -msgid "in_group_object" -msgstr "contains" - -msgid "in_state" -msgstr "in state" - -msgid "in_state_object" -msgstr "state of" - -msgid "in_synchronization" -msgstr "in synchronization" - -msgctxt "CWSource" -msgid "in_synchronization" -msgstr "in synchronization" - -msgid "incontext" -msgstr "in-context" - -msgid "incorrect captcha value" -msgstr "" - -#, python-format -msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" -msgstr "" - -msgid "index this attribute's value in the plain text index" -msgstr "" - -msgid "indexed" -msgstr "" - -msgctxt "CWAttribute" -msgid "indexed" -msgstr "indexed" - -msgid "indicate the current state of an entity" -msgstr "" - -msgid "" -"indicate which state should be used by default when an entity using states " -"is created" -msgstr "" - -msgid "indifferent" -msgstr "indifferent" - -msgid "info" -msgstr "" - -msgid "initial state for this workflow" -msgstr "" - -msgid "initial_state" -msgstr "initial state" - -msgctxt "Workflow" -msgid "initial_state" -msgstr "initial state" - -msgid "initial_state_object" -msgstr "initial state of" - -msgctxt "State" -msgid "initial_state_object" -msgstr "initial state of" - -msgid "inlined" -msgstr "" - -msgctxt "CWRType" -msgid "inlined" -msgstr "inlined" - -msgid "instance home" -msgstr "" - -msgid "internal entity uri" -msgstr "" - -msgid "internationalizable" -msgstr "" - -msgctxt "CWAttribute" -msgid "internationalizable" -msgstr "internationalizable" - -#, python-format -msgid "invalid action %r" -msgstr "" - -#, python-format -msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" -msgstr "" - -msgid "is" -msgstr "" - -msgid "is object of:" -msgstr "" - -msgid "is subject of:" -msgstr "" - -msgid "" -"is the subject/object entity of the relation composed of the other ? This " -"implies that when the composite is deleted, composants are also deleted." -msgstr "" - -msgid "is this attribute's value translatable" -msgstr "" - -msgid "is this relation equivalent in both direction ?" -msgstr "" - -msgid "" -"is this relation physically inlined? you should know what you're doing if " -"you are changing this!" -msgstr "" - -msgid "is_instance_of" -msgstr "is instance of" - -msgid "is_instance_of_object" -msgstr "instances" - -msgid "is_object" -msgstr "has instances" - -msgid "january" -msgstr "" - -msgid "json-entities-export-view" -msgstr "JSON export (entities)" - -msgid "json-export-view" -msgstr "JSON export" - -msgid "july" -msgstr "" - -msgid "june" -msgstr "" - -msgid "language of the user interface" -msgstr "" - -msgid "last connection date" -msgstr "" - -msgid "last login time" -msgstr "" - -msgid "last name" -msgstr "" - -msgid "last usage" -msgstr "" - -msgid "last_login_time" -msgstr "last login time" - -msgctxt "CWUser" -msgid "last_login_time" -msgstr "last login time" - -msgid "latest import" -msgstr "" - -msgid "latest modification time of an entity" -msgstr "" - -msgid "latest synchronization time" -msgstr "" - -msgid "latest update on" -msgstr "" - -msgid "latest_retrieval" -msgstr "latest retrieval" - -msgctxt "CWSource" -msgid "latest_retrieval" -msgstr "latest retrieval" - -msgid "left" -msgstr "" - -msgid "line" -msgstr "" - -msgid "" -"link a property to the user which want this property customization. Unless " -"you're a site manager, this relation will be handled automatically." -msgstr "" - -msgid "link a relation definition to its object entity type" -msgstr "" - -msgid "link a relation definition to its relation type" -msgstr "" - -msgid "link a relation definition to its subject entity type" -msgstr "" - -msgid "link a state to one or more workflow" -msgstr "" - -msgid "link a transition information to its object" -msgstr "" - -msgid "link a transition to one or more workflow" -msgstr "" - -msgid "link a workflow to one or more entity type" -msgstr "" - -msgid "list" -msgstr "" - -msgid "log" -msgstr "" - -msgctxt "CWDataImport" -msgid "log" -msgstr "" - -msgid "log in" -msgstr "" - -msgid "login" -msgstr "" - -msgctxt "CWUser" -msgid "login" -msgstr "login" - -msgid "login / password" -msgstr "" - -msgid "login or email" -msgstr "" - -msgid "login_action" -msgstr "log in" - -msgid "logout" -msgstr "" - -#, python-format -msgid "loop in %(rel)s relation (%(eid)s)" -msgstr "" - -msgid "main informations" -msgstr "" - -msgid "main_tab" -msgstr "description" - -msgid "mainvars" -msgstr "main vars" - -msgctxt "RQLExpression" -msgid "mainvars" -msgstr "main variables" - -msgid "manage" -msgstr "" - -msgid "manage bookmarks" -msgstr "" - -msgid "manage permissions" -msgstr "" - -msgid "managers" -msgstr "" - -msgid "mandatory relation" -msgstr "" - -msgid "march" -msgstr "" - -msgid "match_host" -msgstr "match host" - -msgctxt "CWSourceHostConfig" -msgid "match_host" -msgstr "match host" - -msgid "maximum number of characters in short description" -msgstr "" - -msgid "maximum number of entities to display in related combo box" -msgstr "" - -msgid "maximum number of objects displayed by page of results" -msgstr "" - -msgid "maximum number of related entities to display in the primary view" -msgstr "" - -msgid "may" -msgstr "" - -msgid "memory leak debugging" -msgstr "" - -msgid "message" -msgstr "" - -#, python-format -msgid "missing parameters for entity %s" -msgstr "" - -msgid "modification" -msgstr "" - -msgid "modification_date" -msgstr "modification date" - -msgid "modify" -msgstr "" - -msgid "monday" -msgstr "" - -msgid "more actions" -msgstr "" - -msgid "more info about this workflow" -msgstr "" - -msgid "multiple edit" -msgstr "" - -msgid "my custom search" -msgstr "" - -msgid "name" -msgstr "" - -msgctxt "BaseTransition" -msgid "name" -msgstr "name" - -msgctxt "CWCache" -msgid "name" -msgstr "name" - -msgctxt "CWComputedRType" -msgid "name" -msgstr "name" - -msgctxt "CWConstraintType" -msgid "name" -msgstr "name" - -msgctxt "CWEType" -msgid "name" -msgstr "name" - -msgctxt "CWGroup" -msgid "name" -msgstr "name" - -msgctxt "CWRType" -msgid "name" -msgstr "name" - -msgctxt "CWSource" -msgid "name" -msgstr "name" - -msgctxt "CWUniqueTogetherConstraint" -msgid "name" -msgstr "" - -msgctxt "State" -msgid "name" -msgstr "name" - -msgctxt "Transition" -msgid "name" -msgstr "name" - -msgctxt "Workflow" -msgid "name" -msgstr "name" - -msgctxt "WorkflowTransition" -msgid "name" -msgstr "name" - -msgid "name of the cache" -msgstr "" - -msgid "" -"name of the main variables which should be used in the selection if " -"necessary (comma separated)" -msgstr "" - -msgid "name of the source" -msgstr "" - -msgid "navbottom" -msgstr "page bottom" - -msgid "navcontentbottom" -msgstr "page bottom, within main content" - -msgid "navcontenttop" -msgstr "page top, within main content" - -msgid "navigation" -msgstr "" - -msgid "navigation.combobox-limit" -msgstr "\"related\" combo-box" - -msgid "navigation.page-size" -msgstr "number of results" - -msgid "navigation.related-limit" -msgstr "number of entities in the primary view " - -msgid "navigation.short-line-size" -msgstr "short description" - -msgid "navtop" -msgstr "page top" - -msgid "new" -msgstr "" - -msgid "next page" -msgstr "" - -msgid "next_results" -msgstr "next results" - -msgid "no" -msgstr "" - -msgid "no content next link" -msgstr "" - -msgid "no content prev link" -msgstr "" - -msgid "no edited fields specified" -msgstr "" - -msgid "no log to display" -msgstr "" - -msgid "no related entity" -msgstr "" - -msgid "no repository sessions found" -msgstr "" - -msgid "no selected entities" -msgstr "" - -#, python-format -msgid "no such entity type %s" -msgstr "" - -msgid "no version information" -msgstr "" - -msgid "no web sessions found" -msgstr "" - -msgid "normal" -msgstr "" - -msgid "not authorized" -msgstr "" - -msgid "not selected" -msgstr "" - -msgid "november" -msgstr "" - -msgid "num. users" -msgstr "" - -msgid "object" -msgstr "" - -msgid "object type" -msgstr "" - -msgid "october" -msgstr "" - -msgid "one month" -msgstr "" - -msgid "one week" -msgstr "" - -msgid "oneline" -msgstr "one-line" - -msgid "only select queries are authorized" -msgstr "" - -msgid "open all" -msgstr "" - -msgid "opened sessions" -msgstr "" - -msgid "opened web sessions" -msgstr "" - -msgid "options" -msgstr "" - -msgctxt "CWSourceSchemaConfig" -msgid "options" -msgstr "options" - -msgid "order" -msgstr "" - -msgid "ordernum" -msgstr "order" - -msgctxt "CWAttribute" -msgid "ordernum" -msgstr "order" - -msgctxt "CWRelation" -msgid "ordernum" -msgstr "order" - -msgid "owl" -msgstr "" - -msgid "owlabox" -msgstr "" - -msgid "owned_by" -msgstr "owned by" - -msgid "owned_by_object" -msgstr "owns" - -msgid "owners" -msgstr "" - -msgid "ownerships have been changed" -msgstr "" - -msgid "pageid-not-found" -msgstr "" -"some necessary data seem expired, please reload the page and try again." - -msgid "parser" -msgstr "" - -msgctxt "CWSource" -msgid "parser" -msgstr "parser" - -msgid "parser to use to extract entities from content retrieved at given URLs." -msgstr "" - -msgid "password" -msgstr "" - -msgid "password and confirmation don't match" -msgstr "" - -msgid "path" -msgstr "" - -msgctxt "Bookmark" -msgid "path" -msgstr "path" - -msgid "permalink to this message" -msgstr "" - -msgid "permission" -msgstr "" - -msgid "permissions" -msgstr "" - -msgid "pick existing bookmarks" -msgstr "" - -msgid "pkey" -msgstr "key" - -msgctxt "CWProperty" -msgid "pkey" -msgstr "key" - -msgid "please correct errors below" -msgstr "" - -msgid "please correct the following errors:" -msgstr "" - -msgid "possible views" -msgstr "" - -msgid "prefered_form" -msgstr "prefered form" - -msgctxt "EmailAddress" -msgid "prefered_form" -msgstr "prefered form" - -msgid "prefered_form_object" -msgstr "prefered over" - -msgctxt "EmailAddress" -msgid "prefered_form_object" -msgstr "prefered over" - -msgid "preferences" -msgstr "" - -msgid "previous page" -msgstr "" - -msgid "previous_results" -msgstr "previous results" - -msgid "primary" -msgstr "" - -msgid "primary_email" -msgstr "primary email" - -msgctxt "CWUser" -msgid "primary_email" -msgstr "primary email" - -msgid "primary_email_object" -msgstr "primary email of" - -msgctxt "EmailAddress" -msgid "primary_email_object" -msgstr "primary email of" - -msgid "profile" -msgstr "" - -msgid "rdef-description" -msgstr "description" - -msgid "rdef-permissions" -msgstr "permissions" - -msgid "rdf export" -msgstr "RDF export" - -msgid "read" -msgstr "" - -msgid "read_permission" -msgstr "read permission" - -msgctxt "CWAttribute" -msgid "read_permission" -msgstr "read permission" - -msgctxt "CWEType" -msgid "read_permission" -msgstr "read permission" - -msgctxt "CWRelation" -msgid "read_permission" -msgstr "read permission" - -msgid "read_permission_object" -msgstr "has permission to read" - -msgctxt "CWGroup" -msgid "read_permission_object" -msgstr "has permission to read" - -msgctxt "RQLExpression" -msgid "read_permission_object" -msgstr "has permission to read" - -msgid "regexp matching host(s) to which this config applies" -msgstr "" - -msgid "registry" -msgstr "" - -msgid "related entity has no state" -msgstr "" - -msgid "related entity has no workflow set" -msgstr "" - -msgid "relation" -msgstr "" - -#, python-format -msgid "relation %(relname)s of %(ent)s" -msgstr "" - -#, python-format -msgid "" -"relation %(rtype)s with %(etype)s as %(role)s is supported but no target " -"type supported" -msgstr "" - -#, python-format -msgid "" -"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is " -"mandatory but not supported" -msgstr "" - -#, python-format -msgid "" -"relation %s is supported but none of its definitions matches supported " -"entities" -msgstr "" - -msgid "relation add" -msgstr "" - -msgid "relation removal" -msgstr "" - -msgid "relation_type" -msgstr "relation type" - -msgctxt "CWAttribute" -msgid "relation_type" -msgstr "relation type" - -msgctxt "CWRelation" -msgid "relation_type" -msgstr "relation type" - -msgid "relation_type_object" -msgstr "relation definitions" - -msgctxt "CWRType" -msgid "relation_type_object" -msgstr "relation definitions" - -msgid "relations" -msgstr "" - -msgctxt "CWUniqueTogetherConstraint" -msgid "relations" -msgstr "relations" - -msgid "relations deleted" -msgstr "" - -msgid "relations_object" -msgstr "constrained by" - -msgctxt "CWRType" -msgid "relations_object" -msgstr "constrained by" - -msgid "relative url of the bookmarked page" -msgstr "" - -msgid "remove-inlined-entity-form" -msgstr "remove" - -msgid "require_group" -msgstr "require the group" - -msgctxt "BaseTransition" -msgid "require_group" -msgstr "require group" - -msgctxt "Transition" -msgid "require_group" -msgstr "require group" - -msgctxt "WorkflowTransition" -msgid "require_group" -msgstr "require group" - -msgid "require_group_object" -msgstr "required by" - -msgctxt "CWGroup" -msgid "require_group_object" -msgstr "required by" - -msgid "required" -msgstr "" - -msgid "required attribute" -msgstr "" - -msgid "required field" -msgstr "" - -msgid "resources usage" -msgstr "" - -msgid "" -"restriction part of a rql query. For entity rql expression, X and U are " -"predefined respectivly to the current object and to the request user. For " -"relation rql expression, S, O and U are predefined respectivly to the " -"current relation'subject, object and to the request user. " -msgstr "" - -msgid "revert changes" -msgstr "" - -msgid "right" -msgstr "" - -msgid "rql expression allowing to add entities/relations of this type" -msgstr "" - -msgid "rql expression allowing to delete entities/relations of this type" -msgstr "" - -msgid "rql expression allowing to read entities/relations of this type" -msgstr "" - -msgid "rql expression allowing to update entities/relations of this type" -msgstr "" - -msgid "rql expressions" -msgstr "" - -msgid "rss export" -msgstr "RSS export" - -msgid "rule" -msgstr "rule" - -msgctxt "CWComputedRType" -msgid "rule" -msgstr "rule" - -msgid "same_as" -msgstr "same as" - -msgid "sample format" -msgstr "" - -msgid "saturday" -msgstr "" - -msgid "schema-diagram" -msgstr "diagram" - -msgid "schema-entity-types" -msgstr "entities" - -msgid "schema-relation-types" -msgstr "relations" - -msgid "search" -msgstr "" - -msgid "search for association" -msgstr "" - -msgid "searching for" -msgstr "" - -msgid "security" -msgstr "" - -msgid "see more" -msgstr "" - -msgid "see them all" -msgstr "" - -msgid "see_also" -msgstr "see also" - -msgid "select" -msgstr "" - -msgid "select a" -msgstr "" - -msgid "select a key first" -msgstr "" - -msgid "select a relation" -msgstr "" - -msgid "select this entity" -msgstr "" - -msgid "selected" -msgstr "" - -msgid "semantic description of this attribute" -msgstr "" - -msgid "semantic description of this entity type" -msgstr "" - -msgid "semantic description of this relation" -msgstr "" - -msgid "semantic description of this relation type" -msgstr "" - -msgid "semantic description of this state" -msgstr "" - -msgid "semantic description of this transition" -msgstr "" - -msgid "semantic description of this workflow" -msgstr "" - -msgid "september" -msgstr "" - -msgid "server information" -msgstr "" - -msgid "severity" -msgstr "" - -msgid "" -"should html fields being edited using fckeditor (a HTML WYSIWYG editor). " -"You should also select text/html as default text format to actually get " -"fckeditor." -msgstr "" - -#, python-format -msgid "show %s results" -msgstr "" - -msgid "show advanced fields" -msgstr "" - -msgid "show filter form" -msgstr "" - -msgid "site configuration" -msgstr "" - -msgid "site documentation" -msgstr "" - -msgid "site title" -msgstr "" - -msgid "site-wide property can't be set for user" -msgstr "" - -msgid "some later transaction(s) touch entity, undo them first" -msgstr "" - -msgid "some relations violate a unicity constraint" -msgstr "" - -msgid "sorry, the server is unable to handle this query" -msgstr "" - -msgid "" -"source's configuration. One key=value per line, authorized keys depending on " -"the source's type" -msgstr "" - -msgid "sparql xml" -msgstr "" - -msgid "special transition allowing to go through a sub-workflow" -msgstr "" - -msgid "specializes" -msgstr "specializes" - -msgctxt "CWEType" -msgid "specializes" -msgstr "specializes" - -msgid "specializes_object" -msgstr "specialized by" - -msgctxt "CWEType" -msgid "specializes_object" -msgstr "specialized by" - -#, python-format -msgid "specifying %s is mandatory" -msgstr "" - -msgid "" -"start timestamp of the currently in synchronization, or NULL when no " -"synchronization in progress." -msgstr "" - -msgid "start_timestamp" -msgstr "start timestamp" - -msgctxt "CWDataImport" -msgid "start_timestamp" -msgstr "start timestamp" - -msgid "startup views" -msgstr "" - -msgid "startupview" -msgstr "startup views" - -msgid "state" -msgstr "" - -msgid "state and transition don't belong the the same workflow" -msgstr "" - -msgid "state doesn't apply to this entity's type" -msgstr "" - -msgid "state doesn't belong to entity's current workflow" -msgstr "" - -msgid "state doesn't belong to entity's workflow" -msgstr "" - -msgid "" -"state doesn't belong to entity's workflow. You may want to set a custom " -"workflow for this entity first." -msgstr "" - -msgid "state doesn't belong to this workflow" -msgstr "" - -msgid "state_of" -msgstr "state of" - -msgctxt "State" -msgid "state_of" -msgstr "state of" - -msgid "state_of_object" -msgstr "use states" - -msgctxt "Workflow" -msgid "state_of_object" -msgstr "use states" - -msgid "status" -msgstr "" - -msgctxt "CWDataImport" -msgid "status" -msgstr "status" - -msgid "status change" -msgstr "" - -msgid "status changed" -msgstr "" - -#, python-format -msgid "status will change from %(st1)s to %(st2)s" -msgstr "" - -msgid "subject" -msgstr "" - -msgid "subject type" -msgstr "" - -msgid "subject/object cardinality" -msgstr "" - -msgid "subworkflow" -msgstr "" - -msgctxt "WorkflowTransition" -msgid "subworkflow" -msgstr "subworkflow" - -msgid "" -"subworkflow isn't a workflow for the same types as the transition's workflow" -msgstr "" - -msgid "subworkflow state" -msgstr "" - -msgid "subworkflow_exit" -msgstr "subworkflow exit" - -msgctxt "WorkflowTransition" -msgid "subworkflow_exit" -msgstr "subworkflow exit" - -msgid "subworkflow_exit_object" -msgstr "subworkflow exit of" - -msgctxt "SubWorkflowExitPoint" -msgid "subworkflow_exit_object" -msgstr "subworkflow exit of" - -msgid "subworkflow_object" -msgstr "subworkflow of" - -msgctxt "Workflow" -msgid "subworkflow_object" -msgstr "subworkflow of" - -msgid "subworkflow_state" -msgstr "subworkflow state" - -msgctxt "SubWorkflowExitPoint" -msgid "subworkflow_state" -msgstr "subworkflow state" - -msgid "subworkflow_state_object" -msgstr "exit point" - -msgctxt "State" -msgid "subworkflow_state_object" -msgstr "exit point" - -msgid "success" -msgstr "" - -msgid "sunday" -msgstr "" - -msgid "surname" -msgstr "" - -msgctxt "CWUser" -msgid "surname" -msgstr "" - -msgid "symmetric" -msgstr "symmetric" - -msgctxt "CWRType" -msgid "symmetric" -msgstr "symmetric" - -msgid "synchronization-interval must be greater than 1 minute" -msgstr "" - -msgid "table" -msgstr "" - -msgid "tablefilter" -msgstr "table filter" - -msgid "text" -msgstr "" - -msgid "text/cubicweb-page-template" -msgstr "cubicweb page template" - -msgid "text/html" -msgstr "html" - -msgid "text/markdown" -msgstr "markdown formatted text" - -msgid "text/plain" -msgstr "plain text" - -msgid "text/rest" -msgstr "ReST text" - -msgid "the URI of the object" -msgstr "" - -msgid "the prefered email" -msgstr "" - -msgid "the system source has its configuration stored on the file-system" -msgstr "" - -#, python-format -msgid "the value \"%s\" is already used, use another one" -msgstr "" - -msgid "there is no next page" -msgstr "" - -msgid "there is no previous page" -msgstr "" - -#, python-format -msgid "there is no transaction #%s" -msgstr "" - -msgid "this action is not reversible!" -msgstr "" - -msgid "this entity is currently owned by" -msgstr "" - -msgid "this parser doesn't use a mapping" -msgstr "" - -msgid "this resource does not exist" -msgstr "" - -msgid "this source doesn't use a mapping" -msgstr "" - -msgid "thursday" -msgstr "" - -msgid "timestamp" -msgstr "" - -msgctxt "CWCache" -msgid "timestamp" -msgstr "timestamp" - -msgid "timetable" -msgstr "" - -msgid "title" -msgstr "" - -msgctxt "Bookmark" -msgid "title" -msgstr "title" - -msgid "to" -msgstr "" - -#, python-format -msgid "to %(date)s" -msgstr "" - -msgid "to associate with" -msgstr "" - -msgid "to_entity" -msgstr "to entity" - -msgctxt "CWAttribute" -msgid "to_entity" -msgstr "to entity" - -msgctxt "CWRelation" -msgid "to_entity" -msgstr "to entity" - -msgid "to_entity_object" -msgstr "object relations" - -msgctxt "CWEType" -msgid "to_entity_object" -msgstr "object relations" - -msgid "to_interval_end" -msgstr "to" - -msgid "to_state" -msgstr "to state" - -msgctxt "TrInfo" -msgid "to_state" -msgstr "to state" - -msgid "to_state_object" -msgstr "transitions to this state" - -msgctxt "State" -msgid "to_state_object" -msgstr "transitions to this state" - -msgid "toggle check boxes" -msgstr "" - -msgid "tr_count" -msgstr "transition number" - -msgctxt "TrInfo" -msgid "tr_count" -msgstr "transition number" - -msgid "transaction undone" -msgstr "" - -#, python-format -msgid "transition %(tr)s isn't allowed from %(st)s" -msgstr "" - -msgid "transition doesn't belong to entity's workflow" -msgstr "" - -msgid "transition isn't allowed" -msgstr "" - -msgid "transition may not be fired" -msgstr "" - -msgid "transition_of" -msgstr "transition of" - -msgctxt "BaseTransition" -msgid "transition_of" -msgstr "transition of" - -msgctxt "Transition" -msgid "transition_of" -msgstr "transition of" - -msgctxt "WorkflowTransition" -msgid "transition_of" -msgstr "transition of" - -msgid "transition_of_object" -msgstr "use transitions" - -msgctxt "Workflow" -msgid "transition_of_object" -msgstr "use transitions" - -msgid "tree view" -msgstr "" - -msgid "tuesday" -msgstr "" - -msgid "type" -msgstr "" - -msgctxt "BaseTransition" -msgid "type" -msgstr "type" - -msgctxt "CWSource" -msgid "type" -msgstr "type" - -msgctxt "Transition" -msgid "type" -msgstr "type" - -msgctxt "WorkflowTransition" -msgid "type" -msgstr "type" - -msgid "type here a sparql query" -msgstr "" - -msgid "type of the source" -msgstr "" - -msgid "ui" -msgstr "" - -msgid "ui.date-format" -msgstr "date format" - -msgid "ui.datetime-format" -msgstr "date and time format" - -msgid "ui.default-text-format" -msgstr "text format" - -msgid "ui.encoding" -msgstr "encoding" - -msgid "ui.fckeditor" -msgstr "content editor" - -msgid "ui.float-format" -msgstr "float format" - -msgid "ui.language" -msgstr "language" - -msgid "ui.main-template" -msgstr "main template" - -msgid "ui.site-title" -msgstr "site title" - -msgid "ui.time-format" -msgstr "time format" - -msgid "unable to check captcha, please try again" -msgstr "" - -msgid "unaccessible" -msgstr "" - -msgid "unauthorized value" -msgstr "" - -msgid "undefined user" -msgstr "" - -msgid "undo" -msgstr "" - -msgid "unique identifier used to connect to the application" -msgstr "" - -msgid "unknown external entity" -msgstr "" - -#, python-format -msgid "unknown options %s" -msgstr "" - -#, python-format -msgid "unknown property key %s" -msgstr "" - -msgid "unknown vocabulary:" -msgstr "" - -msgid "unsupported protocol" -msgstr "" - -msgid "upassword" -msgstr "password" - -msgctxt "CWUser" -msgid "upassword" -msgstr "password" - -msgid "update" -msgstr "" - -msgid "update_permission" -msgstr "can be updated by" - -msgctxt "CWAttribute" -msgid "update_permission" -msgstr "can be updated by" - -msgctxt "CWEType" -msgid "update_permission" -msgstr "can be updated by" - -msgid "update_permission_object" -msgstr "has permission to update" - -msgctxt "CWGroup" -msgid "update_permission_object" -msgstr "has permission to update" - -msgctxt "RQLExpression" -msgid "update_permission_object" -msgstr "has permission to update" - -msgid "update_relation" -msgstr "update" - -msgid "updated" -msgstr "" - -#, python-format -msgid "updated %(etype)s #%(eid)s (%(title)s)" -msgstr "" - -msgid "uri" -msgstr "" - -msgctxt "ExternalUri" -msgid "uri" -msgstr "uri" - -msgid "url" -msgstr "" - -msgctxt "CWSource" -msgid "url" -msgstr "url" - -msgid "" -"use to define a transition from one or multiple states to a destination " -"states in workflow's definitions. Transition without destination state will " -"go back to the state from which we arrived to the current state." -msgstr "" - -msgid "use_email" -msgstr "use email" - -msgctxt "CWUser" -msgid "use_email" -msgstr "use email" - -msgid "use_email_object" -msgstr "used by" - -msgctxt "EmailAddress" -msgid "use_email_object" -msgstr "used by" - -msgid "" -"used for cubicweb configuration. Once a property has been created you can't " -"change the key." -msgstr "" - -msgid "" -"used to associate simple states to an entity type and/or to define workflows" -msgstr "" - -msgid "user" -msgstr "" - -#, python-format -msgid "" -"user %s has made the following change(s):\n" -"\n" -msgstr "" - -msgid "user interface encoding" -msgstr "" - -msgid "user preferences" -msgstr "" - -msgid "user's email account" -msgstr "" - -msgid "users" -msgstr "" - -msgid "users and groups" -msgstr "" - -msgid "users using this bookmark" -msgstr "" - -msgid "validate modifications on selected items" -msgstr "" - -msgid "validating..." -msgstr "" - -msgid "value" -msgstr "" - -msgctxt "CWConstraint" -msgid "value" -msgstr "" - -msgctxt "CWProperty" -msgid "value" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be < %(KEY-boundary)s" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be > %(KEY-boundary)s" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" -msgstr "" - -msgid "value associated to this key is not editable manually" -msgstr "" - -#, python-format -msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" -msgstr "" - -#, python-format -msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" -msgstr "" - -msgid "vcard" -msgstr "" - -msgid "versions configuration" -msgstr "" - -msgid "view" -msgstr "" - -msgid "view all" -msgstr "" - -msgid "view detail for this entity" -msgstr "" - -msgid "view history" -msgstr "" - -msgid "view identifier" -msgstr "" - -msgid "view title" -msgstr "" - -msgid "view workflow" -msgstr "" - -msgid "view_index" -msgstr "index" - -msgid "visible" -msgstr "" - -msgid "warning" -msgstr "" - -msgid "we are not yet ready to handle this query" -msgstr "" - -msgid "wednesday" -msgstr "" - -#, python-format -msgid "welcome %s!" -msgstr "" - -msgid "wf_info_for" -msgstr "record for" - -msgid "wf_info_for_object" -msgstr "workflow history" - -msgid "wf_tab_info" -msgstr "states and transitions" - -msgid "wfgraph" -msgstr "graph" - -msgid "" -"when multiple addresses are equivalent (such as python-projects@logilab.org " -"and python-projects@lists.logilab.org), set this to indicate which is the " -"preferred form." -msgstr "" - -msgid "workflow" -msgstr "" - -#, python-format -msgid "workflow changed to \"%s\"" -msgstr "" - -msgid "workflow has no initial state" -msgstr "" - -msgid "workflow history item" -msgstr "" - -msgid "workflow isn't a workflow for this type" -msgstr "" - -msgid "workflow to which this state belongs" -msgstr "" - -msgid "workflow to which this transition belongs" -msgstr "" - -msgid "workflow_of" -msgstr "workflow of" - -msgctxt "Workflow" -msgid "workflow_of" -msgstr "workflow of" - -msgid "workflow_of_object" -msgstr "may use workflow" - -msgctxt "CWEType" -msgid "workflow_of_object" -msgstr "may use workflow" - -#, python-format -msgid "wrong query parameter line %s" -msgstr "" - -msgid "xbel export" -msgstr "XBEL export" - -msgid "xml export" -msgstr "XML export" - -msgid "xml export (entities)" -msgstr "XML export (entities)" - -msgid "yes" -msgstr "" - -msgid "you have been logged out" -msgstr "" - -msgid "you should probably delete that property" -msgstr "" - -#~ msgid "components_etypenavigation" -#~ msgstr "filtering by type" - -#~ msgid "components_etypenavigation_description" -#~ msgstr "permit to filter search results by entity type" diff -r 058bb3dc685f -r 0b59724cb3f2 i18n/es.po --- a/i18n/es.po Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4796 +0,0 @@ -# cubicweb i18n catalog -# Copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# Logilab -# Translators: -# CreaLibre , 2014 -msgid "" -msgstr "" -"Project-Id-Version: Cubicweb\n" -"POT-Creation-Date: 2006-01-12 17:35+CET\n" -"PO-Revision-Date: 2014-03-04 08:10+0000\n" -"Last-Translator: CreaLibre \n" -"Language-Team: Spanish (http://www.transifex.com/projects/p/cubicweb/" -"language/es/)\n" -"Language: es\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: pygettext.py 1.5\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" - -#, python-format -msgid "" -"\n" -"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for " -"entity\n" -"'%(title)s'\n" -"\n" -"%(comment)s\n" -"\n" -"url: %(url)s\n" -msgstr "" -"\n" -"%(user)s ha cambiado su estado de <%(previous_state)s> hacia <" -"%(current_state)s> en la entidad\n" -"'%(title)s'\n" -"\n" -"%(comment)s\n" -"\n" -"url: %(url)s\n" - -#, python-format -msgid " from state %(fromstate)s to state %(tostate)s\n" -msgstr " del estado %(fromstate)s hacia el estado %(tostate)s\n" - -msgid " :" -msgstr ":" - -#, python-format -msgid "\"action\" must be specified in options; allowed values are %s" -msgstr "" -"\"action\" debe estar especificada en opciones; los valores permitidos son : " -"%s" - -msgid "\"role=subject\" or \"role=object\" must be specified in options" -msgstr "" -"\"role=subject\" o \"role=object\" debe ser especificado en las opciones" - -#, python-format -msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" -msgstr "%(KEY-cstr)s restricción errónea para el valor %(KEY-value)r" - -#, python-format -msgid "%(KEY-rtype)s is part of violated unicity constraint" -msgstr "%(KEY-rtype)s pertenece a una restricción de unidad no respectada" - -#, python-format -msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" -msgstr "%(KEY-value)r no corresponde a la expresión regular %(KEY-regexp)r" - -#, python-format -msgid "%(attr)s set to %(newvalue)s" -msgstr "%(attr)s modificado a %(newvalue)s" - -#, python-format -msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" -msgstr "%(attr)s modificado de %(oldvalue)s a %(newvalue)s" - -#, python-format -msgid "%(etype)s by %(author)s" -msgstr "%(etype)s por %(author)s" - -#, python-format -msgid "%(firstname)s %(surname)s" -msgstr "%(firstname)s %(surname)s" - -#, python-format -msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)" -msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)" - -#, python-format -msgid "%d days" -msgstr "%d días" - -#, python-format -msgid "%d hours" -msgstr "%d horas" - -#, python-format -msgid "%d minutes" -msgstr "%d minutos" - -#, python-format -msgid "%d months" -msgstr "%d meses" - -#, python-format -msgid "%d seconds" -msgstr "%d segundos" - -#, python-format -msgid "%d weeks" -msgstr "%d semanas" - -#, python-format -msgid "%d years" -msgstr "%d años" - -#, python-format -msgid "%s could be supported" -msgstr "%s podría ser mantenido" - -#, python-format -msgid "%s error report" -msgstr "%s reporte de errores" - -#, python-format -msgid "%s software version of the database" -msgstr "versión sistema de la base para %s" - -#, python-format -msgid "%s updated" -msgstr "%s actualizado" - -#, python-format -msgid "'%s' action doesn't take any options" -msgstr "la acción '%s' no acepta opciones" - -#, python-format -msgid "" -"'%s' action for in_state relation should at least have 'linkattr=name' option" -msgstr "" -"'%s' acción en la relación in_state debe por lo menos tener la opción " -"'linkattr=name'" - -#, python-format -msgid "'%s' action requires 'linkattr' option" -msgstr "la acción '%s' requiere una opción 'linkattr'" - -msgid "(UNEXISTANT EID)" -msgstr "(EID INEXISTENTE" - -#, python-format -msgid "(suppressed) entity #%d" -msgstr "(eliminada) entidad #%d" - -msgid "**" -msgstr "0..n 0..n" - -msgid "*+" -msgstr "0..n 1..n" - -msgid "*1" -msgstr "0..n 1" - -msgid "*?" -msgstr "0..n 0..1" - -msgid "+*" -msgstr "1..n 0..n" - -msgid "++" -msgstr "1..n 1..n" - -msgid "+1" -msgstr "1..n 1" - -msgid "+?" -msgstr "1..n 0..1" - -msgid "1*" -msgstr "1 0..n" - -msgid "1+" -msgstr "1 1..n" - -msgid "11" -msgstr "1 1" - -msgid "1?" -msgstr "1 0..1" - -#, python-format -msgid "<%s not specified>" -msgstr "<%s no especificado>" - -#, python-format -msgid "" -"
      This schema of the data model excludes the meta-data, but you " -"can also display a complete schema with meta-data.
      " -msgstr "" -"
      Este esquema del modelo de datos no incluye los meta-datos, " -"pero se puede ver a un modelo completo con meta-datos." - -msgid "" -msgstr "" - -msgid "" -msgstr "" - -msgid "?*" -msgstr "0..1 0..n" - -msgid "?+" -msgstr "0..1 1..n" - -msgid "?1" -msgstr "0..1 1" - -msgid "??" -msgstr "0..1 0..1" - -msgid "AND" -msgstr "Y" - -msgid "About this site" -msgstr "Información del Sistema" - -#, python-format -msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "Relación agregada : %(entity_from)s %(rtype)s %(entity_to)s" - -msgid "Attributes permissions:" -msgstr "Permisos de atributos:" - -# schema pot file, generated on 2009-09-16 16:46:55 -# -# singular and plural forms for each entity type -msgid "BaseTransition" -msgstr "Transición (abstracta)" - -msgid "BaseTransition_plural" -msgstr "Transiciones (abstractas)" - -msgid "BigInt" -msgstr "Big integer" - -msgid "BigInt_plural" -msgstr "Big integers" - -msgid "Bookmark" -msgstr "Favorito" - -msgid "Bookmark_plural" -msgstr "Favoritos" - -msgid "Boolean" -msgstr "Booleano" - -msgid "Boolean_plural" -msgstr "Booleanos" - -msgid "BoundConstraint" -msgstr "Restricción de límite" - -msgid "BoundaryConstraint" -msgstr "Restricción de límite" - -msgid "Browse by entity type" -msgstr "Busca por tipo de entidad" - -#, python-format -msgid "By %(user)s on %(dt)s [%(undo_link)s]" -msgstr "Por %(user)s en %(dt)s [%(undo_link)s]" - -msgid "Bytes" -msgstr "Bytes" - -msgid "Bytes_plural" -msgstr "Bytes" - -msgid "CWAttribute" -msgstr "Atributo" - -msgid "CWAttribute_plural" -msgstr "Atributos" - -msgid "CWCache" -msgstr "Cache" - -msgid "CWCache_plural" -msgstr "Caches" - -msgid "CWComputedRType" -msgstr "" - -msgid "CWComputedRType_plural" -msgstr "" - -msgid "CWConstraint" -msgstr "Restricción" - -msgid "CWConstraintType" -msgstr "Tipo de Restricción" - -msgid "CWConstraintType_plural" -msgstr "Tipos de Restricción" - -msgid "CWConstraint_plural" -msgstr "Restricciones" - -msgid "CWDataImport" -msgstr "Importación de Datos" - -msgid "CWDataImport_plural" -msgstr "Importaciones de Datos" - -msgid "CWEType" -msgstr "Tipo de entidad" - -msgctxt "inlined:CWRelation.from_entity.subject" -msgid "CWEType" -msgstr "Tipo de entidad" - -msgctxt "inlined:CWRelation.to_entity.subject" -msgid "CWEType" -msgstr "Tipo de entidad" - -msgid "CWEType_plural" -msgstr "Tipos de entidades" - -msgid "CWGroup" -msgstr "Groupo" - -msgid "CWGroup_plural" -msgstr "Grupos" - -msgid "CWProperty" -msgstr "Propiedad" - -msgid "CWProperty_plural" -msgstr "Propiedades" - -msgid "CWRType" -msgstr "Tipo de relación" - -msgctxt "inlined:CWRelation.relation_type.subject" -msgid "CWRType" -msgstr "Tipo de relación" - -msgid "CWRType_plural" -msgstr "Tipos de relación" - -msgid "CWRelation" -msgstr "Relación" - -msgid "CWRelation_plural" -msgstr "Relaciones" - -msgid "CWSource" -msgstr "Fuente de datos" - -msgid "CWSourceHostConfig" -msgstr "Configuración de Fuente" - -msgid "CWSourceHostConfig_plural" -msgstr "Configuraciones de fuente" - -msgid "CWSourceSchemaConfig" -msgstr "Configuraciones de Esquema de Fuente" - -msgid "CWSourceSchemaConfig_plural" -msgstr "Configuraciones de Esquema de Fuente" - -msgid "CWSource_plural" -msgstr "Fuentes de Datos" - -msgid "CWUniqueTogetherConstraint" -msgstr "Restricción de Singularidad" - -msgid "CWUniqueTogetherConstraint_plural" -msgstr "Restricciones de Singularidad" - -msgid "CWUser" -msgstr "Usuario" - -msgid "CWUser_plural" -msgstr "Usuarios" - -#, python-format -msgid "" -"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " -"linked using this relation." -msgstr "" -"No puede restaurar la relación %(role)s %(rtype)s en la entidad %(eid)s pues " -"ya esta ligada a otra entidad usando esa relación." - -#, python-format -msgid "" -"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " -"does not exists anymore in the schema." -msgstr "" -"No puede restaurar la relación %(rtype)s entre %(subj)s y %(obj)s, esta " -"relación ya no existe en el esquema." - -#, python-format -msgid "" -"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " -"anymore." -msgstr "" -"No puede restaurar la relación %(rtype)s, la entidad %(role)s %(eid)s ya no " -"existe." - -#, python-format -msgid "" -"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " -"exist anymore" -msgstr "" -"No puede anular el agregar la relación %(rtype)s de %(subj)s a %(obj)s, esta " -"relación ya no existe" - -#, python-format -msgid "" -"Can't undo creation of entity %(eid)s of type %(etype)s, type no more " -"supported" -msgstr "" -"No puede anular la creación de la entidad %(eid)s de tipo %(etype)s, este " -"tipo ya no existe" - -msgid "Click to sort on this column" -msgstr "Seleccione para ordenar columna" - -msgid "" -"Configuration of the system source goes to the 'sources' file, not in the " -"database" -msgstr "" -"La configuración de la fuente sistema va en el archivo \"Sources\"/Fuentes, " -"y no en la base de datos." - -#, python-format -msgid "Created %(etype)s : %(entity)s" -msgstr "Se creó %(etype)s : %(entity)s" - -msgid "DEBUG" -msgstr "DEPURAR" - -msgid "Date" -msgstr "Fecha" - -msgid "Date_plural" -msgstr "Fechas" - -msgid "Datetime" -msgstr "Fecha y hora" - -msgid "Datetime_plural" -msgstr "Fechas y horas" - -msgid "Decimal" -msgstr "Decimal" - -msgid "Decimal_plural" -msgstr "Decimales" - -#, python-format -msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "Eliminar relación : %(entity_from)s %(rtype)s %(entity_to)s" - -#, python-format -msgid "Deleted %(etype)s : %(entity)s" -msgstr "Se eliminó %(etype)s : %(entity)s" - -msgid "Detected problems" -msgstr "Problemas detectados" - -msgid "Do you want to delete the following element(s)?" -msgstr "Desea eliminar el/los elemento(s) a continuación?" - -msgid "Download schema as OWL" -msgstr "Descargar el esquema en formato OWL" - -msgid "ERROR" -msgstr "ERROR" - -msgid "EmailAddress" -msgstr "Correo Electrónico" - -msgctxt "inlined:CWUser.use_email.subject" -msgid "EmailAddress" -msgstr "Correo Electrónico" - -msgid "EmailAddress_plural" -msgstr "Direcciones de Correo Electrónico" - -msgid "Entities" -msgstr "Entidades" - -#, python-format -msgid "" -"Entity %(eid)s has changed since you started to edit it. Reload the page and " -"reapply your changes." -msgstr "" - -msgid "Entity and relation supported by this source" -msgstr "Entidades y relaciones aceptadas por esta fuente" - -msgid "ExternalUri" -msgstr "Uri externo" - -msgid "ExternalUri_plural" -msgstr "Uris externos" - -msgid "FATAL" -msgstr "FATAL" - -msgid "Float" -msgstr "Número flotante" - -msgid "Float_plural" -msgstr "Números flotantes" - -# schema pot file, generated on 2009-12-03 09:22:35 -# -# singular and plural forms for each entity type -msgid "FormatConstraint" -msgstr "Restricción de Formato" - -msgid "Garbage collection information" -msgstr "Recolector de basura en memoria" - -msgid "Help" -msgstr "Ayuda" - -msgid "INFO" -msgstr "INFO" - -msgid "Instance" -msgstr "Instancia" - -msgid "Int" -msgstr "Número entero" - -msgid "Int_plural" -msgstr "Números enteros" - -msgid "Interval" -msgstr "Duración" - -msgid "IntervalBoundConstraint" -msgstr "Restricción de intervalo" - -msgid "Interval_plural" -msgstr "Duraciones" - -msgid "Link:" -msgstr "Liga:" - -msgid "Looked up classes" -msgstr "Clases buscadas" - -msgid "Manage" -msgstr "Administración" - -msgid "Manage security" -msgstr "Gestión de seguridad" - -msgid "Message threshold" -msgstr "Límite de mensajes" - -msgid "Most referenced classes" -msgstr "Clases más referenciadas" - -msgid "New BaseTransition" -msgstr "XXX" - -msgid "New Bookmark" -msgstr "Agregar a Favoritos" - -msgid "New CWAttribute" -msgstr "Nueva definición de relación final" - -msgid "New CWCache" -msgstr "Agregar Caché" - -msgid "New CWComputedRType" -msgstr "" - -msgid "New CWConstraint" -msgstr "Agregar Restricción" - -msgid "New CWConstraintType" -msgstr "Agregar tipo de Restricción" - -msgid "New CWDataImport" -msgstr "Nueva importación de datos" - -msgid "New CWEType" -msgstr "Agregar tipo de entidad" - -msgid "New CWGroup" -msgstr "Nuevo grupo" - -msgid "New CWProperty" -msgstr "Agregar Propiedad" - -msgid "New CWRType" -msgstr "Agregar tipo de relación" - -msgid "New CWRelation" -msgstr "Nueva definición de relación final" - -msgid "New CWSource" -msgstr "Nueva fuente" - -msgid "New CWSourceHostConfig" -msgstr "Nueva configuración de fuente" - -msgid "New CWSourceSchemaConfig" -msgstr "Nueva parte de mapeo de fuente" - -msgid "New CWUniqueTogetherConstraint" -msgstr "Nueva restricción de singularidad" - -msgid "New CWUser" -msgstr "Agregar usuario" - -msgid "New EmailAddress" -msgstr "Agregar Email" - -msgid "New ExternalUri" -msgstr "Agregar Uri externa" - -msgid "New RQLExpression" -msgstr "Agregar expresión rql" - -msgid "New State" -msgstr "Agregar Estado" - -msgid "New SubWorkflowExitPoint" -msgstr "Agregar salida de sub-Workflow" - -msgid "New TrInfo" -msgstr "Agregar Información de Transición" - -msgid "New Transition" -msgstr "Agregar transición" - -msgid "New Workflow" -msgstr "Agregar Workflow" - -msgid "New WorkflowTransition" -msgstr "Agregar transición de Workflow" - -msgid "No result matching query" -msgstr "Ningún resultado corresponde a su búsqueda" - -msgid "Non exhaustive list of views that may apply to entities of this type" -msgstr "Lista no exhaustiva de vistas aplicables a este tipo de entidad" - -msgid "OR" -msgstr "O" - -msgid "Ownership" -msgstr "Propiedad" - -msgid "Parent class:" -msgstr "Clase padre:" - -msgid "Password" -msgstr "Contraseña" - -msgid "Password_plural" -msgstr "Contraseñas" - -msgid "Please note that this is only a shallow copy" -msgstr "Recuerde que sólo es una copia superficial" - -msgid "Powered by CubicWeb" -msgstr "Potenciado en CubicWeb" - -msgid "RQLConstraint" -msgstr "Restricción RQL" - -msgid "RQLExpression" -msgstr "Expresión RQL" - -msgid "RQLExpression_plural" -msgstr "Expresiones RQL" - -msgid "RQLUniqueConstraint" -msgstr "Restricción RQL de Unicidad" - -msgid "RQLVocabularyConstraint" -msgstr "Restricción RQL de Vocabulario" - -msgid "RegexpConstraint" -msgstr "restricción expresión regular" - -msgid "Registry's content" -msgstr "Contenido del registro" - -msgid "Relations" -msgstr "Relaciones" - -msgid "Repository" -msgstr "Repositorio" - -#, python-format -msgid "Schema %s" -msgstr "Esquema %s" - -msgid "Schema's permissions definitions" -msgstr "Definiciones de permisos del esquema" - -msgid "Search for" -msgstr "Buscar" - -msgid "Site information" -msgstr "Información del Sitio" - -msgid "SizeConstraint" -msgstr "Restricción de tamaño" - -msgid "" -"Source's configuration for a particular host. One key=value per line, " -"authorized keys depending on the source's type, overriding values defined on " -"the source." -msgstr "" -"Configuración de la fuente por un \"host\" específico. Una clave=valor por " -"línea, las claves permitidas dependen del tipo de fuente. Estos valores son " -"prioritarios a los valores definidos en la fuente." - -msgid "Startup views" -msgstr "Vistas de inicio" - -msgid "State" -msgstr "Estado" - -msgid "State_plural" -msgstr "Estados" - -msgid "StaticVocabularyConstraint" -msgstr "Restricción de vocabulario" - -msgid "String" -msgstr "Cadena de caracteres" - -msgid "String_plural" -msgstr "Cadenas de caracteres" - -msgid "Sub-classes:" -msgstr "Clases hijas:" - -msgid "SubWorkflowExitPoint" -msgstr "Salida de sub-workflow" - -msgid "SubWorkflowExitPoint_plural" -msgstr "Salidas de sub-workflow" - -msgid "Submit bug report" -msgstr "Enviar un reporte de error (bug)" - -msgid "Submit bug report by mail" -msgstr "Enviar este reporte por email" - -msgid "TZDatetime" -msgstr "Fecha y hora internacional" - -msgid "TZDatetime_plural" -msgstr "Fechas y horas internacionales" - -msgid "TZTime" -msgstr "Hora internacional" - -msgid "TZTime_plural" -msgstr "Horas internacionales" - -#, python-format -msgid "The view %s can not be applied to this query" -msgstr "La vista %s no puede ser aplicada a esta búsqueda" - -#, python-format -msgid "The view %s could not be found" -msgstr "La vista %s no ha podido ser encontrada" - -msgid "There is no default workflow" -msgstr "Esta entidad no posee workflow por defecto" - -msgid "This BaseTransition:" -msgstr "Esta transición abstracta:" - -msgid "This Bookmark:" -msgstr "Este favorito:" - -msgid "This CWAttribute:" -msgstr "Esta definición de relación final:" - -msgid "This CWCache:" -msgstr "Este Caché:" - -msgid "This CWComputedRType:" -msgstr "" - -msgid "This CWConstraint:" -msgstr "Esta Restricción:" - -msgid "This CWConstraintType:" -msgstr "Este tipo de Restricción:" - -msgid "This CWDataImport:" -msgstr "Esta importación de datos:" - -msgid "This CWEType:" -msgstr "Este tipo de Entidad:" - -msgid "This CWGroup:" -msgstr "Este grupo:" - -msgid "This CWProperty:" -msgstr "Esta propiedad:" - -msgid "This CWRType:" -msgstr "Este tipo de relación:" - -msgid "This CWRelation:" -msgstr "Esta definición de relación no final:" - -msgid "This CWSource:" -msgstr "Esta fuente:" - -msgid "This CWSourceHostConfig:" -msgstr "Esta configuración de fuente:" - -msgid "This CWSourceSchemaConfig:" -msgstr "Esta parte de mapeo de fuente:" - -msgid "This CWUniqueTogetherConstraint:" -msgstr "Esta restricción de singularidad:" - -msgid "This CWUser:" -msgstr "Este usuario:" - -msgid "This EmailAddress:" -msgstr "Esta dirección electrónica:" - -msgid "This ExternalUri:" -msgstr "Este Uri externo:" - -msgid "This RQLExpression:" -msgstr "Esta expresión RQL:" - -msgid "This State:" -msgstr "Este estado:" - -msgid "This SubWorkflowExitPoint:" -msgstr "Esta Salida de sub-workflow:" - -msgid "This TrInfo:" -msgstr "Esta información de transición:" - -msgid "This Transition:" -msgstr "Esta transición:" - -msgid "This Workflow:" -msgstr "Este Workflow:" - -msgid "This WorkflowTransition:" -msgstr "Esta transición de Workflow:" - -msgid "" -"This action is forbidden. If you think it should be allowed, please contact " -"the site administrator." -msgstr "" -"Esta acción le es prohibida. Si cree que Ud. debería de tener autorización, " -"favor de contactar al administrador del sitio. " - -msgid "This entity type permissions:" -msgstr "Permisos para este tipo de entidad:" - -msgid "Time" -msgstr "Hora" - -msgid "Time_plural" -msgstr "Horas" - -msgid "TrInfo" -msgstr "Información Transición" - -msgid "TrInfo_plural" -msgstr "Información Transiciones" - -msgid "Transition" -msgstr "Transición" - -msgid "Transition_plural" -msgstr "Transiciones" - -msgid "URLs from which content will be imported. You can put one url per line" -msgstr "" -"URLs desde el cual el contenido sera importado. Usted puede incluir un URL " -"por línea." - -msgid "Undoable actions" -msgstr "Acciones irreversibles" - -msgid "Undoing" -msgstr "Deshaciendo" - -msgid "UniqueConstraint" -msgstr "Restricción de Unicidad" - -msgid "Unknown source type" -msgstr "tipo de fuente desconocida" - -msgid "Unreachable objects" -msgstr "Objetos inaccesibles" - -#, python-format -msgid "Updated %(etype)s : %(entity)s" -msgstr "Se actualizó %(etype)s : %(entity)s" - -msgid "Used by:" -msgstr "Utilizado por :" - -msgid "Users and groups management" -msgstr "Usuarios y grupos de administradores" - -msgid "WARNING" -msgstr "ADVERTENCIA" - -msgid "Web server" -msgstr "Servidor web" - -msgid "Workflow" -msgstr "Workflow" - -msgid "Workflow history" -msgstr "Histórico del Workflow" - -msgid "WorkflowTransition" -msgstr "Transición de Workflow" - -msgid "WorkflowTransition_plural" -msgstr "Transiciones de Workflow" - -msgid "Workflow_plural" -msgstr "work flows" - -msgid "" -"You can either submit a new file using the browse button above, or choose to " -"remove already uploaded file by checking the \"detach attached file\" check-" -"box, or edit file content online with the widget below." -msgstr "" -"Usted puede proponer un nuevo archivo utilizando el botón\n" -"\"buscar\" aquí arriba, o eliminar el archivo ya elegido al\n" -"seleccionar el cuadro \"soltar archivo adjunto\", o editar el contenido\n" -"del archivo en línea con el componente inferior." - -msgid "" -"You can either submit a new file using the browse button above, or edit file " -"content online with the widget below." -msgstr "" -"Puede proponer un nuevo archivo utilizando el botón buscar \n" -"\"buscar\" en la parte superior, o editar el contenido del archivo en línea\n" -"en el campo siguiente." - -msgid "You can't change this relation" -msgstr "Usted no puede modificar esta relación" - -msgid "You cannot remove the system source" -msgstr "Usted no puede eliminar la fuente sistema" - -msgid "You cannot rename the system source" -msgstr "Usted no puede Renombrar la fuente sistema" - -msgid "" -"You have no access to this view or it can not be used to display the current " -"data." -msgstr "" -"No tiene permisos para accesar esta vista o No puede utilizarse para " -"desplegar los datos seleccionados." - -msgid "" -"You're not authorized to access this page. If you think you should, please " -"contact the site administrator." -msgstr "" -"Usted no esta autorizado a acceder a esta página. Si Usted cree que \n" -"hay un error, favor de contactar al administrador del Sistema." - -#, python-format -msgid "[%s supervision] changes summary" -msgstr "[%s supervision] descripción de cambios" - -msgid "" -"a RQL expression which should return some results, else the transition won't " -"be available. This query may use X and U variables that will respectivly " -"represents the current entity and the current user." -msgstr "" -"una expresión RQL que debe haber enviado resultados, para que la transición " -"pueda ser realizada. Esta expresión puede utilizar las variables X y U que " -"representan respectivamente la entidad en transición y el usuario actual. " - -msgid "a URI representing an object in external data store" -msgstr "una URI designando un objeto en un repositorio de datos externo" - -msgid "a float is expected" -msgstr "un número flotante es requerido" - -msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" -msgstr "se espera un número (en segundos) ó 20s, 10min, 24h ó 4d " - -msgid "" -"a simple cache entity characterized by a name and a validity date. The " -"target application is responsible for updating timestamp when necessary to " -"invalidate the cache (typically in hooks). Also, checkout the AppObject." -"get_cache() method." -msgstr "" -"un caché simple caracterizado por un nombre y una fecha de validez. Es\n" -"el código de la instancia quién es responsable de actualizar la fecha de\n" -"validez mientras el caché debe ser invalidado (en general en un hook).\n" -"Para recuperar un caché, hace falta utilizar el método\n" -"get_cache(cachename)." - -msgid "abstract base class for transitions" -msgstr "Clase de base abstracta para la transiciones" - -msgid "action menu" -msgstr "acciones" - -msgid "action(s) on this selection" -msgstr "Acción(es) en esta selección" - -msgid "actions" -msgstr "Acciones" - -msgid "activate" -msgstr "Activar" - -msgid "activated" -msgstr "Activado" - -msgid "add" -msgstr "Agregar" - -msgid "add Bookmark bookmarked_by CWUser object" -msgstr "Agregar a los favoritos " - -msgid "add CWAttribute add_permission RQLExpression subject" -msgstr "Expresión RQL de agregación" - -msgid "add CWAttribute constrained_by CWConstraint subject" -msgstr "Restricción" - -msgid "add CWAttribute read_permission RQLExpression subject" -msgstr "Expresión RQL de lectura" - -msgid "add CWAttribute relation_type CWRType object" -msgstr "Definición de atributo" - -msgid "add CWAttribute update_permission RQLExpression subject" -msgstr "Permiso de actualización" - -msgid "add CWEType add_permission RQLExpression subject" -msgstr "Expresión RQL de agregación" - -msgid "add CWEType delete_permission RQLExpression subject" -msgstr "Expresión RQL de eliminación" - -msgid "add CWEType read_permission RQLExpression subject" -msgstr "Expresión RQL de lectura" - -msgid "add CWEType update_permission RQLExpression subject" -msgstr "Definir una expresión RQL de actualización" - -msgid "add CWProperty for_user CWUser object" -msgstr "Propiedad" - -msgid "add CWRelation add_permission RQLExpression subject" -msgstr "Expresión RQL de agregar" - -msgid "add CWRelation constrained_by CWConstraint subject" -msgstr "Restricción" - -msgid "add CWRelation delete_permission RQLExpression subject" -msgstr "Expresión RQL de supresión" - -msgid "add CWRelation read_permission RQLExpression subject" -msgstr "Expresión RQL de lectura" - -msgid "add CWRelation relation_type CWRType object" -msgstr "Definición de relación" - -msgid "add CWSourceHostConfig cw_host_config_of CWSource object" -msgstr "configuración del host" - -msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" -msgstr "restricción de singularidad" - -msgid "add CWUser in_group CWGroup object" -msgstr "Usuario" - -msgid "add CWUser use_email EmailAddress subject" -msgstr "Email" - -msgid "add State allowed_transition Transition object" -msgstr "Estado en entrada" - -msgid "add State allowed_transition Transition subject" -msgstr "Transición en salida" - -msgid "add State allowed_transition WorkflowTransition subject" -msgstr "Transición workflow en salida" - -msgid "add State state_of Workflow object" -msgstr "Estado" - -msgid "add Transition condition RQLExpression subject" -msgstr "Restricción" - -msgid "add Transition destination_state State object" -msgstr "Transición de entrada" - -msgid "add Transition destination_state State subject" -msgstr "Estado de salida" - -msgid "add Transition transition_of Workflow object" -msgstr "Transición" - -msgid "add WorkflowTransition condition RQLExpression subject" -msgstr "Condición" - -msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" -msgstr "Salida de sub-workflow" - -msgid "add WorkflowTransition transition_of Workflow object" -msgstr "Transición Workflow" - -msgid "add a BaseTransition" -msgstr "" - -msgid "add a Bookmark" -msgstr "" - -msgid "add a CWAttribute" -msgstr "" - -msgid "add a CWCache" -msgstr "" - -msgid "add a CWComputedRType" -msgstr "" - -msgid "add a CWConstraint" -msgstr "" - -msgid "add a CWConstraintType" -msgstr "" - -msgid "add a CWDataImport" -msgstr "" - -msgid "add a CWEType" -msgstr "" - -msgctxt "inlined:CWRelation.from_entity.subject" -msgid "add a CWEType" -msgstr "Agregar un tipo de entidad" - -msgctxt "inlined:CWRelation.to_entity.subject" -msgid "add a CWEType" -msgstr "Agregar un tipo de entidad" - -msgid "add a CWGroup" -msgstr "" - -msgid "add a CWProperty" -msgstr "" - -msgid "add a CWRType" -msgstr "" - -msgctxt "inlined:CWRelation.relation_type.subject" -msgid "add a CWRType" -msgstr "Agregar un tipo de relación" - -msgid "add a CWRelation" -msgstr "" - -msgid "add a CWSource" -msgstr "" - -msgid "add a CWSourceHostConfig" -msgstr "" - -msgid "add a CWSourceSchemaConfig" -msgstr "" - -msgid "add a CWUniqueTogetherConstraint" -msgstr "" - -msgid "add a CWUser" -msgstr "" - -msgid "add a EmailAddress" -msgstr "" - -msgctxt "inlined:CWUser.use_email.subject" -msgid "add a EmailAddress" -msgstr "Agregar correo electrónico" - -msgid "add a ExternalUri" -msgstr "" - -msgid "add a RQLExpression" -msgstr "" - -msgid "add a State" -msgstr "" - -msgid "add a SubWorkflowExitPoint" -msgstr "" - -msgid "add a TrInfo" -msgstr "" - -msgid "add a Transition" -msgstr "" - -msgid "add a Workflow" -msgstr "" - -msgid "add a WorkflowTransition" -msgstr "" - -# subject and object forms for each relation type -# (no object form for final relation types) -msgid "add_permission" -msgstr "Autorización para agregar" - -msgctxt "CWAttribute" -msgid "add_permission" -msgstr "Permiso de agregar" - -# subject and object forms for each relation type -# (no object form for final relation types) -msgctxt "CWEType" -msgid "add_permission" -msgstr "Permiso de agregar" - -msgctxt "CWRelation" -msgid "add_permission" -msgstr "Permiso de agregar" - -msgid "add_permission_object" -msgstr "tiene permiso de agregar" - -msgctxt "CWGroup" -msgid "add_permission_object" -msgstr "tiene permiso de agregar" - -msgctxt "RQLExpression" -msgid "add_permission_object" -msgstr "tiene permiso de agregar" - -msgid "add_relation" -msgstr "agregar" - -#, python-format -msgid "added %(etype)s #%(eid)s (%(title)s)" -msgstr "se agregó %(etype)s #%(eid)s (%(title)s)" - -#, python-format -msgid "" -"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" -"%(eidto)s" -msgstr "" -"la relación %(rtype)s de %(frometype)s #%(eidfrom)s a %(toetype)s #%(eidto)s " -"ha sido agregada" - -msgid "additional type specific properties" -msgstr "propiedades adicionales específicas al tipo" - -msgid "addrelated" -msgstr "Agregar" - -msgid "address" -msgstr "correo electrónico" - -msgctxt "EmailAddress" -msgid "address" -msgstr "correo electrónico" - -msgid "alias" -msgstr "alias" - -msgctxt "EmailAddress" -msgid "alias" -msgstr "alias" - -msgid "allow to set a specific workflow for an entity" -msgstr "permite definir un Workflow específico para una entidad" - -msgid "allowed options depends on the source type" -msgstr "las opciones permitidas dependen del tipo de fuente" - -msgid "allowed transitions from this state" -msgstr "transiciones autorizadas desde este estado" - -#, python-format -msgid "allowed values for \"action\" are %s" -msgstr "los valores permitidos por \"acción\" son %s" - -msgid "allowed_transition" -msgstr "transiciones autorizadas" - -msgctxt "State" -msgid "allowed_transition" -msgstr "transiciones autorizadas" - -msgid "allowed_transition_object" -msgstr "Estados de entrada" - -msgctxt "BaseTransition" -msgid "allowed_transition_object" -msgstr "transición autorizada de" - -msgctxt "Transition" -msgid "allowed_transition_object" -msgstr "transición autorizada de" - -msgctxt "WorkflowTransition" -msgid "allowed_transition_object" -msgstr "transición autorizada de" - -msgid "an electronic mail address associated to a short alias" -msgstr "una dirección electrónica asociada a este alias" - -msgid "an error occurred" -msgstr "Ha ocurrido un error" - -msgid "an error occurred while processing your request" -msgstr "un error ocurrió al procesar su demanda" - -msgid "an error occurred, the request cannot be fulfilled" -msgstr "un error ha ocurrido, la búsqueda no ha podido ser realizada" - -msgid "an integer is expected" -msgstr "un número entero es esperado" - -msgid "and linked" -msgstr "y relacionada" - -msgid "and/or between different values" -msgstr "y/o entre los diferentes valores" - -msgid "anyrsetview" -msgstr "vistas rset" - -msgid "april" -msgstr "Abril" - -#, python-format -msgid "archive for %(author)s" -msgstr "archivo de %(author)s" - -#, python-format -msgid "archive for %(month)s/%(year)s" -msgstr "archivo del %(month)s/%(year)s" - -#, python-format -msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" -msgstr "" -"La entidad #%(eid)s de tipo %(etype)s debe necesariamente tener almenos una " -"relación de tipo %(rtype)s" - -msgid "attribute" -msgstr "Atributo" - -msgid "august" -msgstr "Agosto" - -msgid "authentication failure" -msgstr "Usuario o contraseña incorrecta" - -msgid "auto" -msgstr "Automático" - -msgid "autocomputed attribute used to ensure transition coherency" -msgstr "" -"Atributo automatizado usado para asegurar la coherencia en la transición" - -msgid "automatic" -msgstr "Automático" - -#, python-format -msgid "back to pagination (%s results)" -msgstr "regresar a paginación (%s resultados)" - -msgid "bad value" -msgstr "Valor erróneo" - -msgid "badly formatted url" -msgstr "url mal formateado" - -msgid "base url" -msgstr "Url de base" - -msgid "bookmark has been removed" -msgstr "Ha sido eliminado de sus favoritos" - -msgid "bookmark this page" -msgstr "Agregar esta página a los favoritos" - -msgid "bookmark this search" -msgstr "Guardar esta búsqueda" - -msgid "bookmarked_by" -msgstr "está en los Favoritos de" - -msgctxt "Bookmark" -msgid "bookmarked_by" -msgstr "está en los Favoritos de" - -msgid "bookmarked_by_object" -msgstr "tiene como Favoritos" - -msgctxt "CWUser" -msgid "bookmarked_by_object" -msgstr "tiene como Favoritos" - -msgid "bookmarks" -msgstr "Favoritos" - -msgid "bookmarks are used to have user's specific internal links" -msgstr "los Favoritos son ligas directas a espacios guardados por el usuario" - -msgid "boxes" -msgstr "Cajas" - -msgid "bug report sent" -msgstr "Reporte de error enviado" - -msgid "button_apply" -msgstr "Aplicar" - -msgid "button_cancel" -msgstr "Cancelar" - -msgid "button_delete" -msgstr "Eliminar" - -msgid "button_ok" -msgstr "Validar" - -msgid "by" -msgstr "por" - -msgid "by relation" -msgstr "por la relación" - -msgid "by_transition" -msgstr "transición" - -msgctxt "TrInfo" -msgid "by_transition" -msgstr "transición" - -msgid "by_transition_object" -msgstr "cambio de estados" - -msgctxt "BaseTransition" -msgid "by_transition_object" -msgstr "tiene como información" - -msgctxt "Transition" -msgid "by_transition_object" -msgstr "tiene como información" - -msgctxt "WorkflowTransition" -msgid "by_transition_object" -msgstr "tiene como información" - -msgid "calendar" -msgstr "mostrar un calendario" - -msgid "can not resolve entity types:" -msgstr "Imposible de interpretar los tipos de entidades:" - -msgid "can only have one url" -msgstr "solo puede tener un URL" - -msgid "can't be changed" -msgstr "No puede ser modificado" - -msgid "can't be deleted" -msgstr "No puede ser eliminado" - -msgid "can't change this attribute" -msgstr "no puede modificar este atributo" - -#, python-format -msgid "can't display data, unexpected error: %s" -msgstr "imposible de mostrar los datos, a causa del siguiente error: %s" - -msgid "can't have multiple exits on the same state" -msgstr "no puede tener varias salidas en el mismo estado" - -#, python-format -msgid "can't parse %(value)r (expected %(format)s)" -msgstr "no puede analizar %(value)r (formato requerido : %(format)s)" - -#, python-format -msgid "" -"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " -"%(value)s) does not exist any longer" -msgstr "" -"no se pudo restaurar la entidad %(eid)s del tipo %(eschema)s, objetivo de " -"%(rtype)s (eid %(value)s) pues ésta ya no existe" - -#, python-format -msgid "" -"can't restore relation %(rtype)s of entity %(eid)s, this relation does not " -"exist in the schema anymore." -msgstr "" -"no se pudo restaurar la relación %(rtype)s de la entidad %(eid)s, esta " -"relación ya no existe en el esquema. " - -#, python-format -msgid "can't restore state of entity %s, it has been deleted inbetween" -msgstr "" -"no se puede restaurar el estado de la entidad %s, se ha borrado desde " -"entonces" - -#, python-format -msgid "" -"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality=" -"%(card)s" -msgstr "" -"no puede poner 'inlined' = True, %(stype)s %(rtype)s %(otype)s tiene " -"cardinalidad %(card)s" - -msgid "cancel" -msgstr "anular" - -msgid "cancel select" -msgstr "Cancelar la selección" - -msgid "cancel this insert" -msgstr "Cancelar esta inserción" - -msgid "cardinality" -msgstr "cardinalidad" - -msgctxt "CWAttribute" -msgid "cardinality" -msgstr "cardinalidad" - -msgctxt "CWRelation" -msgid "cardinality" -msgstr "cardinalidad" - -msgid "category" -msgstr "categoría" - -#, python-format -msgid "changed state of %(etype)s #%(eid)s (%(title)s)" -msgstr "Cambiar del estado de %(etype)s #%(eid)s (%(title)s)" - -msgid "changes applied" -msgstr "Cambios realizados" - -msgid "click here to see created entity" -msgstr "Ver la entidad creada" - -msgid "click here to see edited entity" -msgstr "seleccione aquí para ver la entidad modificada" - -msgid "click on the box to cancel the deletion" -msgstr "Seleccione la zona de edición para cancelar la eliminación" - -msgid "click to add a value" -msgstr "seleccione para agregar un valor" - -msgid "click to delete this value" -msgstr "seleccione para eliminar este valor" - -msgid "click to edit this field" -msgstr "seleccione para editar este campo" - -msgid "close all" -msgstr "cerrar todos" - -msgid "comment" -msgstr "Comentario" - -msgctxt "TrInfo" -msgid "comment" -msgstr "Comentario" - -msgid "comment_format" -msgstr "Formato" - -msgctxt "TrInfo" -msgid "comment_format" -msgstr "Formato" - -msgid "components" -msgstr "Componentes" - -msgid "components_navigation" -msgstr "Navigación por página" - -msgid "components_navigation_description" -msgstr "" -"Componente que permite presentar en varias páginas los resultados de " -"búsqueda cuando son mayores a un número predeterminado " - -msgid "components_rqlinput" -msgstr "Barra RQL" - -msgid "components_rqlinput_description" -msgstr "La barra para realizar consultas en RQL, en el encabezado de página" - -msgid "composite" -msgstr "composite" - -msgctxt "CWRelation" -msgid "composite" -msgstr "composite" - -msgid "condition" -msgstr "condición" - -msgctxt "BaseTransition" -msgid "condition" -msgstr "condición" - -msgctxt "Transition" -msgid "condition" -msgstr "condición" - -msgctxt "WorkflowTransition" -msgid "condition" -msgstr "condición" - -msgid "condition_object" -msgstr "condición de" - -msgctxt "RQLExpression" -msgid "condition_object" -msgstr "condición de" - -msgid "conditions" -msgstr "condiciones" - -msgid "config" -msgstr "configuración" - -msgctxt "CWSource" -msgid "config" -msgstr "configuración" - -msgctxt "CWSourceHostConfig" -msgid "config" -msgstr "configuración" - -msgid "config mode" -msgstr "Modo de configuración" - -msgid "config type" -msgstr "Tipo de configuración" - -msgid "confirm password" -msgstr "Confirmar contraseña" - -msgid "constrained_by" -msgstr "Restricción impuesta por" - -msgctxt "CWAttribute" -msgid "constrained_by" -msgstr "Restricción impuesta por" - -msgctxt "CWRelation" -msgid "constrained_by" -msgstr "Restricción impuesta por" - -msgid "constrained_by_object" -msgstr "Restricción de" - -msgctxt "CWConstraint" -msgid "constrained_by_object" -msgstr "Restricción de" - -msgid "constraint factory" -msgstr "Fábrica de restricciones" - -msgid "constraint_of" -msgstr "restricción de" - -msgctxt "CWUniqueTogetherConstraint" -msgid "constraint_of" -msgstr "restricción de" - -msgid "constraint_of_object" -msgstr "restringida por" - -msgctxt "CWEType" -msgid "constraint_of_object" -msgstr "restringida por" - -msgid "constraints" -msgstr "Restricciones" - -msgid "constraints applying on this relation" -msgstr "Restricciones que se aplican a esta relación" - -msgid "content type" -msgstr "tipo MIME" - -msgid "context" -msgstr "Contexto" - -msgid "context where this box should be displayed" -msgstr "Contexto en el cual la caja debe aparecer en el sistema" - -msgid "context where this component should be displayed" -msgstr "Contexto en el cual el componente debe aparecer en el sistema" - -msgid "context where this facet should be displayed, leave empty for both" -msgstr "" -"Contexto en el cual esta faceta debe ser mostrada, dejar vacia para ambos" - -msgid "control subject entity's relations order" -msgstr "Controla el orden de relaciones de la entidad sujeto" - -msgid "copy" -msgstr "Copiar" - -msgid "core relation indicating a user's groups" -msgstr "" -"Relación sistema que indica los grupos a los cuales pertenece un usuario" - -msgid "" -"core relation indicating owners of an entity. This relation implicitly put " -"the owner into the owners group for the entity" -msgstr "" -"Relación sistema que indica el(los) propietario(s) de una entidad. Esta " -"relación pone de manera implícita al propietario en el grupo de propietarios " -"de una entidad." - -msgid "core relation indicating the original creator of an entity" -msgstr "Relación sistema que indica el creador de una entidad." - -msgid "core relation indicating the type of an entity" -msgstr "Relación sistema que indica el tipo de entidad." - -msgid "" -"core relation indicating the types (including specialized types) of an entity" -msgstr "" -"Relación sistema indicando los tipos (incluídos los tipos padres) de una " -"entidad" - -msgid "could not connect to the SMTP server" -msgstr "Imposible de conectarse al servidor SMTP" - -msgid "create an index for quick search on this attribute" -msgstr "Crear un índice para accelerar las búsquedas sobre este atributo" - -msgid "created on" -msgstr "creado el" - -msgid "created_by" -msgstr "creado por" - -msgid "created_by_object" -msgstr "ha creado" - -msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)" -msgstr "Creando Favorito" - -msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)" -msgstr "Creación del atributo %(linkto)s" - -msgid "" -"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)" -msgstr "Creación condicionada por el atributo %(linkto)s" - -msgid "" -"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)" -msgstr "Creación condicionada por la relación %(linkto)s" - -msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)" -msgstr "Creación de una propiedad por el usuario %(linkto)s" - -msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" -msgstr "Creación de la relación %(linkto)s" - -msgid "" -"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource " -"%(linkto)s)" -msgstr "creación de una configuración host para la fuente %(linkto)s" - -msgid "" -"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " -"constraint_of CWEType %(linkto)s)" -msgstr "creación de una restricción de singularidad en %(linkto)s" - -msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" -msgstr "Creación de un usuario para agregar al grupo %(linkto)s" - -msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)" -msgstr "Creación de una dirección electrónica para el usuario %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" -msgstr "Creación de una expresión RQL para permitir agregar %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" -msgstr "creación de una expresión RQL por el derecho de lectura de %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s update_permission " -"RQLExpression)" -msgstr "" -"creación de una expresión RQL por el derecho de actualización de %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" -msgstr "" -"Creación de una expresión RQL para la autorización de agregar %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)" -msgstr "" -"Creación de una expresión RQL para la autorización de eliminar %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" -msgstr "Creación de una expresión RQL para permitir leer %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" -msgstr "Creación de una expresión RQL para permitir actualizar %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" -msgstr "Creación de una expresión RQL para permitir agregar %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s delete_permission " -"RQLExpression)" -msgstr "Creación de una expresión RQL para permitir eliminar %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" -msgstr "Creación de una expresión RQL para permitir leer %(linkto)s" - -msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" -msgstr "Creación de una expresión RQL para la transición %(linkto)s" - -msgid "" -"creating RQLExpression (WorkflowTransition %(linkto)s condition " -"RQLExpression)" -msgstr "Creación de una expresión RQL para la transición Workflow %(linkto)s" - -msgid "creating State (State allowed_transition Transition %(linkto)s)" -msgstr "Creación de un estado que puede ir hacia la transición %(linkto)s" - -msgid "creating State (State state_of Workflow %(linkto)s)" -msgstr "Creando un Estado del Workflow" - -msgid "creating State (Transition %(linkto)s destination_state State)" -msgstr "Creación de un Estado Destinación de la Transición %(linkto)s" - -msgid "" -"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " -"subworkflow_exit SubWorkflowExitPoint)" -msgstr "creación de un punto de Salida de la Transición Workflow %(linkto)s" - -msgid "creating Transition (State %(linkto)s allowed_transition Transition)" -msgstr "Creación de una transición autorizada desde el Estado %(linkto)s" - -msgid "creating Transition (Transition destination_state State %(linkto)s)" -msgstr "Creación de un transición hacia el Estado %(linkto)s" - -msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" -msgstr "Creación de una Transición Workflow %(linkto)s" - -msgid "" -"creating WorkflowTransition (State %(linkto)s allowed_transition " -"WorkflowTransition)" -msgstr "" -"Creación de una Transición Workflow permitida desde el estado %(linkto)s" - -msgid "" -"creating WorkflowTransition (WorkflowTransition transition_of Workflow " -"%(linkto)s)" -msgstr "Creación de una Transición Workflow del Workflow %(linkto)s" - -msgid "creation" -msgstr "Creación" - -msgid "creation date" -msgstr "Fecha de Creación" - -msgid "creation time of an entity" -msgstr "Fecha de creación de una entidad" - -msgid "creation_date" -msgstr "Fecha de Creación" - -msgid "cstrtype" -msgstr "Tipo de restricción" - -msgctxt "CWConstraint" -msgid "cstrtype" -msgstr "Tipo" - -msgid "cstrtype_object" -msgstr "utilizado por" - -msgctxt "CWConstraintType" -msgid "cstrtype_object" -msgstr "Tipo de restricciones" - -msgid "csv export" -msgstr "Exportar en CSV" - -msgid "csv export (entities)" -msgstr "Exportar a CSV (entidades)" - -msgid "ctxcomponents" -msgstr "Componentes contextuales" - -msgid "ctxcomponents_anonuserlink" -msgstr "Liga usuario" - -msgid "ctxcomponents_anonuserlink_description" -msgstr "" -"Muestra un enlace hacia el formulario de conexión para los usuarios " -"anónimos, o una caja que contiene los enlaces del usuario conectado. " - -msgid "ctxcomponents_appliname" -msgstr "Nombre de la aplicación" - -msgid "ctxcomponents_appliname_description" -msgstr "Muestra el nombre de la aplicación en el encabezado de la página" - -msgid "ctxcomponents_bookmarks_box" -msgstr "Caja de Favoritos" - -msgid "ctxcomponents_bookmarks_box_description" -msgstr "Muestra y permite administrar los favoritos del usuario" - -msgid "ctxcomponents_breadcrumbs" -msgstr "Ruta de Navegación" - -msgid "ctxcomponents_breadcrumbs_description" -msgstr "Muestra la ruta que permite localizar la página actual en el Sistema" - -msgid "ctxcomponents_download_box" -msgstr "Configuración de caja de descargas" - -msgid "ctxcomponents_download_box_description" -msgstr "Caja que contiene los elementos descargados" - -msgid "ctxcomponents_edit_box" -msgstr "Caja de Acciones" - -msgid "ctxcomponents_edit_box_description" -msgstr "Muestra las acciones posibles a ejecutar para los datos seleccionados" - -msgid "ctxcomponents_facet.filterbox" -msgstr "Filtros" - -msgid "ctxcomponents_facet.filterbox_description" -msgstr "Muestra los filtros aplicables a una búsqueda realizada" - -msgid "ctxcomponents_logo" -msgstr "logo" - -msgid "ctxcomponents_logo_description" -msgstr "El logo de la aplicación, en el encabezado de página" - -msgid "ctxcomponents_metadata" -msgstr "Metadatos de la Entidad" - -msgid "ctxcomponents_metadata_description" -msgstr "espacio que incluye los metadatos de la entidad actual" - -msgid "ctxcomponents_possible_views_box" -msgstr "Caja de Vistas Posibles" - -msgid "ctxcomponents_possible_views_box_description" -msgstr "Muestra las vistas posibles a aplicar a los datos seleccionados" - -msgid "ctxcomponents_prevnext" -msgstr "Elemento anterior / siguiente" - -msgid "ctxcomponents_prevnext_description" -msgstr "" -"Muestra las ligas que permiten pasar de una entidad a otra en las entidades " -"que implementan la interface \"anterior/siguiente\"." - -msgid "ctxcomponents_rss" -msgstr "Ícono RSS" - -msgid "ctxcomponents_rss_description" -msgstr "Muestra el ícono RSS para vistas RSS" - -msgid "ctxcomponents_search_box" -msgstr "Caja de búsqueda" - -msgid "ctxcomponents_search_box_description" -msgstr "" -"Permite realizar una búsqueda simple para cualquier tipo de dato en la " -"aplicación" - -msgid "ctxcomponents_startup_views_box" -msgstr "Caja Vistas de inicio" - -msgid "ctxcomponents_startup_views_box_description" -msgstr "Muestra las vistas de inicio de la aplicación" - -msgid "ctxcomponents_userstatus" -msgstr "estado del usuario" - -msgid "ctxcomponents_userstatus_description" -msgstr "establece el estado del usuario" - -msgid "ctxcomponents_wfhistory" -msgstr "Histórico del workflow." - -msgid "ctxcomponents_wfhistory_description" -msgstr "" -"Sección que muestra el reporte histórico de las transiciones del workflow. " -"Aplica solo en entidades con workflow." - -msgid "ctxtoolbar" -msgstr "Barra de herramientas" - -msgid "custom_workflow" -msgstr "Workflow específico" - -msgid "custom_workflow_object" -msgstr "Workflow de" - -msgid "cw.groups-management" -msgstr "grupos" - -msgid "cw.users-management" -msgstr "usuarios" - -msgid "cw_for_source" -msgstr "fuente" - -msgctxt "CWSourceSchemaConfig" -msgid "cw_for_source" -msgstr "fuente" - -msgid "cw_for_source_object" -msgstr "elemento de mapeo" - -msgctxt "CWSource" -msgid "cw_for_source_object" -msgstr "elemento de mapeo" - -msgid "cw_host_config_of" -msgstr "configuración del host de" - -msgctxt "CWSourceHostConfig" -msgid "cw_host_config_of" -msgstr "configuración del host de" - -msgid "cw_host_config_of_object" -msgstr "tiene la configuración del host" - -msgctxt "CWSource" -msgid "cw_host_config_of_object" -msgstr "tiene la configuración del host" - -msgid "cw_import_of" -msgstr "fuente" - -msgctxt "CWDataImport" -msgid "cw_import_of" -msgstr "fuente" - -msgid "cw_import_of_object" -msgstr "importación" - -msgctxt "CWSource" -msgid "cw_import_of_object" -msgstr "importación" - -msgid "cw_schema" -msgstr "esquema" - -msgctxt "CWSourceSchemaConfig" -msgid "cw_schema" -msgstr "esquema" - -msgid "cw_schema_object" -msgstr "mapeado por" - -msgctxt "CWEType" -msgid "cw_schema_object" -msgstr "mapeado por" - -msgctxt "CWRType" -msgid "cw_schema_object" -msgstr "mapeado por" - -msgctxt "CWRelation" -msgid "cw_schema_object" -msgstr "mapeado por" - -msgid "cw_source" -msgstr "desde la fuente de datos" - -msgid "cw_source_object" -msgstr "entidades" - -msgid "cwetype-box" -msgstr "Vista \"caja\"" - -msgid "cwetype-description" -msgstr "Descripción" - -msgid "cwetype-permissions" -msgstr "Permisos" - -msgid "cwetype-views" -msgstr "Vistas" - -msgid "cwetype-workflow" -msgstr "Workflow" - -msgid "cwgroup-main" -msgstr "Descripción" - -msgid "cwgroup-permissions" -msgstr "Permisos" - -msgid "cwrtype-description" -msgstr "Descripción" - -msgid "cwrtype-permissions" -msgstr "Permisos" - -msgid "cwsource-imports" -msgstr "importación" - -msgid "cwsource-main" -msgstr "descripción" - -msgid "cwsource-mapping" -msgstr "mapeo" - -msgid "cwuri" -msgstr "Uri Interna" - -msgid "data directory url" -msgstr "Url del repertorio de datos" - -msgid "data model schema" -msgstr "Esquema del Sistema" - -msgid "data sources" -msgstr "fuente de datos" - -msgid "data sources management" -msgstr "administración de fuentes de datos" - -msgid "date" -msgstr "Fecha" - -msgid "deactivate" -msgstr "Desactivar" - -msgid "deactivated" -msgstr "Desactivado" - -msgid "december" -msgstr "Diciembre" - -msgid "default" -msgstr "Valor por defecto" - -msgid "default text format for rich text fields." -msgstr "" -"Formato de texto que se utilizará por defecto para los campos de tipo texto" - -msgid "default user workflow" -msgstr "Workflow por defecto de los usuarios" - -msgid "default value" -msgstr "Valor por defecto" - -msgid "default value as gziped pickled python object" -msgstr "valor por defecto, en la forma de objeto python, al usar pickle y gzip" - -msgid "default workflow for an entity type" -msgstr "Workflow por defecto para un tipo de entidad" - -msgid "default_workflow" -msgstr "Workflow por defecto" - -msgctxt "CWEType" -msgid "default_workflow" -msgstr "Workflow por defecto" - -msgid "default_workflow_object" -msgstr "Workflow por defecto de" - -msgctxt "Workflow" -msgid "default_workflow_object" -msgstr "Workflow por defecto de" - -msgid "defaultval" -msgstr "Valor por defecto" - -msgctxt "CWAttribute" -msgid "defaultval" -msgstr "Valor por defecto" - -msgid "define a CubicWeb user" -msgstr "Define un usuario CubicWeb" - -msgid "define a CubicWeb users group" -msgstr "Define un grupo de usuarios CubicWeb" - -msgid "" -"define a final relation: link a final relation type from a non final entity " -"to a final entity type. used to build the instance schema" -msgstr "" -"Define una relación final: liga un tipo de relación final desde una entidad " -"NO final hacia un tipo de entidad final. Se usa para crear el esquema de la " -"instancia." - -msgid "" -"define a non final relation: link a non final relation type from a non final " -"entity to a non final entity type. used to build the instance schema" -msgstr "" -"Define una relación NO final: liga un tipo de relación NO final desde una " -"entidad NO final hacia un tipo de entidad NO final. Se usa para crear el " -"esquema de la instancia." - -msgid "define a relation type, used to build the instance schema" -msgstr "" -"Define un tipo de relación, usado para construir el esquema de la instancia." - -msgid "define a rql expression used to define permissions" -msgstr "Expresión RQL utilizada para definir los derechos de acceso" - -msgid "define a schema constraint" -msgstr "Define una condición de esquema" - -msgid "define a schema constraint type" -msgstr "Define un tipo de condición de esquema" - -msgid "define a virtual relation type, used to build the instance schema" -msgstr "" - -msgid "define an entity type, used to build the instance schema" -msgstr "" -"Define un tipo de entidad, usado para construir el esquema de la instancia." - -msgid "define how we get out from a sub-workflow" -msgstr "Define como salir de un sub-Workflow" - -msgid "defines a sql-level multicolumn unique index" -msgstr "define un índice SQL único a través de varias columnas" - -msgid "" -"defines what's the property is applied for. You must select this first to be " -"able to set value" -msgstr "" -"Define a que se aplica la propiedad . Debe de seleccionar esto antes de " -"establecer un valor" - -msgid "delete" -msgstr "Eliminar" - -msgid "delete this bookmark" -msgstr "Eliminar este favorito" - -msgid "delete this relation" -msgstr "Eliminar esta relación" - -msgid "delete_permission" -msgstr "Permiso de eliminar" - -msgctxt "CWEType" -msgid "delete_permission" -msgstr "Permiso de eliminar" - -msgctxt "CWRelation" -msgid "delete_permission" -msgstr "Permiso de eliminar" - -msgid "delete_permission_object" -msgstr "posee permiso para eliminar" - -msgctxt "CWGroup" -msgid "delete_permission_object" -msgstr "puede eliminar" - -msgctxt "RQLExpression" -msgid "delete_permission_object" -msgstr "puede eliminar" - -#, python-format -msgid "deleted %(etype)s #%(eid)s (%(title)s)" -msgstr "Eliminación de la entidad %(etype)s #%(eid)s (%(title)s)" - -#, python-format -msgid "" -"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" -"%(eidto)s" -msgstr "" -"La relación %(rtype)s de %(frometype)s #%(eidfrom)s a %(toetype)s #%(eidto)s " -"ha sido suprimida." - -msgid "depends on the constraint type" -msgstr "Depende del tipo de restricción" - -msgid "description" -msgstr "Descripción" - -msgctxt "BaseTransition" -msgid "description" -msgstr "Descripción" - -msgctxt "CWAttribute" -msgid "description" -msgstr "Descripción" - -msgctxt "CWComputedRType" -msgid "description" -msgstr "" - -msgctxt "CWEType" -msgid "description" -msgstr "Descripción" - -msgctxt "CWRType" -msgid "description" -msgstr "Descripción" - -msgctxt "CWRelation" -msgid "description" -msgstr "Descripción" - -msgctxt "State" -msgid "description" -msgstr "Descripción" - -msgctxt "Transition" -msgid "description" -msgstr "Descripción" - -msgctxt "Workflow" -msgid "description" -msgstr "Descripción" - -msgctxt "WorkflowTransition" -msgid "description" -msgstr "Descripción" - -msgid "description_format" -msgstr "Formato" - -msgctxt "BaseTransition" -msgid "description_format" -msgstr "Formato" - -msgctxt "CWAttribute" -msgid "description_format" -msgstr "Formato" - -msgctxt "CWComputedRType" -msgid "description_format" -msgstr "" - -msgctxt "CWEType" -msgid "description_format" -msgstr "Formato" - -msgctxt "CWRType" -msgid "description_format" -msgstr "Formato" - -msgctxt "CWRelation" -msgid "description_format" -msgstr "Formato" - -msgctxt "State" -msgid "description_format" -msgstr "Formato" - -msgctxt "Transition" -msgid "description_format" -msgstr "Formato" - -msgctxt "Workflow" -msgid "description_format" -msgstr "Formato" - -msgctxt "WorkflowTransition" -msgid "description_format" -msgstr "Formato" - -msgid "destination state for this transition" -msgstr "Estados accesibles para esta transición" - -msgid "destination state must be in the same workflow as our parent transition" -msgstr "" -"El estado de destino debe pertenecer al mismo Workflow que la transición " -"padre." - -msgid "destination state of a transition" -msgstr "Estado destino de una transición" - -msgid "" -"destination state. No destination state means that transition should go back " -"to the state from which we've entered the subworkflow." -msgstr "" -"Estado destino de la transición. Si el Estado destino no ha sido " -"especificado, la transición regresará hacia el estado que tenía la entidad " -"al entrar en el Sub-Workflow." - -msgid "destination_state" -msgstr "Estado destino" - -msgctxt "SubWorkflowExitPoint" -msgid "destination_state" -msgstr "Estado destino" - -msgctxt "Transition" -msgid "destination_state" -msgstr "Estado destino" - -msgid "destination_state_object" -msgstr "Destino de" - -msgctxt "State" -msgid "destination_state_object" -msgstr "Estado final de" - -msgid "detach attached file" -msgstr "soltar el archivo existente" - -msgid "display order of the box" -msgstr "Orden de aparición de la caja" - -msgid "display order of the component" -msgstr "Orden de aparición del componente" - -msgid "display order of the facet" -msgstr "Orden de aparición de la faceta" - -msgid "display the box or not" -msgstr "Mostrar o no la caja" - -msgid "display the component or not" -msgstr "Mostrar o no el componente" - -msgid "display the facet or not" -msgstr "Mostrar o no la faceta" - -msgid "download" -msgstr "Descargar" - -#, python-format -msgid "download %s" -msgstr "Descargar %s" - -msgid "download icon" -msgstr "ícono de descarga" - -msgid "download schema as owl" -msgstr "Descargar esquema en formato OWL" - -msgid "edit bookmarks" -msgstr "Editar favoritos" - -msgid "edit canceled" -msgstr "Edición cancelada" - -msgid "editable-table" -msgstr "Tabla modificable" - -msgid "eid" -msgstr "eid" - -msgid "embedded html" -msgstr "Html incrustado" - -msgid "end_timestamp" -msgstr "horario final" - -msgctxt "CWDataImport" -msgid "end_timestamp" -msgstr "horario final" - -msgid "entities deleted" -msgstr "Entidades eliminadas" - -msgid "entity and relation types can't be mapped, only attributes or relations" -msgstr "" -"los tipos de entidad y relación no pueden ser mapeados, solo los atributos y " -"las relaciones" - -msgid "entity copied" -msgstr "Entidad copiada" - -msgid "entity created" -msgstr "Entidad creada" - -msgid "entity creation" -msgstr "Creación de entidad" - -msgid "entity deleted" -msgstr "Entidad eliminada" - -msgid "entity deletion" -msgstr "Eliminación de entidad" - -msgid "entity edited" -msgstr "Entidad modificada" - -msgid "entity has no workflow set" -msgstr "La entidad no tiene Workflow" - -msgid "entity linked" -msgstr "Entidad asociada" - -msgid "entity type" -msgstr "Tipo de entidad" - -msgid "entity types which may use this workflow" -msgstr "Tipos de entidades que pueden utilizar este Workflow" - -msgid "entity update" -msgstr "Actualización de la Entidad" - -msgid "entityview" -msgstr "vistas de entidades" - -msgid "error" -msgstr "error" - -msgid "error while publishing ReST text" -msgstr "" -"Se ha producido un error durante la interpretación del texto en formato ReST" - -msgid "exit state must be a subworkflow state" -msgstr "El estado de salida debe de ser un estado del Sub-Workflow" - -msgid "exit_point" -msgstr "Estado de Salida" - -msgid "exit_point_object" -msgstr "Estado de Salida de" - -#, python-format -msgid "exiting from subworkflow %s" -msgstr "Salida del subworkflow %s" - -msgid "expression" -msgstr "Expresión" - -msgctxt "RQLExpression" -msgid "expression" -msgstr "RQL de la expresión" - -msgid "exprtype" -msgstr "Tipo de la expresión" - -msgctxt "RQLExpression" -msgid "exprtype" -msgstr "Tipo" - -msgid "extra_props" -msgstr "propiedades adicionales" - -msgctxt "CWAttribute" -msgid "extra_props" -msgstr "propiedades adicionales" - -msgid "facet-loading-msg" -msgstr "procesando, espere por favor" - -msgid "facet.filters" -msgstr "Filtros" - -msgid "facetbox" -msgstr "Caja de facetas" - -msgid "facets_created_by-facet" -msgstr "Faceta \"creada por\"" - -msgid "facets_created_by-facet_description" -msgstr "Faceta creada por" - -msgid "facets_cw_source-facet" -msgstr "faceta \"fuente de datos\"" - -msgid "facets_cw_source-facet_description" -msgstr "fuente de datos" - -msgid "facets_cwfinal-facet" -msgstr "Faceta \"final\"" - -msgid "facets_cwfinal-facet_description" -msgstr "Faceta para las entidades \"finales\"" - -msgid "facets_datafeed.dataimport.status" -msgstr "estado de la importación" - -msgid "facets_datafeed.dataimport.status_description" -msgstr "Estado de la importación de datos" - -msgid "facets_etype-facet" -msgstr "Faceta \"es de tipo\"" - -msgid "facets_etype-facet_description" -msgstr "Faceta es de tipo" - -msgid "facets_has_text-facet" -msgstr "Faceta \"contiene el texto\"" - -msgid "facets_has_text-facet_description" -msgstr "Faceta contiene el texto" - -msgid "facets_in_group-facet" -msgstr "Faceta \"forma parte del grupo\"" - -msgid "facets_in_group-facet_description" -msgstr "Faceta en grupo" - -msgid "facets_in_state-facet" -msgstr "Faceta \"en el estado\"" - -msgid "facets_in_state-facet_description" -msgstr "Faceta en el estado" - -msgid "failed" -msgstr "fallido" - -#, python-format -msgid "failed to uniquify path (%s, %s)" -msgstr "No se pudo obtener un dato único (%s, %s)" - -msgid "february" -msgstr "Febrero" - -msgid "file tree view" -msgstr "Arborescencia (archivos)" - -msgid "final" -msgstr "Final" - -msgctxt "CWEType" -msgid "final" -msgstr "Final" - -msgctxt "CWRType" -msgid "final" -msgstr "Final" - -msgid "first name" -msgstr "Nombre" - -msgid "firstname" -msgstr "Nombre" - -msgctxt "CWUser" -msgid "firstname" -msgstr "Nombre" - -msgid "foaf" -msgstr "Amigo de un Amigo, FOAF" - -msgid "focus on this selection" -msgstr "muestre esta selección" - -msgid "follow" -msgstr "Seguir la liga" - -#, python-format -msgid "follow this link for more information on this %s" -msgstr "Seleccione esta liga para obtener mayor información sobre %s" - -msgid "for_user" -msgstr "Para el usuario" - -msgctxt "CWProperty" -msgid "for_user" -msgstr "Propiedad del Usuario" - -msgid "for_user_object" -msgstr "Utiliza las propiedades" - -msgctxt "CWUser" -msgid "for_user_object" -msgstr "Tiene como preferencia" - -msgid "formula" -msgstr "" - -msgctxt "CWAttribute" -msgid "formula" -msgstr "" - -msgid "friday" -msgstr "Viernes" - -msgid "from" -msgstr "De" - -#, python-format -msgid "from %(date)s" -msgstr "de %(date)s" - -msgid "from_entity" -msgstr "De la entidad" - -msgctxt "CWAttribute" -msgid "from_entity" -msgstr "Atributo de la entidad" - -msgctxt "CWRelation" -msgid "from_entity" -msgstr "Relación de la entidad" - -msgid "from_entity_object" -msgstr "Relación sujeto" - -msgctxt "CWEType" -msgid "from_entity_object" -msgstr "Entidad de" - -msgid "from_interval_start" -msgstr "De" - -msgid "from_state" -msgstr "Del Estado" - -msgctxt "TrInfo" -msgid "from_state" -msgstr "Estado de Inicio" - -msgid "from_state_object" -msgstr "Transiciones desde este estado" - -msgctxt "State" -msgid "from_state_object" -msgstr "Estado de Inicio de" - -msgid "full text or RQL query" -msgstr "Texto de búsqueda o demanda RQL" - -msgid "fulltext_container" -msgstr "Contenedor de texto indexado" - -msgctxt "CWRType" -msgid "fulltext_container" -msgstr "Objeto a indexar" - -msgid "fulltextindexed" -msgstr "Indexación de texto" - -msgctxt "CWAttribute" -msgid "fulltextindexed" -msgstr "Texto indexado" - -msgid "gc" -msgstr "fuga de memoria" - -msgid "generic plot" -msgstr "Gráfica Genérica" - -msgid "generic relation to link one entity to another" -msgstr "Relación genérica para ligar entidades" - -msgid "" -"generic relation to specify that an external entity represent the same " -"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" -msgstr "" -"Relación genérica que indicar que una entidad es idéntica a otro recurso web " -"(ver http://www.w3.org/TR/owl-ref/#sameAs-def)." - -msgid "granted to groups" -msgstr "Otorgado a los grupos" - -#, python-format -msgid "graphical representation of %(appid)s data model" -msgstr "Representación gráfica del modelo de datos de %(appid)s" - -#, python-format -msgid "" -"graphical representation of the %(etype)s entity type from %(appid)s data " -"model" -msgstr "" -"Representación gráfica del modelo de datos para el tipo de entidad %(etype)s " -"de %(appid)s" - -#, python-format -msgid "" -"graphical representation of the %(rtype)s relation type from %(appid)s data " -"model" -msgstr "" -"Representación gráfica del modelo de datos para el tipo de relación " -"%(rtype)s de %(appid)s" - -msgid "group in which a user should be to be allowed to pass this transition" -msgstr "Grupo en el cual el usuario debe estar lograr la transición" - -msgid "groups" -msgstr "Grupos" - -msgid "groups allowed to add entities/relations of this type" -msgstr "grupos autorizados a agregar entidades/relaciones de este tipo" - -msgid "groups allowed to delete entities/relations of this type" -msgstr "grupos autorizados a eliminar entidades/relaciones de este tipo" - -msgid "groups allowed to read entities/relations of this type" -msgstr "grupos autorizados a leer entidades/relaciones de este tipo" - -msgid "groups allowed to update entities/relations of this type" -msgstr "grupos autorizados a actualizar entidades/relaciones de este tipo" - -msgid "groups grant permissions to the user" -msgstr "Los grupos otorgan los permisos al usuario" - -msgid "guests" -msgstr "Invitados" - -msgid "hCalendar" -msgstr "hCalendar" - -msgid "has_text" -msgstr "Contiene el texto" - -msgid "header-center" -msgstr "header - centro" - -msgid "header-left" -msgstr "encabezado (izquierdo)" - -msgid "header-right" -msgstr "encabezado (derecho)" - -msgid "hide filter form" -msgstr "Esconder el filtro" - -msgid "" -"how to format date and time in the ui (see this page for format " -"description)" -msgstr "" -"Formato de fecha y hora que se utilizará por defecto en la interfaz (mayor información del formato)" - -msgid "" -"how to format date in the ui (see this page for format " -"description)" -msgstr "" -"Formato de fecha que se utilizará por defecto en la interfaz (mayor información del formato)" - -msgid "how to format float numbers in the ui" -msgstr "" -"Formato de números flotantes que se utilizará por defecto en la interfaz" - -msgid "" -"how to format time in the ui (see this page for format " -"description)" -msgstr "" -"Formato de hora que se utilizará por defecto en la interfaz (mayor información del formato)" - -msgid "i18n_bookmark_url_fqs" -msgstr "Parámetros" - -msgid "i18n_bookmark_url_path" -msgstr "Ruta" - -msgid "i18n_login_popup" -msgstr "Identificarse" - -msgid "i18ncard_*" -msgstr "0..n" - -msgid "i18ncard_+" -msgstr "1..n" - -msgid "i18ncard_1" -msgstr "1" - -msgid "i18ncard_?" -msgstr "0..1" - -msgid "i18nprevnext_next" -msgstr "Siguiente" - -msgid "i18nprevnext_previous" -msgstr "Anterior" - -msgid "i18nprevnext_up" -msgstr "Padre" - -msgid "iCalendar" -msgstr "iCalendar" - -msgid "id of main template used to render pages" -msgstr "ID del template principal" - -msgid "identical to" -msgstr "Idéntico a" - -msgid "identical_to" -msgstr "idéntico a" - -msgid "identity" -msgstr "es idéntico a" - -msgid "identity_object" -msgstr "es idéntico a" - -msgid "" -"if full text content of subject/object entity should be added to other side " -"entity (the container)." -msgstr "" -"Si el texto indexado de la entidad sujeto/objeto debe ser agregado a la " -"entidad al otro extremo de la relación (el contenedor)." - -msgid "image" -msgstr "Imagen" - -msgid "in progress" -msgstr "en progreso" - -msgid "in_group" -msgstr "En el grupo" - -msgctxt "CWUser" -msgid "in_group" -msgstr "Forma parte del grupo" - -msgid "in_group_object" -msgstr "Miembros" - -msgctxt "CWGroup" -msgid "in_group_object" -msgstr "Contiene los usuarios" - -msgid "in_state" -msgstr "Estado" - -msgid "in_state_object" -msgstr "Estado de" - -msgid "in_synchronization" -msgstr "sincronizado" - -msgctxt "CWSource" -msgid "in_synchronization" -msgstr "sincronizado" - -msgid "incontext" -msgstr "En el contexto" - -msgid "incorrect captcha value" -msgstr "Valor del Captcha incorrecto" - -#, python-format -msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" -msgstr "el valor (%(KEY-value)r) es incorrecto para el tipo \"%(KEY-type)s\"" - -msgid "index this attribute's value in the plain text index" -msgstr "Indexar el valor de este atributo en el índice de texto simple" - -msgid "indexed" -msgstr "Indexado" - -msgctxt "CWAttribute" -msgid "indexed" -msgstr "Indexado" - -msgid "indicate the current state of an entity" -msgstr "Indica el estado actual de una entidad" - -msgid "" -"indicate which state should be used by default when an entity using states " -"is created" -msgstr "" -"Indica cual estado deberá ser utilizado por defecto al crear una entidad" - -msgid "indifferent" -msgstr "indifferente" - -msgid "info" -msgstr "Información del Sistema" - -msgid "initial state for this workflow" -msgstr "Estado inicial para este Workflow" - -msgid "initial_state" -msgstr "Estado inicial" - -msgctxt "Workflow" -msgid "initial_state" -msgstr "Estado inicial" - -msgid "initial_state_object" -msgstr "Estado inicial de" - -msgctxt "State" -msgid "initial_state_object" -msgstr "Estado inicial de" - -msgid "inlined" -msgstr "Inlined" - -msgctxt "CWRType" -msgid "inlined" -msgstr "Inlined" - -msgid "instance home" -msgstr "Repertorio de la Instancia" - -msgid "internal entity uri" -msgstr "Uri Interna" - -msgid "internationalizable" -msgstr "Internacionalizable" - -msgctxt "CWAttribute" -msgid "internationalizable" -msgstr "Internacionalizable" - -#, python-format -msgid "invalid action %r" -msgstr "Acción %r invalida" - -#, python-format -msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" -msgstr "Valor %(KEY-value)s es incorrecto, seleccione entre %(KEY-choices)s" - -msgid "is" -msgstr "es" - -msgid "is object of:" -msgstr "es objeto de" - -msgid "is subject of:" -msgstr "es sujeto de" - -msgid "" -"is the subject/object entity of the relation composed of the other ? This " -"implies that when the composite is deleted, composants are also deleted." -msgstr "" -"Es la entidad sujeto/objeto de la relación une agregación de el otro ? De " -"ser así, el destruir el composite destruirá de igual manera sus componentes " - -msgid "is this attribute's value translatable" -msgstr "Es el valor de este atributo traducible ?" - -msgid "is this relation equivalent in both direction ?" -msgstr "Es esta relación equivalente en los ambos sentidos ?" - -msgid "" -"is this relation physically inlined? you should know what you're doing if " -"you are changing this!" -msgstr "" -"Es esta relación estilo INLINED en la base de datos ? Usted debe saber lo " -"que hace si cambia esto !" - -msgid "is_instance_of" -msgstr "es una instancia de" - -msgid "is_instance_of_object" -msgstr "tiene como instancias" - -msgid "is_object" -msgstr "tiene por instancia" - -msgid "january" -msgstr "Enero" - -msgid "json-entities-export-view" -msgstr "Exportación JSON (de entidades)" - -msgid "json-export-view" -msgstr "Exportación JSON" - -msgid "july" -msgstr "Julio" - -msgid "june" -msgstr "Junio" - -msgid "language of the user interface" -msgstr "Idioma que se utilizará por defecto en la interfaz usuario" - -msgid "last connection date" -msgstr "Ultima conexión" - -msgid "last login time" -msgstr "Ultima conexión" - -msgid "last name" -msgstr "Apellido" - -msgid "last usage" -msgstr "Ultimo uso" - -msgid "last_login_time" -msgstr "Ultima fecha de conexión" - -msgctxt "CWUser" -msgid "last_login_time" -msgstr "Ultima conexión" - -msgid "latest import" -msgstr "importaciones recientes" - -msgid "latest modification time of an entity" -msgstr "Fecha de la última modificación de una entidad " - -msgid "latest synchronization time" -msgstr "fecha de la última sincronización" - -msgid "latest update on" -msgstr "Actualizado el" - -msgid "latest_retrieval" -msgstr "última sincronización" - -msgctxt "CWSource" -msgid "latest_retrieval" -msgstr "fecha de la última sincronización de la fuente" - -msgid "left" -msgstr "izquierda" - -msgid "line" -msgstr "línea" - -msgid "" -"link a property to the user which want this property customization. Unless " -"you're a site manager, this relation will be handled automatically." -msgstr "" -"Liga una propiedad al usuario que desea esta personalización. Salvo que " -"usted sea un administrador del sistema, esta relación será administrada de " -"forma automática." - -msgid "link a relation definition to its object entity type" -msgstr "Liga una definición de relación a su tipo de entidad objeto" - -msgid "link a relation definition to its relation type" -msgstr "Liga una definición de relación a su tipo de relación" - -msgid "link a relation definition to its subject entity type" -msgstr "Liga una definición de relación a su tipo de entidad" - -msgid "link a state to one or more workflow" -msgstr "Liga un estado a uno o más Workflow" - -msgid "link a transition information to its object" -msgstr "Liga una transición de informacion hacia los objetos asociados" - -msgid "link a transition to one or more workflow" -msgstr "Liga una transición a uno o más Workflow" - -msgid "link a workflow to one or more entity type" -msgstr "Liga un Workflow a uno a más tipos de entidad" - -msgid "list" -msgstr "Lista" - -msgid "log" -msgstr "log" - -msgctxt "CWDataImport" -msgid "log" -msgstr "log" - -msgid "log in" -msgstr "Acceder" - -msgid "login" -msgstr "Usuario" - -msgctxt "CWUser" -msgid "login" -msgstr "Usuario" - -msgid "login / password" -msgstr "usuario / contraseña" - -msgid "login or email" -msgstr "Usuario o dirección de correo" - -msgid "login_action" -msgstr "Ingresa tus datos" - -msgid "logout" -msgstr "Desconectarse" - -#, python-format -msgid "loop in %(rel)s relation (%(eid)s)" -msgstr "loop detectado en %(rel)s de la entidad #%(eid)s" - -msgid "main informations" -msgstr "Informaciones Generales" - -msgid "main_tab" -msgstr "descripción" - -msgid "mainvars" -msgstr "Variables principales" - -msgctxt "RQLExpression" -msgid "mainvars" -msgstr "Variables principales" - -msgid "manage" -msgstr "Administración Sistema" - -msgid "manage bookmarks" -msgstr "Gestión de favoritos" - -msgid "manage permissions" -msgstr "Gestión de permisos" - -msgid "managers" -msgstr "Administradores" - -msgid "mandatory relation" -msgstr "Relación obligatoria" - -msgid "march" -msgstr "Marzo" - -msgid "match_host" -msgstr "para el host" - -msgctxt "CWSourceHostConfig" -msgid "match_host" -msgstr "para el host" - -msgid "maximum number of characters in short description" -msgstr "Máximo de caracteres en las descripciones cortas" - -msgid "maximum number of entities to display in related combo box" -msgstr "Máximo de entidades a mostrar en las listas dinámicas" - -msgid "maximum number of objects displayed by page of results" -msgstr "Máximo de elementos mostrados por página de resultados" - -msgid "maximum number of related entities to display in the primary view" -msgstr "Máximo de entidades relacionadas a mostrar en la vista primaria" - -msgid "may" -msgstr "Mayo" - -msgid "memory leak debugging" -msgstr "depuración (debugging) de fuga de memoria" - -msgid "message" -msgstr "mensaje" - -#, python-format -msgid "missing parameters for entity %s" -msgstr "Parámetros faltantes a la entidad %s" - -msgid "modification" -msgstr "modificación" - -msgid "modification_date" -msgstr "Fecha de modificación" - -msgid "modify" -msgstr "Modificar" - -msgid "monday" -msgstr "Lunes" - -msgid "more actions" -msgstr "Más acciones" - -msgid "more info about this workflow" -msgstr "Más información acerca de este workflow" - -msgid "multiple edit" -msgstr "Edición multiple" - -msgid "my custom search" -msgstr "Mi búsqueda personalizada" - -msgid "name" -msgstr "Nombre" - -msgctxt "BaseTransition" -msgid "name" -msgstr "Nombre" - -msgctxt "CWCache" -msgid "name" -msgstr "Nombre" - -msgctxt "CWComputedRType" -msgid "name" -msgstr "" - -msgctxt "CWConstraintType" -msgid "name" -msgstr "Nombre" - -msgctxt "CWEType" -msgid "name" -msgstr "Nombre" - -msgctxt "CWGroup" -msgid "name" -msgstr "Nombre" - -msgctxt "CWRType" -msgid "name" -msgstr "Nombre" - -msgctxt "CWSource" -msgid "name" -msgstr "nombre" - -msgctxt "CWUniqueTogetherConstraint" -msgid "name" -msgstr "nombre" - -msgctxt "State" -msgid "name" -msgstr "nombre" - -msgctxt "Transition" -msgid "name" -msgstr "Nombre" - -msgctxt "Workflow" -msgid "name" -msgstr "Nombre" - -msgctxt "WorkflowTransition" -msgid "name" -msgstr "Nombre" - -msgid "name of the cache" -msgstr "Nombre del Caché" - -msgid "" -"name of the main variables which should be used in the selection if " -"necessary (comma separated)" -msgstr "" -"Nombre de las variables principales que deberían ser utilizadas en la " -"selección de ser necesario (separarlas con comas)" - -msgid "name of the source" -msgstr "nombre de la fuente" - -msgid "navbottom" -msgstr "Pie de página" - -msgid "navcontentbottom" -msgstr "Pie de página del contenido principal" - -msgid "navcontenttop" -msgstr "Encabezado" - -msgid "navigation" -msgstr "Navegación" - -msgid "navigation.combobox-limit" -msgstr "ComboBox" - -msgid "navigation.page-size" -msgstr "Paginación" - -msgid "navigation.related-limit" -msgstr "Entidades relacionadas" - -msgid "navigation.short-line-size" -msgstr "Descripción corta" - -msgid "navtop" -msgstr "Encabezado del contenido principal" - -msgid "new" -msgstr "Nuevo" - -msgid "next page" -msgstr "página siguiente" - -msgid "next_results" -msgstr "Siguientes resultados" - -msgid "no" -msgstr "No" - -msgid "no content next link" -msgstr "no hay liga siguiente" - -msgid "no content prev link" -msgstr "no existe liga previa" - -msgid "no edited fields specified" -msgstr "ningún campo por editar especificado" - -msgid "no log to display" -msgstr "no arrojó elementos para mostrar" - -msgid "no related entity" -msgstr "No posee entidad asociada" - -msgid "no repository sessions found" -msgstr "Ninguna sesión encontrada" - -msgid "no selected entities" -msgstr "No hay entidades seleccionadas" - -#, python-format -msgid "no such entity type %s" -msgstr "El tipo de entidad '%s' no existe" - -msgid "no version information" -msgstr "No existe la información de version" - -msgid "no web sessions found" -msgstr "Ninguna sesión web encontrada" - -msgid "normal" -msgstr "Normal" - -msgid "not authorized" -msgstr "No autorizado" - -msgid "not selected" -msgstr "No seleccionado" - -msgid "november" -msgstr "Noviembre" - -msgid "num. users" -msgstr "Número de Usuarios" - -msgid "object" -msgstr "Objeto" - -msgid "object type" -msgstr "Tipo de Objeto" - -msgid "october" -msgstr "Octubre" - -msgid "one month" -msgstr "Un mes" - -msgid "one week" -msgstr "Una semana" - -msgid "oneline" -msgstr "En una línea" - -msgid "only select queries are authorized" -msgstr "Solo están permitidas consultas de lectura" - -msgid "open all" -msgstr "Abrir todos" - -msgid "opened sessions" -msgstr "Sesiones abiertas" - -msgid "opened web sessions" -msgstr "Sesiones Web abiertas" - -msgid "options" -msgstr "Opciones" - -msgctxt "CWSourceSchemaConfig" -msgid "options" -msgstr "opciones" - -msgid "order" -msgstr "Orden" - -msgid "ordernum" -msgstr "Orden" - -msgctxt "CWAttribute" -msgid "ordernum" -msgstr "Número de Orden" - -msgctxt "CWRelation" -msgid "ordernum" -msgstr "Número de Orden" - -msgid "owl" -msgstr "OWL" - -msgid "owlabox" -msgstr "OWLabox" - -msgid "owned_by" -msgstr "Pertenece a" - -msgid "owned_by_object" -msgstr "Pertenece al objeto" - -msgid "owners" -msgstr "Proprietarios" - -msgid "ownerships have been changed" -msgstr "Derechos de propiedad modificados" - -msgid "pageid-not-found" -msgstr "Página no encontrada." - -msgid "parser" -msgstr "analizador (parser)" - -msgctxt "CWSource" -msgid "parser" -msgstr "analizador (parser)" - -msgid "parser to use to extract entities from content retrieved at given URLs." -msgstr "" -"analizador (parser) que sirve para extraer entidades y relaciones del " -"contenido recuperado de las URLs." - -msgid "password" -msgstr "Contraseña" - -msgid "password and confirmation don't match" -msgstr "Su contraseña y confirmación no concuerdan" - -msgid "path" -msgstr "Ruta" - -msgctxt "Bookmark" -msgid "path" -msgstr "Ruta" - -msgid "permalink to this message" -msgstr "liga permanente a este mensaje" - -msgid "permission" -msgstr "Permiso" - -msgid "permissions" -msgstr "Permisos" - -msgid "pick existing bookmarks" -msgstr "Seleccionar favoritos existentes" - -msgid "pkey" -msgstr "Clave" - -msgctxt "CWProperty" -msgid "pkey" -msgstr "Código de la Propiedad" - -msgid "please correct errors below" -msgstr "Por favor corregir los errores señalados en la parte inferior" - -msgid "please correct the following errors:" -msgstr "Por favor corregir los siguientes errores:" - -msgid "possible views" -msgstr "Vistas posibles" - -msgid "prefered_form" -msgstr "Forma preferida" - -msgctxt "EmailAddress" -msgid "prefered_form" -msgstr "Email principal" - -msgid "prefered_form_object" -msgstr "Formato preferido sobre" - -msgctxt "EmailAddress" -msgid "prefered_form_object" -msgstr "Email principal de" - -msgid "preferences" -msgstr "Preferencias" - -msgid "previous page" -msgstr "página anterior" - -msgid "previous_results" -msgstr "Resultados Anteriores" - -msgid "primary" -msgstr "Primaria" - -msgid "primary_email" -msgstr "Dirección principal de correo electrónico" - -msgctxt "CWUser" -msgid "primary_email" -msgstr "Dirección principal de correo electrónico" - -msgid "primary_email_object" -msgstr "Dirección de email principal (objeto)" - -msgctxt "EmailAddress" -msgid "primary_email_object" -msgstr "Dirección principal de correo electrónico de" - -msgid "profile" -msgstr "perfil" - -msgid "rdef-description" -msgstr "Descripción" - -msgid "rdef-permissions" -msgstr "Permisos" - -msgid "rdf export" -msgstr "Exportación RDF" - -msgid "read" -msgstr "Lectura" - -msgid "read_permission" -msgstr "Permiso de lectura" - -msgctxt "CWAttribute" -msgid "read_permission" -msgstr "Permiso de Lectura" - -msgctxt "CWEType" -msgid "read_permission" -msgstr "Permiso de Lectura" - -msgctxt "CWRelation" -msgid "read_permission" -msgstr "Permiso de Lectura" - -msgid "read_permission_object" -msgstr "Tiene acceso de lectura a" - -msgctxt "CWGroup" -msgid "read_permission_object" -msgstr "Puede leer" - -msgctxt "RQLExpression" -msgid "read_permission_object" -msgstr "Puede leer" - -msgid "regexp matching host(s) to which this config applies" -msgstr "" -"expresión regular de los nombres de hosts a los cuales esta configuración " -"aplica" - -msgid "registry" -msgstr "Registro" - -msgid "related entity has no state" -msgstr "La entidad relacionada no posee Estado" - -msgid "related entity has no workflow set" -msgstr "La entidad relacionada no posee Workflow definido" - -msgid "relation" -msgstr "relación" - -#, python-format -msgid "relation %(relname)s of %(ent)s" -msgstr "relación %(relname)s de %(ent)s" - -#, python-format -msgid "" -"relation %(rtype)s with %(etype)s as %(role)s is supported but no target " -"type supported" -msgstr "" -"la relación %(rtype)s con %(etype)s como %(role)s es aceptada pero ningún " -"tipo target es aceptado" - -#, python-format -msgid "" -"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is " -"mandatory but not supported" -msgstr "" -"la relación %(type)s con %(etype)s como %(role)s y tipo objetivo %(target)s " -"es obligatoria pero no mantenida" - -#, python-format -msgid "" -"relation %s is supported but none of its definitions matches supported " -"entities" -msgstr "" -"la relación %s es aceptada pero ninguna de sus definiciones corresponden a " -"los tipos de entidades aceptadas" - -msgid "relation add" -msgstr "Agregar Relación" - -msgid "relation removal" -msgstr "Eliminar Relación" - -msgid "relation_type" -msgstr "Tipo de Relación" - -msgctxt "CWAttribute" -msgid "relation_type" -msgstr "Tipo de Relación" - -msgctxt "CWRelation" -msgid "relation_type" -msgstr "Tipo de Relación" - -msgid "relation_type_object" -msgstr "Definición de Relaciones" - -msgctxt "CWRType" -msgid "relation_type_object" -msgstr "Definición de Relaciones" - -msgid "relations" -msgstr "relaciones" - -msgctxt "CWUniqueTogetherConstraint" -msgid "relations" -msgstr "relaciones" - -msgid "relations deleted" -msgstr "Relaciones Eliminadas" - -msgid "relations_object" -msgstr "relaciones de" - -msgctxt "CWRType" -msgid "relations_object" -msgstr "relaciones de" - -msgid "relative url of the bookmarked page" -msgstr "Url relativa de la página" - -msgid "remove-inlined-entity-form" -msgstr "Eliminar" - -msgid "require_group" -msgstr "Requiere el grupo" - -msgctxt "BaseTransition" -msgid "require_group" -msgstr "Restringida al Grupo" - -msgctxt "Transition" -msgid "require_group" -msgstr "Restringida al Grupo" - -msgctxt "WorkflowTransition" -msgid "require_group" -msgstr "Restringida al Grupo" - -msgid "require_group_object" -msgstr "Posee derechos sobre" - -msgctxt "CWGroup" -msgid "require_group_object" -msgstr "Posee derechos sobre" - -msgid "required" -msgstr "Requerido" - -msgid "required attribute" -msgstr "Atributo requerido" - -msgid "required field" -msgstr "Campo requerido" - -msgid "resources usage" -msgstr "Recursos utilizados" - -msgid "" -"restriction part of a rql query. For entity rql expression, X and U are " -"predefined respectivly to the current object and to the request user. For " -"relation rql expression, S, O and U are predefined respectivly to the " -"current relation'subject, object and to the request user. " -msgstr "" -"Parte restrictiva de una consulta RQL. En una expresión ligada a una " -"entidad, X y U son respectivamente asignadas a la Entidad y el Usuario en " -"curso.En una expresión ligada a una relación, S, O y U son respectivamente " -"asignados al Sujeto/Objeto de la relación y al Usuario actual." - -msgid "revert changes" -msgstr "Anular modificación" - -msgid "right" -msgstr "Derecha" - -msgid "rql expression allowing to add entities/relations of this type" -msgstr "Expresión RQL que permite AGREGAR entidades/relaciones de este tipo" - -msgid "rql expression allowing to delete entities/relations of this type" -msgstr "Expresión RQL que permite ELIMINAR entidades/relaciones de este tipo" - -msgid "rql expression allowing to read entities/relations of this type" -msgstr "Expresión RQL que permite LEER entidades/relaciones de este tipo" - -msgid "rql expression allowing to update entities/relations of this type" -msgstr "Expresión RQL que permite ACTUALIZAR entidades/relaciones de este tipo" - -msgid "rql expressions" -msgstr "Expresiones RQL" - -msgid "rss export" -msgstr "Exportación RSS" - -msgid "rule" -msgstr "" - -msgctxt "CWComputedRType" -msgid "rule" -msgstr "" - -msgid "same_as" -msgstr "Idéntico a" - -msgid "sample format" -msgstr "Ejemplo" - -msgid "saturday" -msgstr "Sábado" - -msgid "schema-diagram" -msgstr "Gráfica" - -msgid "schema-entity-types" -msgstr "Entidades" - -msgid "schema-relation-types" -msgstr "Relaciones" - -msgid "search" -msgstr "Buscar" - -msgid "search for association" -msgstr "Búsqueda por asociación" - -msgid "searching for" -msgstr "Buscando" - -msgid "security" -msgstr "Seguridad" - -msgid "see more" -msgstr "ver más" - -msgid "see them all" -msgstr "Ver todos" - -msgid "see_also" -msgstr "Ver además" - -msgid "select" -msgstr "Seleccionar" - -msgid "select a" -msgstr "Seleccione un" - -msgid "select a key first" -msgstr "Seleccione una clave" - -msgid "select a relation" -msgstr "Seleccione una relación" - -msgid "select this entity" -msgstr "Seleccionar esta entidad" - -msgid "selected" -msgstr "Seleccionado" - -msgid "semantic description of this attribute" -msgstr "Descripción semántica de este atributo" - -msgid "semantic description of this entity type" -msgstr "Descripción semántica de este tipo de entidad" - -msgid "semantic description of this relation" -msgstr "Descripción semántica de esta relación" - -msgid "semantic description of this relation type" -msgstr "Descripción semántica de este tipo de relación" - -msgid "semantic description of this state" -msgstr "Descripción semántica de este estado" - -msgid "semantic description of this transition" -msgstr "Descripcion semántica de esta transición" - -msgid "semantic description of this workflow" -msgstr "Descripcion semántica de este Workflow" - -msgid "september" -msgstr "Septiembre" - -msgid "server information" -msgstr "Información del servidor" - -msgid "severity" -msgstr "severidad" - -msgid "" -"should html fields being edited using fckeditor (a HTML WYSIWYG editor). " -"You should also select text/html as default text format to actually get " -"fckeditor." -msgstr "" -"Indica si los campos de tipo texto deberán ser editados usando fckeditor " -"(un\n" -"editor HTML WYSIWYG). Deberá también elegir text/html\n" -"como formato de texto por defecto para poder utilizar fckeditor." - -#, python-format -msgid "show %s results" -msgstr "Mostrar %s resultados" - -msgid "show advanced fields" -msgstr "Mostrar campos avanzados" - -msgid "show filter form" -msgstr "Mostrar el Filtro" - -msgid "site configuration" -msgstr "Configuración Sistema" - -msgid "site documentation" -msgstr "Documentación Sistema" - -msgid "site title" -msgstr "Nombre del Sistema" - -msgid "site-wide property can't be set for user" -msgstr "Una propiedad específica al Sistema no puede ser propia al usuario" - -msgid "some later transaction(s) touch entity, undo them first" -msgstr "" -"Las transacciones más recientes modificaron esta entidad, anúlelas primero" - -msgid "some relations violate a unicity constraint" -msgstr "algunas relaciones no respetan la restricción de unicidad" - -msgid "sorry, the server is unable to handle this query" -msgstr "Lo sentimos, el servidor no puede manejar esta consulta" - -msgid "" -"source's configuration. One key=value per line, authorized keys depending on " -"the source's type" -msgstr "" -"configuración de fuentes. Una clave=valor por línea, las claves permitidas " -"dependen del tipo de la fuente." - -msgid "sparql xml" -msgstr "XML Sparql" - -msgid "special transition allowing to go through a sub-workflow" -msgstr "Transición especial que permite ir en un Sub-Workflow" - -msgid "specializes" -msgstr "Deriva de" - -msgctxt "CWEType" -msgid "specializes" -msgstr "Especializa" - -msgid "specializes_object" -msgstr "Especializado por" - -msgctxt "CWEType" -msgid "specializes_object" -msgstr "Especializado por" - -#, python-format -msgid "specifying %s is mandatory" -msgstr "especificar %s es obligatorio" - -msgid "" -"start timestamp of the currently in synchronization, or NULL when no " -"synchronization in progress." -msgstr "" -"horario de inicio de la sincronización en curso, o NULL cuando no existe " -"sincronización en curso" - -msgid "start_timestamp" -msgstr "horario inicio" - -msgctxt "CWDataImport" -msgid "start_timestamp" -msgstr "horario inicio" - -msgid "startup views" -msgstr "Vistas de inicio" - -msgid "startupview" -msgstr "Vistas de Inicio" - -msgid "state" -msgstr "Estado" - -msgid "state and transition don't belong the the same workflow" -msgstr "El Estado y la Transición no pertenecen al mismo Workflow" - -msgid "state doesn't apply to this entity's type" -msgstr "Este Estado no aplica a este tipo de Entidad" - -msgid "state doesn't belong to entity's current workflow" -msgstr "El Estado no pertenece al Workflow actual de la Entidad" - -msgid "state doesn't belong to entity's workflow" -msgstr "El Estado no pertenece al Workflow de la Entidad" - -msgid "" -"state doesn't belong to entity's workflow. You may want to set a custom " -"workflow for this entity first." -msgstr "" -"El Estado no pertenece al Workflow Actual de la Entidad. Usted deseaquizás " -"especificar que esta entidad debe utilizar este Workflow" - -msgid "state doesn't belong to this workflow" -msgstr "El Estado no pertenece a este Workflow" - -msgid "state_of" -msgstr "Estado de" - -msgctxt "State" -msgid "state_of" -msgstr "Estado de" - -msgid "state_of_object" -msgstr "Tiene por Estado" - -msgctxt "Workflow" -msgid "state_of_object" -msgstr "Tiene por Estado" - -msgid "status" -msgstr "estado" - -msgctxt "CWDataImport" -msgid "status" -msgstr "estado" - -msgid "status change" -msgstr "Cambio de Estatus" - -msgid "status changed" -msgstr "Estatus cambiado" - -#, python-format -msgid "status will change from %(st1)s to %(st2)s" -msgstr "El estatus cambiará de %(st1)s a %(st2)s" - -msgid "subject" -msgstr "Sujeto" - -msgid "subject type" -msgstr "Tipo del sujeto" - -msgid "subject/object cardinality" -msgstr "Cardinalidad Sujeto/Objeto" - -msgid "subworkflow" -msgstr "Sub-Workflow" - -msgctxt "WorkflowTransition" -msgid "subworkflow" -msgstr "Sub-Workflow" - -msgid "" -"subworkflow isn't a workflow for the same types as the transition's workflow" -msgstr "" -"Le Sub-Workflow no se aplica a los mismos tipos que el Workflow de esta " -"transición" - -msgid "subworkflow state" -msgstr "Estado de Sub-Workflow" - -msgid "subworkflow_exit" -msgstr "Salida del Sub-Workflow" - -msgctxt "WorkflowTransition" -msgid "subworkflow_exit" -msgstr "Salida del Sub-Workflow" - -msgid "subworkflow_exit_object" -msgstr "Salida Sub-Workflow de" - -msgctxt "SubWorkflowExitPoint" -msgid "subworkflow_exit_object" -msgstr "Salida Sub-Workflow de" - -msgid "subworkflow_object" -msgstr "Sub-Workflow de" - -msgctxt "Workflow" -msgid "subworkflow_object" -msgstr "Sub-Workflow de" - -msgid "subworkflow_state" -msgstr "Estado de Sub-Workflow" - -msgctxt "SubWorkflowExitPoint" -msgid "subworkflow_state" -msgstr "Estado de Sub-Workflow" - -msgid "subworkflow_state_object" -msgstr "Estado de Salida de" - -msgctxt "State" -msgid "subworkflow_state_object" -msgstr "Estado de Salida de" - -msgid "success" -msgstr "éxito" - -msgid "sunday" -msgstr "Domingo" - -msgid "surname" -msgstr "Apellido" - -msgctxt "CWUser" -msgid "surname" -msgstr "Apellido" - -msgid "symmetric" -msgstr "Simétrico" - -msgctxt "CWRType" -msgid "symmetric" -msgstr "Simétrico" - -msgid "synchronization-interval must be greater than 1 minute" -msgstr "synchronization-interval debe ser mayor a 1 minuto" - -msgid "table" -msgstr "Tabla" - -msgid "tablefilter" -msgstr "Tablero de Filtrado" - -msgid "text" -msgstr "Texto" - -msgid "text/cubicweb-page-template" -msgstr "Usar Page Templates" - -msgid "text/html" -msgstr "Usar HTML" - -msgid "text/markdown" -msgstr "" - -msgid "text/plain" -msgstr "Usar Texto simple" - -msgid "text/rest" -msgstr "Texto en REST" - -msgid "the URI of the object" -msgstr "El URI del Objeto" - -msgid "the prefered email" -msgstr "Dirección principal de email" - -msgid "the system source has its configuration stored on the file-system" -msgstr "" -"el sistema fuente tiene su configuración almacenada en el sistema de archivos" - -#, python-format -msgid "the value \"%s\" is already used, use another one" -msgstr "El valor \"%s\" ya esta en uso, favor de utilizar otro" - -msgid "there is no next page" -msgstr "no existe página siguiente" - -msgid "there is no previous page" -msgstr "no existe página anterior" - -#, python-format -msgid "there is no transaction #%s" -msgstr "no existe la transacción #%s" - -msgid "this action is not reversible!" -msgstr "Esta acción es irreversible!." - -msgid "this entity is currently owned by" -msgstr "Esta Entidad es propiedad de" - -msgid "this parser doesn't use a mapping" -msgstr "este analizador (parser) no utiliza mapeo" - -msgid "this resource does not exist" -msgstr "Este recurso no existe" - -msgid "this source doesn't use a mapping" -msgstr "esta fuente no utiliza mapeo" - -msgid "thursday" -msgstr "Jueves" - -msgid "timestamp" -msgstr "Fecha" - -msgctxt "CWCache" -msgid "timestamp" -msgstr "Válido desde" - -msgid "timetable" -msgstr "Tablero de tiempos" - -msgid "title" -msgstr "Nombre" - -msgctxt "Bookmark" -msgid "title" -msgstr "Nombre" - -msgid "to" -msgstr "a" - -#, python-format -msgid "to %(date)s" -msgstr "a %(date)s" - -msgid "to associate with" -msgstr "Para asociar con" - -msgid "to_entity" -msgstr "Hacia la entidad" - -msgctxt "CWAttribute" -msgid "to_entity" -msgstr "Por la entidad" - -msgctxt "CWRelation" -msgid "to_entity" -msgstr "Por la entidad" - -msgid "to_entity_object" -msgstr "Objeto de la Relación" - -msgctxt "CWEType" -msgid "to_entity_object" -msgstr "Objeto de la Relación" - -msgid "to_interval_end" -msgstr "a" - -msgid "to_state" -msgstr "Hacia el Estado" - -msgctxt "TrInfo" -msgid "to_state" -msgstr "Hacia el Estado" - -msgid "to_state_object" -msgstr "Transición hacia este Estado" - -msgctxt "State" -msgid "to_state_object" -msgstr "Transición hacia este Estado" - -msgid "toggle check boxes" -msgstr "Cambiar valor" - -msgid "tr_count" -msgstr "n° de transición" - -msgctxt "TrInfo" -msgid "tr_count" -msgstr "n° de transición" - -msgid "transaction undone" -msgstr "transacción anulada" - -#, python-format -msgid "transition %(tr)s isn't allowed from %(st)s" -msgstr "La transición %(tr)s no esta permitida desde el Estado %(st)s" - -msgid "transition doesn't belong to entity's workflow" -msgstr "La transición no pertenece al Workflow de la Entidad" - -msgid "transition isn't allowed" -msgstr "La transición no esta permitida" - -msgid "transition may not be fired" -msgstr "La transición no puede ser lanzada" - -msgid "transition_of" -msgstr "Transición de" - -msgctxt "BaseTransition" -msgid "transition_of" -msgstr "Transición de" - -msgctxt "Transition" -msgid "transition_of" -msgstr "Transición de" - -msgctxt "WorkflowTransition" -msgid "transition_of" -msgstr "Transición de" - -msgid "transition_of_object" -msgstr "Utiliza las transiciones" - -msgctxt "Workflow" -msgid "transition_of_object" -msgstr "Utiliza las transiciones" - -msgid "tree view" -msgstr "Vista Jerárquica" - -msgid "tuesday" -msgstr "Martes" - -msgid "type" -msgstr "Tipo" - -msgctxt "BaseTransition" -msgid "type" -msgstr "Tipo" - -msgctxt "CWSource" -msgid "type" -msgstr "tipo" - -msgctxt "Transition" -msgid "type" -msgstr "Tipo" - -msgctxt "WorkflowTransition" -msgid "type" -msgstr "Tipo" - -msgid "type here a sparql query" -msgstr "Escriba aquí su consulta en Sparql" - -msgid "type of the source" -msgstr "tipo de la fuente" - -msgid "ui" -msgstr "Interfaz Genérica" - -msgid "ui.date-format" -msgstr "Formato de Fecha" - -msgid "ui.datetime-format" -msgstr "Formato de Fecha y Hora" - -msgid "ui.default-text-format" -msgstr "Formato de texto" - -msgid "ui.encoding" -msgstr "Codificación" - -msgid "ui.fckeditor" -msgstr "Editor de texto FCK" - -msgid "ui.float-format" -msgstr "Números flotantes" - -msgid "ui.language" -msgstr "Lenguaje" - -msgid "ui.main-template" -msgstr "Plantilla Principal" - -msgid "ui.site-title" -msgstr "Nombre del Sistema" - -msgid "ui.time-format" -msgstr "Formato de hora" - -msgid "unable to check captcha, please try again" -msgstr "Imposible de verificar el Captcha, inténtelo otra vez" - -msgid "unaccessible" -msgstr "Inaccesible" - -msgid "unauthorized value" -msgstr "Valor no permitido" - -msgid "undefined user" -msgstr "usuario indefinido" - -msgid "undo" -msgstr "Anular" - -msgid "unique identifier used to connect to the application" -msgstr "Identificador único utilizado para conectarse al Sistema" - -msgid "unknown external entity" -msgstr "Entidad externa desconocida" - -#, python-format -msgid "unknown options %s" -msgstr "opciones desconocidas: %s" - -#, python-format -msgid "unknown property key %s" -msgstr "Clave de Propiedad desconocida: %s" - -msgid "unknown vocabulary:" -msgstr "Vocabulario desconocido: " - -msgid "unsupported protocol" -msgstr "protocolo no soportado" - -msgid "upassword" -msgstr "Contraseña" - -msgctxt "CWUser" -msgid "upassword" -msgstr "Contraseña" - -msgid "update" -msgstr "Modificación" - -msgid "update_permission" -msgstr "Puede ser modificado por" - -msgctxt "CWAttribute" -msgid "update_permission" -msgstr "Puede ser modificado por" - -msgctxt "CWEType" -msgid "update_permission" -msgstr "Puede ser modificado por" - -msgid "update_permission_object" -msgstr "Tiene permiso de modificar" - -msgctxt "CWGroup" -msgid "update_permission_object" -msgstr "Puede modificar" - -msgctxt "RQLExpression" -msgid "update_permission_object" -msgstr "Puede modificar" - -msgid "update_relation" -msgstr "Modificar" - -msgid "updated" -msgstr "Actualizado" - -#, python-format -msgid "updated %(etype)s #%(eid)s (%(title)s)" -msgstr "actualización de la entidad %(etype)s #%(eid)s (%(title)s)" - -msgid "uri" -msgstr "URI" - -msgctxt "ExternalUri" -msgid "uri" -msgstr "URI" - -msgid "url" -msgstr "url" - -msgctxt "CWSource" -msgid "url" -msgstr "url" - -msgid "" -"use to define a transition from one or multiple states to a destination " -"states in workflow's definitions. Transition without destination state will " -"go back to the state from which we arrived to the current state." -msgstr "" -"Se utiliza en una definición de procesos para agregar una transición desde " -"uno o varios estados hacia un estado destino. Una transición sin Estado " -"destino regresará al Estado anterior del Estado actual" - -msgid "use_email" -msgstr "Correo electrónico" - -msgctxt "CWUser" -msgid "use_email" -msgstr "Usa el Correo Electrónico" - -msgid "use_email_object" -msgstr "Email utilizado por" - -msgctxt "EmailAddress" -msgid "use_email_object" -msgstr "Utilizado por" - -msgid "" -"used for cubicweb configuration. Once a property has been created you can't " -"change the key." -msgstr "" -"Se utiliza para la configuración de CubicWeb. Una vez que la propiedad ha " -"sido creada no puede cambiar la clave" - -msgid "" -"used to associate simple states to an entity type and/or to define workflows" -msgstr "" -"Se utiliza para asociar estados simples a un tipo de entidad y/o para " -"definir Workflows" - -msgid "user" -msgstr "Usuario" - -#, python-format -msgid "" -"user %s has made the following change(s):\n" -"\n" -msgstr "" -"El usuario %s ha efectuado los siguentes cambios:\n" -"\n" - -msgid "user interface encoding" -msgstr "Encoding de la interfaz de usuario" - -msgid "user preferences" -msgstr "Preferencias" - -msgid "user's email account" -msgstr "email del usuario" - -msgid "users" -msgstr "Usuarios" - -msgid "users and groups" -msgstr "usuarios y grupos" - -msgid "users using this bookmark" -msgstr "Usuarios utilizando este Favorito" - -msgid "validate modifications on selected items" -msgstr "Valida modificaciones sobre elementos seleccionados" - -msgid "validating..." -msgstr "Validando ..." - -msgid "value" -msgstr "Valor" - -msgctxt "CWConstraint" -msgid "value" -msgstr "Valor" - -msgctxt "CWProperty" -msgid "value" -msgstr "Vampr" - -#, python-format -msgid "value %(KEY-value)s must be < %(KEY-boundary)s" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" -msgstr "el valor %(KEY-value)s debe ser <= %(KEY-boundary)s" - -#, python-format -msgid "value %(KEY-value)s must be > %(KEY-boundary)s" -msgstr "" - -#, python-format -msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" -msgstr "el valor %(KEY-value)s debe ser >= %(KEY-boundary)s" - -msgid "value associated to this key is not editable manually" -msgstr "El valor asociado a este elemento no es editable manualmente" - -#, python-format -msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" -msgstr "el valor máximo es %(KEY-max)s y encontramos %(KEY-size)s" - -#, python-format -msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" -msgstr "el valor mínimo debe ser %(KEY-min)s y encontramos %(KEY-size)s" - -msgid "vcard" -msgstr "vcard" - -msgid "versions configuration" -msgstr "Configuración de Versión" - -msgid "view" -msgstr "Ver" - -msgid "view all" -msgstr "Ver todos" - -msgid "view detail for this entity" -msgstr "Ver a detalle esta entidad" - -msgid "view history" -msgstr "Ver histórico" - -msgid "view identifier" -msgstr "Identificador" - -msgid "view title" -msgstr "Nombre" - -msgid "view workflow" -msgstr "Ver Workflow" - -msgid "view_index" -msgstr "Inicio" - -msgid "visible" -msgstr "Visible" - -msgid "warning" -msgstr "atención" - -msgid "we are not yet ready to handle this query" -msgstr "Aún no podemos manejar este tipo de consulta Sparql" - -msgid "wednesday" -msgstr "Miércoles" - -#, python-format -msgid "welcome %s!" -msgstr "Bienvenido %s." - -msgid "wf_info_for" -msgstr "Histórico de" - -msgid "wf_info_for_object" -msgstr "Histórico de transiciones" - -msgid "wf_tab_info" -msgstr "Descripción" - -msgid "wfgraph" -msgstr "Gráfica del Workflow" - -msgid "" -"when multiple addresses are equivalent (such as python-projects@logilab.org " -"and python-projects@lists.logilab.org), set this to indicate which is the " -"preferred form." -msgstr "" -"Cuando varias direcciones email son equivalentes (como python-" -"projects@logilab.org y python-projects@lists.logilab.org), aquí se indica " -"cual es la forma preferida." - -msgid "workflow" -msgstr "Workflow" - -#, python-format -msgid "workflow changed to \"%s\"" -msgstr "Workflow cambiado a \"%s\"" - -msgid "workflow has no initial state" -msgstr "El Workflow no posee Estado Inicial" - -msgid "workflow history item" -msgstr "Elemento histórico del Workflow" - -msgid "workflow isn't a workflow for this type" -msgstr "El Workflow no se aplica a este Tipo de Entidad" - -msgid "workflow to which this state belongs" -msgstr "Workflow al cual pertenece este estado" - -msgid "workflow to which this transition belongs" -msgstr "Workflow al cual pertenece esta transición" - -msgid "workflow_of" -msgstr "Workflow de" - -msgctxt "Workflow" -msgid "workflow_of" -msgstr "Workflow de" - -msgid "workflow_of_object" -msgstr "Utiliza el Workflow" - -msgctxt "CWEType" -msgid "workflow_of_object" -msgstr "Utiliza el Workflow" - -#, python-format -msgid "wrong query parameter line %s" -msgstr "Parámetro erróneo de consulta línea %s" - -msgid "xbel export" -msgstr "Exportación XBEL" - -msgid "xml export" -msgstr "Exportar XML" - -msgid "xml export (entities)" -msgstr "Exportación XML (entidades)" - -msgid "yes" -msgstr "Sí" - -msgid "you have been logged out" -msgstr "Ha terminado la sesión" - -msgid "you should probably delete that property" -msgstr "probablamente debería suprimir esta propriedad" - -#~ msgid "%s relation should not be in mapped" -#~ msgstr "la relación %s no debería estar mapeada" - -#~ msgid "Any" -#~ msgstr "Cualquiera" - -#~ msgid "Browse by category" -#~ msgstr "Busca por categoría" - -#~ msgid "No account? Try public access at %s" -#~ msgstr "No esta registrado? Use el acceso público en %s" - -#~ msgid "anonymous" -#~ msgstr "anónimo" - -#~ msgid "attribute/relation can't be mapped, only entity and relation types" -#~ msgstr "" -#~ "los atributos y las relaciones no pueden ser mapeados, solamente los " -#~ "tipos de entidad y de relación" - -#~ msgid "can't connect to source %s, some data may be missing" -#~ msgstr "no se puede conectar a la fuente %s, algunos datos pueden faltar" - -#~ msgid "can't mix dontcross and maycross options" -#~ msgstr "no puede mezclar las opciones dontcross y maycross" - -#~ msgid "can't mix dontcross and write options" -#~ msgstr "no puede mezclar las opciones dontcross y write" - -#~ msgid "components_etypenavigation" -#~ msgstr "Filtar por tipo" - -#~ msgid "components_etypenavigation_description" -#~ msgstr "Permite filtrar por tipo de entidad los resultados de una búsqueda" - -#~ msgid "error while querying source %s, some data may be missing" -#~ msgstr "" -#~ "Un error ha ocurrido al interrogar %s, es posible que los \n" -#~ "datos visibles se encuentren incompletos" - -#~ msgid "inlined relation %(rtype)s of %(etype)s should be supported" -#~ msgstr "" -#~ "la relación %(rtype)s del tipo de entidad %(etype)s debe ser aceptada " -#~ "('inlined')" - -#~ msgid "no edited fields specified for entity %s" -#~ msgstr "Ningún campo editable especificado para la entidad %s" - -#~ msgid "timeline" -#~ msgstr "Escala de Tiempo" - -#~ msgid "unknown option(s): %s" -#~ msgstr "opcion(es) desconocida(s): %s" - -#~ msgid "value %(KEY-value)s must be %(KEY-op)s %(KEY-boundary)s" -#~ msgstr "El valor %(KEY-value)s debe ser %(KEY-op)s %(KEY-boundary)s" - -#~ msgid "web sessions without CNX" -#~ msgstr "sesiones web sin conexión asociada" - -#~ msgid "workflow already has a state of that name" -#~ msgstr "el workflow posee ya un estado con ese nombre" - -#~ msgid "workflow already has a transition of that name" -#~ msgstr "El Workflow posee ya una transición con ese nombre" - -#~ msgid "you may want to specify something for %s" -#~ msgstr "usted desea quizás especificar algo para la relación %s" - -#~ msgid "" -#~ "you should un-inline relation %s which is supported and may be crossed " -#~ msgstr "" -#~ "usted debe quitar la puesta en línea de la relación %s que es aceptada y " -#~ "puede ser cruzada" diff -r 058bb3dc685f -r 0b59724cb3f2 i18n/fr.po --- a/i18n/fr.po Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4725 +0,0 @@ -# cubicweb i18n catalog -# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# Logilab -msgid "" -msgstr "" -"Project-Id-Version: cubicweb 2.46.0\n" -"PO-Revision-Date: 2014-06-24 13:29+0200\n" -"Last-Translator: Logilab Team \n" -"Language-Team: fr \n" -"Language: \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" - -#, python-format -msgid "" -"\n" -"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for " -"entity\n" -"'%(title)s'\n" -"\n" -"%(comment)s\n" -"\n" -"url: %(url)s\n" -msgstr "" -"\n" -"%(user)s a changé l'état de <%(previous_state)s> vers <%(current_state)s> " -"pour l'entité\n" -"'%(title)s'\n" -"\n" -"%(comment)s\n" -"\n" -"url: %(url)s\n" - -#, python-format -msgid " from state %(fromstate)s to state %(tostate)s\n" -msgstr " de l'état %(fromstate)s vers l'état %(tostate)s\n" - -msgid " :" -msgstr " :" - -#, python-format -msgid "\"action\" must be specified in options; allowed values are %s" -msgstr "" -"\"action\" doit être specifié dans les options; les valeurs autorisées " -"sont : %s" - -msgid "\"role=subject\" or \"role=object\" must be specified in options" -msgstr "" -"\"role=subject\" ou \"role=object\" doit être specifié dans les options" - -#, python-format -msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" -msgstr "la valeur %(KEY-value)r ne satisfait pas la contrainte %(KEY-cstr)s" - -#, python-format -msgid "%(KEY-rtype)s is part of violated unicity constraint" -msgstr "%(KEY-rtype)s appartient à une contrainte d'unicité transgressée" - -#, python-format -msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" -msgstr "" -"%(KEY-value)r ne correspond pas à l'expression régulière %(KEY-regexp)r" - -#, python-format -msgid "%(attr)s set to %(newvalue)s" -msgstr "%(attr)s modifié à %(newvalue)s" - -#, python-format -msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" -msgstr "%(attr)s modifié de %(oldvalue)s à %(newvalue)s" - -#, python-format -msgid "%(etype)s by %(author)s" -msgstr "%(etype)s par %(author)s" - -#, python-format -msgid "%(firstname)s %(surname)s" -msgstr "%(firstname)s %(surname)s" - -#, python-format -msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)" -msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)" - -#, python-format -msgid "%d days" -msgstr "%d jours" - -#, python-format -msgid "%d hours" -msgstr "%d heures" - -#, python-format -msgid "%d minutes" -msgstr "%d minutes" - -#, python-format -msgid "%d months" -msgstr "%d mois" - -#, python-format -msgid "%d seconds" -msgstr "%d secondes" - -#, python-format -msgid "%d weeks" -msgstr "%d semaines" - -#, python-format -msgid "%d years" -msgstr "%d années" - -#, python-format -msgid "%s could be supported" -msgstr "%s pourrait être supporté" - -#, python-format -msgid "%s error report" -msgstr "%s rapport d'erreur" - -#, python-format -msgid "%s software version of the database" -msgstr "version logicielle de la base pour %s" - -#, python-format -msgid "%s updated" -msgstr "%s mis à jour" - -#, python-format -msgid "'%s' action doesn't take any options" -msgstr "l'action '%s' ne prend pas d'option" - -#, python-format -msgid "" -"'%s' action for in_state relation should at least have 'linkattr=name' option" -msgstr "" -"l'action '%s' pour la relation in_state doit au moins avoir l'option " -"'linkattr=name'" - -#, python-format -msgid "'%s' action requires 'linkattr' option" -msgstr "l'action '%s' nécessite une option 'linkattr'" - -msgid "(UNEXISTANT EID)" -msgstr "(EID INTROUVABLE)" - -#, python-format -msgid "(suppressed) entity #%d" -msgstr "entité #%d (supprimée)" - -msgid "**" -msgstr "0..n 0..n" - -msgid "*+" -msgstr "0..n 1..n" - -msgid "*1" -msgstr "0..n 1" - -msgid "*?" -msgstr "0..n 0..1" - -msgid "+*" -msgstr "1..n 0..n" - -msgid "++" -msgstr "1..n 1..n" - -msgid "+1" -msgstr "1..n 1" - -msgid "+?" -msgstr "1..n 0..1" - -msgid "1*" -msgstr "1 0..n" - -msgid "1+" -msgstr "1 1..n" - -msgid "11" -msgstr "1 1" - -msgid "1?" -msgstr "1 0..1" - -#, python-format -msgid "<%s not specified>" -msgstr "<%s non spécifié>" - -#, python-format -msgid "" -"
      This schema of the data model excludes the meta-data, but you " -"can also display a complete schema with meta-data.
      " -msgstr "" -"
      Ce schéma du modèle de données exclut les méta-données, mais " -"vous pouvez afficher un schéma complet.
      " - -msgid "" -msgstr "" - -msgid "" -msgstr "" - -msgid "?*" -msgstr "0..1 0..n" - -msgid "?+" -msgstr "0..1 1..n" - -msgid "?1" -msgstr "0..1 1" - -msgid "??" -msgstr "0..1 0..1" - -msgid "AND" -msgstr "ET" - -msgid "About this site" -msgstr "À propos de ce site" - -#, python-format -msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "Relation ajoutée : %(entity_from)s %(rtype)s %(entity_to)s" - -msgid "Attributes permissions:" -msgstr "Permissions des attributs" - -# schema pot file, generated on 2009-09-16 16:46:55 -# -# singular and plural forms for each entity type -msgid "BaseTransition" -msgstr "Transition (abstraite)" - -msgid "BaseTransition_plural" -msgstr "Transitions (abstraites)" - -msgid "BigInt" -msgstr "Entier long" - -msgid "BigInt_plural" -msgstr "Entiers longs" - -msgid "Bookmark" -msgstr "Signet" - -msgid "Bookmark_plural" -msgstr "Signets" - -msgid "Boolean" -msgstr "Booléen" - -msgid "Boolean_plural" -msgstr "Booléen" - -msgid "BoundConstraint" -msgstr "contrainte de bornes" - -msgid "BoundaryConstraint" -msgstr "contrainte de bornes" - -msgid "Browse by entity type" -msgstr "Naviguer par type d'entité" - -#, python-format -msgid "By %(user)s on %(dt)s [%(undo_link)s]" -msgstr "Par %(user)s le %(dt)s [%(undo_link)s] " - -msgid "Bytes" -msgstr "Donnée binaires" - -msgid "Bytes_plural" -msgstr "Données binaires" - -msgid "CWAttribute" -msgstr "Attribut" - -msgid "CWAttribute_plural" -msgstr "Attributs" - -msgid "CWCache" -msgstr "Cache applicatif" - -msgid "CWCache_plural" -msgstr "Caches applicatifs" - -msgid "CWComputedRType" -msgstr "Relation virtuelle" - -msgid "CWComputedRType_plural" -msgstr "Relations virtuelles" - -msgid "CWConstraint" -msgstr "Contrainte" - -msgid "CWConstraintType" -msgstr "Type de contrainte" - -msgid "CWConstraintType_plural" -msgstr "Types de contrainte" - -msgid "CWConstraint_plural" -msgstr "Contraintes" - -msgid "CWDataImport" -msgstr "Import de données" - -msgid "CWDataImport_plural" -msgstr "Imports de données" - -msgid "CWEType" -msgstr "Type d'entité" - -msgctxt "inlined:CWRelation.from_entity.subject" -msgid "CWEType" -msgstr "Type d'entité" - -msgctxt "inlined:CWRelation.to_entity.subject" -msgid "CWEType" -msgstr "Type d'entité" - -msgid "CWEType_plural" -msgstr "Types d'entité" - -msgid "CWGroup" -msgstr "Groupe" - -msgid "CWGroup_plural" -msgstr "Groupes" - -msgid "CWProperty" -msgstr "Propriété" - -msgid "CWProperty_plural" -msgstr "Propriétés" - -msgid "CWRType" -msgstr "Type de relation" - -msgctxt "inlined:CWRelation.relation_type.subject" -msgid "CWRType" -msgstr "Type de relation" - -msgid "CWRType_plural" -msgstr "Types de relation" - -msgid "CWRelation" -msgstr "Relation" - -msgid "CWRelation_plural" -msgstr "Relations" - -msgid "CWSource" -msgstr "Source de données" - -msgid "CWSourceHostConfig" -msgstr "Configuration de source" - -msgid "CWSourceHostConfig_plural" -msgstr "Configurations de source" - -msgid "CWSourceSchemaConfig" -msgstr "Configuration de schéma de source" - -msgid "CWSourceSchemaConfig_plural" -msgstr "Configurations de schéma de source" - -msgid "CWSource_plural" -msgstr "Source de données" - -msgid "CWUniqueTogetherConstraint" -msgstr "Contrainte d'unicité" - -msgid "CWUniqueTogetherConstraint_plural" -msgstr "Contraintes d'unicité" - -msgid "CWUser" -msgstr "Utilisateur" - -msgid "CWUser_plural" -msgstr "Utilisateurs" - -#, python-format -msgid "" -"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " -"linked using this relation." -msgstr "" -"Ne peut restaurer la relation %(role)s %(rtype)s vers l'entité %(eid)s qui " -"est déja lié à une autre entité par cette relation." - -#, python-format -msgid "" -"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " -"does not exists anymore in the schema." -msgstr "" -"Ne peut restaurer la relation %(rtype)s entre %(subj)s et %(obj)s, cette " -"relation n'existe plus dans le schéma." - -#, python-format -msgid "" -"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " -"anymore." -msgstr "" -"Ne peut restaurer la relation %(rtype)s, l'entité %(role)s %(eid)s n'existe " -"plus." - -#, python-format -msgid "" -"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " -"exist anymore" -msgstr "" -"Ne peut annuler l'ajout de relation %(rtype)s de %(subj)s vers %(obj)s, " -"cette relation n'existe plus" - -#, python-format -msgid "" -"Can't undo creation of entity %(eid)s of type %(etype)s, type no more " -"supported" -msgstr "" -"Ne peut annuler la création de l'entité %(eid)s de type %(etype)s, ce type " -"n'existe plus" - -msgid "Click to sort on this column" -msgstr "Cliquer pour trier sur cette colonne" - -msgid "" -"Configuration of the system source goes to the 'sources' file, not in the " -"database" -msgstr "" -"La configuration de la source système va dans le fichier 'sources' et non " -"dans la base de données" - -#, python-format -msgid "Created %(etype)s : %(entity)s" -msgstr "Entité %(etype)s crée : %(entity)s" - -msgid "DEBUG" -msgstr "DEBUG" - -msgid "Date" -msgstr "Date" - -msgid "Date_plural" -msgstr "Dates" - -msgid "Datetime" -msgstr "Date et heure" - -msgid "Datetime_plural" -msgstr "Dates et heures" - -msgid "Decimal" -msgstr "Nombre décimal" - -msgid "Decimal_plural" -msgstr "Nombres décimaux" - -#, python-format -msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "Relation supprimée : %(entity_from)s %(rtype)s %(entity_to)s" - -#, python-format -msgid "Deleted %(etype)s : %(entity)s" -msgstr "Entité %(etype)s supprimée : %(entity)s" - -msgid "Detected problems" -msgstr "Problèmes détectés" - -msgid "Do you want to delete the following element(s)?" -msgstr "Voulez-vous supprimer le(s) élément(s) suivant(s) ?" - -msgid "Download schema as OWL" -msgstr "Télécharger le schéma au format OWL" - -msgid "ERROR" -msgstr "ERREUR" - -msgid "EmailAddress" -msgstr "Adresse électronique" - -msgctxt "inlined:CWUser.use_email.subject" -msgid "EmailAddress" -msgstr "Adresse électronique" - -msgid "EmailAddress_plural" -msgstr "Adresses électroniques" - -msgid "Entities" -msgstr "entités" - -#, python-format -msgid "" -"Entity %(eid)s has changed since you started to edit it. Reload the page and " -"reapply your changes." -msgstr "" -"L'entité %(eid)s a été modifiée depuis votre demande d'édition. Veuillez " -"recharger cette page et réappliquer vos changements." - -msgid "Entity and relation supported by this source" -msgstr "Entités et relations supportés par cette source" - -msgid "ExternalUri" -msgstr "Uri externe" - -msgid "ExternalUri_plural" -msgstr "Uri externes" - -msgid "FATAL" -msgstr "FATAL" - -msgid "Float" -msgstr "Nombre flottant" - -msgid "Float_plural" -msgstr "Nombres flottants" - -# schema pot file, generated on 2009-12-03 09:22:35 -# -# singular and plural forms for each entity type -msgid "FormatConstraint" -msgstr "contrainte de format" - -msgid "Garbage collection information" -msgstr "Information sur le ramasse-miette" - -msgid "Help" -msgstr "Aide" - -msgid "INFO" -msgstr "INFO" - -msgid "Instance" -msgstr "Instance" - -msgid "Int" -msgstr "Nombre entier" - -msgid "Int_plural" -msgstr "Nombres entiers" - -msgid "Interval" -msgstr "Durée" - -msgid "IntervalBoundConstraint" -msgstr "contrainte d'interval" - -msgid "Interval_plural" -msgstr "Durées" - -msgid "Link:" -msgstr "Lien :" - -msgid "Looked up classes" -msgstr "Classes recherchées" - -msgid "Manage" -msgstr "Administration" - -msgid "Manage security" -msgstr "Gestion de la sécurité" - -msgid "Message threshold" -msgstr "Niveau du message" - -msgid "Most referenced classes" -msgstr "Classes les plus référencées" - -msgid "New BaseTransition" -msgstr "XXX" - -msgid "New Bookmark" -msgstr "Nouveau signet" - -msgid "New CWAttribute" -msgstr "Nouvelle définition de relation finale" - -msgid "New CWCache" -msgstr "Nouveau cache applicatif" - -msgid "New CWComputedRType" -msgstr "Nouvelle relation virtuelle" - -msgid "New CWConstraint" -msgstr "Nouvelle contrainte" - -msgid "New CWConstraintType" -msgstr "Nouveau type de contrainte" - -msgid "New CWDataImport" -msgstr "Nouvel import de données" - -msgid "New CWEType" -msgstr "Nouveau type d'entité" - -msgid "New CWGroup" -msgstr "Nouveau groupe" - -msgid "New CWProperty" -msgstr "Nouvelle propriété" - -msgid "New CWRType" -msgstr "Nouveau type de relation" - -msgid "New CWRelation" -msgstr "Nouvelle définition de relation non finale" - -msgid "New CWSource" -msgstr "Nouvelle source" - -msgid "New CWSourceHostConfig" -msgstr "Nouvelle configuration de source" - -msgid "New CWSourceSchemaConfig" -msgstr "Nouvelle partie de mapping de source" - -msgid "New CWUniqueTogetherConstraint" -msgstr "Nouvelle contrainte unique_together" - -msgid "New CWUser" -msgstr "Nouvel utilisateur" - -msgid "New EmailAddress" -msgstr "Nouvelle adresse électronique" - -msgid "New ExternalUri" -msgstr "Nouvelle Uri externe" - -msgid "New RQLExpression" -msgstr "Nouvelle expression rql" - -msgid "New State" -msgstr "Nouvel état" - -msgid "New SubWorkflowExitPoint" -msgstr "Nouvelle sortie de sous-workflow" - -msgid "New TrInfo" -msgstr "Nouvelle information de transition" - -msgid "New Transition" -msgstr "Nouvelle transition" - -msgid "New Workflow" -msgstr "Nouveau workflow" - -msgid "New WorkflowTransition" -msgstr "Nouvelle transition workflow" - -msgid "No result matching query" -msgstr "Aucun résultat ne correspond à la requête" - -msgid "Non exhaustive list of views that may apply to entities of this type" -msgstr "Liste non exhaustive des vues s'appliquant à ce type d'entité" - -msgid "OR" -msgstr "OU" - -msgid "Ownership" -msgstr "Propriété" - -msgid "Parent class:" -msgstr "Classe parente" - -msgid "Password" -msgstr "Mot de passe" - -msgid "Password_plural" -msgstr "Mots de passe" - -msgid "Please note that this is only a shallow copy" -msgstr "Attention, cela n'effectue qu'une copie de surface" - -msgid "Powered by CubicWeb" -msgstr "Construit avec CubicWeb" - -msgid "RQLConstraint" -msgstr "contrainte rql" - -msgid "RQLExpression" -msgstr "Expression RQL" - -msgid "RQLExpression_plural" -msgstr "Expressions RQL" - -msgid "RQLUniqueConstraint" -msgstr "contrainte rql d'unicité" - -msgid "RQLVocabularyConstraint" -msgstr "contrainte rql de vocabulaire" - -msgid "RegexpConstraint" -msgstr "contrainte expression régulière" - -msgid "Registry's content" -msgstr "Contenu du registre" - -msgid "Relations" -msgstr "Relations" - -msgid "Repository" -msgstr "Entrepôt de données" - -#, python-format -msgid "Schema %s" -msgstr "Schéma %s" - -msgid "Schema's permissions definitions" -msgstr "Permissions définies dans le schéma" - -msgid "Search for" -msgstr "Rechercher" - -msgid "Site information" -msgstr "Information du site" - -msgid "SizeConstraint" -msgstr "contrainte de taille" - -msgid "" -"Source's configuration for a particular host. One key=value per line, " -"authorized keys depending on the source's type, overriding values defined on " -"the source." -msgstr "" -"Configuration de la source pour un hôte spécifique. Une clé=valeur par " -"ligne, les clés autorisées dépendantes du type de source. Les valeurs " -"surchargent celles définies sur la source." - -msgid "Startup views" -msgstr "Vues de départ" - -msgid "State" -msgstr "État" - -msgid "State_plural" -msgstr "États" - -msgid "StaticVocabularyConstraint" -msgstr "contrainte de vocabulaire" - -msgid "String" -msgstr "Chaîne de caractères" - -msgid "String_plural" -msgstr "Chaînes de caractères" - -msgid "Sub-classes:" -msgstr "Classes filles :" - -msgid "SubWorkflowExitPoint" -msgstr "Sortie de sous-workflow" - -msgid "SubWorkflowExitPoint_plural" -msgstr "Sorties de sous-workflow" - -msgid "Submit bug report" -msgstr "Soumettre un rapport de bug" - -msgid "Submit bug report by mail" -msgstr "Soumettre ce rapport par email" - -msgid "TZDatetime" -msgstr "Date et heure internationale" - -msgid "TZDatetime_plural" -msgstr "Dates et heures internationales" - -msgid "TZTime" -msgstr "Heure internationale" - -msgid "TZTime_plural" -msgstr "Heures internationales" - -#, python-format -msgid "The view %s can not be applied to this query" -msgstr "La vue %s ne peut être appliquée à cette requête" - -#, python-format -msgid "The view %s could not be found" -msgstr "La vue %s est introuvable" - -msgid "There is no default workflow" -msgstr "Ce type d'entité n'a pas de workflow par défault" - -msgid "This BaseTransition:" -msgstr "Cette transition abstraite :" - -msgid "This Bookmark:" -msgstr "Ce signet :" - -msgid "This CWAttribute:" -msgstr "Cette définition de relation finale :" - -msgid "This CWCache:" -msgstr "Ce cache applicatif :" - -msgid "This CWComputedRType:" -msgstr "Cette relation virtuelle :" - -msgid "This CWConstraint:" -msgstr "Cette contrainte :" - -msgid "This CWConstraintType:" -msgstr "Ce type de contrainte :" - -msgid "This CWDataImport:" -msgstr "Cet import de données :" - -msgid "This CWEType:" -msgstr "Ce type d'entité :" - -msgid "This CWGroup:" -msgstr "Ce groupe :" - -msgid "This CWProperty:" -msgstr "Cette propriété :" - -msgid "This CWRType:" -msgstr "Ce type de relation :" - -msgid "This CWRelation:" -msgstr "Cette définition de relation :" - -msgid "This CWSource:" -msgstr "Cette source :" - -msgid "This CWSourceHostConfig:" -msgstr "Cette configuration de source :" - -msgid "This CWSourceSchemaConfig:" -msgstr "Cette partie de mapping de source :" - -msgid "This CWUniqueTogetherConstraint:" -msgstr "Cette contrainte unique_together :" - -msgid "This CWUser:" -msgstr "Cet utilisateur :" - -msgid "This EmailAddress:" -msgstr "Cette adresse électronique :" - -msgid "This ExternalUri:" -msgstr "Cette Uri externe :" - -msgid "This RQLExpression:" -msgstr "Cette expression RQL :" - -msgid "This State:" -msgstr "Cet état :" - -msgid "This SubWorkflowExitPoint:" -msgstr "Cette sortie de sous-workflow :" - -msgid "This TrInfo:" -msgstr "Cette information de transition :" - -msgid "This Transition:" -msgstr "Cette transition :" - -msgid "This Workflow:" -msgstr "Ce workflow :" - -msgid "This WorkflowTransition:" -msgstr "Cette transition workflow :" - -msgid "" -"This action is forbidden. If you think it should be allowed, please contact " -"the site administrator." -msgstr "" -"Cette action est interdite. Si toutefois vous pensez qu'elle devrait être " -"autorisée, veuillez contacter l'administrateur du site." - -msgid "This entity type permissions:" -msgstr "Permissions pour ce type d'entité" - -msgid "Time" -msgstr "Heure" - -msgid "Time_plural" -msgstr "Heures" - -msgid "TrInfo" -msgstr "Information transition" - -msgid "TrInfo_plural" -msgstr "Information transitions" - -msgid "Transition" -msgstr "Transition" - -msgid "Transition_plural" -msgstr "Transitions" - -msgid "URLs from which content will be imported. You can put one url per line" -msgstr "" -"URLs depuis lesquelles le contenu sera importé. Vous pouvez mettre une URL " -"par ligne." - -msgid "Undoable actions" -msgstr "Action annulables" - -msgid "Undoing" -msgstr "Annuler" - -msgid "UniqueConstraint" -msgstr "contrainte d'unicité" - -msgid "Unknown source type" -msgstr "Type de source inconnue" - -msgid "Unreachable objects" -msgstr "Objets inaccessibles" - -#, python-format -msgid "Updated %(etype)s : %(entity)s" -msgstr "Entité %(etype)s mise à jour : %(entity)s" - -msgid "Used by:" -msgstr "Utilisé par :" - -msgid "Users and groups management" -msgstr "Gestion des utilisateurs et groupes" - -msgid "WARNING" -msgstr "AVERTISSEMENT" - -msgid "Web server" -msgstr "Serveur web" - -msgid "Workflow" -msgstr "Workflow" - -msgid "Workflow history" -msgstr "Historique des changements d'état" - -msgid "WorkflowTransition" -msgstr "Transition workflow" - -msgid "WorkflowTransition_plural" -msgstr "Transitions workflow" - -msgid "Workflow_plural" -msgstr "Workflows" - -msgid "" -"You can either submit a new file using the browse button above, or choose to " -"remove already uploaded file by checking the \"detach attached file\" check-" -"box, or edit file content online with the widget below." -msgstr "" -"Vous pouvez soit soumettre un nouveau fichier en utilisant le bouton\n" -"\"parcourir\" ci-dessus, soit supprimer le fichier déjà présent en\n" -"cochant la case \"détacher fichier attaché\", soit éditer le contenu\n" -"du fichier en ligne avec le champ ci-dessous." - -msgid "" -"You can either submit a new file using the browse button above, or edit file " -"content online with the widget below." -msgstr "" -"Vous pouvez soit soumettre un nouveau fichier en utilisant le bouton\n" -"\"parcourir\" ci-dessus, soit éditer le contenu du fichier en ligne\n" -"avec le champ ci-dessous." - -msgid "You can't change this relation" -msgstr "Vous ne pouvez pas modifier cette relation" - -msgid "You cannot remove the system source" -msgstr "Vous ne pouvez pas supprimer la source système" - -msgid "You cannot rename the system source" -msgstr "Vous ne pouvez pas renommer la source système" - -msgid "" -"You have no access to this view or it can not be used to display the current " -"data." -msgstr "" -"Vous n'avez pas accès à cette vue ou elle ne peut pas afficher ces données." - -msgid "" -"You're not authorized to access this page. If you think you should, please " -"contact the site administrator." -msgstr "" -"Vous n'êtes pas autorisé à accéder à cette page. Si toutefois vous pensez\n" -"que c'est une erreur, veuillez contacter l'administrateur du site." - -#, python-format -msgid "[%s supervision] changes summary" -msgstr "[%s supervision] description des changements" - -msgid "" -"a RQL expression which should return some results, else the transition won't " -"be available. This query may use X and U variables that will respectivly " -"represents the current entity and the current user." -msgstr "" -"une expression RQL devant retourner des résultats pour que la transition " -"puisse être passée. Cette expression peut utiliser les variables X et U qui " -"représentent respectivement l'entité à laquelle on veut appliquer la " -"transition et l'utilisateur courant." - -msgid "a URI representing an object in external data store" -msgstr "une Uri désignant un objet dans un entrepôt de données externe" - -msgid "a float is expected" -msgstr "un nombre flottant est attendu" - -msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" -msgstr "un nombre (en seconde) ou 20s, 10min, 24h ou 4d sont attendus" - -msgid "" -"a simple cache entity characterized by a name and a validity date. The " -"target application is responsible for updating timestamp when necessary to " -"invalidate the cache (typically in hooks). Also, checkout the AppObject." -"get_cache() method." -msgstr "" -"un cache simple caractérisé par un nom et une date de validité. C'est\n" -"le code de l'instance qui est responsable de mettre à jour la date de\n" -"validité lorsque le cache doit être invalidé (en général dans un hook).\n" -"Pour récupérer un cache, il faut utiliser utiliser la méthode\n" -"get_cache(cachename)." - -msgid "abstract base class for transitions" -msgstr "classe de base abstraite pour les transitions" - -msgid "action menu" -msgstr "actions" - -msgid "action(s) on this selection" -msgstr "action(s) sur cette sélection" - -msgid "actions" -msgstr "actions" - -msgid "activate" -msgstr "activer" - -msgid "activated" -msgstr "activé" - -msgid "add" -msgstr "ajouter" - -msgid "add Bookmark bookmarked_by CWUser object" -msgstr "signet" - -msgid "add CWAttribute add_permission RQLExpression subject" -msgstr "définir une expression RQL d'ajout" - -msgid "add CWAttribute constrained_by CWConstraint subject" -msgstr "contrainte" - -msgid "add CWAttribute read_permission RQLExpression subject" -msgstr "expression rql de lecture" - -msgid "add CWAttribute relation_type CWRType object" -msgstr "définition d'attribut" - -msgid "add CWAttribute update_permission RQLExpression subject" -msgstr "permission de mise à jour" - -msgid "add CWEType add_permission RQLExpression subject" -msgstr "définir une expression RQL d'ajout" - -msgid "add CWEType delete_permission RQLExpression subject" -msgstr "définir une expression RQL de suppression" - -msgid "add CWEType read_permission RQLExpression subject" -msgstr "définir une expression RQL de lecture" - -msgid "add CWEType update_permission RQLExpression subject" -msgstr "définir une expression RQL de mise à jour" - -msgid "add CWProperty for_user CWUser object" -msgstr "propriété" - -msgid "add CWRelation add_permission RQLExpression subject" -msgstr "expression rql d'ajout" - -msgid "add CWRelation constrained_by CWConstraint subject" -msgstr "contrainte" - -msgid "add CWRelation delete_permission RQLExpression subject" -msgstr "expression rql de suppression" - -msgid "add CWRelation read_permission RQLExpression subject" -msgstr "expression rql de lecture" - -msgid "add CWRelation relation_type CWRType object" -msgstr "définition de relation" - -msgid "add CWSourceHostConfig cw_host_config_of CWSource object" -msgstr "configuration d'hôte" - -msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" -msgstr "contrainte unique_together" - -msgid "add CWUser in_group CWGroup object" -msgstr "utilisateur" - -msgid "add CWUser use_email EmailAddress subject" -msgstr "adresse email" - -msgid "add State allowed_transition Transition object" -msgstr "état en entrée" - -msgid "add State allowed_transition Transition subject" -msgstr "transition en sortie" - -msgid "add State allowed_transition WorkflowTransition subject" -msgstr "transition workflow en sortie" - -msgid "add State state_of Workflow object" -msgstr "état" - -msgid "add Transition condition RQLExpression subject" -msgstr "condition" - -msgid "add Transition destination_state State object" -msgstr "transition en entrée" - -msgid "add Transition destination_state State subject" -msgstr "état de sortie" - -msgid "add Transition transition_of Workflow object" -msgstr "transition" - -msgid "add WorkflowTransition condition RQLExpression subject" -msgstr "condition" - -msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" -msgstr "sortie de sous-workflow" - -msgid "add WorkflowTransition transition_of Workflow object" -msgstr "transition workflow" - -msgid "add a BaseTransition" -msgstr "" - -msgid "add a Bookmark" -msgstr "" - -msgid "add a CWAttribute" -msgstr "" - -msgid "add a CWCache" -msgstr "" - -msgid "add a CWComputedRType" -msgstr "" - -msgid "add a CWConstraint" -msgstr "" - -msgid "add a CWConstraintType" -msgstr "" - -msgid "add a CWDataImport" -msgstr "" - -msgid "add a CWEType" -msgstr "" - -msgctxt "inlined:CWRelation.from_entity.subject" -msgid "add a CWEType" -msgstr "ajouter un type d'entité sujet" - -msgctxt "inlined:CWRelation.to_entity.subject" -msgid "add a CWEType" -msgstr "ajouter un type d'entité objet" - -msgid "add a CWGroup" -msgstr "" - -msgid "add a CWProperty" -msgstr "" - -msgid "add a CWRType" -msgstr "" - -msgctxt "inlined:CWRelation.relation_type.subject" -msgid "add a CWRType" -msgstr "ajouter un type de relation" - -msgid "add a CWRelation" -msgstr "" - -msgid "add a CWSource" -msgstr "" - -msgid "add a CWSourceHostConfig" -msgstr "" - -msgid "add a CWSourceSchemaConfig" -msgstr "" - -msgid "add a CWUniqueTogetherConstraint" -msgstr "" - -msgid "add a CWUser" -msgstr "" - -msgid "add a EmailAddress" -msgstr "" - -msgctxt "inlined:CWUser.use_email.subject" -msgid "add a EmailAddress" -msgstr "ajouter une adresse électronique" - -msgid "add a ExternalUri" -msgstr "" - -msgid "add a RQLExpression" -msgstr "" - -msgid "add a State" -msgstr "" - -msgid "add a SubWorkflowExitPoint" -msgstr "" - -msgid "add a TrInfo" -msgstr "" - -msgid "add a Transition" -msgstr "" - -msgid "add a Workflow" -msgstr "" - -msgid "add a WorkflowTransition" -msgstr "" - -# subject and object forms for each relation type -# (no object form for final relation types) -msgid "add_permission" -msgstr "peut ajouter" - -msgctxt "CWAttribute" -msgid "add_permission" -msgstr "permission d'ajout" - -# subject and object forms for each relation type -# (no object form for final relation types) -msgctxt "CWEType" -msgid "add_permission" -msgstr "permission d'ajout" - -msgctxt "CWRelation" -msgid "add_permission" -msgstr "permission d'ajout" - -msgid "add_permission_object" -msgstr "a la permission d'ajouter" - -msgctxt "CWGroup" -msgid "add_permission_object" -msgstr "a la permission d'ajouter" - -msgctxt "RQLExpression" -msgid "add_permission_object" -msgstr "a la permission d'ajouter" - -msgid "add_relation" -msgstr "ajouter" - -#, python-format -msgid "added %(etype)s #%(eid)s (%(title)s)" -msgstr "ajout de l'entité %(etype)s #%(eid)s (%(title)s)" - -#, python-format -msgid "" -"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" -"%(eidto)s" -msgstr "" -"la relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #" -"%(eidto)s a été ajoutée" - -msgid "additional type specific properties" -msgstr "propriétés supplémentaires spécifiques au type" - -msgid "addrelated" -msgstr "ajouter" - -msgid "address" -msgstr "adresse électronique" - -msgctxt "EmailAddress" -msgid "address" -msgstr "adresse électronique" - -msgid "alias" -msgstr "alias" - -msgctxt "EmailAddress" -msgid "alias" -msgstr "alias" - -msgid "allow to set a specific workflow for an entity" -msgstr "permet de spécifier un workflow donné pour une entité" - -msgid "allowed options depends on the source type" -msgstr "les options autorisées dépendent du type de la source" - -msgid "allowed transitions from this state" -msgstr "transitions autorisées depuis cet état" - -#, python-format -msgid "allowed values for \"action\" are %s" -msgstr "les valeurs autorisées pour \"action\" sont %s" - -msgid "allowed_transition" -msgstr "transitions autorisées" - -msgctxt "State" -msgid "allowed_transition" -msgstr "transitions autorisées" - -msgid "allowed_transition_object" -msgstr "états en entrée" - -msgctxt "BaseTransition" -msgid "allowed_transition_object" -msgstr "transition autorisée de" - -msgctxt "Transition" -msgid "allowed_transition_object" -msgstr "transition autorisée de" - -msgctxt "WorkflowTransition" -msgid "allowed_transition_object" -msgstr "transition autorisée de" - -msgid "an electronic mail address associated to a short alias" -msgstr "une adresse électronique associée à un alias" - -msgid "an error occurred" -msgstr "une erreur est survenue" - -msgid "an error occurred while processing your request" -msgstr "une erreur est survenue pendant le traitement de votre requête" - -msgid "an error occurred, the request cannot be fulfilled" -msgstr "une erreur est survenue, la requête ne peut être complétée" - -msgid "an integer is expected" -msgstr "un nombre entier est attendu" - -msgid "and linked" -msgstr "et liée" - -msgid "and/or between different values" -msgstr "et/ou entre les différentes valeurs" - -msgid "anyrsetview" -msgstr "vues pour tout rset" - -msgid "april" -msgstr "avril" - -#, python-format -msgid "archive for %(author)s" -msgstr "archive pour l'auteur %(author)s" - -#, python-format -msgid "archive for %(month)s/%(year)s" -msgstr "archive pour le mois %(month)s/%(year)s" - -#, python-format -msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" -msgstr "" -"l'entité #%(eid)s de type %(etype)s doit nécessairement être reliée à une\n" -"autre via la relation %(rtype)s" - -msgid "attribute" -msgstr "attribut" - -msgid "august" -msgstr "août" - -msgid "authentication failure" -msgstr "Identifiant ou mot de passe incorrect" - -msgid "auto" -msgstr "automatique" - -msgid "autocomputed attribute used to ensure transition coherency" -msgstr "" -"attribut calculé automatiquement pour assurer la cohérence de la transition" - -msgid "automatic" -msgstr "automatique" - -#, python-format -msgid "back to pagination (%s results)" -msgstr "retour à la vue paginée (%s résultats)" - -msgid "bad value" -msgstr "mauvaise valeur" - -msgid "badly formatted url" -msgstr "URL mal formattée" - -msgid "base url" -msgstr "url de base" - -msgid "bookmark has been removed" -msgstr "le signet a été retiré" - -msgid "bookmark this page" -msgstr "poser un signet ici" - -msgid "bookmark this search" -msgstr "mémoriser cette recherche" - -msgid "bookmarked_by" -msgstr "utilisé par" - -msgctxt "Bookmark" -msgid "bookmarked_by" -msgstr "utilisé par" - -msgid "bookmarked_by_object" -msgstr "utilise le(s) signet(s)" - -msgctxt "CWUser" -msgid "bookmarked_by_object" -msgstr "utilise le(s) signet(s)" - -msgid "bookmarks" -msgstr "signets" - -msgid "bookmarks are used to have user's specific internal links" -msgstr "" -"les signets sont utilisés pour gérer des liens internes par utilisateur" - -msgid "boxes" -msgstr "boîtes" - -msgid "bug report sent" -msgstr "rapport d'erreur envoyé" - -msgid "button_apply" -msgstr "appliquer" - -msgid "button_cancel" -msgstr "annuler" - -msgid "button_delete" -msgstr "supprimer" - -msgid "button_ok" -msgstr "valider" - -msgid "by" -msgstr "par" - -msgid "by relation" -msgstr "via la relation" - -msgid "by_transition" -msgstr "transition" - -msgctxt "TrInfo" -msgid "by_transition" -msgstr "transition" - -msgid "by_transition_object" -msgstr "changement d'états" - -msgctxt "BaseTransition" -msgid "by_transition_object" -msgstr "a pour information" - -msgctxt "Transition" -msgid "by_transition_object" -msgstr "a pour information" - -msgctxt "WorkflowTransition" -msgid "by_transition_object" -msgstr "a pour information" - -msgid "calendar" -msgstr "afficher un calendrier" - -msgid "can not resolve entity types:" -msgstr "impossible d'interpréter les types d'entités :" - -msgid "can only have one url" -msgstr "ne supporte qu'une seule URL" - -msgid "can't be changed" -msgstr "ne peut-être modifié" - -msgid "can't be deleted" -msgstr "ne peut-être supprimé" - -msgid "can't change this attribute" -msgstr "cet attribut ne peut pas être modifié" - -#, python-format -msgid "can't display data, unexpected error: %s" -msgstr "impossible d'afficher les données à cause de l'erreur suivante: %s" - -msgid "can't have multiple exits on the same state" -msgstr "ne peut avoir plusieurs sorties sur le même état" - -#, python-format -msgid "can't parse %(value)r (expected %(format)s)" -msgstr "ne peut analyser %(value)r (format attendu : %(format)s)" - -#, python-format -msgid "" -"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " -"%(value)s) does not exist any longer" -msgstr "" -"impossible de rétablir l'entité %(eid)s de type %(eschema)s, cible de la " -"relation %(rtype)s (eid %(value)s) n'existe plus" - -#, python-format -msgid "" -"can't restore relation %(rtype)s of entity %(eid)s, this relation does not " -"exist in the schema anymore." -msgstr "" -"impossible de rétablir la relation %(rtype)s sur l'entité %(eid)s, cette " -"relation n'existe plus dans le schéma." - -#, python-format -msgid "can't restore state of entity %s, it has been deleted inbetween" -msgstr "" -"impossible de rétablir l'état de l'entité %s, elle a été supprimée entre-" -"temps" - -#, python-format -msgid "" -"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality=" -"%(card)s" -msgstr "" -"ne peut mettre 'inlined'=Vrai, %(stype)s %(rtype)s %(otype)s a pour " -"cardinalité %(card)s" - -msgid "cancel" -msgstr "annuler" - -msgid "cancel select" -msgstr "annuler la sélection" - -msgid "cancel this insert" -msgstr "annuler cette insertion" - -msgid "cardinality" -msgstr "cardinalité" - -msgctxt "CWAttribute" -msgid "cardinality" -msgstr "cardinalité" - -msgctxt "CWRelation" -msgid "cardinality" -msgstr "cardinalité" - -msgid "category" -msgstr "categorie" - -#, python-format -msgid "changed state of %(etype)s #%(eid)s (%(title)s)" -msgstr "changement de l'état de %(etype)s #%(eid)s (%(title)s)" - -msgid "changes applied" -msgstr "changements appliqués" - -msgid "click here to see created entity" -msgstr "cliquez ici pour voir l'entité créée" - -msgid "click here to see edited entity" -msgstr "cliquez ici pour voir l'entité modifiée" - -msgid "click on the box to cancel the deletion" -msgstr "cliquez dans la zone d'édition pour annuler la suppression" - -msgid "click to add a value" -msgstr "cliquer pour ajouter une valeur" - -msgid "click to delete this value" -msgstr "cliquer pour supprimer cette valeur" - -msgid "click to edit this field" -msgstr "cliquez pour éditer ce champ" - -msgid "close all" -msgstr "tout fermer" - -msgid "comment" -msgstr "commentaire" - -msgctxt "TrInfo" -msgid "comment" -msgstr "commentaire" - -msgid "comment_format" -msgstr "format" - -msgctxt "TrInfo" -msgid "comment_format" -msgstr "format" - -msgid "components" -msgstr "composants" - -msgid "components_navigation" -msgstr "navigation par page" - -msgid "components_navigation_description" -msgstr "" -"composant permettant de présenter sur plusieurs pages les requêtes renvoyant " -"plus d'un certain nombre de résultat" - -msgid "components_rqlinput" -msgstr "barre rql" - -msgid "components_rqlinput_description" -msgstr "la barre de requête rql, dans l'en-tête de page" - -msgid "composite" -msgstr "composite" - -msgctxt "CWRelation" -msgid "composite" -msgstr "composite" - -msgid "condition" -msgstr "condition" - -msgctxt "BaseTransition" -msgid "condition" -msgstr "condition" - -msgctxt "Transition" -msgid "condition" -msgstr "condition" - -msgctxt "WorkflowTransition" -msgid "condition" -msgstr "condition" - -msgid "condition_object" -msgstr "condition de" - -msgctxt "RQLExpression" -msgid "condition_object" -msgstr "condition de" - -msgid "conditions" -msgstr "conditions" - -msgid "config" -msgstr "configuration" - -msgctxt "CWSource" -msgid "config" -msgstr "configuration" - -msgctxt "CWSourceHostConfig" -msgid "config" -msgstr "configuration" - -msgid "config mode" -msgstr "mode de configuration" - -msgid "config type" -msgstr "type de configuration" - -msgid "confirm password" -msgstr "confirmer le mot de passe" - -msgid "constrained_by" -msgstr "contraint par" - -msgctxt "CWAttribute" -msgid "constrained_by" -msgstr "contraint par" - -msgctxt "CWRelation" -msgid "constrained_by" -msgstr "contraint par" - -msgid "constrained_by_object" -msgstr "contrainte de" - -msgctxt "CWConstraint" -msgid "constrained_by_object" -msgstr "contrainte de" - -msgid "constraint factory" -msgstr "fabrique de contraintes" - -msgid "constraint_of" -msgstr "contrainte de" - -msgctxt "CWUniqueTogetherConstraint" -msgid "constraint_of" -msgstr "contrainte de" - -msgid "constraint_of_object" -msgstr "contraint par" - -msgctxt "CWEType" -msgid "constraint_of_object" -msgstr "contraint par" - -msgid "constraints" -msgstr "contraintes" - -msgid "constraints applying on this relation" -msgstr "contraintes s'appliquant à cette relation" - -msgid "content type" -msgstr "type MIME" - -msgid "context" -msgstr "contexte" - -msgid "context where this box should be displayed" -msgstr "contexte dans lequel la boite devrait être affichée" - -msgid "context where this component should be displayed" -msgstr "contexte où ce composant doit être affiché" - -msgid "context where this facet should be displayed, leave empty for both" -msgstr "" -"contexte où cette facette doit être affichée. Laissez ce champ vide pour " -"l'avoir dans les deux." - -msgid "control subject entity's relations order" -msgstr "contrôle l'ordre des relations de l'entité sujet" - -msgid "copy" -msgstr "copier" - -msgid "core relation indicating a user's groups" -msgstr "" -"relation système indiquant les groupes auxquels appartient l'utilisateur" - -msgid "" -"core relation indicating owners of an entity. This relation implicitly put " -"the owner into the owners group for the entity" -msgstr "" -"relation système indiquant le(s) propriétaire(s) d'une entité. Cette " -"relation place implicitement les utilisateurs liés dans le groupe des " -"propriétaires pour cette entité" - -msgid "core relation indicating the original creator of an entity" -msgstr "relation système indiquant le créateur d'une entité." - -msgid "core relation indicating the type of an entity" -msgstr "relation système indiquant le type de l'entité" - -msgid "" -"core relation indicating the types (including specialized types) of an entity" -msgstr "" -"relation système indiquant les types (y compris les types parents) d'une " -"entité" - -msgid "could not connect to the SMTP server" -msgstr "impossible de se connecter au serveur SMTP" - -msgid "create an index for quick search on this attribute" -msgstr "créer un index pour accélérer les recherches sur cet attribut" - -msgid "created on" -msgstr "créé le" - -msgid "created_by" -msgstr "créé par" - -msgid "created_by_object" -msgstr "a créé" - -msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)" -msgstr "création d'un signet pour %(linkto)s" - -msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)" -msgstr "création d'un attribut %(linkto)s" - -msgid "" -"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)" -msgstr "création d'une contrainte pour l'attribut %(linkto)s" - -msgid "" -"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)" -msgstr "création d'une contrainte pour la relation %(linkto)s" - -msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)" -msgstr "création d'une propriété pour l'utilisateur %(linkto)s" - -msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" -msgstr "création relation %(linkto)s" - -msgid "" -"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource " -"%(linkto)s)" -msgstr "création d'une configuration d'hôte pour la source %(linkto)s" - -msgid "" -"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " -"constraint_of CWEType %(linkto)s)" -msgstr "création d'une contrainte unique_together sur %(linkto)s" - -msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" -msgstr "création d'un utilisateur à rajouter au groupe %(linkto)s" - -msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)" -msgstr "création d'une adresse électronique pour l'utilisateur %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" -msgstr "création d'une expression rql pour le droit d'ajout de %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" -msgstr "création d'une expression rql pour le droit de lecture de %(linkto)s" - -msgid "" -"creating RQLExpression (CWAttribute %(linkto)s update_permission " -"RQLExpression)" -msgstr "" -"création d'une expression rql pour le droit de mise à jour de %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" -msgstr "création d'une expression RQL pour la permission d'ajout de %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)" -msgstr "" -"création d'une expression RQL pour la permission de suppression de %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" -msgstr "création d'une expression RQL pour la permission de lire %(linkto)s" - -msgid "" -"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" -msgstr "" -"création d'une expression RQL pour la permission de mise à jour de %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" -msgstr "création d'une expression rql pour le droit d'ajout de %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s delete_permission " -"RQLExpression)" -msgstr "" -"création d'une expression rql pour le droit de suppression de %(linkto)s" - -msgid "" -"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" -msgstr "création d'une expression rql pour le droit de lecture de %(linkto)s" - -msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" -msgstr "création d'une expression RQL pour la transition %(linkto)s" - -msgid "" -"creating RQLExpression (WorkflowTransition %(linkto)s condition " -"RQLExpression)" -msgstr "création d'une expression RQL pour la transition workflow %(linkto)s" - -msgid "creating State (State allowed_transition Transition %(linkto)s)" -msgstr "création d'un état pouvant aller vers la transition %(linkto)s" - -msgid "creating State (State state_of Workflow %(linkto)s)" -msgstr "création d'un état du workflow %(linkto)s" - -msgid "creating State (Transition %(linkto)s destination_state State)" -msgstr "création d'un état destination de la transition %(linkto)s" - -msgid "" -"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " -"subworkflow_exit SubWorkflowExitPoint)" -msgstr "création d'un point de sortie de la transition workflow %(linkto)s" - -msgid "creating Transition (State %(linkto)s allowed_transition Transition)" -msgstr "création d'une transition autorisée depuis l'état %(linkto)s" - -msgid "creating Transition (Transition destination_state State %(linkto)s)" -msgstr "création d'une transition vers l'état %(linkto)s" - -msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" -msgstr "création d'une transition du workflow %(linkto)s" - -msgid "" -"creating WorkflowTransition (State %(linkto)s allowed_transition " -"WorkflowTransition)" -msgstr "création d'une transition workflow autorisée depuis l'état %(linkto)s" - -msgid "" -"creating WorkflowTransition (WorkflowTransition transition_of Workflow " -"%(linkto)s)" -msgstr "création d'une transition workflow du workflow %(linkto)s" - -msgid "creation" -msgstr "création" - -msgid "creation date" -msgstr "date de création" - -msgid "creation time of an entity" -msgstr "date de création d'une entité" - -msgid "creation_date" -msgstr "date de création" - -msgid "cstrtype" -msgstr "type de contrainte" - -msgctxt "CWConstraint" -msgid "cstrtype" -msgstr "type" - -msgid "cstrtype_object" -msgstr "utilisé par" - -msgctxt "CWConstraintType" -msgid "cstrtype_object" -msgstr "type des contraintes" - -msgid "csv export" -msgstr "export CSV" - -msgid "csv export (entities)" -msgstr "export CSV (entités)" - -msgid "ctxcomponents" -msgstr "composants contextuels" - -msgid "ctxcomponents_anonuserlink" -msgstr "lien utilisateur" - -msgid "ctxcomponents_anonuserlink_description" -msgstr "" -"affiche un lien vers le formulaire d'authentification pour les utilisateurs " -"anonymes, sinon une boite contenant notamment des liens propres à " -"l'utilisateur connectés" - -msgid "ctxcomponents_appliname" -msgstr "titre de l'application" - -msgid "ctxcomponents_appliname_description" -msgstr "affiche le titre de l'application dans l'en-tête de page" - -msgid "ctxcomponents_bookmarks_box" -msgstr "boîte signets" - -msgid "ctxcomponents_bookmarks_box_description" -msgstr "boîte contenant les signets de l'utilisateur" - -msgid "ctxcomponents_breadcrumbs" -msgstr "fil d'ariane" - -msgid "ctxcomponents_breadcrumbs_description" -msgstr "" -"affiche un chemin permettant de localiser la page courante dans le site" - -msgid "ctxcomponents_download_box" -msgstr "boîte de téléchargement" - -msgid "ctxcomponents_download_box_description" -msgstr "boîte contenant un lien permettant de télécharger la ressource" - -msgid "ctxcomponents_edit_box" -msgstr "boîte d'actions" - -msgid "ctxcomponents_edit_box_description" -msgstr "" -"boîte affichant les différentes actions possibles sur les données affichées" - -msgid "ctxcomponents_facet.filterbox" -msgstr "boîte à facettes" - -msgid "ctxcomponents_facet.filterbox_description" -msgstr "" -"boîte permettant de filtrer parmi les résultats d'une recherche à l'aide de " -"facettes" - -msgid "ctxcomponents_logo" -msgstr "logo" - -msgid "ctxcomponents_logo_description" -msgstr "le logo de l'application, dans l'en-tête de page" - -msgid "ctxcomponents_metadata" -msgstr "méta-données de l'entité" - -msgid "ctxcomponents_metadata_description" -msgstr "" - -msgid "ctxcomponents_possible_views_box" -msgstr "boîte des vues possibles" - -msgid "ctxcomponents_possible_views_box_description" -msgstr "boîte affichant les vues possibles pour les données courantes" - -msgid "ctxcomponents_prevnext" -msgstr "élément précedent / suivant" - -msgid "ctxcomponents_prevnext_description" -msgstr "" -"affiche des liens permettant de passer d'une entité à une autre sur les " -"entités implémentant l'interface \"précédent/suivant\"." - -msgid "ctxcomponents_rss" -msgstr "icône RSS" - -msgid "ctxcomponents_rss_description" -msgstr "l'icône RSS permettant de récupérer la vue RSS des données affichées" - -msgid "ctxcomponents_search_box" -msgstr "boîte de recherche" - -msgid "ctxcomponents_search_box_description" -msgstr "boîte avec un champ de recherche simple" - -msgid "ctxcomponents_startup_views_box" -msgstr "boîte des vues de départs" - -msgid "ctxcomponents_startup_views_box_description" -msgstr "boîte affichant les vues de départs de l'application" - -msgid "ctxcomponents_userstatus" -msgstr "état de l'utilisateur" - -msgid "ctxcomponents_userstatus_description" -msgstr "" - -msgid "ctxcomponents_wfhistory" -msgstr "historique du workflow." - -msgid "ctxcomponents_wfhistory_description" -msgstr "" -"section affichant l'historique du workflow pour les entités ayant un " -"workflow." - -msgid "ctxtoolbar" -msgstr "barre d'outils" - -msgid "custom_workflow" -msgstr "workflow spécifique" - -msgid "custom_workflow_object" -msgstr "workflow de" - -msgid "cw.groups-management" -msgstr "groupes" - -msgid "cw.users-management" -msgstr "utilisateurs" - -msgid "cw_for_source" -msgstr "source" - -msgctxt "CWSourceSchemaConfig" -msgid "cw_for_source" -msgstr "source" - -msgid "cw_for_source_object" -msgstr "élément de mapping" - -msgctxt "CWSource" -msgid "cw_for_source_object" -msgstr "élément de mapping" - -msgid "cw_host_config_of" -msgstr "host configuration of" - -msgctxt "CWSourceHostConfig" -msgid "cw_host_config_of" -msgstr "host configuration of" - -msgid "cw_host_config_of_object" -msgstr "has host configuration" - -msgctxt "CWSource" -msgid "cw_host_config_of_object" -msgstr "has host configuration" - -msgid "cw_import_of" -msgstr "source" - -msgctxt "CWDataImport" -msgid "cw_import_of" -msgstr "source" - -msgid "cw_import_of_object" -msgstr "imports" - -msgctxt "CWSource" -msgid "cw_import_of_object" -msgstr "imports" - -msgid "cw_schema" -msgstr "schéma" - -msgctxt "CWSourceSchemaConfig" -msgid "cw_schema" -msgstr "schéma" - -msgid "cw_schema_object" -msgstr "mappé par" - -msgctxt "CWEType" -msgid "cw_schema_object" -msgstr "mappé par" - -msgctxt "CWRType" -msgid "cw_schema_object" -msgstr "mappé par" - -msgctxt "CWRelation" -msgid "cw_schema_object" -msgstr "mappé par" - -msgid "cw_source" -msgstr "source" - -msgid "cw_source_object" -msgstr "entités" - -msgid "cwetype-box" -msgstr "vue \"boîte\"" - -msgid "cwetype-description" -msgstr "description" - -msgid "cwetype-permissions" -msgstr "permissions" - -msgid "cwetype-views" -msgstr "vues" - -msgid "cwetype-workflow" -msgstr "workflow" - -msgid "cwgroup-main" -msgstr "description" - -msgid "cwgroup-permissions" -msgstr "permissions" - -msgid "cwrtype-description" -msgstr "description" - -msgid "cwrtype-permissions" -msgstr "permissions" - -msgid "cwsource-imports" -msgstr "imports" - -msgid "cwsource-main" -msgstr "description" - -msgid "cwsource-mapping" -msgstr "mapping" - -msgid "cwuri" -msgstr "uri interne" - -msgid "data directory url" -msgstr "url du répertoire de données" - -msgid "data model schema" -msgstr "schéma du modèle de données" - -msgid "data sources" -msgstr "sources de données" - -msgid "data sources management" -msgstr "gestion des sources de données" - -msgid "date" -msgstr "date" - -msgid "deactivate" -msgstr "désactiver" - -msgid "deactivated" -msgstr "désactivé" - -msgid "december" -msgstr "décembre" - -msgid "default" -msgstr "valeur par défaut" - -msgid "default text format for rich text fields." -msgstr "format de texte par défaut pour les champs textes" - -msgid "default user workflow" -msgstr "workflow par défaut des utilisateurs" - -msgid "default value" -msgstr "valeur par défaut" - -msgid "default value as gziped pickled python object" -msgstr "valeur par défaut, sous forme d'objet python picklé zippé" - -msgid "default workflow for an entity type" -msgstr "workflow par défaut pour un type d'entité" - -msgid "default_workflow" -msgstr "workflow par défaut" - -msgctxt "CWEType" -msgid "default_workflow" -msgstr "workflow par défaut" - -msgid "default_workflow_object" -msgstr "workflow par défaut de" - -msgctxt "Workflow" -msgid "default_workflow_object" -msgstr "workflow par défaut de" - -msgid "defaultval" -msgstr "valeur par défaut" - -msgctxt "CWAttribute" -msgid "defaultval" -msgstr "valeur par défaut" - -msgid "define a CubicWeb user" -msgstr "défini un utilisateur CubicWeb" - -msgid "define a CubicWeb users group" -msgstr "défini un groupe d'utilisateur CubicWeb" - -msgid "" -"define a final relation: link a final relation type from a non final entity " -"to a final entity type. used to build the instance schema" -msgstr "" -"définit une relation non finale: lie un type de relation non finale depuis " -"une entité vers un type d'entité non final. Utilisé pour construire le " -"schéma de l'instance" - -msgid "" -"define a non final relation: link a non final relation type from a non final " -"entity to a non final entity type. used to build the instance schema" -msgstr "" -"définit une relation 'attribut', utilisé pour construire le schéma de " -"l'instance" - -msgid "define a relation type, used to build the instance schema" -msgstr "définit un type de relation" - -msgid "define a rql expression used to define permissions" -msgstr "définit une expression rql donnant une permission" - -msgid "define a schema constraint" -msgstr "définit une contrainte de schema" - -msgid "define a schema constraint type" -msgstr "définit un type de contrainte de schema" - -msgid "define a virtual relation type, used to build the instance schema" -msgstr "définit une relation virtuelle" - -msgid "define an entity type, used to build the instance schema" -msgstr "définit un type d'entité" - -msgid "define how we get out from a sub-workflow" -msgstr "définit comment sortir d'un sous-workflow" - -msgid "defines a sql-level multicolumn unique index" -msgstr "définit un index SQL unique sur plusieurs colonnes" - -msgid "" -"defines what's the property is applied for. You must select this first to be " -"able to set value" -msgstr "" -"définit à quoi la propriété est appliquée. Vous devez sélectionner cela " -"avant de pouvoir fixer une valeur" - -msgid "delete" -msgstr "supprimer" - -msgid "delete this bookmark" -msgstr "supprimer ce signet" - -msgid "delete this relation" -msgstr "supprimer cette relation" - -msgid "delete_permission" -msgstr "permission de supprimer" - -msgctxt "CWEType" -msgid "delete_permission" -msgstr "permission de supprimer" - -msgctxt "CWRelation" -msgid "delete_permission" -msgstr "permission de supprimer" - -msgid "delete_permission_object" -msgstr "a la permission de supprimer" - -msgctxt "CWGroup" -msgid "delete_permission_object" -msgstr "peut supprimer" - -msgctxt "RQLExpression" -msgid "delete_permission_object" -msgstr "peut supprimer" - -#, python-format -msgid "deleted %(etype)s #%(eid)s (%(title)s)" -msgstr "suppression de l'entité %(etype)s #%(eid)s (%(title)s)" - -#, python-format -msgid "" -"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" -"%(eidto)s" -msgstr "" -"relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #%(eidto)s " -"supprimée" - -msgid "depends on the constraint type" -msgstr "dépend du type de contrainte" - -msgid "description" -msgstr "description" - -msgctxt "BaseTransition" -msgid "description" -msgstr "description" - -msgctxt "CWAttribute" -msgid "description" -msgstr "description" - -msgctxt "CWComputedRType" -msgid "description" -msgstr "description" - -msgctxt "CWEType" -msgid "description" -msgstr "description" - -msgctxt "CWRType" -msgid "description" -msgstr "description" - -msgctxt "CWRelation" -msgid "description" -msgstr "description" - -msgctxt "State" -msgid "description" -msgstr "description" - -msgctxt "Transition" -msgid "description" -msgstr "description" - -msgctxt "Workflow" -msgid "description" -msgstr "description" - -msgctxt "WorkflowTransition" -msgid "description" -msgstr "description" - -msgid "description_format" -msgstr "format" - -msgctxt "BaseTransition" -msgid "description_format" -msgstr "format" - -msgctxt "CWAttribute" -msgid "description_format" -msgstr "format" - -msgctxt "CWComputedRType" -msgid "description_format" -msgstr "format" - -msgctxt "CWEType" -msgid "description_format" -msgstr "format" - -msgctxt "CWRType" -msgid "description_format" -msgstr "format" - -msgctxt "CWRelation" -msgid "description_format" -msgstr "format" - -msgctxt "State" -msgid "description_format" -msgstr "format" - -msgctxt "Transition" -msgid "description_format" -msgstr "format" - -msgctxt "Workflow" -msgid "description_format" -msgstr "format" - -msgctxt "WorkflowTransition" -msgid "description_format" -msgstr "format" - -msgid "destination state for this transition" -msgstr "états accessibles par cette transition" - -msgid "destination state must be in the same workflow as our parent transition" -msgstr "" -"l'état de destination doit être dans le même workflow que la transition " -"parente" - -msgid "destination state of a transition" -msgstr "état d'arrivée d'une transition" - -msgid "" -"destination state. No destination state means that transition should go back " -"to the state from which we've entered the subworkflow." -msgstr "" -"état de destination de la transition. Si aucun état de destination n'est " -"spécifié, la transition ira vers l'état depuis lequel l'entité est entrée " -"dans le sous-workflow." - -msgid "destination_state" -msgstr "état de destination" - -msgctxt "SubWorkflowExitPoint" -msgid "destination_state" -msgstr "état de destination" - -msgctxt "Transition" -msgid "destination_state" -msgstr "état de destination" - -msgid "destination_state_object" -msgstr "destination de" - -msgctxt "State" -msgid "destination_state_object" -msgstr "état final de" - -msgid "detach attached file" -msgstr "détacher le fichier existant" - -msgid "display order of the box" -msgstr "ordre d'affichage de la boîte" - -msgid "display order of the component" -msgstr "ordre d'affichage du composant" - -msgid "display order of the facet" -msgstr "ordre d'affichage de la facette" - -msgid "display the box or not" -msgstr "afficher la boîte ou non" - -msgid "display the component or not" -msgstr "afficher le composant ou non" - -msgid "display the facet or not" -msgstr "afficher la facette ou non" - -msgid "download" -msgstr "télécharger" - -#, python-format -msgid "download %s" -msgstr "télécharger %s" - -msgid "download icon" -msgstr "icône de téléchargement" - -msgid "download schema as owl" -msgstr "télécharger le schéma OWL" - -msgid "edit bookmarks" -msgstr "éditer les signets" - -msgid "edit canceled" -msgstr "édition annulée" - -msgid "editable-table" -msgstr "table éditable" - -msgid "eid" -msgstr "eid" - -msgid "embedded html" -msgstr "HTML contenu" - -msgid "end_timestamp" -msgstr "horodate de fin" - -msgctxt "CWDataImport" -msgid "end_timestamp" -msgstr "horodate de fin" - -msgid "entities deleted" -msgstr "entités supprimées" - -msgid "entity and relation types can't be mapped, only attributes or relations" -msgstr "" -"les types d'entités et de relations ne peuvent être mappés, uniquement les " -"relations" - -msgid "entity copied" -msgstr "entité copiée" - -msgid "entity created" -msgstr "entité créée" - -msgid "entity creation" -msgstr "création d'entité" - -msgid "entity deleted" -msgstr "entité supprimée" - -msgid "entity deletion" -msgstr "suppression d'entité" - -msgid "entity edited" -msgstr "entité éditée" - -msgid "entity has no workflow set" -msgstr "l'entité n'a pas de workflow" - -msgid "entity linked" -msgstr "entité liée" - -msgid "entity type" -msgstr "type d'entité" - -msgid "entity types which may use this workflow" -msgstr "types d'entité pouvant utiliser ce workflow" - -msgid "entity update" -msgstr "mise à jour d'entité" - -msgid "entityview" -msgstr "vues d'entité" - -msgid "error" -msgstr "erreur" - -msgid "error while publishing ReST text" -msgstr "" -"une erreur s'est produite lors de l'interprétation du texte au format ReST" - -msgid "exit state must be a subworkflow state" -msgstr "l'état de sortie doit être un état du sous-workflow" - -msgid "exit_point" -msgstr "état de sortie" - -msgid "exit_point_object" -msgstr "état de sortie de" - -#, python-format -msgid "exiting from subworkflow %s" -msgstr "sortie du sous-workflow %s" - -msgid "expression" -msgstr "expression" - -msgctxt "RQLExpression" -msgid "expression" -msgstr "rql de l'expression" - -msgid "exprtype" -msgstr "type de l'expression" - -msgctxt "RQLExpression" -msgid "exprtype" -msgstr "type" - -msgid "extra_props" -msgstr "" - -msgctxt "CWAttribute" -msgid "extra_props" -msgstr "propriétés additionnelles" - -msgid "facet-loading-msg" -msgstr "en cours de traitement, merci de patienter" - -msgid "facet.filters" -msgstr "facettes" - -msgid "facetbox" -msgstr "boîte à facettes" - -msgid "facets_created_by-facet" -msgstr "facette \"créé par\"" - -msgid "facets_created_by-facet_description" -msgstr "" - -msgid "facets_cw_source-facet" -msgstr "facette \"source de données\"" - -msgid "facets_cw_source-facet_description" -msgstr "" - -msgid "facets_cwfinal-facet" -msgstr "facette \"type d'entité ou de relation final\"" - -msgid "facets_cwfinal-facet_description" -msgstr "" - -msgid "facets_datafeed.dataimport.status" -msgstr "état de l'iport" - -msgid "facets_datafeed.dataimport.status_description" -msgstr "" - -msgid "facets_etype-facet" -msgstr "facette \"est de type\"" - -msgid "facets_etype-facet_description" -msgstr "" - -msgid "facets_has_text-facet" -msgstr "facette \"contient le texte\"" - -msgid "facets_has_text-facet_description" -msgstr "" - -msgid "facets_in_group-facet" -msgstr "facette \"fait partie du groupe\"" - -msgid "facets_in_group-facet_description" -msgstr "" - -msgid "facets_in_state-facet" -msgstr "facette \"dans l'état\"" - -msgid "facets_in_state-facet_description" -msgstr "" - -msgid "failed" -msgstr "échec" - -#, python-format -msgid "failed to uniquify path (%s, %s)" -msgstr "ne peut obtenir un nom de fichier unique (%s, %s)" - -msgid "february" -msgstr "février" - -msgid "file tree view" -msgstr "arborescence (fichiers)" - -msgid "final" -msgstr "final" - -msgctxt "CWEType" -msgid "final" -msgstr "final" - -msgctxt "CWRType" -msgid "final" -msgstr "final" - -msgid "first name" -msgstr "prénom" - -msgid "firstname" -msgstr "prénom" - -msgctxt "CWUser" -msgid "firstname" -msgstr "prénom" - -msgid "foaf" -msgstr "foaf" - -msgid "focus on this selection" -msgstr "afficher cette sélection" - -msgid "follow" -msgstr "suivre le lien" - -#, python-format -msgid "follow this link for more information on this %s" -msgstr "suivez ce lien pour plus d'information sur ce %s" - -msgid "for_user" -msgstr "pour l'utilisateur" - -msgctxt "CWProperty" -msgid "for_user" -msgstr "propriété de l'utilisateur" - -msgid "for_user_object" -msgstr "utilise les propriétés" - -msgctxt "CWUser" -msgid "for_user_object" -msgstr "a pour préférence" - -msgid "formula" -msgstr "formule" - -msgctxt "CWAttribute" -msgid "formula" -msgstr "formule" - -msgid "friday" -msgstr "vendredi" - -msgid "from" -msgstr "de" - -#, python-format -msgid "from %(date)s" -msgstr "du %(date)s" - -msgid "from_entity" -msgstr "de l'entité" - -msgctxt "CWAttribute" -msgid "from_entity" -msgstr "attribut de l'entité" - -msgctxt "CWRelation" -msgid "from_entity" -msgstr "relation de l'entité" - -msgid "from_entity_object" -msgstr "relation sujet" - -msgctxt "CWEType" -msgid "from_entity_object" -msgstr "entité de" - -msgid "from_interval_start" -msgstr "De" - -msgid "from_state" -msgstr "de l'état" - -msgctxt "TrInfo" -msgid "from_state" -msgstr "état de départ" - -msgid "from_state_object" -msgstr "transitions depuis cet état" - -msgctxt "State" -msgid "from_state_object" -msgstr "état de départ de" - -msgid "full text or RQL query" -msgstr "texte à rechercher ou requête RQL" - -msgid "fulltext_container" -msgstr "conteneur du texte indexé" - -msgctxt "CWRType" -msgid "fulltext_container" -msgstr "objet à indexer" - -msgid "fulltextindexed" -msgstr "indexation du texte" - -msgctxt "CWAttribute" -msgid "fulltextindexed" -msgstr "texte indexé" - -msgid "gc" -msgstr "fuite mémoire" - -msgid "generic plot" -msgstr "tracé de courbes standard" - -msgid "generic relation to link one entity to another" -msgstr "relation générique pour lier une entité à une autre" - -msgid "" -"generic relation to specify that an external entity represent the same " -"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" -msgstr "" -"relation générique permettant d'indiquer qu'une entité est identique à une " -"autre ressource web (voir http://www.w3.org/TR/owl-ref/#sameAs-def)." - -msgid "granted to groups" -msgstr "accordée aux groupes" - -#, python-format -msgid "graphical representation of %(appid)s data model" -msgstr "réprésentation graphique du modèle de données de %(appid)s" - -#, python-format -msgid "" -"graphical representation of the %(etype)s entity type from %(appid)s data " -"model" -msgstr "" -"réprésentation graphique du modèle de données pour le type d'entité " -"%(etype)s de %(appid)s" - -#, python-format -msgid "" -"graphical representation of the %(rtype)s relation type from %(appid)s data " -"model" -msgstr "" -"réprésentation graphique du modèle de données pour le type de relation " -"%(rtype)s de %(appid)s" - -msgid "group in which a user should be to be allowed to pass this transition" -msgstr "" -"groupe dans lequel l'utilisateur doit être pour pouvoir passer la transition" - -msgid "groups" -msgstr "groupes" - -msgid "groups allowed to add entities/relations of this type" -msgstr "groupes autorisés à ajouter des entités/relations de ce type" - -msgid "groups allowed to delete entities/relations of this type" -msgstr "groupes autorisés à supprimer des entités/relations de ce type" - -msgid "groups allowed to read entities/relations of this type" -msgstr "groupes autorisés à lire des entités/relations de ce type" - -msgid "groups allowed to update entities/relations of this type" -msgstr "groupes autorisés à mettre à jour des entités/relations de ce type" - -msgid "groups grant permissions to the user" -msgstr "les groupes donnent des permissions à l'utilisateur" - -msgid "guests" -msgstr "invités" - -msgid "hCalendar" -msgstr "hCalendar" - -msgid "has_text" -msgstr "contient le texte" - -msgid "header-center" -msgstr "en-tête (centre)" - -msgid "header-left" -msgstr "en-tête (gauche)" - -msgid "header-right" -msgstr "en-tête (droite)" - -msgid "hide filter form" -msgstr "cacher le filtre" - -msgid "" -"how to format date and time in the ui (see this page for format " -"description)" -msgstr "" -"comment formater l'horodate dans l'interface (description du " -"format)" - -msgid "" -"how to format date in the ui (see this page for format " -"description)" -msgstr "" -"comment formater la date dans l'interface (description du format)" - -msgid "how to format float numbers in the ui" -msgstr "comment formater les nombres flottants dans l'interface" - -msgid "" -"how to format time in the ui (see this page for format " -"description)" -msgstr "" -"comment formater l'heure dans l'interface (description du format)" - -msgid "i18n_bookmark_url_fqs" -msgstr "paramètres" - -msgid "i18n_bookmark_url_path" -msgstr "chemin" - -msgid "i18n_login_popup" -msgstr "s'identifier" - -msgid "i18ncard_*" -msgstr "0..n" - -msgid "i18ncard_+" -msgstr "1..n" - -msgid "i18ncard_1" -msgstr "1" - -msgid "i18ncard_?" -msgstr "0..1" - -msgid "i18nprevnext_next" -msgstr "suivant" - -msgid "i18nprevnext_previous" -msgstr "précédent" - -msgid "i18nprevnext_up" -msgstr "parent" - -msgid "iCalendar" -msgstr "iCalendar" - -msgid "id of main template used to render pages" -msgstr "id du template principal" - -msgid "identical to" -msgstr "identique à" - -msgid "identical_to" -msgstr "identique à" - -msgid "identity" -msgstr "est identique à" - -msgid "identity_object" -msgstr "est identique à" - -msgid "" -"if full text content of subject/object entity should be added to other side " -"entity (the container)." -msgstr "" -"si le text indexé de l'entité sujet/objet doit être ajouté à l'entité à " -"l'autre extrémité de la relation (le conteneur)." - -msgid "image" -msgstr "image" - -msgid "in progress" -msgstr "en cours" - -msgid "in_group" -msgstr "dans le groupe" - -msgctxt "CWUser" -msgid "in_group" -msgstr "fait partie du groupe" - -msgid "in_group_object" -msgstr "membres" - -msgctxt "CWGroup" -msgid "in_group_object" -msgstr "contient les utilisateurs" - -msgid "in_state" -msgstr "état" - -msgid "in_state_object" -msgstr "état de" - -msgid "in_synchronization" -msgstr "en cours de synchronisation" - -msgctxt "CWSource" -msgid "in_synchronization" -msgstr "en cours de synchronisation" - -msgid "incontext" -msgstr "dans le contexte" - -msgid "incorrect captcha value" -msgstr "valeur de captcha incorrecte" - -#, python-format -msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" -msgstr "la valeur %(KEY-value)s est incorrecte pour le type \"%(KEY-type)s\"" - -msgid "index this attribute's value in the plain text index" -msgstr "indexer la valeur de cet attribut dans l'index plein texte" - -msgid "indexed" -msgstr "index" - -msgctxt "CWAttribute" -msgid "indexed" -msgstr "indexé" - -msgid "indicate the current state of an entity" -msgstr "indique l'état courant d'une entité" - -msgid "" -"indicate which state should be used by default when an entity using states " -"is created" -msgstr "" -"indique quel état devrait être utilisé par défaut lorsqu'une entité est créée" - -msgid "indifferent" -msgstr "indifférent" - -msgid "info" -msgstr "information" - -msgid "initial state for this workflow" -msgstr "état initial pour ce workflow" - -msgid "initial_state" -msgstr "état initial" - -msgctxt "Workflow" -msgid "initial_state" -msgstr "état initial" - -msgid "initial_state_object" -msgstr "état initial de" - -msgctxt "State" -msgid "initial_state_object" -msgstr "état initial de" - -msgid "inlined" -msgstr "mise en ligne" - -msgctxt "CWRType" -msgid "inlined" -msgstr "mise en ligne" - -msgid "instance home" -msgstr "répertoire de l'instance" - -msgid "internal entity uri" -msgstr "uri interne" - -msgid "internationalizable" -msgstr "internationalisable" - -msgctxt "CWAttribute" -msgid "internationalizable" -msgstr "internationalisable" - -#, python-format -msgid "invalid action %r" -msgstr "action %r invalide" - -#, python-format -msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" -msgstr "" -"la valeur %(KEY-value)s est incorrecte, elle doit être parmi %(KEY-choices)s" - -msgid "is" -msgstr "de type" - -msgid "is object of:" -msgstr "est object de" - -msgid "is subject of:" -msgstr "est sujet de" - -msgid "" -"is the subject/object entity of the relation composed of the other ? This " -"implies that when the composite is deleted, composants are also deleted." -msgstr "" -"Est-ce que l'entité sujet/objet de la relation est une agrégation de " -"l'autre ?Si c'est le cas, détruire le composite détruira ses composants " -"également" - -msgid "is this attribute's value translatable" -msgstr "est-ce que la valeur de cet attribut est traduisible ?" - -msgid "is this relation equivalent in both direction ?" -msgstr "est que cette relation est équivalent dans les deux sens ?" - -msgid "" -"is this relation physically inlined? you should know what you're doing if " -"you are changing this!" -msgstr "" -"est ce que cette relation est mise en ligne dans la base de données ?vous " -"devez savoir ce que vous faites si vous changez cela !" - -msgid "is_instance_of" -msgstr "est une instance de" - -msgid "is_instance_of_object" -msgstr "type de" - -msgid "is_object" -msgstr "a pour instance" - -msgid "january" -msgstr "janvier" - -msgid "json-entities-export-view" -msgstr "export JSON (entités)" - -msgid "json-export-view" -msgstr "export JSON" - -msgid "july" -msgstr "juillet" - -msgid "june" -msgstr "juin" - -msgid "language of the user interface" -msgstr "langue pour l'interface utilisateur" - -msgid "last connection date" -msgstr "dernière date de connexion" - -msgid "last login time" -msgstr "dernière date de connexion" - -msgid "last name" -msgstr "nom" - -msgid "last usage" -msgstr "dernier usage" - -msgid "last_login_time" -msgstr "dernière date de connexion" - -msgctxt "CWUser" -msgid "last_login_time" -msgstr "dernière date de connexion" - -msgid "latest import" -msgstr "dernier import" - -msgid "latest modification time of an entity" -msgstr "date de dernière modification d'une entité" - -msgid "latest synchronization time" -msgstr "date de la dernière synchronisation" - -msgid "latest update on" -msgstr "dernière mise à jour" - -msgid "latest_retrieval" -msgstr "dernière synchronisation" - -msgctxt "CWSource" -msgid "latest_retrieval" -msgstr "date de la dernière synchronisation de la source." - -msgid "left" -msgstr "gauche" - -msgid "line" -msgstr "ligne" - -msgid "" -"link a property to the user which want this property customization. Unless " -"you're a site manager, this relation will be handled automatically." -msgstr "" -"lie une propriété à l'utilisateur désirant cette personnalisation. A moins " -"que vous ne soyez gestionnaire du site, cette relation est gérée " -"automatiquement." - -msgid "link a relation definition to its object entity type" -msgstr "lie une définition de relation à son type d'entité objet" - -msgid "link a relation definition to its relation type" -msgstr "lie une définition de relation à son type d'entité" - -msgid "link a relation definition to its subject entity type" -msgstr "lie une définition de relation à son type d'entité sujet" - -msgid "link a state to one or more workflow" -msgstr "lie un état à un ou plusieurs workflow" - -msgid "link a transition information to its object" -msgstr "lié une enregistrement de transition vers l'objet associé" - -msgid "link a transition to one or more workflow" -msgstr "lie une transition à un ou plusieurs workflow" - -msgid "link a workflow to one or more entity type" -msgstr "lie un workflow à un ou plusieurs types d'entité" - -msgid "list" -msgstr "liste" - -msgid "log" -msgstr "journal" - -msgctxt "CWDataImport" -msgid "log" -msgstr "journal" - -msgid "log in" -msgstr "s'identifier" - -msgid "login" -msgstr "identifiant" - -msgctxt "CWUser" -msgid "login" -msgstr "identifiant" - -msgid "login / password" -msgstr "identifiant / mot de passe" - -msgid "login or email" -msgstr "identifiant ou email" - -msgid "login_action" -msgstr "identifiez vous" - -msgid "logout" -msgstr "se déconnecter" - -#, python-format -msgid "loop in %(rel)s relation (%(eid)s)" -msgstr "boucle détectée en parcourant la relation %(rel)s de l'entité #%(eid)s" - -msgid "main informations" -msgstr "Informations générales" - -msgid "main_tab" -msgstr "description" - -msgid "mainvars" -msgstr "variables principales" - -msgctxt "RQLExpression" -msgid "mainvars" -msgstr "variables principales" - -msgid "manage" -msgstr "gestion du site" - -msgid "manage bookmarks" -msgstr "gérer les signets" - -msgid "manage permissions" -msgstr "gestion des permissions" - -msgid "managers" -msgstr "administrateurs" - -msgid "mandatory relation" -msgstr "relation obligatoire" - -msgid "march" -msgstr "mars" - -msgid "match_host" -msgstr "pour l'hôte" - -msgctxt "CWSourceHostConfig" -msgid "match_host" -msgstr "pour l'hôte" - -msgid "maximum number of characters in short description" -msgstr "nombre maximum de caractères dans les descriptions courtes" - -msgid "maximum number of entities to display in related combo box" -msgstr "nombre maximum d'entités à afficher dans les listes déroulantes" - -msgid "maximum number of objects displayed by page of results" -msgstr "nombre maximum d'entités affichées par pages" - -msgid "maximum number of related entities to display in the primary view" -msgstr "nombre maximum d'entités liées à afficher dans la vue primaire" - -msgid "may" -msgstr "mai" - -msgid "memory leak debugging" -msgstr "Déboguage des fuites de mémoire" - -msgid "message" -msgstr "message" - -#, python-format -msgid "missing parameters for entity %s" -msgstr "paramètres manquants pour l'entité %s" - -msgid "modification" -msgstr "modification" - -msgid "modification_date" -msgstr "date de modification" - -msgid "modify" -msgstr "modifier" - -msgid "monday" -msgstr "lundi" - -msgid "more actions" -msgstr "plus d'actions" - -msgid "more info about this workflow" -msgstr "plus d'information sur ce workflow" - -msgid "multiple edit" -msgstr "édition multiple" - -msgid "my custom search" -msgstr "ma recherche personnalisée" - -msgid "name" -msgstr "nom" - -msgctxt "BaseTransition" -msgid "name" -msgstr "nom" - -msgctxt "CWCache" -msgid "name" -msgstr "nom" - -msgctxt "CWComputedRType" -msgid "name" -msgstr "nom" - -msgctxt "CWConstraintType" -msgid "name" -msgstr "nom" - -msgctxt "CWEType" -msgid "name" -msgstr "nom" - -msgctxt "CWGroup" -msgid "name" -msgstr "nom" - -msgctxt "CWRType" -msgid "name" -msgstr "nom" - -msgctxt "CWSource" -msgid "name" -msgstr "nom" - -msgctxt "CWUniqueTogetherConstraint" -msgid "name" -msgstr "nom" - -msgctxt "State" -msgid "name" -msgstr "nom" - -msgctxt "Transition" -msgid "name" -msgstr "nom" - -msgctxt "Workflow" -msgid "name" -msgstr "nom" - -msgctxt "WorkflowTransition" -msgid "name" -msgstr "nom" - -msgid "name of the cache" -msgstr "nom du cache applicatif" - -msgid "" -"name of the main variables which should be used in the selection if " -"necessary (comma separated)" -msgstr "" -"nom des variables principales qui devrait être utilisées dans la sélection " -"si nécessaire (les séparer par des virgules)" - -msgid "name of the source" -msgstr "nom de la source" - -msgid "navbottom" -msgstr "bas de page" - -msgid "navcontentbottom" -msgstr "bas de page du contenu principal" - -msgid "navcontenttop" -msgstr "haut de page" - -msgid "navigation" -msgstr "navigation" - -msgid "navigation.combobox-limit" -msgstr "nombre d'entités dans les listes déroulantes" - -msgid "navigation.page-size" -msgstr "nombre de résultats" - -msgid "navigation.related-limit" -msgstr "nombre d'entités dans la vue primaire" - -msgid "navigation.short-line-size" -msgstr "taille des descriptions courtes" - -msgid "navtop" -msgstr "haut de page du contenu principal" - -msgid "new" -msgstr "nouveau" - -msgid "next page" -msgstr "page suivante" - -msgid "next_results" -msgstr "résultats suivants" - -msgid "no" -msgstr "non" - -msgid "no content next link" -msgstr "pas de lien 'suivant'" - -msgid "no content prev link" -msgstr "pas de lien 'précédent'" - -msgid "no edited fields specified" -msgstr "aucun champ à éditer spécifié" - -msgid "no log to display" -msgstr "rien à afficher" - -msgid "no related entity" -msgstr "pas d'entité liée" - -msgid "no repository sessions found" -msgstr "aucune session trouvée" - -msgid "no selected entities" -msgstr "pas d'entité sélectionnée" - -#, python-format -msgid "no such entity type %s" -msgstr "le type d'entité '%s' n'existe pas" - -msgid "no version information" -msgstr "pas d'information de version" - -msgid "no web sessions found" -msgstr "aucune session trouvée" - -msgid "normal" -msgstr "normal" - -msgid "not authorized" -msgstr "non autorisé" - -msgid "not selected" -msgstr "non sélectionné" - -msgid "november" -msgstr "novembre" - -msgid "num. users" -msgstr "nombre d'utilisateurs" - -msgid "object" -msgstr "objet" - -msgid "object type" -msgstr "type de l'objet" - -msgid "october" -msgstr "octobre" - -msgid "one month" -msgstr "un mois" - -msgid "one week" -msgstr "une semaine" - -msgid "oneline" -msgstr "une ligne" - -msgid "only select queries are authorized" -msgstr "seules les requêtes de sélections sont autorisées" - -msgid "open all" -msgstr "tout ouvrir" - -msgid "opened sessions" -msgstr "sessions ouvertes" - -msgid "opened web sessions" -msgstr "sessions web ouvertes" - -msgid "options" -msgstr "options" - -msgctxt "CWSourceSchemaConfig" -msgid "options" -msgstr "options" - -msgid "order" -msgstr "ordre" - -msgid "ordernum" -msgstr "ordre" - -msgctxt "CWAttribute" -msgid "ordernum" -msgstr "numéro d'ordre" - -msgctxt "CWRelation" -msgid "ordernum" -msgstr "numéro d'ordre" - -msgid "owl" -msgstr "owl" - -msgid "owlabox" -msgstr "owl ABox" - -msgid "owned_by" -msgstr "appartient à" - -msgid "owned_by_object" -msgstr "possède" - -msgid "owners" -msgstr "propriétaires" - -msgid "ownerships have been changed" -msgstr "les droits de propriété ont été modifiés" - -msgid "pageid-not-found" -msgstr "" -"des données nécessaires semblent expirées, veuillez recharger la page et " -"recommencer." - -msgid "parser" -msgstr "parseur" - -msgctxt "CWSource" -msgid "parser" -msgstr "parseur" - -msgid "parser to use to extract entities from content retrieved at given URLs." -msgstr "" -"parseur à utiliser pour extraire entités et relations du contenu récupéré " -"aux URLs données" - -msgid "password" -msgstr "mot de passe" - -msgid "password and confirmation don't match" -msgstr "le mot de passe et la confirmation sont différents" - -msgid "path" -msgstr "chemin" - -msgctxt "Bookmark" -msgid "path" -msgstr "chemin" - -msgid "permalink to this message" -msgstr "lien permanent vers ce message" - -msgid "permission" -msgstr "permission" - -msgid "permissions" -msgstr "permissions" - -msgid "pick existing bookmarks" -msgstr "récupérer des signets existants" - -msgid "pkey" -msgstr "clé" - -msgctxt "CWProperty" -msgid "pkey" -msgstr "code de la propriété" - -msgid "please correct errors below" -msgstr "veuillez corriger les erreurs ci-dessous" - -msgid "please correct the following errors:" -msgstr "veuillez corriger les erreurs suivantes :" - -msgid "possible views" -msgstr "vues possibles" - -msgid "prefered_form" -msgstr "forme préférée" - -msgctxt "EmailAddress" -msgid "prefered_form" -msgstr "forme préférée" - -msgid "prefered_form_object" -msgstr "forme préférée à" - -msgctxt "EmailAddress" -msgid "prefered_form_object" -msgstr "forme préférée de" - -msgid "preferences" -msgstr "préférences" - -msgid "previous page" -msgstr "page précédente" - -msgid "previous_results" -msgstr "résultats précédents" - -msgid "primary" -msgstr "primaire" - -msgid "primary_email" -msgstr "adresse email principale" - -msgctxt "CWUser" -msgid "primary_email" -msgstr "email principal" - -msgid "primary_email_object" -msgstr "adresse email principale (object)" - -msgctxt "EmailAddress" -msgid "primary_email_object" -msgstr "adresse principale de" - -msgid "profile" -msgstr "profil" - -msgid "rdef-description" -msgstr "description" - -msgid "rdef-permissions" -msgstr "permissions" - -msgid "rdf export" -msgstr "export RDF" - -msgid "read" -msgstr "lecture" - -msgid "read_permission" -msgstr "permission de lire" - -msgctxt "CWAttribute" -msgid "read_permission" -msgstr "permission de lire" - -msgctxt "CWEType" -msgid "read_permission" -msgstr "permission de lire" - -msgctxt "CWRelation" -msgid "read_permission" -msgstr "permission de lire" - -msgid "read_permission_object" -msgstr "a la permission de lire" - -msgctxt "CWGroup" -msgid "read_permission_object" -msgstr "peut lire" - -msgctxt "RQLExpression" -msgid "read_permission_object" -msgstr "peut lire" - -msgid "regexp matching host(s) to which this config applies" -msgstr "" -"expression régulière des noms d'hôtes auxquels cette configuration s'applique" - -msgid "registry" -msgstr "registre" - -msgid "related entity has no state" -msgstr "l'entité lié n'a pas d'état" - -msgid "related entity has no workflow set" -msgstr "l'entité lié n'a pas de workflow" - -msgid "relation" -msgstr "relation" - -#, python-format -msgid "relation %(relname)s of %(ent)s" -msgstr "relation %(relname)s de %(ent)s" - -#, python-format -msgid "" -"relation %(rtype)s with %(etype)s as %(role)s is supported but no target " -"type supported" -msgstr "" -"la relation %(rtype)s avec %(etype)s comme %(role)s est supportée mais aucun " -"type cible n'est supporté" - -#, python-format -msgid "" -"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is " -"mandatory but not supported" -msgstr "" -"la relation %(rtype)s avec %(etype)s comme %(role)s est obligatoire mais non " -"supportée" - -#, python-format -msgid "" -"relation %s is supported but none of its definitions matches supported " -"entities" -msgstr "" -"la relation %s est supportée mais aucune de ses définitions ne correspondent " -"aux types d'entités supportés" - -msgid "relation add" -msgstr "ajout de relation" - -msgid "relation removal" -msgstr "suppression de relation" - -msgid "relation_type" -msgstr "type de relation" - -msgctxt "CWAttribute" -msgid "relation_type" -msgstr "type de relation" - -msgctxt "CWRelation" -msgid "relation_type" -msgstr "type de relation" - -msgid "relation_type_object" -msgstr "définition" - -msgctxt "CWRType" -msgid "relation_type_object" -msgstr "définition" - -msgid "relations" -msgstr "relations" - -msgctxt "CWUniqueTogetherConstraint" -msgid "relations" -msgstr "relations" - -msgid "relations deleted" -msgstr "relations supprimées" - -msgid "relations_object" -msgstr "relations de" - -msgctxt "CWRType" -msgid "relations_object" -msgstr "relations de" - -msgid "relative url of the bookmarked page" -msgstr "url relative de la page" - -msgid "remove-inlined-entity-form" -msgstr "supprimer" - -msgid "require_group" -msgstr "nécessite le groupe" - -msgctxt "BaseTransition" -msgid "require_group" -msgstr "restreinte au groupe" - -msgctxt "Transition" -msgid "require_group" -msgstr "restreinte au groupe" - -msgctxt "WorkflowTransition" -msgid "require_group" -msgstr "restreinte au groupe" - -msgid "require_group_object" -msgstr "a les droits" - -msgctxt "CWGroup" -msgid "require_group_object" -msgstr "a les droits" - -msgid "required" -msgstr "requis" - -msgid "required attribute" -msgstr "attribut requis" - -msgid "required field" -msgstr "champ requis" - -msgid "resources usage" -msgstr "resources utilisées" - -msgid "" -"restriction part of a rql query. For entity rql expression, X and U are " -"predefined respectivly to the current object and to the request user. For " -"relation rql expression, S, O and U are predefined respectivly to the " -"current relation'subject, object and to the request user. " -msgstr "" -"partie restriction de la requête rql. Pour une expression s'appliquant à une " -"entité, X et U sont respectivement prédéfinis à l'entité et à l'utilisateur " -"courant. Pour une expression s'appliquant à une relation, S, O et U sont " -"respectivement prédéfinis au sujet/objet de la relation et à l'utilisateur " -"courant." - -msgid "revert changes" -msgstr "annuler les changements" - -msgid "right" -msgstr "droite" - -msgid "rql expression allowing to add entities/relations of this type" -msgstr "expression rql autorisant à ajouter des entités/relations de ce type" - -msgid "rql expression allowing to delete entities/relations of this type" -msgstr "expression rql autorisant à supprimer des entités/relations de ce type" - -msgid "rql expression allowing to read entities/relations of this type" -msgstr "expression rql autorisant à lire des entités/relations de ce type" - -msgid "rql expression allowing to update entities/relations of this type" -msgstr "" -"expression rql autorisant à mettre à jour des entités/relations de ce type" - -msgid "rql expressions" -msgstr "conditions rql" - -msgid "rss export" -msgstr "export RSS" - -msgid "rule" -msgstr "règle" - -msgctxt "CWComputedRType" -msgid "rule" -msgstr "règle" - -msgid "same_as" -msgstr "identique à" - -msgid "sample format" -msgstr "exemple" - -msgid "saturday" -msgstr "samedi" - -msgid "schema-diagram" -msgstr "diagramme" - -msgid "schema-entity-types" -msgstr "types d'entités" - -msgid "schema-relation-types" -msgstr "types de relations" - -msgid "search" -msgstr "rechercher" - -msgid "search for association" -msgstr "rechercher pour associer" - -msgid "searching for" -msgstr "Recherche de" - -msgid "security" -msgstr "sécurité" - -msgid "see more" -msgstr "voir plus" - -msgid "see them all" -msgstr "les voir toutes" - -msgid "see_also" -msgstr "voir aussi" - -msgid "select" -msgstr "sélectionner" - -msgid "select a" -msgstr "sélectionner un" - -msgid "select a key first" -msgstr "sélectionnez d'abord une clé" - -msgid "select a relation" -msgstr "sélectionner une relation" - -msgid "select this entity" -msgstr "sélectionner cette entité" - -msgid "selected" -msgstr "sélectionné" - -msgid "semantic description of this attribute" -msgstr "description sémantique de cet attribut" - -msgid "semantic description of this entity type" -msgstr "description sémantique de ce type d'entité" - -msgid "semantic description of this relation" -msgstr "description sémantique de cette relation" - -msgid "semantic description of this relation type" -msgstr "description sémantique de ce type de relation" - -msgid "semantic description of this state" -msgstr "description sémantique de cet état" - -msgid "semantic description of this transition" -msgstr "description sémantique de cette transition" - -msgid "semantic description of this workflow" -msgstr "description sémantique de ce workflow" - -msgid "september" -msgstr "septembre" - -msgid "server information" -msgstr "informations serveur" - -msgid "severity" -msgstr "sévérité" - -msgid "" -"should html fields being edited using fckeditor (a HTML WYSIWYG editor). " -"You should also select text/html as default text format to actually get " -"fckeditor." -msgstr "" -"indique si les champs HTML doivent être édités avec fckeditor (un\n" -"éditeur HTML WYSIWYG). Il est également conseillé de choisir text/html\n" -"comme format de texte par défaut pour pouvoir utiliser fckeditor." - -#, python-format -msgid "show %s results" -msgstr "montrer %s résultats" - -msgid "show advanced fields" -msgstr "montrer les champs avancés" - -msgid "show filter form" -msgstr "afficher le filtre" - -msgid "site configuration" -msgstr "configuration du site" - -msgid "site documentation" -msgstr "documentation du site" - -msgid "site title" -msgstr "titre du site" - -msgid "site-wide property can't be set for user" -msgstr "une propriété spécifique au site ne peut être propre à un utilisateur" - -msgid "some later transaction(s) touch entity, undo them first" -msgstr "" -"des transactions plus récentes modifient cette entité, annulez les d'abord" - -msgid "some relations violate a unicity constraint" -msgstr "certaines relations transgressent une contrainte d'unicité" - -msgid "sorry, the server is unable to handle this query" -msgstr "désolé, le serveur ne peut traiter cette requête" - -msgid "" -"source's configuration. One key=value per line, authorized keys depending on " -"the source's type" -msgstr "" -"Configuration de la source. Une clé=valeur par ligne, les clés autorisées " -"dépendantes du type de source. Les valeur surchargent celles définies sur la " -"source." - -msgid "sparql xml" -msgstr "XML Sparql" - -msgid "special transition allowing to go through a sub-workflow" -msgstr "transition spécial permettant d'aller dans un sous-workfow" - -msgid "specializes" -msgstr "dérive de" - -msgctxt "CWEType" -msgid "specializes" -msgstr "spécialise" - -msgid "specializes_object" -msgstr "parent de" - -msgctxt "CWEType" -msgid "specializes_object" -msgstr "parent de" - -#, python-format -msgid "specifying %s is mandatory" -msgstr "spécifier %s est obligatoire" - -msgid "" -"start timestamp of the currently in synchronization, or NULL when no " -"synchronization in progress." -msgstr "" -"horodate de départ de la synchronisation en cours, ou NULL s'il n'y en a pas." - -msgid "start_timestamp" -msgstr "horodate de début" - -msgctxt "CWDataImport" -msgid "start_timestamp" -msgstr "horodate de début" - -msgid "startup views" -msgstr "vues de départ" - -msgid "startupview" -msgstr "vues de départ" - -msgid "state" -msgstr "état" - -msgid "state and transition don't belong the the same workflow" -msgstr "l'état et la transition n'appartiennent pas au même workflow" - -msgid "state doesn't apply to this entity's type" -msgstr "cet état ne s'applique pas à ce type d'entité" - -msgid "state doesn't belong to entity's current workflow" -msgstr "l'état n'appartient pas au workflow courant de l'entité" - -msgid "state doesn't belong to entity's workflow" -msgstr "l'état n'appartient pas au workflow de l'entité" - -msgid "" -"state doesn't belong to entity's workflow. You may want to set a custom " -"workflow for this entity first." -msgstr "" -"l'état n'appartient pas au workflow courant de l'entité. Vous désirez peut-" -"être spécifier que cette entité doit utiliser ce workflow." - -msgid "state doesn't belong to this workflow" -msgstr "l'état n'appartient pas à ce workflow" - -msgid "state_of" -msgstr "état de" - -msgctxt "State" -msgid "state_of" -msgstr "état de" - -msgid "state_of_object" -msgstr "a pour état" - -msgctxt "Workflow" -msgid "state_of_object" -msgstr "contient les états" - -msgid "status" -msgstr "état" - -msgctxt "CWDataImport" -msgid "status" -msgstr "état" - -msgid "status change" -msgstr "changer l'état" - -msgid "status changed" -msgstr "changement d'état" - -#, python-format -msgid "status will change from %(st1)s to %(st2)s" -msgstr "l'entité passera de l'état %(st1)s à l'état %(st2)s" - -msgid "subject" -msgstr "sujet" - -msgid "subject type" -msgstr "type du sujet" - -msgid "subject/object cardinality" -msgstr "cardinalité sujet/objet" - -msgid "subworkflow" -msgstr "sous-workflow" - -msgctxt "WorkflowTransition" -msgid "subworkflow" -msgstr "sous-workflow" - -msgid "" -"subworkflow isn't a workflow for the same types as the transition's workflow" -msgstr "" -"le sous-workflow ne s'applique pas aux mêmes types que le workflow de cette " -"transition" - -msgid "subworkflow state" -msgstr "état de sous-workflow" - -msgid "subworkflow_exit" -msgstr "sortie de sous-workflow" - -msgctxt "WorkflowTransition" -msgid "subworkflow_exit" -msgstr "sortie du sous-workflow" - -msgid "subworkflow_exit_object" -msgstr "états de sortie" - -msgctxt "SubWorkflowExitPoint" -msgid "subworkflow_exit_object" -msgstr "états de sortie" - -msgid "subworkflow_object" -msgstr "utilisé par la transition" - -msgctxt "Workflow" -msgid "subworkflow_object" -msgstr "sous workflow de" - -msgid "subworkflow_state" -msgstr "état du sous-workflow" - -msgctxt "SubWorkflowExitPoint" -msgid "subworkflow_state" -msgstr "état" - -msgid "subworkflow_state_object" -msgstr "état de sortie de" - -msgctxt "State" -msgid "subworkflow_state_object" -msgstr "état de sortie de" - -msgid "success" -msgstr "succès" - -msgid "sunday" -msgstr "dimanche" - -msgid "surname" -msgstr "nom" - -msgctxt "CWUser" -msgid "surname" -msgstr "nom de famille" - -msgid "symmetric" -msgstr "symétrique" - -msgctxt "CWRType" -msgid "symmetric" -msgstr "symétrique" - -msgid "synchronization-interval must be greater than 1 minute" -msgstr "synchronization-interval doit être supérieur à 1 minute" - -msgid "table" -msgstr "table" - -msgid "tablefilter" -msgstr "filtre de tableau" - -msgid "text" -msgstr "text" - -msgid "text/cubicweb-page-template" -msgstr "contenu dynamique" - -msgid "text/html" -msgstr "html" - -msgid "text/markdown" -msgstr "texte au format markdown" - -msgid "text/plain" -msgstr "texte pur" - -msgid "text/rest" -msgstr "ReST" - -msgid "the URI of the object" -msgstr "l'Uri de l'objet" - -msgid "the prefered email" -msgstr "l'adresse électronique principale" - -msgid "the system source has its configuration stored on the file-system" -msgstr "la source système a sa configuration stockée sur le système de fichier" - -#, python-format -msgid "the value \"%s\" is already used, use another one" -msgstr "la valeur \"%s\" est déjà utilisée, veuillez utiliser une autre valeur" - -msgid "there is no next page" -msgstr "Il n'y a pas de page suivante" - -msgid "there is no previous page" -msgstr "Il n'y a pas de page précédente" - -#, python-format -msgid "there is no transaction #%s" -msgstr "Il n'y a pas de transaction #%s" - -msgid "this action is not reversible!" -msgstr "" -"Attention ! Cette opération va détruire les données de façon irréversible." - -msgid "this entity is currently owned by" -msgstr "cette entité appartient à" - -msgid "this parser doesn't use a mapping" -msgstr "ce parseur n'utilise pas de mapping" - -msgid "this resource does not exist" -msgstr "cette ressource est introuvable" - -msgid "this source doesn't use a mapping" -msgstr "cette source n'utilise pas de mapping" - -msgid "thursday" -msgstr "jeudi" - -msgid "timestamp" -msgstr "date" - -msgctxt "CWCache" -msgid "timestamp" -msgstr "valide depuis" - -msgid "timetable" -msgstr "emploi du temps" - -msgid "title" -msgstr "titre" - -msgctxt "Bookmark" -msgid "title" -msgstr "libellé" - -msgid "to" -msgstr "à" - -#, python-format -msgid "to %(date)s" -msgstr "au %(date)s" - -msgid "to associate with" -msgstr "pour associer à" - -msgid "to_entity" -msgstr "vers l'entité" - -msgctxt "CWAttribute" -msgid "to_entity" -msgstr "pour l'entité" - -msgctxt "CWRelation" -msgid "to_entity" -msgstr "pour l'entité" - -msgid "to_entity_object" -msgstr "objet de la relation" - -msgctxt "CWEType" -msgid "to_entity_object" -msgstr "objet de la relation" - -msgid "to_interval_end" -msgstr "à" - -msgid "to_state" -msgstr "vers l'état" - -msgctxt "TrInfo" -msgid "to_state" -msgstr "état de destination" - -msgid "to_state_object" -msgstr "transitions vers cet état" - -msgctxt "State" -msgid "to_state_object" -msgstr "transition vers cet état" - -msgid "toggle check boxes" -msgstr "afficher/masquer les cases à cocher" - -msgid "tr_count" -msgstr "n° de transition" - -msgctxt "TrInfo" -msgid "tr_count" -msgstr "n° de transition" - -msgid "transaction undone" -msgstr "transaction annulée" - -#, python-format -msgid "transition %(tr)s isn't allowed from %(st)s" -msgstr "la transition %(tr)s n'est pas autorisée depuis l'état %(st)s" - -msgid "transition doesn't belong to entity's workflow" -msgstr "la transition n'appartient pas au workflow de l'entité" - -msgid "transition isn't allowed" -msgstr "la transition n'est pas autorisée" - -msgid "transition may not be fired" -msgstr "la transition ne peut-être déclenchée" - -msgid "transition_of" -msgstr "transition de" - -msgctxt "BaseTransition" -msgid "transition_of" -msgstr "transition de" - -msgctxt "Transition" -msgid "transition_of" -msgstr "transition de" - -msgctxt "WorkflowTransition" -msgid "transition_of" -msgstr "transition de" - -msgid "transition_of_object" -msgstr "a pour transition" - -msgctxt "Workflow" -msgid "transition_of_object" -msgstr "a pour transition" - -msgid "tree view" -msgstr "arborescence" - -msgid "tuesday" -msgstr "mardi" - -msgid "type" -msgstr "type" - -msgctxt "BaseTransition" -msgid "type" -msgstr "type" - -msgctxt "CWSource" -msgid "type" -msgstr "type" - -msgctxt "Transition" -msgid "type" -msgstr "type" - -msgctxt "WorkflowTransition" -msgid "type" -msgstr "type" - -msgid "type here a sparql query" -msgstr "Tapez une requête sparql" - -msgid "type of the source" -msgstr "type de la source" - -msgid "ui" -msgstr "propriétés génériques de l'interface" - -msgid "ui.date-format" -msgstr "format de date" - -msgid "ui.datetime-format" -msgstr "format de date et de l'heure" - -msgid "ui.default-text-format" -msgstr "format de texte" - -msgid "ui.encoding" -msgstr "encodage" - -msgid "ui.fckeditor" -msgstr "éditeur du contenu" - -msgid "ui.float-format" -msgstr "format des flottants" - -msgid "ui.language" -msgstr "langue" - -msgid "ui.main-template" -msgstr "gabarit principal" - -msgid "ui.site-title" -msgstr "titre du site" - -msgid "ui.time-format" -msgstr "format de l'heure" - -msgid "unable to check captcha, please try again" -msgstr "impossible de vérifier le captcha, veuillez réessayer" - -msgid "unaccessible" -msgstr "inaccessible" - -msgid "unauthorized value" -msgstr "valeur non autorisée" - -msgid "undefined user" -msgstr "utilisateur inconnu" - -msgid "undo" -msgstr "annuler" - -msgid "unique identifier used to connect to the application" -msgstr "identifiant unique utilisé pour se connecter à l'application" - -msgid "unknown external entity" -msgstr "entité (externe) introuvable" - -#, python-format -msgid "unknown options %s" -msgstr "options inconnues : %s" - -#, python-format -msgid "unknown property key %s" -msgstr "clé de propriété inconnue : %s" - -msgid "unknown vocabulary:" -msgstr "vocabulaire inconnu : " - -msgid "unsupported protocol" -msgstr "protocole non supporté" - -msgid "upassword" -msgstr "mot de passe" - -msgctxt "CWUser" -msgid "upassword" -msgstr "mot de passe" - -msgid "update" -msgstr "modification" - -msgid "update_permission" -msgstr "permission de modification" - -msgctxt "CWAttribute" -msgid "update_permission" -msgstr "permission de modifier" - -msgctxt "CWEType" -msgid "update_permission" -msgstr "permission de modifier" - -msgid "update_permission_object" -msgstr "a la permission de modifier" - -msgctxt "CWGroup" -msgid "update_permission_object" -msgstr "peut modifier" - -msgctxt "RQLExpression" -msgid "update_permission_object" -msgstr "peut modifier" - -msgid "update_relation" -msgstr "modifier" - -msgid "updated" -msgstr "mis à jour" - -#, python-format -msgid "updated %(etype)s #%(eid)s (%(title)s)" -msgstr "modification de l'entité %(etype)s #%(eid)s (%(title)s)" - -msgid "uri" -msgstr "uri" - -msgctxt "ExternalUri" -msgid "uri" -msgstr "uri" - -msgid "url" -msgstr "url" - -msgctxt "CWSource" -msgid "url" -msgstr "url" - -msgid "" -"use to define a transition from one or multiple states to a destination " -"states in workflow's definitions. Transition without destination state will " -"go back to the state from which we arrived to the current state." -msgstr "" -"utilisé dans une définition de processus pour ajouter une transition depuis " -"un ou plusieurs états vers un état de destination. Une transition sans état " -"de destination retournera à l'état précédent l'état courant." - -msgid "use_email" -msgstr "adresse électronique" - -msgctxt "CWUser" -msgid "use_email" -msgstr "utilise l'adresse électronique" - -msgid "use_email_object" -msgstr "adresse utilisée par" - -msgctxt "EmailAddress" -msgid "use_email_object" -msgstr "utilisée par" - -msgid "" -"used for cubicweb configuration. Once a property has been created you can't " -"change the key." -msgstr "" -"utilisé pour la configuration de l'application. Une fois qu'une propriété a " -"été créée, vous ne pouvez plus changez la clé associée" - -msgid "" -"used to associate simple states to an entity type and/or to define workflows" -msgstr "associe les états à un type d'entité pour définir un workflow" - -msgid "user" -msgstr "utilisateur" - -#, python-format -msgid "" -"user %s has made the following change(s):\n" -"\n" -msgstr "" -"l'utilisateur %s a effectué le(s) changement(s) suivant(s):\n" -"\n" - -msgid "user interface encoding" -msgstr "encodage utilisé dans l'interface utilisateur" - -msgid "user preferences" -msgstr "préférences utilisateur" - -msgid "user's email account" -msgstr "email de l'utilisateur" - -msgid "users" -msgstr "utilisateurs" - -msgid "users and groups" -msgstr "utilisateurs et groupes" - -msgid "users using this bookmark" -msgstr "utilisateurs utilisant ce signet" - -msgid "validate modifications on selected items" -msgstr "valider les modifications apportées aux éléments sélectionnés" - -msgid "validating..." -msgstr "chargement en cours ..." - -msgid "value" -msgstr "valeur" - -msgctxt "CWConstraint" -msgid "value" -msgstr "contrainte" - -msgctxt "CWProperty" -msgid "value" -msgstr "valeur" - -#, python-format -msgid "value %(KEY-value)s must be < %(KEY-boundary)s" -msgstr "la valeur %(KEY-value)s doit être strictement inférieure à %(KEY-boundary)s" - -#, python-format -msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" -msgstr "" -"la valeur %(KEY-value)s doit être inférieure ou égale à %(KEY-boundary)s" - -#, python-format -msgid "value %(KEY-value)s must be > %(KEY-boundary)s" -msgstr "la valeur %(KEY-value)s doit être strictement supérieure à %(KEY-boundary)s" - -#, python-format -msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" -msgstr "" -"la valeur %(KEY-value)s doit être supérieure ou égale à %(KEY-boundary)s" - -msgid "value associated to this key is not editable manually" -msgstr "la valeur associée à cette clé n'est pas éditable manuellement" - -#, python-format -msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" -msgstr "" -"la taille maximum est %(KEY-max)s mais cette valeur est de taille " -"%(KEY-size)s" - -#, python-format -msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" -msgstr "" -"la taille minimum est %(KEY-min)s mais cette valeur est de taille " -"%(KEY-size)s" - -msgid "vcard" -msgstr "vcard" - -msgid "versions configuration" -msgstr "configuration de version" - -msgid "view" -msgstr "voir" - -msgid "view all" -msgstr "voir tous" - -msgid "view detail for this entity" -msgstr "voir les détails de cette entité" - -msgid "view history" -msgstr "voir l'historique" - -msgid "view identifier" -msgstr "identifiant" - -msgid "view title" -msgstr "titre" - -msgid "view workflow" -msgstr "voir les états possibles" - -msgid "view_index" -msgstr "accueil" - -msgid "visible" -msgstr "visible" - -msgid "warning" -msgstr "attention" - -msgid "we are not yet ready to handle this query" -msgstr "" -"nous ne sommes pas capable de gérer ce type de requête sparql pour le moment" - -msgid "wednesday" -msgstr "mercredi" - -#, python-format -msgid "welcome %s!" -msgstr "bienvenue %s !" - -msgid "wf_info_for" -msgstr "historique de" - -msgid "wf_info_for_object" -msgstr "historique des transitions" - -msgid "wf_tab_info" -msgstr "description" - -msgid "wfgraph" -msgstr "image du workflow" - -msgid "" -"when multiple addresses are equivalent (such as python-projects@logilab.org " -"and python-projects@lists.logilab.org), set this to indicate which is the " -"preferred form." -msgstr "" -"quand plusieurs addresses sont équivalentes (comme python-projects@logilab." -"org et python-projects@lists.logilab.org), indique laquelle est la forme " -"préférentielle." - -msgid "workflow" -msgstr "workflow" - -#, python-format -msgid "workflow changed to \"%s\"" -msgstr "workflow changé à \"%s\"" - -msgid "workflow has no initial state" -msgstr "le workflow n'a pas d'état initial" - -msgid "workflow history item" -msgstr "entrée de l'historique de workflow" - -msgid "workflow isn't a workflow for this type" -msgstr "le workflow ne s'applique pas à ce type d'entité" - -msgid "workflow to which this state belongs" -msgstr "workflow auquel cet état appartient" - -msgid "workflow to which this transition belongs" -msgstr "workflow auquel cette transition appartient" - -msgid "workflow_of" -msgstr "workflow de" - -msgctxt "Workflow" -msgid "workflow_of" -msgstr "workflow de" - -msgid "workflow_of_object" -msgstr "a pour workflow" - -msgctxt "CWEType" -msgid "workflow_of_object" -msgstr "a pour workflow" - -#, python-format -msgid "wrong query parameter line %s" -msgstr "mauvais paramètre de requête ligne %s" - -msgid "xbel export" -msgstr "export XBEL" - -msgid "xml export" -msgstr "export XML" - -msgid "xml export (entities)" -msgstr "export XML (entités)" - -msgid "yes" -msgstr "oui" - -msgid "you have been logged out" -msgstr "vous avez été déconnecté" - -msgid "you should probably delete that property" -msgstr "vous devriez probablement supprimer cette propriété" diff -r 058bb3dc685f -r 0b59724cb3f2 mail.py --- a/mail.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,154 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Common utilies to format / send emails.""" - -__docformat__ = "restructuredtext en" - -from base64 import b64encode, b64decode -from time import time -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -from email.mime.image import MIMEImage -from email.header import Header -from email.utils import formatdate -from socket import gethostname - -from six import PY2, PY3, text_type - - -def header(ustring): - if PY3: - return Header(ustring, 'utf-8') - return Header(ustring.encode('UTF-8'), 'UTF-8') - -def addrheader(uaddr, uname=None): - # even if an email address should be ascii, encode it using utf8 since - # automatic tests may generate non ascii email address - if PY2: - addr = uaddr.encode('UTF-8') - else: - addr = uaddr - if uname: - val = '%s <%s>' % (header(uname).encode(), addr) - else: - val = addr - assert isinstance(val, str) # bytes in py2, ascii-encoded unicode in py3 - return val - - -def construct_message_id(appid, eid, withtimestamp=True): - if withtimestamp: - addrpart = 'eid=%s×tamp=%.10f' % (eid, time()) - else: - addrpart = 'eid=%s' % eid - # we don't want any equal sign nor trailing newlines - leftpart = b64encode(addrpart.encode('ascii'), b'.-').decode('ascii').rstrip().rstrip('=') - return '<%s@%s.%s>' % (leftpart, appid, gethostname()) - - -def parse_message_id(msgid, appid): - if msgid[0] == '<': - msgid = msgid[1:] - if msgid[-1] == '>': - msgid = msgid[:-1] - try: - values, qualif = msgid.split('@') - padding = len(values) % 4 - values = b64decode(str(values + '='*padding), '.-').decode('ascii') - values = dict(v.split('=') for v in values.split('&')) - fromappid, host = qualif.split('.', 1) - except Exception: - return None - if appid != fromappid or host != gethostname(): - return None - return values - - -def format_mail(uinfo, to_addrs, content, subject="", - cc_addrs=(), msgid=None, references=(), config=None): - """Sends an Email to 'e_addr' with content 'content', and subject 'subject' - - to_addrs and cc_addrs are expected to be a list of email address without - name - """ - assert isinstance(content, text_type), repr(content) - msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8') - # safety: keep only the first newline - try: - subject = subject.splitlines()[0] - msg['Subject'] = header(subject) - except IndexError: - pass # no subject - if uinfo.get('email'): - email = uinfo['email'] - elif config and config['sender-addr']: - email = text_type(config['sender-addr']) - else: - email = u'' - if uinfo.get('name'): - name = uinfo['name'] - elif config and config['sender-name']: - name = text_type(config['sender-name']) - else: - name = u'' - msg['From'] = addrheader(email, name) - if config and config['sender-addr'] and config['sender-addr'] != email: - appaddr = addrheader(config['sender-addr'], config['sender-name']) - msg['Reply-to'] = '%s, %s' % (msg['From'], appaddr) - elif email: - msg['Reply-to'] = msg['From'] - if config is not None: - msg['X-CW'] = config.appid - unique_addrs = lambda addrs: sorted(set(addr for addr in addrs if addr is not None)) - msg['To'] = ', '.join(addrheader(addr) for addr in unique_addrs(to_addrs)) - if cc_addrs: - msg['Cc'] = ', '.join(addrheader(addr) for addr in unique_addrs(cc_addrs)) - if msgid: - msg['Message-id'] = msgid - if references: - msg['References'] = ', '.join(references) - msg['Date'] = formatdate() - return msg - - -class HtmlEmail(MIMEMultipart): - - def __init__(self, subject, textcontent, htmlcontent, - sendermail=None, sendername=None, recipients=None, ccrecipients=None): - MIMEMultipart.__init__(self, 'related') - self['Subject'] = header(subject) - self.preamble = 'This is a multi-part message in MIME format.' - # Attach alternative text message - alternative = MIMEMultipart('alternative') - self.attach(alternative) - msgtext = MIMEText(textcontent.encode('UTF-8'), 'plain', 'UTF-8') - alternative.attach(msgtext) - # Attach html message - msghtml = MIMEText(htmlcontent.encode('UTF-8'), 'html', 'UTF-8') - alternative.attach(msghtml) - if sendermail or sendername: - self['From'] = addrheader(sendermail, sendername) - if recipients: - self['To'] = ', '.join(addrheader(addr) for addr in recipients if addr is not None) - if ccrecipients: - self['Cc'] = ', '.join(addrheader(addr) for addr in ccrecipients if addr is not None) - - def attach_image(self, data, htmlId): - image = MIMEImage(data) - image.add_header('Content-ID', '<%s>' % htmlId) - self.attach(image) diff -r 058bb3dc685f -r 0b59724cb3f2 md5crypt.py --- a/md5crypt.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,121 +0,0 @@ -# md5crypt.py -# -# 0423.2000 by michal wallace http://www.sabren.com/ -# based on perl's Crypt::PasswdMD5 by Luis Munoz (lem@cantv.net) -# based on /usr/src/libcrypt/crypt.c from FreeBSD 2.2.5-RELEASE -# -# MANY THANKS TO -# -# Carey Evans - http://home.clear.net.nz/pages/c.evans/ -# Dennis Marti - http://users.starpower.net/marti1/ -# -# For the patches that got this thing working! -# -# modification by logilab: -# * remove usage of the string module -# * don't include the magic string in the output string -# for true crypt.crypt compatibility -# * use hashlib module instead of md5 -######################################################### -"""md5crypt.py - Provides interoperable MD5-based crypt() function - -SYNOPSIS - - import md5crypt.py - - cryptedpassword = md5crypt.md5crypt(password, salt); - -DESCRIPTION - -unix_md5_crypt() provides a crypt()-compatible interface to the -rather new MD5-based crypt() function found in modern operating systems. -It's based on the implementation found on FreeBSD 2.2.[56]-RELEASE and -contains the following license in it: - - "THE BEER-WARE LICENSE" (Revision 42): - wrote this file. As long as you retain this notice you - can do whatever you want with this stuff. If we meet some day, and you think - this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp -""" - -MAGIC = b'$1$' # Magic string -ITOA64 = b"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" - -from hashlib import md5 # pylint: disable=E0611 - -from six import text_type, indexbytes -from six.moves import range - - -def to64 (v, n): - ret = bytearray() - while (n - 1 >= 0): - n = n - 1 - ret.append(ITOA64[v & 0x3f]) - v = v >> 6 - return ret - -def crypt(pw, salt): - if isinstance(pw, text_type): - pw = pw.encode('utf-8') - if isinstance(salt, text_type): - salt = salt.encode('ascii') - # Take care of the magic string if present - if salt.startswith(MAGIC): - salt = salt[len(MAGIC):] - # salt can have up to 8 characters: - salt = salt.split(b'$', 1)[0] - salt = salt[:8] - ctx = pw + MAGIC + salt - final = md5(pw + salt + pw).digest() - for pl in range(len(pw), 0, -16): - if pl > 16: - ctx = ctx + final[:16] - else: - ctx = ctx + final[:pl] - # Now the 'weird' xform (??) - i = len(pw) - while i: - if i & 1: - ctx = ctx + b'\0' #if ($i & 1) { $ctx->add(pack("C", 0)); } - else: - ctx = ctx + pw[0] - i = i >> 1 - final = md5(ctx).digest() - # The following is supposed to make - # things run slower. - # my question: WTF??? - for i in range(1000): - ctx1 = b'' - if i & 1: - ctx1 = ctx1 + pw - else: - ctx1 = ctx1 + final[:16] - if i % 3: - ctx1 = ctx1 + salt - if i % 7: - ctx1 = ctx1 + pw - if i & 1: - ctx1 = ctx1 + final[:16] - else: - ctx1 = ctx1 + pw - final = md5(ctx1).digest() - # Final xform - passwd = b'' - passwd += to64((indexbytes(final, 0) << 16) - |(indexbytes(final, 6) << 8) - |(indexbytes(final, 12)),4) - passwd += to64((indexbytes(final, 1) << 16) - |(indexbytes(final, 7) << 8) - |(indexbytes(final, 13)), 4) - passwd += to64((indexbytes(final, 2) << 16) - |(indexbytes(final, 8) << 8) - |(indexbytes(final, 14)), 4) - passwd += to64((indexbytes(final, 3) << 16) - |(indexbytes(final, 9) << 8) - |(indexbytes(final, 15)), 4) - passwd += to64((indexbytes(final, 4) << 16) - |(indexbytes(final, 10) << 8) - |(indexbytes(final, 5)), 4) - passwd += to64((indexbytes(final, 11)), 2) - return passwd diff -r 058bb3dc685f -r 0b59724cb3f2 migration.py --- a/migration.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,553 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""utilities for instances migration""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -import os -import logging -import tempfile -from os.path import exists, join, basename, splitext -from itertools import chain -from warnings import warn - -from six import string_types - -from logilab.common import IGNORED_EXTENSIONS -from logilab.common.decorators import cached -from logilab.common.configuration import REQUIRED, read_old_config -from logilab.common.shellutils import ASK -from logilab.common.changelog import Version -from logilab.common.deprecation import deprecated - -from cubicweb import ConfigurationError, ExecutionError -from cubicweb.cwconfig import CubicWebConfiguration as cwcfg -from cubicweb.toolsutils import show_diffs - -def filter_scripts(config, directory, fromversion, toversion, quiet=True): - """return a list of paths of migration files to consider to upgrade - from a version to a greater one - """ - from logilab.common.changelog import Version # doesn't work with appengine - assert fromversion - assert toversion - assert isinstance(fromversion, tuple), fromversion.__class__ - assert isinstance(toversion, tuple), toversion.__class__ - assert fromversion <= toversion, (fromversion, toversion) - if not exists(directory): - if not quiet: - print(directory, "doesn't exists, no migration path") - return [] - if fromversion == toversion: - return [] - result = [] - for fname in os.listdir(directory): - if fname.endswith(IGNORED_EXTENSIONS): - continue - fpath = join(directory, fname) - try: - tver, mode = fname.split('_', 1) - except ValueError: - continue - mode = mode.split('.', 1)[0] - if not config.accept_mode(mode): - continue - try: - tver = Version(tver) - except ValueError: - continue - if tver <= fromversion: - continue - if tver > toversion: - continue - result.append((tver, fpath)) - # be sure scripts are executed in order - return sorted(result) - - -def execscript_confirm(scriptpath): - """asks for confirmation before executing a script and provides the - ability to show the script's content - """ - while True: - answer = ASK.ask('Execute %r ?' % scriptpath, - ('Y','n','show','abort'), 'Y') - if answer == 'abort': - raise SystemExit(1) - elif answer == 'n': - return False - elif answer == 'show': - stream = open(scriptpath) - scriptcontent = stream.read() - stream.close() - print() - print(scriptcontent) - print() - else: - return True - -def yes(*args, **kwargs): - return True - - -class MigrationHelper(object): - """class holding CubicWeb Migration Actions used by migration scripts""" - - def __init__(self, config, interactive=True, verbosity=1): - self.config = config - if config: - # no config on shell to a remote instance - self.config.init_log(logthreshold=logging.ERROR) - # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything - self.verbosity = verbosity - self.need_wrap = True - if not interactive or not verbosity: - self.confirm = yes - self.execscript_confirm = yes - else: - self.execscript_confirm = execscript_confirm - self._option_changes = [] - self.__context = {'confirm': self.confirm, - 'config': self.config, - 'interactive_mode': interactive, - } - self._context_stack = [] - - def __getattribute__(self, name): - try: - return object.__getattribute__(self, name) - except AttributeError: - cmd = 'cmd_%s' % name - # search self.__class__ to avoid infinite recursion - if hasattr(self.__class__, cmd): - meth = getattr(self, cmd) - return lambda *args, **kwargs: self.interact(args, kwargs, - meth=meth) - raise - raise AttributeError(name) - - def migrate(self, vcconf, toupgrade, options): - """upgrade the given set of cubes - - `cubes` is an ordered list of 3-uple: - (cube, fromversion, toversion) - """ - if options.fs_only: - # monkey path configuration.accept_mode so database mode (e.g. Any) - # won't be accepted - orig_accept_mode = self.config.accept_mode - def accept_mode(mode): - if mode == 'Any': - return False - return orig_accept_mode(mode) - self.config.accept_mode = accept_mode - # may be an iterator - toupgrade = tuple(toupgrade) - vmap = dict( (cube, (fromver, tover)) for cube, fromver, tover in toupgrade) - ctx = self.__context - ctx['versions_map'] = vmap - if self.config.accept_mode('Any') and 'cubicweb' in vmap: - migrdir = self.config.migration_scripts_dir() - self.cmd_process_script(join(migrdir, 'bootstrapmigration_repository.py')) - for cube, fromversion, toversion in toupgrade: - if cube == 'cubicweb': - migrdir = self.config.migration_scripts_dir() - else: - migrdir = self.config.cube_migration_scripts_dir(cube) - scripts = filter_scripts(self.config, migrdir, fromversion, toversion) - if scripts: - prevversion = None - for version, script in scripts: - # take care to X.Y.Z_Any.py / X.Y.Z_common.py: we've to call - # cube_upgraded once all script of X.Y.Z have been executed - if prevversion is not None and version != prevversion: - self.cube_upgraded(cube, prevversion) - prevversion = version - self.cmd_process_script(script) - self.cube_upgraded(cube, toversion) - else: - self.cube_upgraded(cube, toversion) - - def cube_upgraded(self, cube, version): - pass - - def shutdown(self): - pass - - def interact(self, args, kwargs, meth): - """execute the given method according to user's confirmation""" - msg = 'Execute command: %s(%s) ?' % ( - meth.__name__[4:], - ', '.join([repr(arg) for arg in args] + - ['%s=%r' % (n,v) for n,v in kwargs.items()])) - if 'ask_confirm' in kwargs: - ask_confirm = kwargs.pop('ask_confirm') - else: - ask_confirm = True - if not ask_confirm or self.confirm(msg): - return meth(*args, **kwargs) - - def confirm(self, question, # pylint: disable=E0202 - shell=True, abort=True, retry=False, pdb=False, default='y'): - """ask for confirmation and return true on positive answer - - if `retry` is true the r[etry] answer may return 2 - """ - possibleanswers = ['y', 'n'] - if abort: - possibleanswers.append('abort') - if pdb: - possibleanswers.append('pdb') - if shell: - possibleanswers.append('shell') - if retry: - possibleanswers.append('retry') - try: - answer = ASK.ask(question, possibleanswers, default) - except (EOFError, KeyboardInterrupt): - answer = 'abort' - if answer == 'n': - return False - if answer == 'retry': - return 2 - if answer == 'abort': - raise SystemExit(1) - if answer == 'shell': - self.interactive_shell() - return self.confirm(question, shell, abort, retry, pdb, default) - if answer == 'pdb': - import pdb - pdb.set_trace() - return self.confirm(question, shell, abort, retry, pdb, default) - return True - - def interactive_shell(self): - self.confirm = yes - self.need_wrap = False - # avoid '_' to be added to builtins by sys.display_hook - def do_not_add___to_builtins(obj): - if obj is not None: - print(repr(obj)) - sys.displayhook = do_not_add___to_builtins - local_ctx = self._create_context() - try: - import readline - from cubicweb.toolsutils import CWShellCompleter - except ImportError: - # readline not available - pass - else: - rql_completer = CWShellCompleter(local_ctx) - readline.set_completer(rql_completer.complete) - readline.parse_and_bind('tab: complete') - home_key = 'HOME' - if sys.platform == 'win32': - home_key = 'USERPROFILE' - histfile = os.path.join(os.environ[home_key], ".cwshell_history") - try: - readline.read_history_file(histfile) - except IOError: - pass - from code import interact - banner = """entering the migration python shell -just type migration commands or arbitrary python code and type ENTER to execute it -type "exit" or Ctrl-D to quit the shell and resume operation""" - # give custom readfunc to avoid http://bugs.python.org/issue1288615 - def unicode_raw_input(prompt): - return unicode(raw_input(prompt), sys.stdin.encoding) - interact(banner, readfunc=unicode_raw_input, local=local_ctx) - try: - readline.write_history_file(histfile) - except IOError: - pass - # delete instance's confirm attribute to avoid questions - del self.confirm - self.need_wrap = True - - @cached - def _create_context(self): - """return a dictionary to use as migration script execution context""" - context = self.__context - for attr in dir(self): - if attr.startswith('cmd_'): - if self.need_wrap: - context[attr[4:]] = getattr(self, attr[4:]) - else: - context[attr[4:]] = getattr(self, attr) - return context - - def update_context(self, key, value): - for context in self._context_stack: - context[key] = value - self.__context[key] = value - - def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): - """execute a migration script in interactive mode - - Display the migration script path, ask for confirmation and execute it - if confirmed - - Allowed input file formats for migration scripts: - - `python` (.py) - - `sql` (.sql) - - `doctest` (.txt or .rst) - - .. warning:: sql migration scripts are not available in web-only instance - - You can pass script parameters with using double dash (--) in the - command line - - Context environment can have these variables defined: - - __name__ : will be determine by funcname parameter - - __file__ : is the name of the script if it exists - - __args__ : script arguments coming from command-line - - :param migrscript: name of the script - :param funcname: defines __name__ inside the shell (or use __main__) - :params args: optional arguments for funcname - :keyword scriptargs: optional arguments of the script - """ - ftypes = {'python': ('.py',), - 'doctest': ('.txt', '.rst'), - 'sql': ('.sql',)} - # sql migration scripts are not available in web-only instance - if not hasattr(self, "session"): - ftypes.pop('sql') - migrscript = os.path.normpath(migrscript) - for (script_mode, ftype) in ftypes.items(): - if migrscript.endswith(ftype): - break - else: - ftypes = ', '.join(chain(*ftypes.values())) - msg = 'ignoring %s, not a valid script extension (%s)' - raise ExecutionError(msg % (migrscript, ftypes)) - if not self.execscript_confirm(migrscript): - return - scriptlocals = self._create_context().copy() - scriptlocals.update({'__file__': migrscript, - '__args__': kwargs.pop("scriptargs", [])}) - self._context_stack.append(scriptlocals) - if script_mode == 'python': - if funcname is None: - pyname = '__main__' - else: - pyname = splitext(basename(migrscript))[0] - scriptlocals['__name__'] = pyname - with open(migrscript, 'rb') as fobj: - fcontent = fobj.read() - try: - code = compile(fcontent, migrscript, 'exec') - except SyntaxError: - # try without print_function - code = compile(fcontent, migrscript, 'exec', 0, True) - warn('[3.22] script %r should be updated to work with print_function' - % migrscript, DeprecationWarning) - exec(code, scriptlocals) - if funcname is not None: - try: - func = scriptlocals[funcname] - self.info('found %s in locals', funcname) - assert callable(func), '%s (%s) is not callable' % (func, funcname) - except KeyError: - self.critical('no %s in script %s', funcname, migrscript) - return None - return func(*args, **kwargs) - elif script_mode == 'sql': - from cubicweb.server.sqlutils import sqlexec - sqlexec(open(migrscript).read(), self.session.system_sql) - self.commit() - else: # script_mode == 'doctest' - import doctest - return doctest.testfile(migrscript, module_relative=False, - optionflags=doctest.ELLIPSIS, - # verbose mode when user input is expected - verbose=self.verbosity==2, - report=True, - encoding='utf-8', - globs=scriptlocals) - self._context_stack.pop() - - def cmd_option_renamed(self, oldname, newname): - """a configuration option has been renamed""" - self._option_changes.append(('renamed', oldname, newname)) - - def cmd_option_group_changed(self, option, oldgroup, newgroup): - """a configuration option has been moved in another group""" - self._option_changes.append(('moved', option, oldgroup, newgroup)) - - def cmd_option_added(self, optname): - """a configuration option has been added""" - self._option_changes.append(('added', optname)) - - def cmd_option_removed(self, optname): - """a configuration option has been removed""" - # can safely be ignored - #self._option_changes.append(('removed', optname)) - - def cmd_option_type_changed(self, optname, oldtype, newvalue): - """a configuration option's type has changed""" - self._option_changes.append(('typechanged', optname, oldtype, newvalue)) - - def cmd_add_cubes(self, cubes): - """modify the list of used cubes in the in-memory config - returns newly inserted cubes, including dependencies - """ - if isinstance(cubes, string_types): - cubes = (cubes,) - origcubes = self.config.cubes() - newcubes = [p for p in self.config.expand_cubes(cubes) - if not p in origcubes] - if newcubes: - self.config.add_cubes(newcubes) - return newcubes - - @deprecated('[3.20] use drop_cube() instead of remove_cube()') - def cmd_remove_cube(self, cube, removedeps=False): - return self.cmd_drop_cube(cube, removedeps) - - def cmd_drop_cube(self, cube, removedeps=False): - if removedeps: - toremove = self.config.expand_cubes([cube]) - else: - toremove = (cube,) - origcubes = self.config._cubes - basecubes = [c for c in origcubes if not c in toremove] - # don't fake-add any new ones, or we won't be able to really-add them later - self.config._cubes = tuple(cube for cube in self.config.expand_cubes(basecubes) - if cube in origcubes) - removed = [p for p in origcubes if not p in self.config._cubes] - if not cube in removed and cube in origcubes: - raise ConfigurationError("can't remove cube %s, " - "used as a dependency" % cube) - return removed - - def rewrite_configuration(self): - configfile = self.config.main_config_file() - if self._option_changes: - read_old_config(self.config, self._option_changes, configfile) - fd, newconfig = tempfile.mkstemp() - for optdescr in self._option_changes: - if optdescr[0] == 'added': - optdict = self.config.get_option_def(optdescr[1]) - if optdict.get('default') is REQUIRED: - self.config.input_option(optdescr[1], optdict) - self.config.generate_config(open(newconfig, 'w')) - show_diffs(configfile, newconfig, askconfirm=self.confirm is not yes) - os.close(fd) - if exists(newconfig): - os.unlink(newconfig) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(MigrationHelper, getLogger('cubicweb.migration')) - - -def version_strictly_lower(a, b): - if a is None: - return True - if b is None: - return False - if a: - a = Version(a) - if b: - b = Version(b) - return a < b - -def max_version(a, b): - return str(max(Version(a), Version(b))) - -class ConfigurationProblem(object): - """Each cube has its own list of dependencies on other cubes/versions. - - The ConfigurationProblem is used to record the loaded cubes, then to detect - inconsistencies in their dependencies. - - See configuration management on wikipedia for litterature. - """ - - def __init__(self, config): - self.config = config - self.cubes = {'cubicweb': cwcfg.cubicweb_version()} - - def add_cube(self, name, version): - self.cubes[name] = version - - def solve(self): - self.warnings = [] - self.errors = [] - self.dependencies = {} - self.reverse_dependencies = {} - self.constraints = {} - # read dependencies - for cube in self.cubes: - if cube == 'cubicweb': continue - self.dependencies[cube] = dict(self.config.cube_dependencies(cube)) - self.dependencies[cube]['cubicweb'] = self.config.cube_depends_cubicweb_version(cube) - # compute reverse dependencies - for cube, dependencies in self.dependencies.items(): - for name, constraint in dependencies.items(): - self.reverse_dependencies.setdefault(name,set()) - if constraint: - try: - oper, version = constraint.split() - self.reverse_dependencies[name].add( (oper, version, cube) ) - except Exception: - self.warnings.append( - 'cube %s depends on %s but constraint badly ' - 'formatted: %s' % (cube, name, constraint)) - else: - self.reverse_dependencies[name].add( (None, None, cube) ) - # check consistency - for cube, versions in sorted(self.reverse_dependencies.items()): - oper, version, source = None, None, None - # simplify constraints - if versions: - for constraint in versions: - op, ver, src = constraint - if oper is None: - oper = op - version = ver - source = src - elif op == '>=' and oper == '>=': - if version_strictly_lower(version, ver): - version = ver - source = src - elif op == None: - continue - else: - print('unable to handle %s in %s, set to `%s %s` ' - 'but currently up to `%s %s`' % - (cube, source, oper, version, op, ver)) - # "solve" constraint satisfaction problem - if cube not in self.cubes: - self.errors.append( ('add', cube, version, source) ) - elif versions: - lower_strict = version_strictly_lower(self.cubes[cube], version) - if oper in ('>=','=','=='): - if lower_strict: - self.errors.append( ('update', cube, version, source) ) - elif oper is None: - pass # no constraint on version - else: - print('unknown operator', oper) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cmp_schema.py --- a/misc/cmp_schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,24 +0,0 @@ -"""This module compare the Schema on the file system to the one in the database""" - -from cStringIO import StringIO -from cubicweb.web.schemaviewer import SchemaViewer -from logilab.common.ureports import TextWriter -import difflib - -viewer = SchemaViewer() -layout_db = viewer.visit_schema(schema, display_relations=True) -layout_fs = viewer.visit_schema(fsschema, display_relations=True) -writer = TextWriter() -stream_db = StringIO() -stream_fs = StringIO() -writer.format(layout_db, stream=stream_db) -writer.format(layout_fs, stream=stream_fs) - -stream_db.seek(0) -stream_fs.seek(0) -db = stream_db.getvalue().splitlines() -fs = stream_fs.getvalue().splitlines() -open('db_schema.txt', 'w').write(stream_db.getvalue()) -open('fs_schema.txt', 'w').write(stream_fs.getvalue()) -#for diff in difflib.ndiff(fs, db): -# print diff diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwdesklets/gfx/bg.png Binary file misc/cwdesklets/gfx/bg.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwdesklets/gfx/border-left.png Binary file misc/cwdesklets/gfx/border-left.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwdesklets/gfx/logo_cw.png Binary file misc/cwdesklets/gfx/logo_cw.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwdesklets/gfx/rss.png Binary file misc/cwdesklets/gfx/rss.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwdesklets/rql_query.display --- a/misc/cwdesklets/rql_query.display Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwdesklets/rqlsensor/__init__.py --- a/misc/cwdesklets/rqlsensor/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,118 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -import webbrowser -reload(webbrowser) - -from sensor.Sensor import Sensor -from utils import datatypes, i18n - -from cubicweb.dbapi import connect - -_ = str - -class RQLSensor(Sensor): - - def __init__(self, *args): - global _; _ = i18n.Translator("rql-desklet") - Sensor.__init__(self) - # define configuration - self._set_config_type("appid", datatypes.TYPE_STRING, "") - self._set_config_type("user", datatypes.TYPE_STRING, "") - self._set_config_type("passwd", datatypes.TYPE_SECRET_STRING, "") - self._set_config_type("rql", datatypes.TYPE_STRING, "") - self._set_config_type("url", datatypes.TYPE_STRING, "") - self._set_config_type("delay", datatypes.TYPE_STRING, "600") - # default timer - self._add_timer(20, self.__update) - - def get_configurator(self): - configurator = self._new_configurator() - configurator.set_name(_("RQL")) - configurator.add_title(_("CubicWeb source settings")) - configurator.add_entry(_("ID",), "appid", _("The application id of this source")) - configurator.add_entry(_("User",), "user", _("The user to connect to this source")) - configurator.add_entry(_("Password",), "passwd", _("The user's password to connect to this source")) - configurator.add_entry(_("URL",), "url", _("The url of the web interface for this source")) - configurator.add_entry(_("RQL",), "rql", _("The rql query")) - configurator.add_entry(_("Update interval",), "delay", _("Delay in seconds between updates")) - return configurator - - - def call_action(self, action, path, args=[]): - index = path[-1] - output = self._new_output() - if action=="enter-line": - # change background - output.set('resultbg[%s]' % index, 'yellow') - elif action=="leave-line": - # change background - output.set('resultbg[%s]' % index, 'black') - elif action=="click-line": - # open url - output.set('resultbg[%s]' % index, 'black') - webbrowser.open(self._urls[index]) - self._send_output(output) - - def __get_connection(self): - try: - return self._v_cnx - except AttributeError: - appid, user, passwd = self._get_config("appid"), self._get_config("user"), self._get_config("passwd") - cnx = connect(database=appid, login=user, password=passwd) - self._v_cnx = cnx - return cnx - - def __run_query(self, output): - base = self._get_config('url') - rql = self._get_config('rql') - cnx = self.__get_connection() - cursor = cnx.cursor() - try: - rset = cursor.execute(rql) - except Exception: - del self._v_cnx - raise - self._urls = [] - output.set('layout', 'vertical, 14') - output.set('length', rset.rowcount) - i = 0 - for line in rset: - output.set('result[%s]' % i, ', '.join([str(v) for v in line[1:]])) - output.set('resultbg[%s]' % i, 'black') - try: - self._urls.append(base % 'Any X WHERE X eid %s' % line[0]) - except Exception: - self._urls.append('') - i += 1 - - def __update(self): - output = self._new_output() - try: - self.__run_query(output) - except Exception as ex: - import traceback - traceback.print_exc() - output.set('layout', 'vertical, 10') - output.set('length', 1) - output.set('result[0]', str(ex)) - self._send_output(output) - self._add_timer(int(self._get_config('delay'))*1000, self.__update) - - -def new_sensor(args): - return RQLSensor(*args) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwdesklets/web_query.display --- a/misc/cwdesklets/web_query.display Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwfs/A_FAIRE --- a/misc/cwfs/A_FAIRE Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ -TACHES -====== - --- crire objet stocke/manipule les donnes - --- extraire tests de chane de caractre - -* utiliser sqlite - -* crire fonction prend chemin en argument et renvoie contenu - -* extraire tests (chane de caractre) de spec - -* utiliser yams pour schma \ No newline at end of file diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwfs/cwfs-spec.txt --- a/misc/cwfs/cwfs-spec.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,180 +0,0 @@ -======================= - Specification cubicwebfs -======================= - -Remarque: cubicwebfs c'est le siamois de yamsfs -en fait c'est un yamsfs avec une interrogation -de base RQL - -Modle -------- - -Description du modle; -:: - societe - nom - ville - - affaire - ref - - document - annee - mois - jour - type {RAP,CLI,OFR,FCT} - fichier - -document concerne affaire -affaire concerne societe - -Contenu de la base exemple ---------------------------- - -societe | nom | ville | - | CETIAD | Dijon | - | EDF R&D | Clamart | - | Logilab | Paris | - -affaire | ref | concerne | - | CTIA01 | CETIAD | - | EDFR01 | EDF R&D | - | EDFR02 | EDF R&D | - -document | annee | mois | jour | type | concerne | fichier | - | 2004 | 09 | 06 | PRE | CTIA01 | depodoc/2004/09/CTIA01-040906-PRE-1-01.pdf | - | 2005 | 02 | 01 | CLI | EDFR01 | depodoc/2005/02/EDFR01-050201-CLI-1-01.pdf | - | 2005 | 03 | 22 | OFR | EDFR01 | depodoc/2005/02/EDFR01-050322-OFR-1-01.pdf | - - -Exemples de chemins/recherches -------------------------------- - -Cherche documents de mars 2005; -:: - /document/annee/2005/mois/03/ - - -Dont le contenu successif serait; - -Test:: - - $ ls /document - annee/ mois/ jour/ type/ - affaire/ concerne/ CTIA01-040906-PRE-1-01.pdf - EDFR01-050201-CLI-1-01.pdf EDFR01-050322-OFR-1-01.pdf - - $ ls /document/annee/ - 2004/ 2005/ - - $ ls /document/annee/2005/ - mois/ jour/ type/ affaire/ - concerne/ EDFR01-050201-CLI-1-01.pdf EDFR01-050322-OFR-1-01.pdf - - $ ls /document/annee/2005/mois/ - 02/ 03/ - - $ ls /document/annee/2005/mois/03/ - jour/ type/ affaire/ concerne/ - EDFR01-050322-OFR-1-01.pdf - - -Question: est-ce que fichier/ ne va pas nous manquer ? - - -Cherche documents relatifs CTIA01; -:: - /affaire/ref/CTIA01/document/ - -Dont le contenu des rpertoires successifs serait: - -Test:: - - $ ls /affaire/ - ref/ societe/ concerne/ document/ - concerne_par/ CTIA01 EDFR01 EDFR02 - - $ ls /affaire/ref/ - CTIA01/ EDFR01/ EDFR02/ - - $ ls /affaire/ref/CTIA01/ - societe/ concerne/ document/ concerne_par/ - - $ ls /affaire/ref/CTIA01/document/ - annee/ mois/ jour/ type/ - CTIA01-040906-PRE-1-01.pdf - - -Cherche documents des affaires qui concernent CETIAD; -:: - /societe/nom/CETIAD/affaire/document/ - -Dont le contenu des rpertoires successifs serait; - -Test:: - - $ ls /societe/ - nom/ ville/ affaire/ concerne_par/ - CETIAD EDF R&D Logilab - - $ ls /societe/nom/ - CETIAD EDF R&D Logilab - - $ ls /societe/nom/CETIAD/ - ville/ affaire/ concerne_par/ CETIAD Logilab - - $ ls /societe/nom/CETIAD/affaire/ - ref/ societe/ concerne/ document/ - concerne_par/ CTIA01 - - $ ls /societe/nom/CETIAD/affaire/document/ - annee/ mois/ jour/ type/ - affaire/ concerne/ CTIA01-040906-PRE-1-01.pdf - - -En particulier, pour la recherche ci-dessus on ne peut pas crire; -:: - /document/affaire/concerne/societe/CETIAD/ - -La logique est que si on est dans un rpertoire document, il faut -qu'il contienne des documents. - -Cherche documents de 2002 qui concernent des affaires -qui concernent CETIAD; -:: - /societe/CETIAD/affaire/document/annee/2002/ - -Question: est-ce que les relations doivent tre des composants -du chemin ? -Question : si les relations ne font pas partie du chemin, il faudrait -pouvoir faire des recherches en utilisant des relations anonymes (ce -qui est impossible en RQL par exemple); -:: - /document/affaire/... s'il existe plusieurs relations entre - les entits document et affaire, on ne peut pas s'en sortir - -Question: que va-t-il se passer pour des chemins du type; -:: - /affaire/CTIA*/document/ - -Nicolas: mon avis on a rien faire, car c'est le shell qui -s'en occupe. De la mme faon, le systme de fichier n'a pas - se proccuper de ~/ et les programmes reoivent pas le "qqch*" -en argument, mais directement la liste. - -Attention: si jamais l'arborescence est sans fond, les -commandes rcursives vont prendre du temps... - -Attention: dans un premier temps, un systme de fichiers en -lecture seule est satisfaisant. on verra ensuite pour l'dition. -pour l'dition, on peut s'inspirer du external editor de zope -et avoir un format d'change XML entre le serveur et l'diteur. - -Le cas suivant est dbile, faut-il l'interdire ? -:: - /document/affaire/societe/concerne_par/affaire/concerne_par/document - - -NB: manque dtail d'un cas comme /document/annee/2005/concerne/affaire/ - - diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwfs/cwfs.py --- a/misc/cwfs/cwfs.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,175 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -class Schema : - - def __init__(self, schema) : - self._schema = schema - - def get_attrs(self, entity) : - return self._schema[entity][0] - - def get_relations(self, entity) : - return self._schema[entity][1] - - def get_attr_index(self, entity, attr) : - return list(self._schema[entity][0]).index(attr) - -SCHEMA = Schema({'societe': ( ('nom','ville'), - [('concerne_par','affaire'), - ] ), - 'affaire': ( ('ref',), - [('concerne','societe'), - ('concerne_par', 'document') - ] ), - 'document':( ('fichier', 'annee','mois','jour','type'), - [('concerne','affaire'), - ] ), - }) - - - -DATA = { 'societe': [ ('CETIAD', 'Dijon'), - ('EDF_R&D', 'Clamart'), - ('Logilab', 'Paris'), - ], - 'affaire': [ ('CTIA01', 'CETIAD'), - ('EDFR01', 'EDF_R&D'), - ('EDFR02', 'EDF_R&D'), - ], - 'document':[ ('CTIA01-040906-PRE-1-01.pdf','2004','09','06','PRE','CTIA01'), - ('EDFR01-050201-CLI-1-01.pdf','2005','02','01','CLI','EDFR01'), - ('EDFR01-050322-OFR-1-01.pdf','2005','03','22','OFR','EDFR01'), - ], - } - -def get_data(entity, where=[]) : - for value in DATA[entity] : - for index, val in where : - if value[index] != val : - break - else : - yield value - -class PathParser : - - def __init__(self, schema, path) : - self.schema = schema - self.path = path - self._components = iter([comp for comp in self.path.split('/') if comp]) - self._entity = None - self._attr = None - self._rel = None - self._restrictions = [] - - def parse(self) : - self._entity = next(self._components) - try: - self.process_entity() - except StopIteration : - pass - - def process_entity(self) : - _next = next(self._components) - if _next in self.schema.get_attrs(self._entity) : - self._attr = _next - _next = next(self._components) - self._restrictions.append( (self._entity, self._attr, _next) ) - self._attr = None - self._rel = None - self.process_entity() - - def get_list(self) : - if self._rel : - return - elif self._attr : - where = [] - for e,a,v in self._restrictions : - i = self.schema.get_attr_index(e, a) - where.append( (i,v) ) - i = self.schema.get_attr_index(self._entity, self._attr) - for values in get_data(self._entity,where) : - yield values[i]+'/' - else : - attr_restrict = [a for e,a,v in self._restrictions] - for attr in self.schema.get_attrs(self._entity) : - if attr not in attr_restrict : - yield attr+'/' - for data in DATA[self._entity]: - yield data[0] - for nom, entity in self.schema.get_relations(self._entity) : - yield nom+'/' - yield entity+'/' - -def ls(path) : - p = PathParser(SCHEMA,path) - p.parse() - return list(p.get_list()) - - -class SytPathParser : - - def __init__(self, schema, path) : - self.schema = schema - self.path = path - self._components = iter([comp for comp in self.path.split('/') if comp]) - self._e_type = None - self._restrictions = [] - self._alphabet = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') - - def parse(self): - self._var = self._alphabet.pop(0) - self._e_type = next(self._components) - e_type = self._e_type.capitalize() - self._restrictions.append('%s is %s' % (self._var, e_type)) - try: - self.process_entity() - except StopIteration : - pass - return 'Any %s WHERE %s' % (self._var, ', '.join(self._restrictions)) - - def process_entity(self) : - _next = next(self._components) - if _next in self.schema.get_attrs(self._e_type) : - attr = _next - try: - _next = next(self._components) - self._restrictions.append('%s %s %s' % (self._var, attr, _next)) - except StopIteration: - a_var = self._alphabet.pop(0) - self._restrictions.append('%s %s %s' % (self._var, attr, a_var) ) - self._var = a_var - raise - elif _next in [r for r,e in self.schema.get_relations(self._e_type)]: - rel = _next - r_var = self._alphabet.pop(0) - self._restrictions.append('%s %s %s' % (self._var, rel, r_var)) - self._var = r_var - try: - _next = next(self._components) - self._restrictions.append('%s is %s' % (r_var, _next.capitalize())) - except StopIteration: - raise - self.process_entity() - - -def to_rql(path) : - p = SytPathParser(SCHEMA,path) - return p.parse() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwfs/cwfs_test.py --- a/misc/cwfs/cwfs_test.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,66 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from logilab.common.testlib import TestCase, unittest_main - -import cubicwebfs -import sre - -def spec_parser(filename) : - """ - extract tests from specification - """ - sections = [] - buffer = "" - in_section = False - for line in open(filename) : - if line.startswith('Test::'): - in_section = True - buffer = "" - elif in_section : - if line.startswith(" ") or not line.strip() : - buffer += line.lstrip() - else : - sections.append(buffer) - in_section = False - tests = [] - for section in sections : - subsections = [t for t in section.strip().split('$ ls') if t] - for subsection in subsections : - path, results = subsection.splitlines()[0], subsection.splitlines()[1:] - path = path.strip() - items = set([i for i in sre.split('[\t\n]', '\n'.join(results)) if i]) - tests.append((path, items)) - return tests - -tests = spec_parser("cubicwebfs-spec.txt") - -class monTC(TestCase) : - pass - -for index, (path, results) in enumerate(tests) : - def f(self, p=path, r=results) : - res = set(cubicwebfs.ls(p)) - self.assertEqual(r, res) #, 'en trop %s\nmanque %s' % (r-results,results-r)) - f.__doc__ = "%s %s"%(index,path) - setattr(monTC,'test_%s'%index,f) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/cwzope/cwzope.py --- a/misc/cwzope/cwzope.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from AccessControl import getSecurityManager - -from cubicweb.dbapi import connect, Connection, Cursor -from cubicweb.common.utils import ResultSet, ResultSetIterator, ResultSetRow, Entity - -Connection.__allow_access_to_unprotected_subobjects__ = 1 -Cursor.__allow_access_to_unprotected_subobjects__ = 1 -ResultSet.__allow_access_to_unprotected_subobjects__ = 1 -ResultSetIterator.__allow_access_to_unprotected_subobjects__ = 1 -ResultSetRow.__allow_access_to_unprotected_subobjects__ = 1 -Entity.__allow_access_to_unprotected_subobjects__ = 1 - -CNX_CACHE = {} - -def get_connection(context, user=None, password=None, - host=None, database=None, group='cubicweb'): - """get a connection on an cubicweb server""" - request = context.REQUEST - zope_user = getSecurityManager().getUser() - if user is None: - user = zope_user.getId() - key = (user, host, database) - try: - return CNX_CACHE[key] - except KeyError: - if password is None: - password = zope_user._getPassword() - cnx = connect(user, password, host, database, group) - CNX_CACHE[key] = cnx - return cnx diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.10.0_Any.py --- a/misc/migration/3.10.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -from six import text_type - -from cubicweb.server.session import hooks_control - -for uri, cfg in config.read_sources_file().items(): - if uri in ('system', 'admin'): - continue - repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg.copy()) - -add_entity_type('CWSource') -add_relation_definition('CWSource', 'cw_source', 'CWSource') -add_entity_type('CWSourceHostConfig') - -with hooks_control(session, session.HOOKS_ALLOW_ALL, 'cw.sources'): - create_entity('CWSource', type=u'native', name=u'system') -commit() - -sql('INSERT INTO cw_source_relation(eid_from,eid_to) ' - 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s ' - 'WHERE s.cw_name=e.type') -commit() - -for uri, cfg in config.read_sources_file().items(): - if uri in ('system', 'admin'): - continue - repo.sources_by_uri.pop(uri) - config = u'\n'.join('%s=%s' % (key, value) for key, value in cfg.items() - if key != 'adapter' and value is not None) - create_entity('CWSource', name=text_type(uri), type=text_type(cfg['adapter']), - config=config) -commit() - -# rename cwprops for boxes/contentnavigation -for x in rql('Any X,XK WHERE X pkey XK, ' - 'X pkey ~= "boxes.%" OR ' - 'X pkey ~= "contentnavigation.%"').entities(): - x.cw_set(pkey=u'ctxcomponents.' + x.pkey.split('.', 1)[1]) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.10.0_common.py --- a/misc/migration/3.10.0_common.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -option_group_changed('cleanup-session-time', 'web', 'main') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.10.4_Any.py --- a/misc/migration/3.10.4_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -for eschema in schema.entities(): - if not (eschema.final or 'cw_source' in eschema.subjrels): - add_relation_definition(eschema.type, 'cw_source', 'CWSource', ask_confirm=False) - -sql('INSERT INTO cw_source_relation(eid_from, eid_to) ' - 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s ' - 'WHERE s.cw_name=e.source AND NOT EXISTS(SELECT 1 FROM cw_source_relation WHERE eid_from=e.eid AND eid_to=s.cw_eid)') -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.10.5_Any.py --- a/misc/migration/3.10.5_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -sync_schema_props_perms('CWSourceHostConfig', syncperms=False) - -sql('INSERT INTO cw_source_relation(eid_from, eid_to) ' - 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s ' - 'WHERE s.cw_name=e.source AND NOT EXISTS(SELECT 1 FROM cw_source_relation WHERE eid_from=e.eid AND eid_to=s.cw_eid)') -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.10.7_Any.py --- a/misc/migration/3.10.7_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -add_attribute('TrInfo', 'tr_count') -sync_schema_props_perms('TrInfo') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.10.8_Any.py --- a/misc/migration/3.10.8_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -sync_schema_props_perms('CWSource', syncprops=False) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.10.9_Any.py --- a/misc/migration/3.10.9_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,32 +0,0 @@ -import sys - -if confirm('fix some corrupted entities noticed on several instances?'): - rql('DELETE CWConstraint X WHERE NOT E constrained_by X') - rql('SET X is_instance_of Y WHERE X is Y, NOT X is_instance_of Y') - commit() - -if confirm('fix existing cwuri?'): - from logilab.common.shellutils import progress - from cubicweb.server.session import hooks_control - rset = rql('Any X, XC WHERE X cwuri XC, X cwuri ~= "%/eid/%"') - title = "%i entities to fix" % len(rset) - nbops = rset.rowcount - enabled = interactive_mode - with progress(title=title, nbops=nbops, size=30, enabled=enabled) as pb: - for i, row in enumerate(rset): - with hooks_control(session, session.HOOKS_DENY_ALL, 'integrity'): - data = {'eid': row[0], 'cwuri': row[1].replace(u'/eid', u'')} - rql('SET X cwuri %(cwuri)s WHERE X eid %(eid)s', data) - if not i % 100: # commit every 100 entities to limit memory consumption - pb.text = "%i committed" % i - commit(ask_confirm=False) - pb.update() - commit(ask_confirm=False) - -try: - from cubicweb import devtools - option_group_changed('anonymous-user', 'main', 'web') - option_group_changed('anonymous-password', 'main', 'web') -except ImportError: - # cubicweb-dev unavailable, nothing needed - pass diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.11.0_Any.py --- a/misc/migration/3.11.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -from datetime import datetime - -for rtype in ('cw_support', 'cw_dont_cross', 'cw_may_cross'): - drop_relation_type(rtype) - -add_entity_type('CWSourceSchemaConfig') - -if not 'url' in schema['CWSource'].subjrels: - add_attribute('CWSource', 'url') - add_attribute('CWSource', 'parser') - add_attribute('CWSource', 'latest_retrieval') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.12.9_Any.py --- a/misc/migration/3.12.9_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -sync_schema_props_perms('cw_source') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.13.0_Any.py --- a/misc/migration/3.13.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -sync_schema_props_perms('cw_source', syncprops=False) -if schema['BigInt'].eid is None: - add_entity_type('BigInt') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.13.3_Any.py --- a/misc/migration/3.13.3_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -drop_relation_definition('CWSourceSchemaConfig', 'cw_schema', 'CWAttribute') -sync_schema_props_perms('cw_schema') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.13.6_Any.py --- a/misc/migration/3.13.6_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -sync_schema_props_perms('CWSourceSchemaConfig') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.13.8_Any.py --- a/misc/migration/3.13.8_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -change_attribute_type('CWCache', 'timestamp', 'TZDatetime') -change_attribute_type('CWUser', 'last_login_time', 'TZDatetime') -change_attribute_type('CWSource', 'latest_retrieval', 'TZDatetime') -drop_attribute('CWSource', 'synchronizing') -add_attribute('CWSource', 'in_synchronization') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.14.0_Any.py --- a/misc/migration/3.14.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,15 +0,0 @@ -from __future__ import print_function - -config['rql-cache-size'] = config['rql-cache-size'] * 10 - -add_entity_type('CWDataImport') - -from cubicweb.schema import CONSTRAINTS, guess_rrqlexpr_mainvars -for rqlcstr in rql('Any X,XT,XV WHERE X is CWConstraint, X cstrtype XT, X value XV,' - 'X cstrtype XT, XT name IN ("RQLUniqueConstraint","RQLConstraint","RQLVocabularyConstraint"),' - 'NOT X value ~= ";%"').entities(): - expression = rqlcstr.value - mainvars = guess_rrqlexpr_mainvars(expression) - yamscstr = CONSTRAINTS[rqlcstr.type](expression, mainvars) - rqlcstr.cw_set(value=yamscstr.serialize()) - print('updated', rqlcstr.type, rqlcstr.value.strip()) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.14.7_Any.py --- a/misc/migration/3.14.7_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -# migrate default format for TriInfo `comment_format` attribute -sync_schema_props_perms('TrInfo') - -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.15.0_Any.py --- a/misc/migration/3.15.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -sync_schema_props_perms('EmailAddress') - -for source in rql('CWSource X WHERE X type "ldapuser"').entities(): - config = source.dictconfig - host = config.pop('host', u'ldap') - protocol = config.pop('protocol', u'ldap') - source.cw_set(url=u'%s://%s' % (protocol, host)) - source.update_config(skip_unknown=True, **config) - -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.15.0_common.py --- a/misc/migration/3.15.0_common.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -import ConfigParser -try: - undo_actions = config.cfgfile_parser.get('MAIN', 'undo-support', False) -except ConfigParser.NoOptionError: - pass # this conf. file was probably already migrated -else: - config.global_set_option('undo-enabled', bool(undo_actions)) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.15.4_Any.py --- a/misc/migration/3.15.4_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ -from __future__ import print_function - -from logilab.common.shellutils import generate_password -from cubicweb.server.utils import crypt_password - -for user in rql('CWUser U WHERE U cw_source S, S name "system", U upassword P, U login L').entities(): - salt = user.upassword.getvalue() - if crypt_password('', salt) == salt: - passwd = generate_password() - print('setting random password for user %s' % user.login) - user.set_attributes(upassword=passwd) - -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.15.9_Any.py --- a/misc/migration/3.15.9_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -sync_schema_props_perms(('State', 'state_of', 'Workflow'), commit=False) -sync_schema_props_perms(('State', 'name', 'String')) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.16.1_Any.py --- a/misc/migration/3.16.1_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -sync_schema_props_perms(('State', 'state_of', 'Workflow'), commit=False) -sync_schema_props_perms(('State', 'name', 'String')) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.17.0_Any.py --- a/misc/migration/3.17.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -add_attribute('CWAttribute', 'extra_props') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.17.11_Any.py --- a/misc/migration/3.17.11_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -for table, column in [ - ('transactions', 'tx_time'), - ('tx_entity_actions', 'tx_uuid'), - ('tx_relation_actions', 'tx_uuid')]: - repo.system_source.create_index(session, table, column) - -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.18.2_Any.py --- a/misc/migration/3.18.2_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -install_custom_sql_scripts() -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.18.4_Any.py --- a/misc/migration/3.18.4_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -sync_schema_props_perms('CWSource') -sync_schema_props_perms('CWSourceHostConfig') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.19.0_Any.py --- a/misc/migration/3.19.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -sql('DROP TABLE "deleted_entities"') - -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.20.0_Any.py --- a/misc/migration/3.20.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -sync_schema_props_perms('state_of') -sync_schema_props_perms('transition_of') -sync_schema_props_perms('State') -sync_schema_props_perms('BaseTransition') -sync_schema_props_perms('Transition') -sync_schema_props_perms('WorkflowTransition') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.20.7_Any.py --- a/misc/migration/3.20.7_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -if repo.system_source.dbdriver == 'postgres': - install_custom_sql_scripts() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.20.8_Any.py --- a/misc/migration/3.20.8_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -sync_schema_props_perms('cwuri') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.21.0_Any.py --- a/misc/migration/3.21.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,174 +0,0 @@ -from __future__ import print_function - -from cubicweb.schema import PURE_VIRTUAL_RTYPES -from cubicweb.server.schema2sql import rschema_has_table - - -def add_foreign_keys(): - source = repo.system_source - if not source.dbhelper.alter_column_support: - return - for rschema in schema.relations(): - if rschema.inlined: - add_foreign_keys_inlined(rschema) - elif rschema_has_table(rschema, skip_relations=PURE_VIRTUAL_RTYPES): - add_foreign_keys_relation(rschema) - for eschema in schema.entities(): - if eschema.final: - continue - add_foreign_key_etype(eschema) - - -def add_foreign_keys_relation(rschema): - args = {'r': rschema.type} - count = sql('SELECT COUNT(*) FROM (' - ' SELECT eid_from FROM %(r)s_relation' - ' UNION' - ' SELECT eid_to FROM %(r)s_relation' - ' EXCEPT' - ' SELECT eid FROM entities) AS eids' % args, - ask_confirm=False)[0][0] - if count: - print('%s references %d unknown entities, deleting' % (rschema, count)) - sql('DELETE FROM %(r)s_relation ' - 'WHERE eid_from IN (SELECT eid_from FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args) - sql('DELETE FROM %(r)s_relation ' - 'WHERE eid_to IN (SELECT eid_to FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args) - - args['from_fk'] = '%(r)s_relation_eid_from_fkey' % args - args['to_fk'] = '%(r)s_relation_eid_to_fkey' % args - args['table'] = '%(r)s_relation' % args - if repo.system_source.dbdriver == 'postgres': - sql('ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(from_fk)s' % args, - ask_confirm=False) - sql('ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(to_fk)s' % args, - ask_confirm=False) - elif repo.system_source.dbdriver.startswith('sqlserver'): - sql("IF OBJECT_ID('%(from_fk)s', 'F') IS NOT NULL " - "ALTER TABLE %(table)s DROP CONSTRAINT %(from_fk)s" % args, - ask_confirm=False) - sql("IF OBJECT_ID('%(to_fk)s', 'F') IS NOT NULL " - "ALTER TABLE %(table)s DROP CONSTRAINT %(to_fk)s" % args, - ask_confirm=False) - sql('ALTER TABLE %(table)s ADD CONSTRAINT %(from_fk)s ' - 'FOREIGN KEY (eid_from) REFERENCES entities (eid)' % args, - ask_confirm=False) - sql('ALTER TABLE %(table)s ADD CONSTRAINT %(to_fk)s ' - 'FOREIGN KEY (eid_to) REFERENCES entities (eid)' % args, - ask_confirm=False) - - -def add_foreign_keys_inlined(rschema): - for eschema in rschema.subjects(): - args = {'e': eschema.type, 'r': rschema.type} - args['c'] = 'cw_%(e)s_cw_%(r)s_fkey' % args - - if eschema.rdef(rschema).cardinality[0] == '1': - broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IS NULL' % args, - ask_confirm=False) - if broken_eids: - print('Required relation %(e)s.%(r)s missing' % args) - args['eids'] = ', '.join(str(eid) for eid, in broken_eids) - rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args) - broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IN (SELECT cw_%(r)s FROM cw_%(e)s ' - 'EXCEPT SELECT eid FROM entities)' % args, - ask_confirm=False) - if broken_eids: - print('Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args) - args['eids'] = ', '.join(str(eid) for eid, in broken_eids) - rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args) - else: - if sql('SELECT COUNT(*) FROM (' - ' SELECT cw_%(r)s FROM cw_%(e)s WHERE cw_%(r)s IS NOT NULL' - ' EXCEPT' - ' SELECT eid FROM entities) AS eids' % args, - ask_confirm=False)[0][0]: - print('%(e)s.%(r)s references unknown entities, deleting relation' % args) - sql('UPDATE cw_%(e)s SET cw_%(r)s = NULL WHERE cw_%(r)s IS NOT NULL AND cw_%(r)s IN ' - '(SELECT cw_%(r)s FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args) - - if repo.system_source.dbdriver == 'postgres': - sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args, - ask_confirm=False) - elif repo.system_source.dbdriver.startswith('sqlserver'): - sql("IF OBJECT_ID('%(c)s', 'F') IS NOT NULL " - "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args, - ask_confirm=False) - sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s ' - 'FOREIGN KEY (cw_%(r)s) references entities(eid)' % args, - ask_confirm=False) - - -def add_foreign_key_etype(eschema): - args = {'e': eschema.type} - if sql('SELECT COUNT(*) FROM (' - ' SELECT cw_eid FROM cw_%(e)s' - ' EXCEPT' - ' SELECT eid FROM entities) AS eids' % args, - ask_confirm=False)[0][0]: - print('%(e)s has nonexistent entities, deleting' % args) - sql('DELETE FROM cw_%(e)s WHERE cw_eid IN ' - '(SELECT cw_eid FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args) - args['c'] = 'cw_%(e)s_cw_eid_fkey' % args - if repo.system_source.dbdriver == 'postgres': - sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args, - ask_confirm=False) - elif repo.system_source.dbdriver.startswith('sqlserver'): - sql("IF OBJECT_ID('%(c)s', 'F') IS NOT NULL " - "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args, - ask_confirm=False) - sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s ' - 'FOREIGN KEY (cw_eid) REFERENCES entities (eid)' % args, - ask_confirm=False) - - -add_foreign_keys() - -cu = session.cnxset.cu -helper = repo.system_source.dbhelper - -helper.drop_index(cu, 'entities', 'extid', False) -# don't use create_index because it doesn't work for columns that may be NULL -# on sqlserver -for query in helper.sqls_create_multicol_unique_index('entities', ['extid']): - cu.execute(query) - -if 'moved_entities' not in helper.list_tables(cu): - sql(''' - CREATE TABLE moved_entities ( - eid INTEGER PRIMARY KEY NOT NULL, - extid VARCHAR(256) UNIQUE - ) - ''') - -moved_entities = sql('SELECT -eid, extid FROM entities WHERE eid < 0', - ask_confirm=False) -if moved_entities: - cu.executemany('INSERT INTO moved_entities (eid, extid) VALUES (%s, %s)', - moved_entities) - sql('DELETE FROM entities WHERE eid < 0') - -commit() - -sync_schema_props_perms('CWEType') - -sync_schema_props_perms('cwuri') - -from cubicweb.server.schema2sql import check_constraint - -for cwconstraint in rql('Any C WHERE R constrained_by C').entities(): - cwrdef = cwconstraint.reverse_constrained_by[0] - rdef = cwrdef.yams_schema() - cstr = rdef.constraint_by_eid(cwconstraint.eid) - if cstr.type() not in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'): - continue - cstrname, check = check_constraint(rdef.subject, rdef.object, rdef.rtype.type, - cstr, helper, prefix='cw_') - args = {'e': rdef.subject.type, 'c': cstrname, 'v': check} - if repo.system_source.dbdriver == 'postgres': - sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args) - elif repo.system_source.dbdriver.startswith('sqlserver'): - sql("IF OBJECT_ID('%(c)s', 'C') IS NOT NULL " - "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args) - sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s CHECK(%(v)s)' % args) -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.21.1_Any.py --- a/misc/migration/3.21.1_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -# re-read ComputedRelation permissions from schema.py now that we're -# able to serialize them -for computedrtype in schema.iter_computed_relations(): - sync_schema_props_perms(computedrtype.type) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.21.2_Any.py --- a/misc/migration/3.21.2_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -sync_schema_props_perms('cwuri') - -helper = repo.system_source.dbhelper -cu = session.cnxset.cu -helper.set_null_allowed(cu, 'moved_entities', 'extid', 'VARCHAR(256)', False) - -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.22.0_Any.py --- a/misc/migration/3.22.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -if confirm('use Europe/Paris as timezone?'): - timezone = 'Europe/Paris' -else: - import pytz - while True: - timezone = raw_input('enter your timezone') - if timezone in pytz.common_timezones: - break - -dbdriver = repo.system_source.dbdriver -if dbdriver == 'postgres': - sql("SET TIME ZONE '%s'" % timezone) - -for entity in schema.entities(): - if entity.final: - continue - change_attribute_type(entity.type, 'creation_date', 'TZDatetime', ask_confirm=False) - change_attribute_type(entity.type, 'modification_date', 'TZDatetime', ask_confirm=False) - -if dbdriver == 'postgres': - sql("SET TIME ZONE UTC") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.3.5_Any.py --- a/misc/migration/3.3.5_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.4.0_Any.py --- a/misc/migration/3.4.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.4.0_common.py --- a/misc/migration/3.4.0_common.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.4.3_Any.py --- a/misc/migration/3.4.3_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.5.0_Any.py --- a/misc/migration/3.5.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.5.10_Any.py --- a/misc/migration/3.5.10_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.5.3_Any.py --- a/misc/migration/3.5.3_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.6.1_Any.py --- a/misc/migration/3.6.1_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.7.0_Any.py --- a/misc/migration/3.7.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,40 +0,0 @@ -typemap = repo.system_source.dbhelper.TYPE_MAPPING -sqls = """ -CREATE TABLE transactions ( - tx_uuid CHAR(32) PRIMARY KEY NOT NULL, - tx_user INTEGER NOT NULL, - tx_time %s NOT NULL -);; -CREATE INDEX transactions_tx_user_idx ON transactions(tx_user);; - -CREATE TABLE tx_entity_actions ( - tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE, - txa_action CHAR(1) NOT NULL, - txa_public %s NOT NULL, - txa_order INTEGER, - eid INTEGER NOT NULL, - etype VARCHAR(64) NOT NULL, - changes %s -);; -CREATE INDEX tx_entity_actions_txa_action_idx ON tx_entity_actions(txa_action);; -CREATE INDEX tx_entity_actions_txa_public_idx ON tx_entity_actions(txa_public);; -CREATE INDEX tx_entity_actions_eid_idx ON tx_entity_actions(eid);; -CREATE INDEX tx_entity_actions_etype_idx ON tx_entity_actions(etype);; - -CREATE TABLE tx_relation_actions ( - tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE, - txa_action CHAR(1) NOT NULL, - txa_public %s NOT NULL, - txa_order INTEGER, - eid_from INTEGER NOT NULL, - eid_to INTEGER NOT NULL, - rtype VARCHAR(256) NOT NULL -);; -CREATE INDEX tx_relation_actions_txa_action_idx ON tx_relation_actions(txa_action);; -CREATE INDEX tx_relation_actions_txa_public_idx ON tx_relation_actions(txa_public);; -CREATE INDEX tx_relation_actions_eid_from_idx ON tx_relation_actions(eid_from);; -CREATE INDEX tx_relation_actions_eid_to_idx ON tx_relation_actions(eid_to) -""" % (typemap['Datetime'], - typemap['Boolean'], typemap['Bytes'], typemap['Boolean']) -for statement in sqls.split(';;'): - sql(statement) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.7.2_Any.py --- a/misc/migration/3.7.2_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -sql('DROP FUNCTION IF EXISTS _fsopen(bytea)') -sql('DROP FUNCTION IF EXISTS fspath(bigint, text, text)') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.7.4_Any.py --- a/misc/migration/3.7.4_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -sync_schema_props_perms('TrInfo', syncprops=False) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.7.5_Any.py --- a/misc/migration/3.7.5_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -if versions_map['cubicweb'][0] == (3, 7, 4): - config['http-session-time'] *= 60 - config['cleanup-session-time'] *= 60 - config['cleanup-anonymous-session-time'] *= 60 diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.8.1_Any.py --- a/misc/migration/3.8.1_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -rql('SET X name "BoundaryConstraint" ' - 'WHERE X is CWConstraintType, X name "BoundConstraint"') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.8.3_Any.py --- a/misc/migration/3.8.3_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -if 'same_as' in schema: - sync_schema_props_perms('same_as', syncperms=False) -sync_schema_props_perms('Bookmark', syncperms=False) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.8.3_common.py --- a/misc/migration/3.8.3_common.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -option_group_changed('port', 'main', 'web') -option_group_changed('query-log-file', 'main', 'web') -option_group_changed('profile', 'main', 'web') -option_group_changed('max-post-length', 'main', 'web') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.8.5_Any.py --- a/misc/migration/3.8.5_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -from __future__ import print_function - -def migrate_varchar_to_nvarchar(): - dbdriver = config.system_source_config['db-driver'] - if dbdriver != "sqlserver2005": - return - - introspection_sql = """\ -SELECT table_schema, table_name, column_name, is_nullable, character_maximum_length -FROM information_schema.columns -WHERE data_type = 'VARCHAR' and table_name <> 'SYSDIAGRAMS' -""" - has_index_sql = """\ -SELECT i.name AS index_name, - i.type_desc, - i.is_unique, - i.is_unique_constraint -FROM sys.indexes AS i, sys.index_columns as j, sys.columns as k -WHERE is_hypothetical = 0 AND i.index_id <> 0 -AND i.object_id = j.object_id -AND i.index_id = j.index_id -AND i.object_id = OBJECT_ID('%(table)s') -AND k.name = '%(col)s' -AND k.object_id=i.object_id -AND j.column_id = k.column_id;""" - - generated_statements = [] - for schema, table, column, is_nullable, length in sql(introspection_sql, ask_confirm=False): - qualified_table = '[%s].[%s]' % (schema, table) - rset = sql(has_index_sql % {'table': qualified_table, 'col':column}, - ask_confirm = False) - drops = [] - creates = [] - for idx_name, idx_type, idx_unique, is_unique_constraint in rset: - if is_unique_constraint: - drops.append('ALTER TABLE %s DROP CONSTRAINT %s' % (qualified_table, idx_name)) - creates.append('ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)' % (qualified_table, idx_name, column)) - else: - drops.append('DROP INDEX %s ON %s' % (idx_name, qualified_table)) - if idx_unique: - unique = 'UNIQUE' - else: - unique = '' - creates.append('CREATE %s %s INDEX %s ON %s(%s)' % (unique, idx_type, idx_name, qualified_table, column)) - - if length == -1: - length = 'max' - if is_nullable == 'YES': - not_null = 'NULL' - else: - not_null = 'NOT NULL' - alter_sql = 'ALTER TABLE %s ALTER COLUMN %s NVARCHAR(%s) %s' % (qualified_table, column, length, not_null) - generated_statements+= drops + [alter_sql] + creates - - - for statement in generated_statements: - print(statement) - sql(statement, ask_confirm=False) - commit() - -migrate_varchar_to_nvarchar() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.9.0_Any.py --- a/misc/migration/3.9.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -if repo.system_source.dbdriver == 'postgres': - sql('ALTER TABLE appears ADD COLUMN weight float') - sql('UPDATE appears SET weight=1.0 ') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/3.9.5_Any.py --- a/misc/migration/3.9.5_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -if not rql('CWConstraintType X WHERE X name "RQLUniqueConstraint"', - ask_confirm=False): - rql('INSERT CWConstraintType X: X name "RQLUniqueConstraint"', - ask_confirm=False) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/bootstrapmigration_repository.py --- a/misc/migration/bootstrapmigration_repository.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,459 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""allways executed before all others in server migration - -it should only include low level schema changes -""" -from __future__ import print_function - -from six import text_type - -from cubicweb import ConfigurationError -from cubicweb.server.session import hooks_control -from cubicweb.server import schemaserial as ss - -applcubicwebversion, cubicwebversion = versions_map['cubicweb'] - -def _add_relation_definition_no_perms(subjtype, rtype, objtype): - rschema = fsschema.rschema(rtype) - rdef = rschema.rdefs[(subjtype, objtype)] - rdef.rtype = schema.rschema(rtype) - rdef.subject = schema.eschema(subjtype) - rdef.object = schema.eschema(objtype) - ss.execschemarql(rql, rdef, ss.rdef2rql(rdef, CSTRMAP, groupmap=None)) - commit(ask_confirm=False) - -def replace_eid_sequence_with_eid_numrange(session): - dbh = session.repo.system_source.dbhelper - cursor = session.cnxset.cu - try: - cursor.execute(dbh.sql_sequence_current_state('entities_id_seq')) - lasteid = cursor.fetchone()[0] - except: # programming error, already migrated - return - - cursor.execute(dbh.sql_drop_sequence('entities_id_seq')) - cursor.execute(dbh.sql_create_numrange('entities_id_seq')) - cursor.execute(dbh.sql_restart_numrange('entities_id_seq', initial_value=lasteid)) - session.commit() - -if applcubicwebversion <= (3, 13, 0) and cubicwebversion >= (3, 13, 1): - sql('ALTER TABLE entities ADD asource VARCHAR(64)') - sql('UPDATE entities SET asource=cw_name ' - 'FROM cw_CWSource, cw_source_relation ' - 'WHERE entities.eid=cw_source_relation.eid_from AND cw_source_relation.eid_to=cw_CWSource.cw_eid') - commit() - -if applcubicwebversion <= (3, 14, 4) and cubicwebversion >= (3, 14, 4): - from cubicweb.server import schema2sql as y2sql - dbhelper = repo.system_source.dbhelper - rdefdef = schema['CWSource'].rdef('name') - attrtype = y2sql.type_from_constraints(dbhelper, rdefdef.object, rdefdef.constraints).split()[0] - cursor = session.cnxset.cu - sql('UPDATE entities SET asource = source WHERE asource is NULL') - dbhelper.change_col_type(cursor, 'entities', 'asource', attrtype, False) - dbhelper.change_col_type(cursor, 'entities', 'source', attrtype, False) - - # we now have a functional asource column, start using the normal eid_type_source method - if repo.system_source.eid_type_source == repo.system_source.eid_type_source_pre_131: - del repo.system_source.eid_type_source - -if applcubicwebversion < (3, 19, 0) and cubicwebversion >= (3, 19, 0): - try: - # need explicit drop of the indexes on some database systems (sqlserver) - sql(repo.system_source.dbhelper.sql_drop_index('entities', 'mtime')) - sql('ALTER TABLE "entities" DROP COLUMN "mtime"') - sql('ALTER TABLE "entities" DROP COLUMN "source"') - except: # programming error, already migrated - print("Failed to drop mtime or source database columns") - print("'entities' table of the database has probably been already updated") - - commit() - - replace_eid_sequence_with_eid_numrange(session) - -if applcubicwebversion < (3, 20, 0) and cubicwebversion >= (3, 20, 0): - ss._IGNORED_PROPS.append('formula') - add_attribute('CWAttribute', 'formula', commit=False) - ss._IGNORED_PROPS.remove('formula') - commit() - add_entity_type('CWComputedRType') - commit() - -if schema['TZDatetime'].eid is None: - add_entity_type('TZDatetime', auto=False) -if schema['TZTime'].eid is None: - add_entity_type('TZTime', auto=False) - -if applcubicwebversion < (3, 18, 0) and cubicwebversion >= (3, 18, 0): - driver = config.system_source_config['db-driver'] - if not (driver == 'postgres' or driver.startswith('sqlserver')): - import sys - print('This migration is not supported for backends other than sqlserver or postgres (yet).', file=sys.stderr) - sys.exit(1) - - add_relation_definition('CWAttribute', 'add_permission', 'CWGroup') - add_relation_definition('CWAttribute', 'add_permission', 'RQLExpression') - - # a bad defaultval in 3.13.8 schema was fixed in 3.13.9, but the migration was missed - rql('SET ATTR defaultval NULL WHERE ATTR from_entity E, E name "CWSource", ATTR relation_type T, T name "in_synchronization"') - - # the migration gets confused when we change rdefs out from under it. So - # explicitly remove this size constraint so it doesn't stick around and break - # things later. - rdefeid = schema['defaultval'].rdefs.values()[0].eid - rql('DELETE CWConstraint C WHERE C cstrtype T, T name "SizeConstraint", R constrained_by C, R eid %(eid)s', {'eid': rdefeid}) - - sync_schema_props_perms('defaultval') - - def convert_defaultval(cwattr, default): - from decimal import Decimal - import yams - from cubicweb import Binary - if default is None: - return - if isinstance(default, Binary): - # partially migrated instance, try to be idempotent - return default - atype = cwattr.to_entity[0].name - if atype == 'Boolean': - # boolean attributes with default=False were stored as '' - assert default in ('True', 'False', ''), repr(default) - default = default == 'True' - elif atype in ('Int', 'BigInt'): - default = int(default) - elif atype == 'Float': - default = float(default) - elif atype == 'Decimal': - default = Decimal(default) - elif atype in ('Date', 'Datetime', 'TZDatetime', 'Time'): - try: - # handle NOW and TODAY, keep them stored as strings - yams.KEYWORD_MAP[atype][default.upper()] - default = default.upper() - except KeyError: - # otherwise get an actual date or datetime - default = yams.DATE_FACTORY_MAP[atype](default) - else: - assert atype == 'String', atype - default = text_type(default) - return Binary.zpickle(default) - - dbh = repo.system_source.dbhelper - - - sql('ALTER TABLE cw_cwattribute ADD new_defaultval %s' % dbh.TYPE_MAPPING['Bytes']) - - for cwattr in rql('CWAttribute X').entities(): - olddefault = cwattr.defaultval - if olddefault is not None: - req = "UPDATE cw_cwattribute SET new_defaultval = %(val)s WHERE cw_eid = %(eid)s" - args = {'val': dbh.binary_value(convert_defaultval(cwattr, olddefault).getvalue()), 'eid': cwattr.eid} - sql(req, args, ask_confirm=False) - - sql('ALTER TABLE cw_cwattribute DROP COLUMN cw_defaultval') - if driver == 'postgres': - sql('ALTER TABLE cw_cwattribute RENAME COLUMN new_defaultval TO cw_defaultval') - else: # sqlserver - sql("sp_rename 'cw_cwattribute.new_defaultval', 'cw_defaultval', 'COLUMN'") - - - # Set object type to "Bytes" for CWAttribute's "defaultval" attribute - rql('SET X to_entity B WHERE X is CWAttribute, X from_entity Y, Y name "CWAttribute", ' - 'X relation_type Z, Z name "defaultval", B name "Bytes", NOT X to_entity B') - - oldrdef = schema['CWAttribute'].rdef('defaultval') - import yams.buildobjs as ybo - newrdef = ybo.RelationDefinition('CWAttribute', 'defaultval', 'Bytes') - newrdef.eid = oldrdef.eid - schema.add_relation_def(newrdef) - schema.del_relation_def('CWAttribute', 'defaultval', 'String') - - commit() - - sync_schema_props_perms('defaultval') - - for rschema in schema.relations(): - if rschema.symmetric: - subjects = set(repr(e.type) for e in rschema.subjects()) - objects = set(repr(e.type) for e in rschema.objects()) - assert subjects == objects - martians = set(str(eid) for eid, in sql('SELECT eid_to FROM %s_relation, entities WHERE eid_to = eid AND type NOT IN (%s)' % - (rschema.type, ','.join(subjects)))) - martians |= set(str(eid) for eid, in sql('SELECT eid_from FROM %s_relation, entities WHERE eid_from = eid AND type NOT IN (%s)' % - (rschema.type, ','.join(subjects)))) - if martians: - martians = ','.join(martians) - print('deleting broken relations %s for eids %s' % (rschema.type, martians)) - sql('DELETE FROM %s_relation WHERE eid_from IN (%s) OR eid_to IN (%s)' % (rschema.type, martians, martians)) - with session.deny_all_hooks_but(): - rql('SET X %(r)s Y WHERE Y %(r)s X, NOT X %(r)s Y' % {'r': rschema.type}) - commit() - - - # multi columns unique constraints regeneration - from cubicweb.server import schemaserial - - # syncschema hooks would try to remove indices but - # 1) we already do that below - # 2) the hook expects the CWUniqueTogetherConstraint.name attribute that hasn't - # yet been added - with session.allow_all_hooks_but('syncschema'): - rql('DELETE CWUniqueTogetherConstraint C') - commit() - add_attribute('CWUniqueTogetherConstraint', 'name') - - # low-level wipe code for postgres & sqlserver, plain sql ... - if driver == 'postgres': - for indexname, in sql('select indexname from pg_indexes'): - if indexname.startswith('unique_'): - print('dropping index', indexname) - sql('DROP INDEX %s' % indexname) - commit() - elif driver.startswith('sqlserver'): - for viewname, in sql('select name from sys.views'): - if viewname.startswith('utv_'): - print('dropping view (index should be cascade-deleted)', viewname) - sql('DROP VIEW %s' % viewname) - commit() - - # recreate the constraints, hook will lead to low-level recreation - for eschema in sorted(schema.entities()): - if eschema._unique_together: - print('recreate unique indexes for', eschema) - rql_args = schemaserial.uniquetogether2rqls(eschema) - for rql, args in rql_args: - args['x'] = eschema.eid - session.execute(rql, args) - commit() - - # all attributes perms have to be refreshed ... - for rschema in sorted(schema.relations()): - if rschema.final: - if rschema.type in fsschema: - print('sync perms for', rschema.type) - sync_schema_props_perms(rschema.type, syncprops=False, ask_confirm=False, commit=False) - else: - print('WARNING: attribute %s missing from fs schema' % rschema.type) - commit() - -if applcubicwebversion < (3, 17, 0) and cubicwebversion >= (3, 17, 0): - try: - add_cube('sioc', update_database=False) - except ConfigurationError: - if not confirm('In cubicweb 3.17 sioc views have been moved to the sioc ' - 'cube, which is not installed. Continue anyway?'): - raise - try: - add_cube('embed', update_database=False) - except ConfigurationError: - if not confirm('In cubicweb 3.17 embedding views have been moved to the embed ' - 'cube, which is not installed. Continue anyway?'): - raise - try: - add_cube('geocoding', update_database=False) - except ConfigurationError: - if not confirm('In cubicweb 3.17 geocoding views have been moved to the geocoding ' - 'cube, which is not installed. Continue anyway?'): - raise - - -if applcubicwebversion <= (3, 14, 0) and cubicwebversion >= (3, 14, 0): - if 'require_permission' in schema and not 'localperms'in repo.config.cubes(): - from cubicweb import ExecutionError - try: - add_cube('localperms', update_database=False) - except ConfigurationError: - raise ExecutionError('In cubicweb 3.14, CWPermission and related stuff ' - 'has been moved to cube localperms. Install it first.') - - -if applcubicwebversion == (3, 6, 0) and cubicwebversion >= (3, 6, 0): - CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T', - ask_confirm=False)) - _add_relation_definition_no_perms('CWAttribute', 'update_permission', 'CWGroup') - _add_relation_definition_no_perms('CWAttribute', 'update_permission', 'RQLExpression') - rql('SET X update_permission Y WHERE X is CWAttribute, X add_permission Y') - drop_relation_definition('CWAttribute', 'delete_permission', 'CWGroup') - drop_relation_definition('CWAttribute', 'delete_permission', 'RQLExpression') - -elif applcubicwebversion < (3, 6, 0) and cubicwebversion >= (3, 6, 0): - CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T', - ask_confirm=False)) - session.set_cnxset() - permsdict = ss.deserialize_ertype_permissions(session) - - with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'): - for rschema in repo.schema.relations(): - rpermsdict = permsdict.get(rschema.eid, {}) - for rdef in rschema.rdefs.values(): - for action in rdef.ACTIONS: - actperms = [] - for something in rpermsdict.get(action == 'update' and 'add' or action, ()): - if isinstance(something, tuple): - actperms.append(rdef.rql_expression(*something)) - else: # group name - actperms.append(something) - rdef.set_action_permissions(action, actperms) - for action in ('read', 'add', 'delete'): - _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'CWGroup') - _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'RQLExpression') - for action in ('read', 'update'): - _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'CWGroup') - _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'RQLExpression') - for action in ('read', 'add', 'delete'): - rql('SET X %s_permission Y WHERE X is CWRelation, ' - 'RT %s_permission Y, X relation_type RT, Y is CWGroup' % (action, action)) - rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, ' - 'X %s_permission Y WHERE X is CWRelation, ' - 'X relation_type RT, RT %s_permission Y2, Y2 exprtype YET, ' - 'Y2 mainvars YMV, Y2 expression YEX' % (action, action)) - rql('SET X read_permission Y WHERE X is CWAttribute, ' - 'RT read_permission Y, X relation_type RT, Y is CWGroup') - rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, ' - 'X read_permission Y WHERE X is CWAttribute, ' - 'X relation_type RT, RT read_permission Y2, Y2 exprtype YET, ' - 'Y2 mainvars YMV, Y2 expression YEX') - rql('SET X update_permission Y WHERE X is CWAttribute, ' - 'RT add_permission Y, X relation_type RT, Y is CWGroup') - rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, ' - 'X update_permission Y WHERE X is CWAttribute, ' - 'X relation_type RT, RT add_permission Y2, Y2 exprtype YET, ' - 'Y2 mainvars YMV, Y2 expression YEX') - for action in ('read', 'add', 'delete'): - drop_relation_definition('CWRType', '%s_permission' % action, 'CWGroup', commit=False) - drop_relation_definition('CWRType', '%s_permission' % action, 'RQLExpression') - sync_schema_props_perms('read_permission', syncperms=False) # fix read_permission cardinality - -if applcubicwebversion < (3, 9, 6) and cubicwebversion >= (3, 9, 6) and not 'CWUniqueTogetherConstraint' in schema: - add_entity_type('CWUniqueTogetherConstraint') - -if not ('CWUniqueTogetherConstraint', 'CWRType') in schema['relations'].rdefs: - add_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWRType') - rql('SET C relations RT WHERE C relations RDEF, RDEF relation_type RT') - commit() - drop_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWAttribute') - drop_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWRelation') - - -if applcubicwebversion < (3, 4, 0) and cubicwebversion >= (3, 4, 0): - - with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'): - session.set_shared_data('do-not-insert-cwuri', True) - add_relation_type('cwuri') - base_url = session.base_url() - for eid, in rql('Any X', ask_confirm=False): - type, source, extid = session.describe(eid) - if source == 'system': - rql('SET X cwuri %(u)s WHERE X eid %(x)s', - {'x': eid, 'u': u'%s%s' % (base_url, eid)}) - isession.commit() - session.set_shared_data('do-not-insert-cwuri', False) - -if applcubicwebversion < (3, 5, 0) and cubicwebversion >= (3, 5, 0): - # check that migration is not doomed - rset = rql('Any X,Y WHERE X transition_of E, Y transition_of E, ' - 'X name N, Y name N, NOT X identity Y', - ask_confirm=False) - if rset: - from logilab.common.shellutils import ASK - if not ASK.confirm('Migration will fail because of transitions with the same name. ' - 'Continue anyway ?'): - import sys - sys.exit(1) - # proceed with migration - add_entity_type('Workflow') - add_entity_type('BaseTransition') - add_entity_type('WorkflowTransition') - add_entity_type('SubWorkflowExitPoint') - # drop explicit 'State allowed_transition Transition' since it should be - # infered due to yams inheritance. However we've to disable the schema - # sync hook first to avoid to destroy existing data... - try: - from cubicweb.hooks import syncschema - repo.vreg.unregister(syncschema.AfterDelRelationTypeHook) - try: - drop_relation_definition('State', 'allowed_transition', 'Transition') - finally: - repo.vreg.register(syncschema.AfterDelRelationTypeHook) - except ImportError: # syncschema is in CW >= 3.6 only - from cubicweb.server.schemahooks import after_del_relation_type - repo.hm.unregister_hook(after_del_relation_type, - 'after_delete_relation', 'relation_type') - try: - drop_relation_definition('State', 'allowed_transition', 'Transition') - finally: - repo.hm.register_hook(after_del_relation_type, - 'after_delete_relation', 'relation_type') - schema.rebuild_infered_relations() # need to be explicitly called once everything is in place - - for et in rql('DISTINCT Any ET,ETN WHERE S state_of ET, ET name ETN', - ask_confirm=False).entities(): - wf = add_workflow(u'default %s workflow' % et.name, et.name, - ask_confirm=False) - rql('SET S state_of WF WHERE S state_of ET, ET eid %(et)s, WF eid %(wf)s', - {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False) - rql('SET T transition_of WF WHERE T transition_of ET, ET eid %(et)s, WF eid %(wf)s', - {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False) - rql('SET WF initial_state S WHERE ET initial_state S, ET eid %(et)s, WF eid %(wf)s', - {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False) - - - rql('DELETE TrInfo TI WHERE NOT TI from_state S') - rql('SET TI by_transition T WHERE TI from_state FS, TI to_state TS, ' - 'FS allowed_transition T, T destination_state TS') - commit() - - drop_relation_definition('State', 'state_of', 'CWEType') - drop_relation_definition('Transition', 'transition_of', 'CWEType') - drop_relation_definition('CWEType', 'initial_state', 'State') - - sync_schema_props_perms() - -if applcubicwebversion < (3, 2, 2) and cubicwebversion >= (3, 2, 1): - from base64 import b64encode - for eid, extid in sql('SELECT eid, extid FROM entities ' - 'WHERE extid is NOT NULL', - ask_confirm=False): - sql('UPDATE entities SET extid=%(extid)s WHERE eid=%(eid)s', - {'extid': b64encode(extid), 'eid': eid}, ask_confirm=False) - commit() - -if applcubicwebversion < (3, 2, 0) and cubicwebversion >= (3, 2, 0): - add_cube('card', update_database=False) - - -if applcubicwebversion < (3, 21, 1) and cubicwebversion >= (3, 21, 1): - add_relation_definition('CWComputedRType', 'read_permission', 'CWGroup') - add_relation_definition('CWComputedRType', 'read_permission', 'RQLExpression') - - -def sync_constraint_types(): - """Make sure the repository knows about all constraint types defined in the code""" - from cubicweb.schema import CONSTRAINTS - repo_constraints = set(row[0] for row in rql('Any N WHERE X is CWConstraintType, X name N')) - - for cstrtype in set(CONSTRAINTS) - repo_constraints: - if cstrtype == 'BoundConstraint': - # was renamed to BoundaryConstraint, we don't need the old name - continue - rql('INSERT CWConstraintType X: X name %(name)s', {'name': cstrtype}) - - commit() - -sync_constraint_types() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/migration/postcreate.py --- a/misc/migration/postcreate.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,77 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb post creation script, set user's workflow""" -from __future__ import print_function - -from six import text_type - -from cubicweb import _ - - -# insert versions -create_entity('CWProperty', pkey=u'system.version.cubicweb', - value=text_type(config.cubicweb_version())) -for cube in config.cubes(): - create_entity('CWProperty', pkey=u'system.version.%s' % cube.lower(), - value=text_type(config.cube_version(cube))) - -# some entities have been added before schema entities, fix the 'is' and -# 'is_instance_of' relations -for rtype in ('is', 'is_instance_of'): - sql('INSERT INTO %s_relation ' - 'SELECT X.eid, ET.cw_eid FROM entities as X, cw_CWEType as ET ' - 'WHERE X.type=ET.cw_name AND NOT EXISTS(' - ' SELECT 1 from %s_relation ' - ' WHERE eid_from=X.eid AND eid_to=ET.cw_eid)' % (rtype, rtype)) - -# user workflow -userwf = add_workflow(_('default user workflow'), 'CWUser') -activated = userwf.add_state(_('activated'), initial=True) -deactivated = userwf.add_state(_('deactivated')) -userwf.add_transition(_('deactivate'), (activated,), deactivated, - requiredgroups=(u'managers',)) -userwf.add_transition(_('activate'), (deactivated,), activated, - requiredgroups=(u'managers',)) - -# create anonymous user if all-in-one config and anonymous user has been specified -if hasattr(config, 'anonymous_user'): - anonlogin, anonpwd = config.anonymous_user() - if anonlogin == session.user.login: - print('you are using a manager account as anonymous user.') - print('Hopefully this is not a production instance...') - elif anonlogin: - from cubicweb.server import create_user - create_user(session, text_type(anonlogin), anonpwd, u'guests') - -# need this since we already have at least one user in the database (the default admin) -for user in rql('Any X WHERE X is CWUser').entities(): - rql('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', - {'x': user.eid, 's': activated.eid}) - -# on interactive mode, ask for level 0 persistent options -if interactive_mode: - cfg = config.persistent_options_configuration() - cfg.input_config(inputlevel=0) - for section, options in cfg.options_by_section(): - for optname, optdict, value in options: - key = u'%s.%s' % (section, optname) - default = cfg.option_default(optname, optdict) - # only record values differing from default - if value != default: - rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s', - {'k': key, 'v': value}) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/scripts/chpasswd.py --- a/misc/scripts/chpasswd.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -import sys -import getpass - -from cubicweb import Binary -from cubicweb.server.utils import crypt_password - - -if __args__: - login = __args__.pop() -else: - login = raw_input("login? ") - -rset = rql('Any U WHERE U is CWUser, U login %(login)s', {'login': login}) - -if len(rset) != 1: - sys.exit("user '%s' does not exist!" % login) - -pass1 = getpass.getpass(prompt='Enter new password? ') -pass2 = getpass.getpass(prompt='Confirm? ') - -if pass1 != pass2: - sys.exit("passwords don't match!") - -crypted = crypt_password(pass1) - -cwuser = rset.get_entity(0,0) -cwuser.cw_set(upassword=Binary(crypted)) -commit() - -print("password updated.") diff -r 058bb3dc685f -r 0b59724cb3f2 misc/scripts/cwuser_ldap2system.py --- a/misc/scripts/cwuser_ldap2system.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,42 +0,0 @@ -from __future__ import print_function - -import base64 -from cubicweb.server.utils import crypt_password - -dbdriver = config.system_source_config['db-driver'] -from logilab.database import get_db_helper -dbhelper = get_db_helper(driver) - -insert = ('INSERT INTO cw_cwuser (cw_creation_date,' - ' cw_eid,' - ' cw_modification_date,' - ' cw_login,' - ' cw_firstname,' - ' cw_surname,' - ' cw_last_login_time,' - ' cw_upassword,' - ' cw_cwuri) ' - "VALUES (%(mtime)s, %(eid)s, %(mtime)s, %(login)s, " - " %(firstname)s, %(surname)s, %(mtime)s, %(pwd)s, 'foo');") -update = "UPDATE entities SET source='system' WHERE eid=%(eid)s;" -rset = sql("SELECT eid,type,source,extid,mtime FROM entities WHERE source!='system'", ask_confirm=False) -for eid, type, source, extid, mtime in rset: - if type != 'CWUser': - print("don't know what to do with entity type", type) - continue - if not source.lower().startswith('ldap'): - print("don't know what to do with source type", source) - continue - extid = base64.decodestring(extid) - ldapinfos = [x.strip().split('=') for x in extid.split(',')] - login = ldapinfos[0][1] - firstname = login.capitalize() - surname = login.capitalize() - args = dict(eid=eid, type=type, source=source, login=login, - firstname=firstname, surname=surname, mtime=mtime, - pwd=dbhelper.binary_value(crypt_password('toto'))) - print(args) - sql(insert, args) - sql(update, args) - -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/scripts/detect_cycle.py --- a/misc/scripts/detect_cycle.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ -from __future__ import print_function - -try: - rtype, = __args__ -except ValueError: - print('USAGE: cubicweb-ctl shell detect_cycle.py -- ') - print() - -graph = {} -for fromeid, toeid in rql('Any X,Y WHERE X %s Y' % rtype): - graph.setdefault(fromeid, []).append(toeid) - -from logilab.common.graph import get_cycles - -for cycle in get_cycles(graph): - print('cycle', '->'.join(str(n) for n in cycle)) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/scripts/ldap_change_base_dn.py --- a/misc/scripts/ldap_change_base_dn.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,26 +0,0 @@ -from __future__ import print_function - -from base64 import b64decode, b64encode -try: - uri, newdn = __args__ -except ValueError: - print('USAGE: cubicweb-ctl shell ldap_change_base_dn.py -- ') - print() - print('you should not have updated your sources file yet') - -olddn = repo.sources_by_uri[uri].config['user-base-dn'] - -assert olddn != newdn - -raw_input("Ensure you've stopped the instance, type enter when done.") - -for eid, extid in sql("SELECT eid, extid FROM entities WHERE source='%s'" % uri): - olduserdn = b64decode(extid) - newuserdn = olduserdn.replace(olddn, newdn) - if newuserdn != olduserdn: - print(olduserdn, '->', newuserdn) - sql("UPDATE entities SET extid='%s' WHERE eid=%s" % (b64encode(newuserdn), eid)) - -commit() - -print('you can now update the sources file to the new dn and restart the instance') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/scripts/ldapuser2ldapfeed.py --- a/misc/scripts/ldapuser2ldapfeed.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,98 +0,0 @@ -"""turn a pyro source into a datafeed source - -Once this script is run, execute c-c db-check to cleanup relation tables. -""" -from __future__ import print_function - -import sys -from collections import defaultdict -from logilab.common.shellutils import generate_password - -try: - source_name, = __args__ - source = repo.sources_by_uri[source_name] -except ValueError: - print('you should specify the source name as script argument (i.e. after --' - ' on the command line)') - sys.exit(1) -except KeyError: - print('%s is not an active source' % source_name) - sys.exit(1) - -# check source is reachable before doing anything -if not source.get_connection().cnx: - print('%s is not reachable. Fix this before running this script' % source_name) - sys.exit(1) - -raw_input('Ensure you have shutdown all instances of this application before continuing.' - ' Type enter when ready.') - -system_source = repo.system_source - -from datetime import datetime -from cubicweb.server.edition import EditedEntity - - -print('******************** backport entity content ***************************') - -todelete = defaultdict(list) -extids = set() -duplicates = [] -for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities(): - etype = entity.cw_etype - if not source.support_entity(etype): - print("source doesn't support %s, delete %s" % (etype, entity.eid)) - todelete[etype].append(entity) - continue - try: - entity.complete() - except Exception: - print('%s %s much probably deleted, delete it (extid %s)' % ( - etype, entity.eid, entity.cw_metainformation()['extid'])) - todelete[etype].append(entity) - continue - print('get back', etype, entity.eid) - entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) - if not entity.creation_date: - entity.cw_edited['creation_date'] = datetime.utcnow() - if not entity.modification_date: - entity.cw_edited['modification_date'] = datetime.utcnow() - if not entity.upassword: - entity.cw_edited['upassword'] = generate_password() - extid = entity.cw_metainformation()['extid'] - if not entity.cwuri: - entity.cw_edited['cwuri'] = '%s/?dn=%s' % ( - source.urls[0], extid.decode('utf-8', 'ignore')) - print(entity.cw_edited) - if extid in extids: - duplicates.append(extid) - continue - extids.add(extid) - system_source.add_entity(session, entity) - sql("UPDATE entities SET source='system' " - "WHERE eid=%(eid)s", {'eid': entity.eid}) - -# only cleanup entities table, remaining stuff should be cleaned by a c-c -# db-check to be run after this script -if duplicates: - print('found %s duplicate entries' % len(duplicates)) - from pprint import pprint - pprint(duplicates) - -print(len(todelete), 'entities will be deleted') -for etype, entities in todelete.items(): - print('deleting', etype, [e.login for e in entities]) - system_source.delete_info_multi(session, entities, source_name) - - - -source_ent = rql('CWSource S WHERE S eid %(s)s', {'s': source.eid}).get_entity(0, 0) -source_ent.cw_set(type=u"ldapfeed", parser=u"ldapfeed") - - -if raw_input('Commit?') in 'yY': - print('committing') - commit() -else: - rollback() - print('rolled back') diff -r 058bb3dc685f -r 0b59724cb3f2 misc/scripts/pyroforge2datafeed.py --- a/misc/scripts/pyroforge2datafeed.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,134 +0,0 @@ -"""turn a pyro source into a datafeed source - -Once this script is run, execute c-c db-check to cleanup relation tables. -""" -from __future__ import print_function - -import sys - -try: - source_name, = __args__ - source = repo.sources_by_uri[source_name] -except ValueError: - print('you should specify the source name as script argument (i.e. after --' - ' on the command line)') - sys.exit(1) -except KeyError: - print('%s is not an active source' % source_name) - sys.exit(1) - -# check source is reachable before doing anything -try: - source.get_connection()._repo -except AttributeError: - print('%s is not reachable. Fix this before running this script' % source_name) - sys.exit(1) - -raw_input('Ensure you have shutdown all instances of this application before continuing.' - ' Type enter when ready.') - -system_source = repo.system_source - -from base64 import b64encode -from cubicweb.server.edition import EditedEntity - -DONT_GET_BACK_ETYPES = set(( # XXX edit as desired - 'State', - 'RecipeStep', 'RecipeStepInput', 'RecipeStepOutput', - 'RecipeTransition', 'RecipeTransitionCondition', - 'NarvalConditionExpression', 'Recipe', - # XXX TestConfig - )) - - -print('******************** backport entity content ***************************') - -from cubicweb.server import debugged -todelete = {} -host = source.config['base-url'].split('://')[1] -for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities(): - etype = entity.cw_etype - if not source.support_entity(etype): - print("source doesn't support %s, delete %s" % (etype, entity.eid)) - elif etype in DONT_GET_BACK_ETYPES: - print('ignore %s, delete %s' % (etype, entity.eid)) - else: - try: - entity.complete() - if not host in entity.cwuri: - print('SKIP foreign entity', entity.cwuri, source.config['base-url']) - continue - except Exception: - print('%s %s much probably deleted, delete it (extid %s)' % ( - etype, entity.eid, entity.cw_metainformation()['extid'])) - else: - print('get back', etype, entity.eid) - entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) - system_source.add_entity(session, entity) - sql("UPDATE entities SET asource=%(asource)s, source='system', extid=%(extid)s " - "WHERE eid=%(eid)s", {'asource': source_name, - 'extid': b64encode(entity.cwuri), - 'eid': entity.eid}) - continue - todelete.setdefault(etype, []).append(entity) - -# only cleanup entities table, remaining stuff should be cleaned by a c-c -# db-check to be run after this script -for entities in todelete.values(): - system_source.delete_info_multi(session, entities, source_name) - - -print('******************** backport mapping **********************************') -session.disable_hook_categories('cw.sources') -mapping = [] -for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s', - {'s': source.eid}).entities(): - schemaent = mappart.cw_schema[0] - if schemaent.cw_etype != 'CWEType': - assert schemaent.cw_etype == 'CWRType' - sch = schema._eid_index[schemaent.eid] - for rdef in sch.rdefs.values(): - if not source.support_entity(rdef.subject) \ - or not source.support_entity(rdef.object): - continue - if rdef.subject in DONT_GET_BACK_ETYPES \ - and rdef.object in DONT_GET_BACK_ETYPES: - print('dont map', rdef) - continue - if rdef.subject in DONT_GET_BACK_ETYPES: - options = u'action=link\nlinkattr=name' - roles = 'object', - elif rdef.object in DONT_GET_BACK_ETYPES: - options = u'action=link\nlinkattr=name' - roles = 'subject', - else: - options = u'action=copy' - if rdef.rtype in ('use_environment',): - roles = 'object', - else: - roles = 'subject', - print('map', rdef, options, roles) - for role in roles: - mapping.append( ( - (str(rdef.subject), str(rdef.rtype), str(rdef.object)), - options + '\nrole=%s' % role) ) - mappart.cw_delete() - -source_ent = rql('CWSource S WHERE S eid %(s)s', {'s': source.eid}).get_entity(0, 0) -source_ent.init_mapping(mapping) - -# change source properties -config = u'''synchronize=yes -synchronization-interval=10min -delete-entities=no -''' -rql('SET X type "datafeed", X parser "cw.entityxml", X url %(url)s, X config %(config)s ' - 'WHERE X eid %(x)s', - {'x': source.eid, 'config': config, - 'url': source.config['base-url']+'/project'}) - - -commit() - -from cubes.apycot import recipes -recipes.create_quick_recipe(session) diff -r 058bb3dc685f -r 0b59724cb3f2 misc/scripts/repair_file_1-9_migration.py --- a/misc/scripts/repair_file_1-9_migration.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,52 +0,0 @@ -"""execute this script if you've migration to file >= 1.9.0 with cubicweb <= 3.9.2 - -FYI, this migration occurred : -* on our intranet on July 07 2010 -* on our extranet on July 16 2010 -""" -from __future__ import print_function - -try: - backupinstance, = __args__ -except ValueError: - print('USAGE: cubicweb-ctl shell repair_file_1-9_migration.py -- ') - print() - print('you should restored the backup on a new instance, accessible through pyro') - -from cubicweb import cwconfig, dbapi -from cubicweb.server.session import hooks_control - -defaultadmin = repo.config.default_admin_config -backupcfg = cwconfig.instance_configuration(backupinstance) -backupcfg.repairing = True -backuprepo, backupcnx = dbapi.in_memory_repo_cnx(backupcfg, defaultadmin['login'], - password=defaultadmin['password'], - host='localhost') -backupcu = backupcnx.cursor() - -with hooks_control(session, session.HOOKS_DENY_ALL): - rql('SET X is Y WHERE X is File, Y name "File", NOT X is Y') - rql('SET X is_instance_of Y WHERE X is File, Y name "File", NOT X is_instance_of Y') - for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,' - 'X from_entity Y, Y name "Image", X is CWRelation, ' - 'EXISTS(XX is CWRelation, XX relation_type RT, ' - 'XX from_entity YY, YY name "File")'): - if rtype in ('is', 'is_instance_of'): - continue - print(rtype) - for feid, xeid in backupcu.execute('Any F,X WHERE F %s X, F is IN (File,Image)' % rtype): - print('restoring relation %s between file %s and %s' % (rtype, feid, xeid), end=' ') - print(rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype), - {'f': feid, 'x': xeid})) - - for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,' - 'X to_entity Y, Y name "Image", X is CWRelation, ' - 'EXISTS(XX is CWRelation, XX relation_type RT, ' - 'XX to_entity YY, YY name "File")'): - print(rtype) - for feid, xeid in backupcu.execute('Any F,X WHERE X %s F, F is IN (File,Image)' % rtype): - print('restoring relation %s between %s and file %s' % (rtype, xeid, feid), end=' ') - print(rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype), - {'f': feid, 'x': xeid})) - -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 misc/scripts/repair_splitbrain_ldapuser_source.py --- a/misc/scripts/repair_splitbrain_ldapuser_source.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,109 +0,0 @@ -""" -CAUTION: READ THIS CAREFULLY - -Sometimes it happens that ldap (specifically ldapuser type) source -yield "ghost" users. The reasons may vary (server upgrade while some -instances are still running & syncing with the ldap source, unmanaged -updates to the upstream ldap, etc.). - -This script was written and refined enough times that we are confident -in that it does something reasonnable (at least it did for the -target application). - -However you should really REALLY understand what it does before -deciding to apply it for you. And then ADAPT it tou your needs. - -""" -from __future__ import print_function - -import base64 -from collections import defaultdict - -from cubicweb.server.session import hooks_control - -try: - source_name, = __args__ - source = repo.sources_by_uri[source_name] -except ValueError: - print('you should specify the source name as script argument (i.e. after --' - ' on the command line)') - sys.exit(1) -except KeyError: - print('%s is not an active source' % source_name) - sys.exit(1) - -# check source is reachable before doing anything -if not source.get_connection().cnx: - print('%s is not reachable. Fix this before running this script' % source_name) - sys.exit(1) - -def find_dupes(): - # XXX this retrieves entities from a source name "ldap" - # you will want to adjust - rset = sql("SELECT eid, extid FROM entities WHERE source='%s'" % source_name) - extid2eids = defaultdict(list) - for eid, extid in rset: - extid2eids[extid].append(eid) - return dict((base64.b64decode(extid).lower(), eids) - for extid, eids in extid2eids.items() - if len(eids) > 1) - -def merge_dupes(dupes, docommit=False): - gone_eids = [] - CWUser = schema['CWUser'] - for extid, eids in dupes.items(): - newest = eids.pop() # we merge everything on the newest - print('merging ghosts of', extid, 'into', newest) - # now we merge pairwise into the newest - for old in eids: - subst = {'old': old, 'new': newest} - print(' merging', old) - gone_eids.append(old) - for rschema in CWUser.subject_relations(): - if rschema.final or rschema == 'identity': - continue - if CWUser.rdef(rschema, 'subject').composite == 'subject': - # old 'composite' property is wiped ... - # think about email addresses, excel preferences - for eschema in rschema.objects(): - rql('DELETE %s X WHERE U %s X, U eid %%(old)s' % (eschema, rschema), subst) - else: - # relink the new user to its old relations - rql('SET NU %s X WHERE NU eid %%(new)s, NOT NU %s X, OU %s X, OU eid %%(old)s' % - (rschema, rschema, rschema), subst) - # delete the old relations - rql('DELETE U %s X WHERE U eid %%(old)s' % rschema, subst) - # same thing ... - for rschema in CWUser.object_relations(): - if rschema.final or rschema == 'identity': - continue - rql('SET X %s NU WHERE NU eid %%(new)s, NOT X %s NU, X %s OU, OU eid %%(old)s' % - (rschema, rschema, rschema), subst) - rql('DELETE X %s U WHERE U eid %%(old)s' % rschema, subst) - if not docommit: - rollback() - return - commit() # XXX flushing operations is wanted rather than really committing - print('clean up entities table') - sql('DELETE FROM entities WHERE eid IN (%s)' % (', '.join(str(x) for x in gone_eids))) - commit() - -def main(): - dupes = find_dupes() - if not dupes: - print('No duplicate user') - return - - print('Found %s duplicate user instances' % len(dupes)) - - while True: - print('Fix or dry-run? (f/d) ... or Ctrl-C to break out') - answer = raw_input('> ') - if answer.lower() not in 'fd': - continue - print('Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.') - raw_input('') - with hooks_control(session, session.HOOKS_DENY_ALL): - merge_dupes(dupes, docommit=answer=='f') - -main() diff -r 058bb3dc685f -r 0b59724cb3f2 mttransforms.py --- a/mttransforms.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,121 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""mime type transformation engine for cubicweb, based on mtconverter""" - -__docformat__ = "restructuredtext en" - -from logilab import mtconverter - -from logilab.mtconverter.engine import TransformEngine -from logilab.mtconverter.transform import Transform -from logilab.mtconverter import (register_base_transforms, - register_pil_transforms, - register_pygments_transforms) - -from cubicweb.utils import UStringIO -from cubicweb.uilib import rest_publish, markdown_publish, html_publish - -HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml') - -# CubicWeb specific transformations - -class rest_to_html(Transform): - inputs = ('text/rest', 'text/x-rst') - output = 'text/html' - def _convert(self, trdata): - return rest_publish(trdata.appobject, trdata.decode()) - -class markdown_to_html(Transform): - inputs = ('text/markdown', 'text/x-markdown') - output = 'text/html' - def _convert(self, trdata): - return markdown_publish(trdata.appobject, trdata.decode()) - -class html_to_html(Transform): - inputs = HTML_MIMETYPES - output = 'text/html' - def _convert(self, trdata): - return html_publish(trdata.appobject, trdata.data) - - -# Instantiate and configure the transformation engine - -mtconverter.UNICODE_POLICY = 'replace' - -ENGINE = TransformEngine() -ENGINE.add_transform(rest_to_html()) -ENGINE.add_transform(markdown_to_html()) -ENGINE.add_transform(html_to_html()) - -try: - from cubicweb.ext.tal import CubicWebContext, compile_template -except ImportError: - HAS_TAL = False - from cubicweb import schema - schema.NEED_PERM_FORMATS.remove('text/cubicweb-page-template') - -else: - HAS_TAL = True - - class ept_to_html(Transform): - inputs = ('text/cubicweb-page-template',) - output = 'text/html' - output_encoding = 'utf-8' - def _convert(self, trdata): - context = CubicWebContext() - appobject = trdata.appobject - context.update({'self': appobject, 'rset': appobject.cw_rset, - 'req': appobject._cw, - '_' : appobject._cw._, - 'user': appobject._cw.user}) - output = UStringIO() - template = compile_template(trdata.encode(self.output_encoding)) - template.expand(context, output) - return output.getvalue() - - ENGINE.add_transform(ept_to_html()) - -if register_pil_transforms(ENGINE, verb=False): - HAS_PIL_TRANSFORMS = True -else: - HAS_PIL_TRANSFORMS = False - -try: - from logilab.mtconverter.transforms import pygmentstransforms - for mt in ('text/plain',) + HTML_MIMETYPES: - try: - pygmentstransforms.mimetypes.remove(mt) - except ValueError: - continue - register_pygments_transforms(ENGINE, verb=False) - - def patch_convert(cls): - def _convert(self, trdata, origconvert=cls._convert): - add_css = getattr(trdata.appobject._cw, 'add_css', None) - if add_css is not None: - # session has no add_css, only http request - add_css('pygments.css') - return origconvert(self, trdata) - cls._convert = _convert - patch_convert(pygmentstransforms.PygmentsHTMLTransform) - - HAS_PYGMENTS_TRANSFORMS = True -except ImportError: - HAS_PYGMENTS_TRANSFORMS = False - -register_base_transforms(ENGINE, verb=False) diff -r 058bb3dc685f -r 0b59724cb3f2 multipart.py --- a/multipart.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,416 +0,0 @@ -# -*- coding: utf-8 -*- -''' -Parser for multipart/form-data -============================== - -This module provides a parser for the multipart/form-data format. It can read -from a file, a socket or a WSGI environment. The parser can be used to replace -cgi.FieldStorage (without the bugs) and works with Python 2.5+ and 3.x (2to3). - -Licence (MIT) -------------- - - Copyright (c) 2010, Marcel Hellkamp. - Inspired by the Werkzeug library: http://werkzeug.pocoo.org/ - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. - -''' - -__author__ = 'Marcel Hellkamp' -__version__ = '0.1' -__license__ = 'MIT' - -from tempfile import TemporaryFile -from wsgiref.headers import Headers -import re, sys -try: - from io import BytesIO -except ImportError: # pragma: no cover (fallback for Python 2.5) - from StringIO import StringIO as BytesIO - -from six import PY3, text_type -from six.moves.urllib.parse import parse_qs - -############################################################################## -################################ Helper & Misc ################################ -############################################################################## -# Some of these were copied from bottle: http://bottle.paws.de/ - -try: - from collections import MutableMapping as DictMixin -except ImportError: # pragma: no cover (fallback for Python 2.5) - from UserDict import DictMixin - -class MultiDict(DictMixin): - """ A dict that remembers old values for each key """ - def __init__(self, *a, **k): - self.dict = dict() - for k, v in dict(*a, **k).items(): - self[k] = v - - def __len__(self): return len(self.dict) - def __iter__(self): return iter(self.dict) - def __contains__(self, key): return key in self.dict - def __delitem__(self, key): del self.dict[key] - def keys(self): return self.dict.keys() - def __getitem__(self, key): return self.get(key, KeyError, -1) - def __setitem__(self, key, value): self.append(key, value) - - def append(self, key, value): self.dict.setdefault(key, []).append(value) - def replace(self, key, value): self.dict[key] = [value] - def getall(self, key): return self.dict.get(key) or [] - - def get(self, key, default=None, index=-1): - if key not in self.dict and default != KeyError: - return [default][index] - return self.dict[key][index] - - def iterallitems(self): - for key, values in self.dict.items(): - for value in values: - yield key, value - -def tob(data, enc='utf8'): # Convert strings to bytes (py2 and py3) - return data.encode(enc) if isinstance(data, text_type) else data - -def copy_file(stream, target, maxread=-1, buffer_size=2*16): - ''' Read from :stream and write to :target until :maxread or EOF. ''' - size, read = 0, stream.read - while 1: - to_read = buffer_size if maxread < 0 else min(buffer_size, maxread-size) - part = read(to_read) - if not part: return size - target.write(part) - size += len(part) - -############################################################################## -################################ Header Parser ################################ -############################################################################## - -_special = re.escape('()<>@,;:\\"/[]?={} \t') -_re_special = re.compile('[%s]' % _special) -_qstr = '"(?:\\\\.|[^"])*"' # Quoted string -_value = '(?:[^%s]+|%s)' % (_special, _qstr) # Save or quoted string -_option = '(?:;|^)\s*([^%s]+)\s*=\s*(%s)' % (_special, _value) -_re_option = re.compile(_option) # key=value part of an Content-Type like header - -def header_quote(val): - if not _re_special.search(val): - return val - return '"' + val.replace('\\','\\\\').replace('"','\\"') + '"' - -def header_unquote(val, filename=False): - if val[0] == val[-1] == '"': - val = val[1:-1] - if val[1:3] == ':\\' or val[:2] == '\\\\': - val = val.split('\\')[-1] # fix ie6 bug: full path --> filename - return val.replace('\\\\','\\').replace('\\"','"') - return val - -def parse_options_header(header, options=None): - if ';' not in header: - return header.lower().strip(), {} - ctype, tail = header.split(';', 1) - options = options or {} - for match in _re_option.finditer(tail): - key = match.group(1).lower() - value = header_unquote(match.group(2), key=='filename') - options[key] = value - return ctype, options - -############################################################################## -################################## Multipart ################################## -############################################################################## - - -class MultipartError(ValueError): pass - - -class MultipartParser(object): - - def __init__(self, stream, boundary, content_length=-1, - disk_limit=2**30, mem_limit=2**20, memfile_limit=2**18, - buffer_size=2**16, charset='latin1'): - ''' Parse a multipart/form-data byte stream. This object is an iterator - over the parts of the message. - - :param stream: A file-like stream. Must implement ``.read(size)``. - :param boundary: The multipart boundary as a byte string. - :param content_length: The maximum number of bytes to read. - ''' - self.stream, self.boundary = stream, boundary - self.content_length = content_length - self.disk_limit = disk_limit - self.memfile_limit = memfile_limit - self.mem_limit = min(mem_limit, self.disk_limit) - self.buffer_size = min(buffer_size, self.mem_limit) - self.charset = charset - if self.buffer_size - 6 < len(boundary): # "--boundary--\r\n" - raise MultipartError('Boundary does not fit into buffer_size.') - self._done = [] - self._part_iter = None - - def __iter__(self): - ''' Iterate over the parts of the multipart message. ''' - if not self._part_iter: - self._part_iter = self._iterparse() - for part in self._done: - yield part - for part in self._part_iter: - self._done.append(part) - yield part - - def parts(self): - ''' Returns a list with all parts of the multipart message. ''' - return list(iter(self)) - - def get(self, name, default=None): - ''' Return the first part with that name or a default value (None). ''' - for part in self: - if name == part.name: - return part - return default - - def get_all(self, name): - ''' Return a list of parts with that name. ''' - return [p for p in self if p.name == name] - - def _lineiter(self): - ''' Iterate over a binary file-like object line by line. Each line is - returned as a (line, line_ending) tuple. If the line does not fit - into self.buffer_size, line_ending is empty and the rest of the line - is returned with the next iteration. - ''' - read = self.stream.read - maxread, maxbuf = self.content_length, self.buffer_size - _bcrnl = tob('\r\n') - _bcr = _bcrnl[:1] - _bnl = _bcrnl[1:] - _bempty = _bcrnl[:0] # b'rn'[:0] -> b'' - buffer = _bempty # buffer for the last (partial) line - while 1: - data = read(maxbuf if maxread < 0 else min(maxbuf, maxread)) - maxread -= len(data) - lines = (buffer+data).splitlines(True) - len_first_line = len(lines[0]) - # be sure that the first line does not become too big - if len_first_line > self.buffer_size: - # at the same time don't split a '\r\n' accidentally - if (len_first_line == self.buffer_size+1 and - lines[0].endswith(_bcrnl)): - splitpos = self.buffer_size - 1 - else: - splitpos = self.buffer_size - lines[:1] = [lines[0][:splitpos], - lines[0][splitpos:]] - if data: - buffer = lines[-1] - lines = lines[:-1] - for line in lines: - if line.endswith(_bcrnl): yield line[:-2], _bcrnl - elif line.endswith(_bnl): yield line[:-1], _bnl - elif line.endswith(_bcr): yield line[:-1], _bcr - else: yield line, _bempty - if not data: - break - - def _iterparse(self): - lines, line = self._lineiter(), '' - separator = tob('--') + tob(self.boundary) - terminator = tob('--') + tob(self.boundary) + tob('--') - # Consume first boundary. Ignore leading blank lines - for line, nl in lines: - if line: break - if line != separator: - raise MultipartError("Stream does not start with boundary") - # For each part in stream... - mem_used, disk_used = 0, 0 # Track used resources to prevent DoS - is_tail = False # True if the last line was incomplete (cutted) - opts = {'buffer_size': self.buffer_size, - 'memfile_limit': self.memfile_limit, - 'charset': self.charset} - part = MultipartPart(**opts) - for line, nl in lines: - if line == terminator and not is_tail: - part.file.seek(0) - yield part - break - elif line == separator and not is_tail: - if part.is_buffered(): mem_used += part.size - else: disk_used += part.size - part.file.seek(0) - yield part - part = MultipartPart(**opts) - else: - is_tail = not nl # The next line continues this one - part.feed(line, nl) - if part.is_buffered(): - if part.size + mem_used > self.mem_limit: - raise MultipartError("Memory limit reached.") - elif part.size + disk_used > self.disk_limit: - raise MultipartError("Disk limit reached.") - if line != terminator: - raise MultipartError("Unexpected end of multipart stream.") - - -class MultipartPart(object): - - def __init__(self, buffer_size=2**16, memfile_limit=2**18, charset='latin1'): - self.headerlist = [] - self.headers = None - self.file = False - self.size = 0 - self._buf = tob('') - self.disposition, self.name, self.filename = None, None, None - self.content_type, self.charset = None, charset - self.memfile_limit = memfile_limit - self.buffer_size = buffer_size - - def feed(self, line, nl=''): - if self.file: - return self.write_body(line, nl) - return self.write_header(line, nl) - - def write_header(self, line, nl): - line = line.decode(self.charset or 'latin1') - if not nl: raise MultipartError('Unexpected end of line in header.') - if not line.strip(): # blank line -> end of header segment - self.finish_header() - elif line[0] in ' \t' and self.headerlist: - name, value = self.headerlist.pop() - self.headerlist.append((name, value+line.strip())) - else: - if ':' not in line: - raise MultipartError("Syntax error in header: No colon.") - name, value = line.split(':', 1) - self.headerlist.append((name.strip(), value.strip())) - - def write_body(self, line, nl): - if not line and not nl: return # This does not even flush the buffer - self.size += len(line) + len(self._buf) - self.file.write(self._buf + line) - self._buf = nl - if self.content_length > 0 and self.size > self.content_length: - raise MultipartError('Size of body exceeds Content-Length header.') - if self.size > self.memfile_limit and isinstance(self.file, BytesIO): - # TODO: What about non-file uploads that exceed the memfile_limit? - self.file, old = TemporaryFile(mode='w+b'), self.file - old.seek(0) - copy_file(old, self.file, self.size, self.buffer_size) - - def finish_header(self): - self.file = BytesIO() - self.headers = Headers(self.headerlist) - cdis = self.headers.get('Content-Disposition','') - ctype = self.headers.get('Content-Type','') - clen = self.headers.get('Content-Length','-1') - if not cdis: - raise MultipartError('Content-Disposition header is missing.') - self.disposition, self.options = parse_options_header(cdis) - self.name = self.options.get('name') - self.filename = self.options.get('filename') - self.content_type, options = parse_options_header(ctype) - self.charset = options.get('charset') or self.charset - self.content_length = int(self.headers.get('Content-Length','-1')) - - def is_buffered(self): - ''' Return true if the data is fully buffered in memory.''' - return isinstance(self.file, BytesIO) - - @property - def value(self): - ''' Data decoded with the specified charset ''' - pos = self.file.tell() - self.file.seek(0) - val = self.file.read() - self.file.seek(pos) - return val.decode(self.charset) - - def save_as(self, path): - fp = open(path, 'wb') - pos = self.file.tell() - try: - self.file.seek(0) - size = copy_file(self.file, fp) - finally: - self.file.seek(pos) - return size - -############################################################################## -#################################### WSGI #################################### -############################################################################## - -def parse_form_data(environ, charset='utf8', strict=False, **kw): - ''' Parse form data from an environ dict and return a (forms, files) tuple. - Both tuple values are dictionaries with the form-field name as a key - (unicode) and lists as values (multiple values per key are possible). - The forms-dictionary contains form-field values as unicode strings. - The files-dictionary contains :class:`MultipartPart` instances, either - because the form-field was a file-upload or the value is to big to fit - into memory limits. - - :param environ: An WSGI environment dict. - :param charset: The charset to use if unsure. (default: utf8) - :param strict: If True, raise :exc:`MultipartError` on any parsing - errors. These are silently ignored by default. - ''' - - forms, files = MultiDict(), MultiDict() - try: - if environ.get('REQUEST_METHOD','GET').upper() not in ('POST', 'PUT'): - raise MultipartError("Request method other than POST or PUT.") - content_length = int(environ.get('CONTENT_LENGTH', '-1')) - content_type = environ.get('CONTENT_TYPE', '') - if not content_type: - raise MultipartError("Missing Content-Type header.") - content_type, options = parse_options_header(content_type) - stream = environ.get('wsgi.input') or BytesIO() - kw['charset'] = charset = options.get('charset', charset) - if content_type == 'multipart/form-data': - boundary = options.get('boundary','') - if not boundary: - raise MultipartError("No boundary for multipart/form-data.") - for part in MultipartParser(stream, boundary, content_length, **kw): - if part.filename or not part.is_buffered(): - files[part.name] = part - else: # TODO: Big form-fields are in the files dict. really? - forms[part.name] = part.value - elif content_type in ('application/x-www-form-urlencoded', - 'application/x-url-encoded'): - mem_limit = kw.get('mem_limit', 2**20) - if content_length > mem_limit: - raise MultipartError("Request too big. Increase MAXMEM.") - data = stream.read(mem_limit) - if stream.read(1): # These is more that does not fit mem_limit - raise MultipartError("Request too big. Increase MAXMEM.") - if PY3: - data = data.decode('ascii') - data = parse_qs(data, keep_blank_values=True) - for key, values in data.items(): - for value in values: - if PY3: - forms[key] = value - else: - forms[key.decode(charset)] = value.decode(charset) - else: - raise MultipartError("Unsupported content type.") - except MultipartError: - if strict: raise - return forms, files diff -r 058bb3dc685f -r 0b59724cb3f2 predicates.py --- a/predicates.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1421 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Predicate classes -""" - -__docformat__ = "restructuredtext en" - -import logging -from warnings import warn -from operator import eq - -from six import string_types, integer_types -from six.moves import range - -from logilab.common.deprecation import deprecated -from logilab.common.registry import Predicate, objectify_predicate, yes - -from yams.schema import BASE_TYPES, role_name -from rql.nodes import Function - -from cubicweb import (Unauthorized, NoSelectableObject, NotAnEntity, - CW_EVENT_MANAGER, role) -from cubicweb.uilib import eid_param -from cubicweb.schema import split_expression - -yes = deprecated('[3.15] import yes() from use logilab.common.registry')(yes) - - -# abstract predicates / mixin helpers ########################################### - -class PartialPredicateMixIn(object): - """convenience mix-in for predicates that will look into the containing - class to find missing information. - - cf. `cubicweb.web.action.LinkToEntityAction` for instance - """ - def __call__(self, cls, *args, **kwargs): - self.complete(cls) - return super(PartialPredicateMixIn, self).__call__(cls, *args, **kwargs) - - -class EClassPredicate(Predicate): - """abstract class for predicates working on *entity class(es)* specified - explicitly or found of the result set. - - Here are entity lookup / scoring rules: - - * if `entity` is specified, return score for this entity's class - - * elif `rset`, `select` and `filtered_variable` are specified, return score - for the possible classes for variable in the given rql :class:`Select` - node - - * elif `rset` and `row` are specified, return score for the class of the - entity found in the specified cell, using column specified by `col` or 0 - - * elif `rset` is specified return score for each entity class found in the - column specified specified by the `col` argument or in column 0 if not - specified - - When there are several classes to be evaluated, return the sum of scores for - each entity class unless: - - - `mode` == 'all' (the default) and some entity class is scored - to 0, in which case 0 is returned - - - `mode` == 'any', in which case the first non-zero score is - returned - - - `accept_none` is False and some cell in the column has a None value - (this may occurs with outer join) - """ - def __init__(self, once_is_enough=None, accept_none=True, mode='all'): - if once_is_enough is not None: - warn("[3.14] once_is_enough is deprecated, use mode='any'", - DeprecationWarning, stacklevel=2) - if once_is_enough: - mode = 'any' - assert mode in ('any', 'all'), 'bad mode %s' % mode - self.once_is_enough = mode == 'any' - self.accept_none = accept_none - - def __call__(self, cls, req, rset=None, row=None, col=0, entity=None, - select=None, filtered_variable=None, - accept_none=None, - **kwargs): - if entity is not None: - return self.score_class(entity.__class__, req) - if not rset: - return 0 - if select is not None and filtered_variable is not None: - etypes = set(sol[filtered_variable.name] for sol in select.solutions) - elif row is None: - if accept_none is None: - accept_none = self.accept_none - if not accept_none and \ - any(row[col] is None for row in rset): - return 0 - etypes = rset.column_types(col) - else: - etype = rset.description[row][col] - # may have None in rset.description on outer join - if etype is None or rset.rows[row][col] is None: - return 0 - etypes = (etype,) - score = 0 - for etype in etypes: - escore = self.score(cls, req, etype) - if not escore and not self.once_is_enough: - return 0 - elif self.once_is_enough: - return escore - score += escore - return score - - def score(self, cls, req, etype): - if etype in BASE_TYPES: - return 0 - return self.score_class(req.vreg['etypes'].etype_class(etype), req) - - def score_class(self, eclass, req): - raise NotImplementedError() - - -class EntityPredicate(EClassPredicate): - """abstract class for predicates working on *entity instance(s)* specified - explicitly or found of the result set. - - Here are entity lookup / scoring rules: - - * if `entity` is specified, return score for this entity - - * elif `row` is specified, return score for the entity found in the - specified cell, using column specified by `col` or 0 - - * else return the sum of scores for each entity found in the column - specified specified by the `col` argument or in column 0 if not specified, - unless: - - - `mode` == 'all' (the default) and some entity class is scored - to 0, in which case 0 is returned - - - `mode` == 'any', in which case the first non-zero score is - returned - - - `accept_none` is False and some cell in the column has a None value - (this may occurs with outer join) - - .. Note:: - using :class:`EntityPredicate` or :class:`EClassPredicate` as base predicate - class impacts performance, since when no entity or row is specified the - later works on every different *entity class* found in the result set, - while the former works on each *entity* (eg each row of the result set), - which may be much more costly. - """ - - def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None, - entity=None, **kwargs): - if not rset and entity is None: - return 0 - score = 0 - if entity is not None: - score = self.score_entity(entity) - elif row is None: - col = col or 0 - if accept_none is None: - accept_none = self.accept_none - for row, rowvalue in enumerate(rset.rows): - if rowvalue[col] is None: # outer join - if not accept_none: - return 0 - continue - escore = self.score(req, rset, row, col) - if not escore and not self.once_is_enough: - return 0 - elif self.once_is_enough: - return escore - score += escore - else: - col = col or 0 - etype = rset.description[row][col] - if etype is not None: # outer join - score = self.score(req, rset, row, col) - return score - - def score(self, req, rset, row, col): - try: - return self.score_entity(rset.get_entity(row, col)) - except NotAnEntity: - return 0 - - def score_entity(self, entity): - raise NotImplementedError() - - -class ExpectedValuePredicate(Predicate): - """Take a list of expected values as initializer argument and store them - into the :attr:`expected` set attribute. You may also give a set as single - argument, which will then be referenced as set of expected values, - allowing modifications to the given set to be considered. - - You should implement one of :meth:`_values_set(cls, req, **kwargs)` or - :meth:`_get_value(cls, req, **kwargs)` method which should respectively - return the set of values or the unique possible value for the given context. - - You may also specify a `mode` behaviour as argument, as explained below. - - Returned score is: - - - 0 if `mode` == 'all' (the default) and at least one expected - values isn't found - - - 0 if `mode` == 'any' and no expected values isn't found at all - - - else the number of matching values - - Notice `mode` = 'any' with a single expected value has no effect at all. - """ - def __init__(self, *expected, **kwargs): - assert expected, self - if len(expected) == 1 and isinstance(expected[0], (set, dict)): - self.expected = expected[0] - else: - self.expected = frozenset(expected) - mode = kwargs.pop('mode', 'all') - assert mode in ('any', 'all'), 'bad mode %s' % mode - self.once_is_enough = mode == 'any' - assert not kwargs, 'unexpected arguments %s' % kwargs - - def __str__(self): - return '%s(%s)' % (self.__class__.__name__, - ','.join(sorted(str(s) for s in self.expected))) - - def __call__(self, cls, req, **kwargs): - values = self._values_set(cls, req, **kwargs) - if isinstance(values, dict): - if isinstance(self.expected, dict): - matching = 0 - for key, expected_value in self.expected.items(): - if key in values: - if (isinstance(expected_value, (list, tuple, frozenset, set)) - and values[key] in expected_value): - matching += 1 - elif values[key] == expected_value: - matching += 1 - if isinstance(self.expected, (set, frozenset)): - values = frozenset(values) - matching = len(values & self.expected) - else: - matching = len(values & self.expected) - if self.once_is_enough: - return matching - if matching == len(self.expected): - return matching - return 0 - - def _values_set(self, cls, req, **kwargs): - return frozenset( (self._get_value(cls, req, **kwargs),) ) - - def _get_value(self, cls, req, **kwargs): - raise NotImplementedError() - - -# bare predicates ############################################################## - -class match_kwargs(ExpectedValuePredicate): - """Return non-zero score if parameter names specified as initializer - arguments are specified in the input context. - - - Return a score corresponding to the number of expected parameters. - - When multiple parameters are expected, all of them should be found in - the input context unless `mode` keyword argument is given to 'any', - in which case a single matching parameter is enough. - """ - - def _values_set(self, cls, req, **kwargs): - return kwargs - - -class appobject_selectable(Predicate): - """Return 1 if another appobject is selectable using the same input context. - - Initializer arguments: - - * `registry`, a registry name - - * `regids`, object identifiers in this registry, one of them should be - selectable. - """ - selectable_score = 1 - def __init__(self, registry, *regids): - self.registry = registry - self.regids = regids - - def __call__(self, cls, req, **kwargs): - for regid in self.regids: - if req.vreg[self.registry].select_or_none(regid, req, **kwargs) is not None: - return self.selectable_score - return 0 - - -class adaptable(appobject_selectable): - """Return 1 if another appobject is selectable using the same input context. - - Initializer arguments: - - * `regids`, adapter identifiers (e.g. interface names) to which the context - (usually entities) should be adaptable. One of them should be selectable - when multiple identifiers are given. - """ - def __init__(self, *regids): - super(adaptable, self).__init__('adapters', *regids) - - def __call__(self, cls, req, **kwargs): - kwargs.setdefault('accept_none', False) - score = super(adaptable, self).__call__(cls, req, **kwargs) - if score == 0 and kwargs.get('rset') and len(kwargs['rset']) > 1 and not 'row' in kwargs: - # on rset containing several entity types, each row may be - # individually adaptable, while the whole rset won't be if the - # same adapter can't be used for each type - for row in range(len(kwargs['rset'])): - kwargs.setdefault('col', 0) - _score = super(adaptable, self).__call__(cls, req, row=row, **kwargs) - if not _score: - return 0 - # adjust score per row as expected by default adjust_score - # implementation - score += self.adjust_score(_score) - else: - score = self.adjust_score(score) - return score - - @staticmethod - def adjust_score(score): - # being adaptable to an interface should takes precedence other - # is_instance('Any'), but not other explicit - # is_instance('SomeEntityType'), and, for **a single entity**: - # * is_instance('Any') score is 1 - # * is_instance('SomeEntityType') score is at least 2 - if score >= 2: - return score - 0.5 - if score == 1: - return score + 0.5 - return score - - -class configuration_values(Predicate): - """Return 1 if the instance has an option set to a given value(s) in its - configuration file. - """ - # XXX this predicate could be evaluated on startup - def __init__(self, key, values): - self._key = key - if not isinstance(values, (tuple, list)): - values = (values,) - self._values = frozenset(values) - - def __call__(self, cls, req, **kwargs): - try: - return self._score - except AttributeError: - if req is None: - config = kwargs['repo'].config - else: - config = req.vreg.config - self._score = config[self._key] in self._values - return self._score - - -# rset predicates ############################################################## - -@objectify_predicate -def none_rset(cls, req, rset=None, **kwargs): - """Return 1 if the result set is None (eg usually not specified).""" - if rset is None: - return 1 - return 0 - - -# XXX == ~ none_rset -@objectify_predicate -def any_rset(cls, req, rset=None, **kwargs): - """Return 1 for any result set, whatever the number of rows in it, even 0.""" - if rset is not None: - return 1 - return 0 - - -@objectify_predicate -def nonempty_rset(cls, req, rset=None, **kwargs): - """Return 1 for result set containing one ore more rows.""" - if rset: - return 1 - return 0 - - -# XXX == ~ nonempty_rset -@objectify_predicate -def empty_rset(cls, req, rset=None, **kwargs): - """Return 1 for result set which doesn't contain any row.""" - if rset is not None and len(rset) == 0: - return 1 - return 0 - - -# XXX == multi_lines_rset(1) -@objectify_predicate -def one_line_rset(cls, req, rset=None, row=None, **kwargs): - """Return 1 if the result set is of size 1, or greater but a specific row in - the result set is specified ('row' argument). - """ - if rset is None and 'entity' in kwargs: - return 1 - if rset is not None and (row is not None or len(rset) == 1): - return 1 - return 0 - - -class multi_lines_rset(Predicate): - """Return 1 if the operator expression matches between `num` elements - in the result set and the `expected` value if defined. - - By default, multi_lines_rset(expected) matches equality expression: - `nb` row(s) in result set equals to expected value - But, you can perform richer comparisons by overriding default operator: - multi_lines_rset(expected, operator.gt) - - If `expected` is None, return 1 if the result set contains *at least* - two rows. - If rset is None, return 0. - """ - def __init__(self, expected=None, operator=eq): - self.expected = expected - self.operator = operator - - def match_expected(self, num): - if self.expected is None: - return num > 1 - return self.operator(num, self.expected) - - def __call__(self, cls, req, rset=None, **kwargs): - return int(rset is not None and self.match_expected(len(rset))) - - -class multi_columns_rset(multi_lines_rset): - """If `nb` is specified, return 1 if the result set has exactly `nb` column - per row. Else (`nb` is None), return 1 if the result set contains *at least* - two columns per row. Return 0 for empty result set. - """ - - def __call__(self, cls, req, rset=None, **kwargs): - # 'or 0' since we *must not* return None. Also don't use rset.rows so - # this selector will work if rset is a simple list of list. - return rset and self.match_expected(len(rset[0])) or 0 - - -class paginated_rset(Predicate): - """Return 1 or more for result set with more rows than one or more page - size. You can specify expected number of pages to the initializer (default - to one), and you'll get that number of pages as score if the result set is - big enough. - - Page size is searched in (respecting order): - * a `page_size` argument - * a `page_size` form parameters - * the `navigation.page-size` property (see :ref:`PersistentProperties`) - """ - def __init__(self, nbpages=1): - assert nbpages > 0 - self.nbpages = nbpages - - def __call__(self, cls, req, rset=None, **kwargs): - if rset is None: - return 0 - page_size = kwargs.get('page_size') - if page_size is None: - page_size = req.form.get('page_size') - if page_size is not None: - try: - page_size = int(page_size) - except ValueError: - page_size = None - if page_size is None: - page_size = req.property_value('navigation.page-size') - if len(rset) <= (page_size*self.nbpages): - return 0 - return self.nbpages - - -@objectify_predicate -def sorted_rset(cls, req, rset=None, **kwargs): - """Return 1 for sorted result set (e.g. from an RQL query containing an - ORDERBY clause), with exception that it will return 0 if the rset is - 'ORDERBY FTIRANK(VAR)' (eg sorted by rank value of the has_text index). - """ - if rset is None: - return 0 - selects = rset.syntax_tree().children - if (len(selects) > 1 or - not selects[0].orderby or - (isinstance(selects[0].orderby[0].term, Function) and - selects[0].orderby[0].term.name == 'FTIRANK') - ): - return 0 - return 2 - - -# XXX == multi_etypes_rset(1) -@objectify_predicate -def one_etype_rset(cls, req, rset=None, col=0, **kwargs): - """Return 1 if the result set contains entities which are all of the same - type in the column specified by the `col` argument of the input context, or - in column 0. - """ - if rset is None: - return 0 - if len(rset.column_types(col)) != 1: - return 0 - return 1 - - -class multi_etypes_rset(multi_lines_rset): - """If `nb` is specified, return 1 if the result set contains `nb` different - types of entities in the column specified by the `col` argument of the input - context, or in column 0. If `nb` is None, return 1 if the result set contains - *at least* two different types of entities. - """ - - def __call__(self, cls, req, rset=None, col=0, **kwargs): - # 'or 0' since we *must not* return None - return rset and self.match_expected(len(rset.column_types(col))) or 0 - - -@objectify_predicate -def logged_user_in_rset(cls, req, rset=None, row=None, col=0, **kwargs): - """Return positive score if the result set at the specified row / col - contains the eid of the logged user. - """ - if rset is None: - return 0 - return req.user.eid == rset[row or 0][col] - - -# entity predicates ############################################################# - -class composite_etype(Predicate): - """Return 1 for composite entities. - - A composite entity has an etype for which at least one relation - definition points in its direction with the - composite='subject'/'object' notation. - """ - - def __call__(self, cls, req, **kwargs): - entity = kwargs.pop('entity', None) - if entity is None: - return 0 - return entity.e_schema.is_composite - - - -class non_final_entity(EClassPredicate): - """Return 1 for entity of a non final entity type(s). Remember, "final" - entity types are String, Int, etc... This is equivalent to - `is_instance('Any')` but more optimized. - - See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity - class lookup / score rules according to the input context. - """ - def score(self, cls, req, etype): - if etype in BASE_TYPES: - return 0 - return 1 - - def score_class(self, eclass, req): - return 1 # necessarily true if we're there - - - -def _reset_is_instance_cache(vreg): - vreg._is_instance_predicate_cache = {} - -CW_EVENT_MANAGER.bind('before-registry-reset', _reset_is_instance_cache) - -class is_instance(EClassPredicate): - """Return non-zero score for entity that is an instance of the one of given - type(s). If multiple arguments are given, matching one of them is enough. - - Entity types should be given as string, the corresponding class will be - fetched from the registry at selection time. - - See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity - class lookup / score rules according to the input context. - - .. note:: the score will reflect class proximity so the most specific object - will be selected. - """ - - def __init__(self, *expected_etypes, **kwargs): - super(is_instance, self).__init__(**kwargs) - self.expected_etypes = expected_etypes - for etype in self.expected_etypes: - assert isinstance(etype, string_types), etype - - def __str__(self): - return '%s(%s)' % (self.__class__.__name__, - ','.join(str(s) for s in self.expected_etypes)) - - def score_class(self, eclass, req): - # cache on vreg to avoid reloading issues - try: - cache = req.vreg._is_instance_predicate_cache - except AttributeError: - # XXX 'before-registry-reset' not called for db-api connections - cache = req.vreg._is_instance_predicate_cache = {} - try: - expected_eclasses = cache[self] - except KeyError: - # turn list of entity types as string into a list of - # (entity class, parent classes) - etypesreg = req.vreg['etypes'] - expected_eclasses = cache[self] = [] - for etype in self.expected_etypes: - try: - expected_eclasses.append(etypesreg.etype_class(etype)) - except KeyError: - continue # entity type not in the schema - parents, any = req.vreg['etypes'].parent_classes(eclass.__regid__) - score = 0 - for expectedcls in expected_eclasses: - # adjust score according to class proximity - if expectedcls is eclass: - score += len(parents) + 4 - elif expectedcls is any: # Any - score += 1 - else: - for index, basecls in enumerate(reversed(parents)): - if expectedcls is basecls: - score += index + 3 - break - return score - - -class score_entity(EntityPredicate): - """Return score according to an arbitrary function given as argument which - will be called with input content entity as argument. - - This is a very useful predicate that will usually interest you since it - allows a lot of things without having to write a specific predicate. - - The function can return arbitrary value which will be casted to an integer - value at the end. - - See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity - lookup / score rules according to the input context. - """ - def __init__(self, scorefunc, once_is_enough=None, mode='all'): - super(score_entity, self).__init__(mode=mode, once_is_enough=once_is_enough) - def intscore(*args, **kwargs): - score = scorefunc(*args, **kwargs) - if not score: - return 0 - if isinstance(score, integer_types): - return score - return 1 - self.score_entity = intscore - - -class has_mimetype(EntityPredicate): - """Return 1 if the entity adapt to IDownloadable and has the given MIME type. - - You can give 'image/' to match any image for instance, or 'image/png' to match - only PNG images. - """ - def __init__(self, mimetype, once_is_enough=None, mode='all'): - super(has_mimetype, self).__init__(mode=mode, once_is_enough=once_is_enough) - self.mimetype = mimetype - - def score_entity(self, entity): - idownloadable = entity.cw_adapt_to('IDownloadable') - if idownloadable is None: - return 0 - mt = idownloadable.download_content_type() - if not (mt and mt.startswith(self.mimetype)): - return 0 - return 1 - - -class relation_possible(EntityPredicate): - """Return 1 for entity that supports the relation, provided that the - request's user may do some `action` on it (see below). - - The relation is specified by the following initializer arguments: - - * `rtype`, the name of the relation - - * `role`, the role of the entity in the relation, either 'subject' or - 'object', default to 'subject' - - * `target_etype`, optional name of an entity type that should be supported - at the other end of the relation - - * `action`, a relation schema action (e.g. one of 'read', 'add', 'delete', - default to 'read') which must be granted to the user, else a 0 score will - be returned. Give None if you don't want any permission checking. - - * `strict`, boolean (default to False) telling what to do when the user has - not globally the permission for the action (eg the action is not granted - to one of the user's groups) - - - when strict is False, if there are some local role defined for this - action (e.g. using rql expressions), then the permission will be - considered as granted - - - when strict is True, then the permission will be actually checked for - each entity - - Setting `strict` to True impacts performance for large result set since - you'll then get the :class:`~cubicweb.predicates.EntityPredicate` behaviour - while otherwise you get the :class:`~cubicweb.predicates.EClassPredicate`'s - one. See those classes documentation for entity lookup / score rules - according to the input context. - """ - - def __init__(self, rtype, role='subject', target_etype=None, - action='read', strict=False, **kwargs): - super(relation_possible, self).__init__(**kwargs) - self.rtype = rtype - self.role = role - self.target_etype = target_etype - self.action = action - self.strict = strict - - # hack hack hack - def __call__(self, cls, req, **kwargs): - # hack hack hack - if self.strict: - return EntityPredicate.__call__(self, cls, req, **kwargs) - return EClassPredicate.__call__(self, cls, req, **kwargs) - - def score(self, *args): - if self.strict: - return EntityPredicate.score(self, *args) - return EClassPredicate.score(self, *args) - - def _get_rschema(self, eclass): - eschema = eclass.e_schema - try: - if self.role == 'object': - return eschema.objrels[self.rtype] - else: - return eschema.subjrels[self.rtype] - except KeyError: - return None - - def score_class(self, eclass, req): - rschema = self._get_rschema(eclass) - if rschema is None: - return 0 # relation not supported - eschema = eclass.e_schema - if self.target_etype is not None: - try: - rdef = rschema.role_rdef(eschema, self.target_etype, self.role) - except KeyError: - return 0 - if self.action and not rdef.may_have_permission(self.action, req): - return 0 - teschema = req.vreg.schema.eschema(self.target_etype) - if not teschema.may_have_permission('read', req): - return 0 - elif self.action: - return rschema.may_have_permission(self.action, req, eschema, self.role) - return 1 - - def score_entity(self, entity): - rschema = self._get_rschema(entity) - if rschema is None: - return 0 # relation not supported - if self.action: - if self.target_etype is not None: - try: - rschema = rschema.role_rdef(entity.e_schema, - self.target_etype, self.role) - except KeyError: - return 0 - if self.role == 'subject': - if not rschema.has_perm(entity._cw, self.action, fromeid=entity.eid): - return 0 - elif not rschema.has_perm(entity._cw, self.action, toeid=entity.eid): - return 0 - if self.target_etype is not None: - req = entity._cw - teschema = req.vreg.schema.eschema(self.target_etype) - if not teschema.may_have_permission('read', req): - return 0 - return 1 - - -class partial_relation_possible(PartialPredicateMixIn, relation_possible): - """Same as :class:~`cubicweb.predicates.relation_possible`, but will look for - attributes of the selected class to get information which is otherwise - expected by the initializer, except for `action` and `strict` which are kept - as initializer arguments. - - This is useful to predefine predicate of an abstract class designed to be - customized. - """ - def __init__(self, action='read', **kwargs): - super(partial_relation_possible, self).__init__(None, None, None, - action, **kwargs) - - def complete(self, cls): - self.rtype = cls.rtype - self.role = role(cls) - self.target_etype = getattr(cls, 'target_etype', None) - - -class has_related_entities(EntityPredicate): - """Return 1 if entity support the specified relation and has some linked - entities by this relation , optionally filtered according to the specified - target type. - - The relation is specified by the following initializer arguments: - - * `rtype`, the name of the relation - - * `role`, the role of the entity in the relation, either 'subject' or - 'object', default to 'subject'. - - * `target_etype`, optional name of an entity type that should be found - at the other end of the relation - - See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity - lookup / score rules according to the input context. - """ - def __init__(self, rtype, role='subject', target_etype=None, **kwargs): - super(has_related_entities, self).__init__(**kwargs) - self.rtype = rtype - self.role = role - self.target_etype = target_etype - - def score_entity(self, entity): - relpossel = relation_possible(self.rtype, self.role, self.target_etype) - if not relpossel.score_class(entity.__class__, entity._cw): - return 0 - rset = entity.related(self.rtype, self.role) - if self.target_etype: - return any(r for r in rset.description if r[0] == self.target_etype) - return rset and 1 or 0 - - -class partial_has_related_entities(PartialPredicateMixIn, has_related_entities): - """Same as :class:~`cubicweb.predicates.has_related_entity`, but will look - for attributes of the selected class to get information which is otherwise - expected by the initializer. - - This is useful to predefine predicate of an abstract class designed to be - customized. - """ - def __init__(self, **kwargs): - super(partial_has_related_entities, self).__init__(None, None, None, - **kwargs) - - def complete(self, cls): - self.rtype = cls.rtype - self.role = role(cls) - self.target_etype = getattr(cls, 'target_etype', None) - - -class has_permission(EntityPredicate): - """Return non-zero score if request's user has the permission to do the - requested action on the entity. `action` is an entity schema action (eg one - of 'read', 'add', 'delete', 'update'). - - Here are entity lookup / scoring rules: - - * if `entity` is specified, check permission is granted for this entity - - * elif `row` is specified, check permission is granted for the entity found - in the specified cell - - * else check permission is granted for each entity found in the column - specified specified by the `col` argument or in column 0 - """ - def __init__(self, action): - self.action = action - - # don't use EntityPredicate.__call__ but this optimized implementation to - # avoid considering each entity when it's not necessary - def __call__(self, cls, req, rset=None, row=None, col=0, entity=None, **kwargs): - if entity is not None: - return self.score_entity(entity) - if rset is None: - return 0 - if row is None: - score = 0 - need_local_check = [] - geteschema = req.vreg.schema.eschema - user = req.user - action = self.action - for etype in rset.column_types(0): - if etype in BASE_TYPES: - return 0 - eschema = geteschema(etype) - if not user.matching_groups(eschema.get_groups(action)): - if eschema.has_local_role(action): - # have to ckeck local roles - need_local_check.append(eschema) - continue - else: - # even a local role won't be enough - return 0 - score += 1 - if need_local_check: - # check local role for entities of necessary types - for i, row in enumerate(rset): - if not rset.description[i][col] in need_local_check: - continue - # micro-optimisation instead of calling self.score(req, - # rset, i, col): rset may be large - if not rset.get_entity(i, col).cw_has_perm(action): - return 0 - score += 1 - return score - return self.score(req, rset, row, col) - - def score_entity(self, entity): - if entity.cw_has_perm(self.action): - return 1 - return 0 - - -class has_add_permission(EClassPredicate): - """Return 1 if request's user has the add permission on entity type - specified in the `etype` initializer argument, or according to entity found - in the input content if not specified. - - It also check that then entity type is not a strict subobject (e.g. may only - be used as a composed of another entity). - - See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity - class lookup / score rules according to the input context when `etype` is - not specified. - """ - def __init__(self, etype=None, **kwargs): - super(has_add_permission, self).__init__(**kwargs) - self.etype = etype - - def __call__(self, cls, req, **kwargs): - if self.etype is None: - return super(has_add_permission, self).__call__(cls, req, **kwargs) - return self.score(cls, req, self.etype) - - def score_class(self, eclass, req): - eschema = eclass.e_schema - if eschema.final or eschema.is_subobject(strict=True) \ - or not eschema.has_perm(req, 'add'): - return 0 - return 1 - - -class rql_condition(EntityPredicate): - """Return non-zero score if arbitrary rql specified in `expression` - initializer argument return some results for entity found in the input - context. Returned score is the number of items returned by the rql - condition. - - `expression` is expected to be a string containing an rql expression, which - must use 'X' variable to represent the context entity and may use 'U' to - represent the request's user. - - .. warning:: - If simply testing value of some attribute/relation of context entity (X), - you should rather use the :class:`score_entity` predicate which will - benefit from the ORM's request entities cache. - - See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity - lookup / score rules according to the input context. - """ - def __init__(self, expression, once_is_enough=None, mode='all', user_condition=False): - super(rql_condition, self).__init__(mode=mode, once_is_enough=once_is_enough) - self.user_condition = user_condition - if user_condition: - rql = 'Any COUNT(U) WHERE U eid %%(u)s, %s' % expression - elif 'U' in frozenset(split_expression(expression)): - rql = 'Any COUNT(X) WHERE X eid %%(x)s, U eid %%(u)s, %s' % expression - else: - rql = 'Any COUNT(X) WHERE X eid %%(x)s, %s' % expression - self.rql = rql - - def __str__(self): - return '%s(%r)' % (self.__class__.__name__, self.rql) - - def __call__(self, cls, req, **kwargs): - if self.user_condition: - try: - return req.execute(self.rql, {'u': req.user.eid})[0][0] - except Unauthorized: - return 0 - else: - return super(rql_condition, self).__call__(cls, req, **kwargs) - - def _score(self, req, eid): - try: - return req.execute(self.rql, {'x': eid, 'u': req.user.eid})[0][0] - except Unauthorized: - return 0 - - def score(self, req, rset, row, col): - return self._score(req, rset[row][col]) - - def score_entity(self, entity): - return self._score(entity._cw, entity.eid) - - -# workflow predicates ########################################################### - -class is_in_state(score_entity): - """Return 1 if entity is in one of the states given as argument list - - You should use this instead of your own :class:`score_entity` predicate to - avoid some gotchas: - - * possible views gives a fake entity with no state - * you must use the latest tr info thru the workflow adapter for repository - side checking of the current state - - In debug mode, this predicate can raise :exc:`ValueError` for unknown states names - (only checked on entities without a custom workflow) - - :rtype: int - """ - def __init__(self, *expected): - assert expected, self - self.expected = frozenset(expected) - def score(entity, expected=self.expected): - adapted = entity.cw_adapt_to('IWorkflowable') - # in debug mode only (time consuming) - if entity._cw.vreg.config.debugmode: - # validation can only be done for generic etype workflow because - # expected transition list could have been changed for a custom - # workflow (for the current entity) - if not entity.custom_workflow: - self._validate(adapted) - return self._score(adapted) - super(is_in_state, self).__init__(score) - - def _score(self, adapted): - trinfo = adapted.latest_trinfo() - if trinfo is None: # entity is probably in it's initial state - statename = adapted.state - else: - statename = trinfo.new_state.name - return statename in self.expected - - def _validate(self, adapted): - wf = adapted.current_workflow - valid = [n.name for n in wf.reverse_state_of] - unknown = sorted(self.expected.difference(valid)) - if unknown: - raise ValueError("%s: unknown state(s): %s" - % (wf.name, ",".join(unknown))) - - def __str__(self): - return '%s(%s)' % (self.__class__.__name__, - ','.join(str(s) for s in self.expected)) - - -def on_fire_transition(etype, tr_names, from_state_name=None): - """Return 1 when entity of the type `etype` is going through transition of - a name included in `tr_names`. - - You should use this predicate on 'after_add_entity' hook, since it's actually - looking for addition of `TrInfo` entities. Hence in the hook, `self.entity` - will reference the matching `TrInfo` entity, allowing to get all the - transition details (including the entity to which is applied the transition - but also its original state, transition, destination state, user...). - - See :class:`cubicweb.entities.wfobjs.TrInfo` for more information. - """ - if from_state_name is not None: - warn("on_fire_transition's from_state_name argument is unused", DeprecationWarning) - if isinstance(tr_names, string_types): - tr_names = set((tr_names,)) - def match_etype_and_transition(trinfo): - # take care trinfo.transition is None when calling change_state - return (trinfo.transition and trinfo.transition.name in tr_names - # is_instance() first two arguments are 'cls' (unused, so giving - # None is fine) and the request/session - and is_instance(etype)(None, trinfo._cw, entity=trinfo.for_entity)) - - return is_instance('TrInfo') & score_entity(match_etype_and_transition) - - -class match_transition(ExpectedValuePredicate): - """Return 1 if `transition` argument is found in the input context which has - a `.name` attribute matching one of the expected names given to the - initializer. - - This predicate is expected to be used to customise the status change form in - the web ui. - """ - def __call__(self, cls, req, transition=None, **kwargs): - # XXX check this is a transition that apply to the object? - if transition is None: - treid = req.form.get('treid', None) - if treid: - transition = req.entity_from_eid(treid) - if transition is not None and getattr(transition, 'name', None) in self.expected: - return 1 - return 0 - - -# logged user predicates ######################################################## - -@objectify_predicate -def no_cnx(cls, req, **kwargs): - """Return 1 if the web session has no connection set. This occurs when - anonymous access is not allowed and user isn't authenticated. - """ - if not req.cnx: - return 1 - return 0 - - -@objectify_predicate -def authenticated_user(cls, req, **kwargs): - """Return 1 if the user is authenticated (i.e. not the anonymous user). - """ - if req.session.anonymous_session: - return 0 - return 1 - - -@objectify_predicate -def anonymous_user(cls, req, **kwargs): - """Return 1 if the user is not authenticated (i.e. is the anonymous user). - """ - if req.session.anonymous_session: - return 1 - return 0 - - -class match_user_groups(ExpectedValuePredicate): - """Return a non-zero score if request's user is in at least one of the - groups given as initializer argument. Returned score is the number of groups - in which the user is. - - If the special 'owners' group is given and `rset` is specified in the input - context: - - * if `row` is specified check the entity at the given `row`/`col` (default - to 0) is owned by the user - - * else check all entities in `col` (default to 0) are owned by the user - """ - - def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): - if not getattr(req, 'cnx', True): # default to True for repo session instances - return 0 - user = req.user - if user is None: - return int('guests' in self.expected) - score = user.matching_groups(self.expected) - if not score and 'owners' in self.expected and rset: - if row is not None: - if not user.owns(rset[row][col]): - return 0 - score = 1 - else: - score = all(user.owns(r[col]) for r in rset) - return score - -# Web request predicates ######################################################## - -# XXX deprecate -@objectify_predicate -def primary_view(cls, req, view=None, **kwargs): - """Return 1 if: - - * *no view is specified* in the input context - - * a view is specified and its `.is_primary()` method return True - - This predicate is usually used by contextual components that only want to - appears for the primary view of an entity. - """ - if view is not None and not view.is_primary(): - return 0 - return 1 - - -@objectify_predicate -def contextual(cls, req, view=None, **kwargs): - """Return 1 if view's contextual property is true""" - if view is not None and view.contextual: - return 1 - return 0 - - -class match_view(ExpectedValuePredicate): - """Return 1 if a view is specified an as its registry id is in one of the - expected view id given to the initializer. - """ - def __call__(self, cls, req, view=None, **kwargs): - if view is None or not view.__regid__ in self.expected: - return 0 - return 1 - - -class match_context(ExpectedValuePredicate): - - def __call__(self, cls, req, context=None, **kwargs): - if not context in self.expected: - return 0 - return 1 - - -# XXX deprecate -@objectify_predicate -def match_context_prop(cls, req, context=None, **kwargs): - """Return 1 if: - - * no `context` is specified in input context (take care to confusion, here - `context` refers to a string given as an argument to the input context...) - - * specified `context` is matching the context property value for the - appobject using this predicate - - * the appobject's context property value is None - - This predicate is usually used by contextual components that want to appears - in a configurable place. - """ - if context is None: - return 1 - propval = req.property_value('%s.%s.context' % (cls.__registry__, - cls.__regid__)) - if propval and context != propval: - return 0 - return 1 - - -class match_search_state(ExpectedValuePredicate): - """Return 1 if the current request search state is in one of the expected - states given to the initializer. - - Known search states are either 'normal' or 'linksearch' (eg searching for an - object to create a relation with another). - - This predicate is usually used by action that want to appears or not according - to the ui search state. - """ - - def __call__(self, cls, req, **kwargs): - try: - if not req.search_state[0] in self.expected: - return 0 - except AttributeError: - return 1 # class doesn't care about search state, accept it - return 1 - - -class match_form_params(ExpectedValuePredicate): - """Return non-zero score if parameter names specified as initializer - arguments are specified in request's form parameters. - - Return a score corresponding to the number of expected parameters. - - When multiple parameters are expected, all of them should be found in - the input context unless `mode` keyword argument is given to 'any', - in which case a single matching parameter is enough. - """ - - def __init__(self, *expected, **kwargs): - """override default __init__ to allow either named or positional - parameters. - """ - if kwargs and expected: - raise ValueError("match_form_params() can't be called with both " - "positional and named arguments") - if expected: - if len(expected) == 1 and not isinstance(expected[0], string_types): - raise ValueError("match_form_params() positional arguments " - "must be strings") - super(match_form_params, self).__init__(*expected) - else: - super(match_form_params, self).__init__(kwargs) - - def _values_set(self, cls, req, **kwargs): - return req.form - - -class match_http_method(ExpectedValuePredicate): - """Return non-zero score if one of the HTTP methods specified as - initializer arguments is the HTTP method of the request (GET, POST, ...). - """ - - def __call__(self, cls, req, **kwargs): - return int(req.http_method() in self.expected) - - -class match_edited_type(ExpectedValuePredicate): - """return non-zero if main edited entity type is the one specified as - initializer argument, or is among initializer arguments if `mode` == 'any'. - """ - - def _values_set(self, cls, req, **kwargs): - try: - return frozenset((req.form['__type:%s' % req.form['__maineid']],)) - except KeyError: - return frozenset() - - -class match_form_id(ExpectedValuePredicate): - """return non-zero if request form identifier is the one specified as - initializer argument, or is among initializer arguments if `mode` == 'any'. - """ - - def _values_set(self, cls, req, **kwargs): - try: - return frozenset((req.form['__form_id'],)) - except KeyError: - return frozenset() - - -class specified_etype_implements(is_instance): - """Return non-zero score if the entity type specified by an 'etype' key - searched in (by priority) input context kwargs and request form parameters - match a known entity type (case insensitivly), and it's associated entity - class is of one of the type(s) given to the initializer. If multiple - arguments are given, matching one of them is enough. - - .. note:: as with :class:`~cubicweb.predicates.is_instance`, entity types - should be given as string and the score will reflect class - proximity so the most specific object will be selected. - - This predicate is usually used by views holding entity creation forms (since - we've no result set to work on). - """ - - def __call__(self, cls, req, **kwargs): - try: - etype = kwargs['etype'] - except KeyError: - try: - etype = req.form['etype'] - except KeyError: - return 0 - else: - # only check this is a known type if etype comes from req.form, - # else we want the error to propagate - try: - etype = req.vreg.case_insensitive_etypes[etype.lower()] - req.form['etype'] = etype - except KeyError: - return 0 - score = self.score_class(req.vreg['etypes'].etype_class(etype), req) - if score: - eschema = req.vreg.schema.eschema(etype) - if eschema.may_have_permission('add', req): - return score - return 0 - - -class attribute_edited(EntityPredicate): - """Scores if the specified attribute has been edited This is useful for - selection of forms by the edit controller. - - The initial use case is on a form, in conjunction with match_transition, - which will not score at edit time:: - - is_instance('Version') & (match_transition('ready') | - attribute_edited('publication_date')) - """ - def __init__(self, attribute, once_is_enough=None, mode='all'): - super(attribute_edited, self).__init__(mode=mode, once_is_enough=once_is_enough) - self._attribute = attribute - - def score_entity(self, entity): - return eid_param(role_name(self._attribute, 'subject'), entity.eid) in entity._cw.form - - -# Other predicates ############################################################## - -class match_exception(ExpectedValuePredicate): - """Return 1 if exception given as `exc` in the input context is an instance - of one of the class given on instanciation of this predicate. - """ - def __init__(self, *expected): - assert expected, self - # we want a tuple, not a set as done in the parent class - self.expected = expected - - def __call__(self, cls, req, exc=None, **kwargs): - if exc is not None and isinstance(exc, self.expected): - return 1 - return 0 - - -@objectify_predicate -def debug_mode(cls, req, rset=None, **kwargs): - """Return 1 if running in debug mode.""" - return req.vreg.config.debugmode and 1 or 0 diff -r 058bb3dc685f -r 0b59724cb3f2 pylintext.py --- a/pylintext.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -"""https://pastebin.logilab.fr/show/860/""" - -from astroid import MANAGER, InferenceError, nodes, scoped_nodes -from astroid.builder import AstroidBuilder - -def turn_function_to_class(node): - """turn a Function node into a Class node (in-place)""" - node.__class__ = scoped_nodes.Class - node.bases = () - # remove return nodes so that we don't get warned about 'return outside - # function' by pylint - for rnode in node.nodes_of_class(nodes.Return): - rnode.parent.body.remove(rnode) - # that seems to be enough :) - - -def cubicweb_transform(module): - # handle objectify_predicate decorator (and its former name until bw compat - # is kept). Only look at module level functions, should be enough. - for assnodes in module.locals.values(): - for node in assnodes: - if isinstance(node, scoped_nodes.Function) and node.decorators: - for decorator in node.decorators.nodes: - try: - for infered in decorator.infer(): - if infered.name in ('objectify_predicate', 'objectify_selector'): - turn_function_to_class(node) - break - else: - continue - break - except InferenceError: - continue - # add yams base types into 'yams.buildobjs', astng doesn't grasp globals() - # magic in there - if module.name == 'yams.buildobjs': - from yams import BASE_TYPES - for etype in BASE_TYPES: - module.locals[etype] = [scoped_nodes.Class(etype, None)] - # add data() to uiprops module - if module.name.split('.')[-1] == 'uiprops': - fake = AstroidBuilder(MANAGER).string_build(''' -def data(string): - return u'' -''') - module.locals['data'] = fake.locals['data'] - -def register(linter): - """called when loaded by pylint --load-plugins, nothing to do here""" - MANAGER.register_transform(nodes.Module, cubicweb_transform) diff -r 058bb3dc685f -r 0b59724cb3f2 pytestconf.py --- a/pytestconf.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""pytest configuration file: we need this to properly remove ressources -cached on test classes, at least until we've proper support for teardown_class -""" -import sys -from os.path import split, splitext -from logilab.common.pytest import PyTester - -class CustomPyTester(PyTester): - def testfile(self, filename, batchmode=False): - try: - return super(CustomPyTester, self).testfile(filename, batchmode) - finally: - modname = splitext(split(filename)[1])[0] - try: - module = sys.modules[modname] - except KeyError: - # error during test module import - return - for cls in vars(module).values(): - if getattr(cls, '__module__', None) != modname: - continue - clean_repo_test_cls(cls) - -def clean_repo_test_cls(cls): - if 'repo' in cls.__dict__: - if not cls.repo.shutting_down: - cls.repo.shutdown() - del cls.repo - for clsattr in ('cnx', 'config', '_config', 'vreg', 'schema'): - if clsattr in cls.__dict__: - delattr(cls, clsattr) diff -r 058bb3dc685f -r 0b59724cb3f2 repoapi.py --- a/repoapi.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,71 +0,0 @@ -# copyright 2013-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Official API to access the content of a repository -""" -from warnings import warn - -from six import add_metaclass - -from logilab.common.deprecation import class_deprecated - -from cubicweb.utils import parse_repo_uri -from cubicweb import AuthenticationError -from cubicweb.server.session import Connection - - -### public API ###################################################### - -def get_repository(uri=None, config=None, vreg=None): - """get a repository for the given URI or config/vregistry (in case we're - loading the repository for a client, eg web server, configuration). - - The returned repository may be an in-memory repository or a proxy object - using a specific RPC method, depending on the given URI. - """ - if uri is not None: - warn('[3.22] get_repository only wants a config') - - assert config is not None, 'get_repository(config=config)' - return config.repository(vreg) - -def connect(repo, login, **kwargs): - """Take credential and return associated Connection. - - raise AuthenticationError if the credential are invalid.""" - sessionid = repo.connect(login, **kwargs) - session = repo._get_session(sessionid) - # XXX the autoclose_session should probably be handle on the session directly - # this is something to consider once we have proper server side Connection. - return Connection(session) - -def anonymous_cnx(repo): - """return a Connection for Anonymous user. - - raises an AuthenticationError if anonymous usage is not allowed - """ - anoninfo = getattr(repo.config, 'anonymous_user', lambda: None)() - if anoninfo is None: # no anonymous user - raise AuthenticationError('anonymous access is not authorized') - anon_login, anon_password = anoninfo - # use vreg's repository cache - return connect(repo, anon_login, password=anon_password) - - -@add_metaclass(class_deprecated) -class ClientConnection(Connection): - __deprecation_warning__ = '[3.20] %(cls)s is deprecated, use Connection instead' diff -r 058bb3dc685f -r 0b59724cb3f2 req.py --- a/req.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,508 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Base class for request/session""" - -__docformat__ = "restructuredtext en" - -from warnings import warn -from datetime import time, datetime, timedelta - -from six import PY2, PY3, text_type -from six.moves.urllib.parse import parse_qs, parse_qsl, quote as urlquote, unquote as urlunquote, urlsplit, urlunsplit - -from logilab.common.decorators import cached -from logilab.common.deprecation import deprecated -from logilab.common.date import ustrftime, strptime, todate, todatetime - -from rql.utils import rqlvar_maker - -from cubicweb import (Unauthorized, NoSelectableObject, NoResultError, - MultipleResultsError, uilib) -from cubicweb.rset import ResultSet - -ONESECOND = timedelta(0, 1, 0) -CACHE_REGISTRY = {} - -class FindEntityError(Exception): - """raised when find_one_entity() can not return one and only one entity""" - -class Cache(dict): - def __init__(self): - super(Cache, self).__init__() - _now = datetime.now() - self.cache_creation_date = _now - self.latest_cache_lookup = _now - - -class RequestSessionBase(object): - """base class containing stuff shared by server session and web request - - request/session is the main resources accessor, mainly through it's vreg - attribute: - - :attribute vreg: the instance's registry - :attribute vreg.schema: the instance's schema - :attribute vreg.config: the instance's configuration - """ - is_request = True # False for repository session - - def __init__(self, vreg): - self.vreg = vreg - try: - encoding = vreg.property_value('ui.encoding') - except Exception: # no vreg or property not registered - encoding = 'utf-8' - self.encoding = encoding - # cache result of execution for (rql expr / eids), - # should be emptied on commit/rollback of the server session / web - # connection - self.user = None - self.local_perm_cache = {} - self._ = text_type - - def _set_user(self, orig_user): - """set the user for this req_session_base - - A special method is needed to ensure the linked user is linked to the - connection too. - """ - rset = self.eid_rset(orig_user.eid, 'CWUser') - user_cls = self.vreg['etypes'].etype_class('CWUser') - user = user_cls(self, rset, row=0, groups=orig_user.groups, - properties=orig_user.properties) - user.cw_attr_cache['login'] = orig_user.login # cache login - self.user = user - self.set_entity_cache(user) - self.set_language(user.prefered_language()) - - - def set_language(self, lang): - """install i18n configuration for `lang` translation. - - Raises :exc:`KeyError` if translation doesn't exist. - """ - self.lang = lang - gettext, pgettext = self.vreg.config.translations[lang] - # use _cw.__ to translate a message without registering it to the catalog - self._ = self.__ = gettext - self.pgettext = pgettext - - def get_option_value(self, option): - raise NotImplementedError - - def property_value(self, key): - """return value of the property with the given key, giving priority to - user specific value if any, else using site value - """ - if self.user: - val = self.user.property_value(key) - if val is not None: - return val - return self.vreg.property_value(key) - - def etype_rset(self, etype, size=1): - """return a fake result set for a particular entity type""" - rset = ResultSet([('A',)]*size, '%s X' % etype, - description=[(etype,)]*size) - def get_entity(row, col=0, etype=etype, req=self, rset=rset): - return req.vreg['etypes'].etype_class(etype)(req, rset, row, col) - rset.get_entity = get_entity - rset.req = self - return rset - - def eid_rset(self, eid, etype=None): - """return a result set for the given eid without doing actual query - (we have the eid, we can suppose it exists and user has access to the - entity) - """ - eid = int(eid) - if etype is None: - etype = self.entity_metas(eid)['type'] - rset = ResultSet([(eid,)], 'Any X WHERE X eid %(x)s', {'x': eid}, - [(etype,)]) - rset.req = self - return rset - - def empty_rset(self): - """ return a guaranteed empty result """ - rset = ResultSet([], 'Any X WHERE X eid -1') - rset.req = self - return rset - - def entity_from_eid(self, eid, etype=None): - """return an entity instance for the given eid. No query is done""" - try: - return self.entity_cache(eid) - except KeyError: - rset = self.eid_rset(eid, etype) - entity = rset.get_entity(0, 0) - self.set_entity_cache(entity) - return entity - - def entity_cache(self, eid): - raise KeyError - - def set_entity_cache(self, entity): - pass - - def create_entity(self, etype, **kwargs): - """add a new entity of the given type - - Example (in a shell session): - - >>> c = create_entity('Company', name=u'Logilab') - >>> create_entity('Person', firstname=u'John', surname=u'Doe', - ... works_for=c) - - """ - cls = self.vreg['etypes'].etype_class(etype) - return cls.cw_instantiate(self.execute, **kwargs) - - @deprecated('[3.18] use find(etype, **kwargs).entities()') - def find_entities(self, etype, **kwargs): - """find entities of the given type and attribute values. - - >>> users = find_entities('CWGroup', name=u'users') - >>> groups = find_entities('CWGroup') - """ - return self.find(etype, **kwargs).entities() - - @deprecated('[3.18] use find(etype, **kwargs).one()') - def find_one_entity(self, etype, **kwargs): - """find one entity of the given type and attribute values. - raise :exc:`FindEntityError` if can not return one and only one entity. - - >>> users = find_one_entity('CWGroup', name=u'users') - >>> groups = find_one_entity('CWGroup') - Exception() - """ - try: - return self.find(etype, **kwargs).one() - except (NoResultError, MultipleResultsError) as e: - raise FindEntityError("%s: (%s, %s)" % (str(e), etype, kwargs)) - - def find(self, etype, **kwargs): - """find entities of the given type and attribute values. - - :returns: A :class:`ResultSet` - - >>> users = find('CWGroup', name=u"users").one() - >>> groups = find('CWGroup').entities() - """ - parts = ['Any X WHERE X is %s' % etype] - varmaker = rqlvar_maker(defined='X') - eschema = self.vreg.schema.eschema(etype) - for attr, value in kwargs.items(): - if isinstance(value, list) or isinstance(value, tuple): - raise NotImplementedError("List of values are not supported") - if hasattr(value, 'eid'): - kwargs[attr] = value.eid - if attr.startswith('reverse_'): - attr = attr[8:] - assert attr in eschema.objrels, \ - '%s not in %s object relations' % (attr, eschema) - parts.append( - '%(varname)s %(attr)s X, ' - '%(varname)s eid %%(reverse_%(attr)s)s' - % {'attr': attr, 'varname': next(varmaker)}) - else: - assert attr in eschema.subjrels, \ - '%s not in %s subject relations' % (attr, eschema) - parts.append('X %(attr)s %%(%(attr)s)s' % {'attr': attr}) - - rql = ', '.join(parts) - - return self.execute(rql, kwargs) - - def ensure_ro_rql(self, rql): - """raise an exception if the given rql is not a select query""" - first = rql.split(None, 1)[0].lower() - if first in ('insert', 'set', 'delete'): - raise Unauthorized(self._('only select queries are authorized')) - - def get_cache(self, cachename): - """cachename should be dotted names as in : - - - cubicweb.mycache - - cubes.blog.mycache - - etc. - """ - warn.warning('[3.19] .get_cache will disappear soon. ' - 'Distributed caching mechanisms are being introduced instead.' - 'Other caching mechanism can be used more reliably ' - 'to the same effect.', - DeprecationWarning) - if cachename in CACHE_REGISTRY: - cache = CACHE_REGISTRY[cachename] - else: - cache = CACHE_REGISTRY[cachename] = Cache() - _now = datetime.now() - if _now > cache.latest_cache_lookup + ONESECOND: - ecache = self.execute( - 'Any C,T WHERE C is CWCache, C name %(name)s, C timestamp T', - {'name':cachename}).get_entity(0,0) - cache.latest_cache_lookup = _now - if not ecache.valid(cache.cache_creation_date): - cache.clear() - cache.cache_creation_date = _now - return cache - - # url generation methods ################################################## - - def build_url(self, *args, **kwargs): - """return an absolute URL using params dictionary key/values as URL - parameters. Values are automatically URL quoted, and the - publishing method to use may be specified or will be guessed. - - if ``__secure__`` argument is True, the request will try to build a - https url. - - raises :exc:`ValueError` if None is found in arguments - """ - # use *args since we don't want first argument to be "anonymous" to - # avoid potential clash with kwargs - method = None - if args: - assert len(args) == 1, 'only 0 or 1 non-named-argument expected' - method = args[0] - if method is None: - method = 'view' - # XXX I (adim) think that if method is passed explicitly, we should - # not try to process it and directly call req.build_url() - base_url = kwargs.pop('base_url', None) - if base_url is None: - secure = kwargs.pop('__secure__', None) - base_url = self.base_url(secure=secure) - if '_restpath' in kwargs: - assert method == 'view', repr(method) - path = kwargs.pop('_restpath') - else: - path = method - if not kwargs: - return u'%s%s' % (base_url, path) - return u'%s%s?%s' % (base_url, path, self.build_url_params(**kwargs)) - - def build_url_params(self, **kwargs): - """return encoded params to incorporate them in a URL""" - args = [] - for param, values in kwargs.items(): - if not isinstance(values, (list, tuple)): - values = (values,) - for value in values: - assert value is not None - args.append(u'%s=%s' % (param, self.url_quote(value))) - return '&'.join(args) - - def url_quote(self, value, safe=''): - """urllib.quote is not unicode safe, use this method to do the - necessary encoding / decoding. Also it's designed to quote each - part of a url path and so the '/' character will be encoded as well. - """ - if PY2 and isinstance(value, unicode): - quoted = urlquote(value.encode(self.encoding), safe=safe) - return unicode(quoted, self.encoding) - return urlquote(str(value), safe=safe) - - def url_unquote(self, quoted): - """returns a unicode unquoted string - - decoding is based on `self.encoding` which is the encoding - used in `url_quote` - """ - if PY3: - return urlunquote(quoted) - if isinstance(quoted, unicode): - quoted = quoted.encode(self.encoding) - try: - return unicode(urlunquote(quoted), self.encoding) - except UnicodeDecodeError: # might occurs on manually typed URLs - return unicode(urlunquote(quoted), 'iso-8859-1') - - def url_parse_qsl(self, querystring): - """return a list of (key, val) found in the url quoted query string""" - if PY3: - for key, val in parse_qsl(querystring): - yield key, val - return - if isinstance(querystring, unicode): - querystring = querystring.encode(self.encoding) - for key, val in parse_qsl(querystring): - try: - yield unicode(key, self.encoding), unicode(val, self.encoding) - except UnicodeDecodeError: # might occurs on manually typed URLs - yield unicode(key, 'iso-8859-1'), unicode(val, 'iso-8859-1') - - - def rebuild_url(self, url, **newparams): - """return the given url with newparams inserted. If any new params - is already specified in the url, it's overriden by the new value - - newparams may only be mono-valued. - """ - if PY2 and isinstance(url, unicode): - url = url.encode(self.encoding) - schema, netloc, path, query, fragment = urlsplit(url) - query = parse_qs(query) - # sort for testing predictability - for key, val in sorted(newparams.items()): - query[key] = (self.url_quote(val),) - query = '&'.join(u'%s=%s' % (param, value) - for param, values in sorted(query.items()) - for value in values) - return urlunsplit((schema, netloc, path, query, fragment)) - - # bound user related methods ############################################### - - @cached - def user_data(self): - """returns a dictionary with this user's information. - - The keys are : - - login - The user login - - name - The user name, returned by user.name() - - email - The user principal email - - """ - userinfo = {} - user = self.user - userinfo['login'] = user.login - userinfo['name'] = user.name() - userinfo['email'] = user.cw_adapt_to('IEmailable').get_email() - return userinfo - - # formating methods ####################################################### - - def view(self, __vid, rset=None, __fallback_oid=None, __registry='views', - initargs=None, w=None, **kwargs): - """Select object with the given id (`__oid`) then render it. If the - object isn't selectable, try to select fallback object if - `__fallback_oid` is specified. - - If specified `initargs` is expected to be a dictionary containing - arguments that should be given to selection (hence to object's __init__ - as well), but not to render(). Other arbitrary keyword arguments will be - given to selection *and* to render(), and so should be handled by - object's call or cell_call method.. - """ - if initargs is None: - initargs = kwargs - else: - initargs.update(kwargs) - try: - view = self.vreg[__registry].select(__vid, self, rset=rset, **initargs) - except NoSelectableObject: - if __fallback_oid is None: - raise - view = self.vreg[__registry].select(__fallback_oid, self, - rset=rset, **initargs) - return view.render(w=w, **kwargs) - - def printable_value(self, attrtype, value, props=None, displaytime=True, - formatters=uilib.PRINTERS): - """return a displayablye value (i.e. unicode string)""" - if value is None: - return u'' - try: - as_string = formatters[attrtype] - except KeyError: - self.error('given bad attrtype %s', attrtype) - return unicode(value) - return as_string(value, self, props, displaytime) - - def format_date(self, date, date_format=None, time=False): - """return a string for a date time according to instance's - configuration - """ - if date is not None: - if date_format is None: - if time: - date_format = self.property_value('ui.datetime-format') - else: - date_format = self.property_value('ui.date-format') - return ustrftime(date, date_format) - return u'' - - def format_time(self, time): - """return a string for a time according to instance's - configuration - """ - if time is not None: - return ustrftime(time, self.property_value('ui.time-format')) - return u'' - - def format_float(self, num): - """return a string for floating point number according to instance's - configuration - """ - if num is not None: - return self.property_value('ui.float-format') % num - return u'' - - def parse_datetime(self, value, etype='Datetime'): - """get a datetime or time from a string (according to etype) - Datetime formatted as Date are accepted - """ - assert etype in ('Datetime', 'Date', 'Time'), etype - # XXX raise proper validation error - if etype == 'Datetime': - format = self.property_value('ui.datetime-format') - try: - return todatetime(strptime(value, format)) - except ValueError: - pass - elif etype == 'Time': - format = self.property_value('ui.time-format') - try: - # (adim) I can't find a way to parse a Time with a custom format - date = strptime(value, format) # this returns a DateTime - return time(date.hour, date.minute, date.second) - except ValueError: - raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') - % {'value': value, 'format': format}) - try: - format = self.property_value('ui.date-format') - dt = strptime(value, format) - if etype == 'Datetime': - return todatetime(dt) - return todate(dt) - except ValueError: - raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') - % {'value': value, 'format': format}) - - def _base_url(self, secure=None): - if secure: - return self.vreg.config.get('https-url') or self.vreg.config['base-url'] - return self.vreg.config['base-url'] - - def base_url(self, secure=None): - """return the root url of the instance - """ - url = self._base_url(secure=secure) - return url if url is None else url.rstrip('/') + '/' - - # abstract methods to override according to the web front-end ############# - - def describe(self, eid, asdict=False): - """return a tuple (type, sourceuri, extid) for the entity with id """ - raise NotImplementedError diff -r 058bb3dc685f -r 0b59724cb3f2 rqlrewrite.py --- a/rqlrewrite.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,933 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""RQL rewriting utilities : insert rql expression snippets into rql syntax -tree. - -This is used for instance for read security checking in the repository. -""" -__docformat__ = "restructuredtext en" - -from six import text_type, string_types - -from rql import nodes as n, stmts, TypeResolverException -from rql.utils import common_parent - -from yams import BadSchemaDefinition - -from logilab.common import tempattr -from logilab.common.graph import has_path - -from cubicweb import Unauthorized -from cubicweb.schema import RRQLExpression - -def cleanup_solutions(rqlst, solutions): - for sol in solutions: - for vname in list(sol): - if not (vname in rqlst.defined_vars or vname in rqlst.aliases): - del sol[vname] - - -def add_types_restriction(schema, rqlst, newroot=None, solutions=None): - if newroot is None: - assert solutions is None - if hasattr(rqlst, '_types_restr_added'): - return - solutions = rqlst.solutions - newroot = rqlst - rqlst._types_restr_added = True - else: - assert solutions is not None - rqlst = rqlst.stmt - eschema = schema.eschema - allpossibletypes = {} - for solution in solutions: - for varname, etype in solution.items(): - # XXX not considering aliases by design, right ? - if varname not in newroot.defined_vars or eschema(etype).final: - continue - allpossibletypes.setdefault(varname, set()).add(etype) - # XXX could be factorized with add_etypes_restriction from rql 0.31 - for varname in sorted(allpossibletypes): - var = newroot.defined_vars[varname] - stinfo = var.stinfo - if stinfo.get('uidrel') is not None: - continue # eid specified, no need for additional type specification - try: - typerel = rqlst.defined_vars[varname].stinfo.get('typerel') - except KeyError: - assert varname in rqlst.aliases - continue - if newroot is rqlst and typerel is not None: - mytyperel = typerel - else: - for vref in var.references(): - rel = vref.relation() - if rel and rel.is_types_restriction(): - mytyperel = rel - break - else: - mytyperel = None - possibletypes = allpossibletypes[varname] - if mytyperel is not None: - if mytyperel.r_type == 'is_instance_of': - # turn is_instance_of relation into a is relation since we've - # all possible solutions and don't want to bother with - # potential is_instance_of incompatibility - mytyperel.r_type = 'is' - if len(possibletypes) > 1: - node = n.Function('IN') - for etype in sorted(possibletypes): - node.append(n.Constant(etype, 'etype')) - else: - etype = next(iter(possibletypes)) - node = n.Constant(etype, 'etype') - comp = mytyperel.children[1] - comp.replace(comp.children[0], node) - else: - # variable has already some strict types restriction. new - # possible types can only be a subset of existing ones, so only - # remove no more possible types - for cst in mytyperel.get_nodes(n.Constant): - if not cst.value in possibletypes: - cst.parent.remove(cst) - else: - # we have to add types restriction - if stinfo.get('scope') is not None: - rel = var.scope.add_type_restriction(var, possibletypes) - else: - # tree is not annotated yet, no scope set so add the restriction - # to the root - rel = newroot.add_type_restriction(var, possibletypes) - stinfo['typerel'] = rel - stinfo['possibletypes'] = possibletypes - - -def remove_solutions(origsolutions, solutions, defined): - """when a rqlst has been generated from another by introducing security - assertions, this method returns solutions which are contained in orig - solutions - """ - newsolutions = [] - for origsol in origsolutions: - for newsol in solutions[:]: - for var, etype in origsol.items(): - try: - if newsol[var] != etype: - try: - defined[var].stinfo['possibletypes'].remove(newsol[var]) - except KeyError: - pass - break - except KeyError: - # variable has been rewritten - continue - else: - newsolutions.append(newsol) - solutions.remove(newsol) - return newsolutions - - -def _add_noinvariant(noinvariant, restricted, select, nbtrees): - # a variable can actually be invariant if it has not been restricted for - # security reason or if security assertion hasn't modified the possible - # solutions for the query - for vname in restricted: - try: - var = select.defined_vars[vname] - except KeyError: - # this is an alias - continue - if nbtrees != 1 or len(var.stinfo['possibletypes']) != 1: - noinvariant.add(var) - - -def _expand_selection(terms, selected, aliases, select, newselect): - for term in terms: - for vref in term.iget_nodes(n.VariableRef): - if not vref.name in selected: - select.append_selected(vref) - colalias = newselect.get_variable(vref.name, len(aliases)) - aliases.append(n.VariableRef(colalias)) - selected.add(vref.name) - -def _has_multiple_cardinality(etypes, rdef, ttypes_func, cardindex): - """return True if relation definitions from entity types (`etypes`) to - target types returned by the `ttypes_func` function all have single (1 or ?) - cardinality. - """ - for etype in etypes: - for ttype in ttypes_func(etype): - if rdef(etype, ttype).cardinality[cardindex] in '+*': - return True - return False - -def _compatible_relation(relations, stmt, sniprel): - """Search among given rql relation nodes if there is one 'compatible' with the - snippet relation, and return it if any, else None. - - A relation is compatible if it: - * belongs to the currently processed statement, - * isn't negged (i.e. direct parent is a NOT node) - * isn't optional (outer join) or similarly as the snippet relation - """ - for rel in relations: - # don't share if relation's scope is not the current statement - if rel.scope is not stmt: - continue - # don't share neged relation - if rel.neged(strict=True): - continue - # don't share optional relation, unless the snippet relation is - # similarly optional - if rel.optional and rel.optional != sniprel.optional: - continue - return rel - return None - - -def iter_relations(stinfo): - # this is a function so that test may return relation in a predictable order - return stinfo['relations'] - stinfo['rhsrelations'] - - -class Unsupported(Exception): - """raised when an rql expression can't be inserted in some rql query - because it create an unresolvable query (eg no solutions found) - """ - -class VariableFromSubQuery(Exception): - """flow control exception to indicate that a variable is coming from a - subquery, and let parent act accordingly - """ - def __init__(self, variable): - self.variable = variable - - -class RQLRewriter(object): - """Insert some rql snippets into another rql syntax tree, for security / - relation vocabulary. This implies that it should only restrict results of - the original query, not generate new ones. Hence, inserted snippets are - inserted under an EXISTS node. - - This class *isn't thread safe*. - """ - - def __init__(self, session): - self.session = session - vreg = session.vreg - self.schema = vreg.schema - self.annotate = vreg.rqlhelper.annotate - self._compute_solutions = vreg.solutions - - def compute_solutions(self): - self.annotate(self.select) - try: - self._compute_solutions(self.session, self.select, self.kwargs) - except TypeResolverException: - raise Unsupported(str(self.select)) - if len(self.select.solutions) < len(self.solutions): - raise Unsupported() - - def insert_local_checks(self, select, kwargs, - localchecks, restricted, noinvariant): - """ - select: the rql syntax tree Select node - kwargs: query arguments - - localchecks: {(('Var name', (rqlexpr1, rqlexpr2)), - ('Var name1', (rqlexpr1, rqlexpr23))): [solution]} - - (see querier._check_permissions docstring for more information) - - restricted: set of variable names to which an rql expression has to be - applied - - noinvariant: set of variable names that can't be considered has - invariant due to security reason (will be filed by this method) - """ - nbtrees = len(localchecks) - myunion = union = select.parent - # transform in subquery when len(localchecks)>1 and groups - if nbtrees > 1 and (select.orderby or select.groupby or - select.having or select.has_aggregat or - select.distinct or - select.limit or select.offset): - newselect = stmts.Select() - # only select variables in subqueries - origselection = select.selection - select.select_only_variables() - select.has_aggregat = False - # create subquery first so correct node are used on copy - # (eg ColumnAlias instead of Variable) - aliases = [n.VariableRef(newselect.get_variable(vref.name, i)) - for i, vref in enumerate(select.selection)] - selected = set(vref.name for vref in aliases) - # now copy original selection and groups - for term in origselection: - newselect.append_selected(term.copy(newselect)) - if select.orderby: - sortterms = [] - for sortterm in select.orderby: - sortterms.append(sortterm.copy(newselect)) - for fnode in sortterm.get_nodes(n.Function): - if fnode.name == 'FTIRANK': - # we've to fetch the has_text relation as well - var = fnode.children[0].variable - rel = next(iter(var.stinfo['ftirels'])) - assert not rel.ored(), 'unsupported' - newselect.add_restriction(rel.copy(newselect)) - # remove relation from the orig select and - # cleanup variable stinfo - rel.parent.remove(rel) - var.stinfo['ftirels'].remove(rel) - var.stinfo['relations'].remove(rel) - # XXX not properly re-annotated after security insertion? - newvar = newselect.get_variable(var.name) - newvar.stinfo.setdefault('ftirels', set()).add(rel) - newvar.stinfo.setdefault('relations', set()).add(rel) - newselect.set_orderby(sortterms) - _expand_selection(select.orderby, selected, aliases, select, newselect) - select.orderby = () # XXX dereference? - if select.groupby: - newselect.set_groupby([g.copy(newselect) for g in select.groupby]) - _expand_selection(select.groupby, selected, aliases, select, newselect) - select.groupby = () # XXX dereference? - if select.having: - newselect.set_having([g.copy(newselect) for g in select.having]) - _expand_selection(select.having, selected, aliases, select, newselect) - select.having = () # XXX dereference? - if select.limit: - newselect.limit = select.limit - select.limit = None - if select.offset: - newselect.offset = select.offset - select.offset = 0 - myunion = stmts.Union() - newselect.set_with([n.SubQuery(aliases, myunion)], check=False) - newselect.distinct = select.distinct - solutions = [sol.copy() for sol in select.solutions] - cleanup_solutions(newselect, solutions) - newselect.set_possible_types(solutions) - # if some solutions doesn't need rewriting, insert original - # select as first union subquery - if () in localchecks: - myunion.append(select) - # we're done, replace original select by the new select with - # subqueries (more added in the loop below) - union.replace(select, newselect) - elif not () in localchecks: - union.remove(select) - for lcheckdef, lchecksolutions in localchecks.items(): - if not lcheckdef: - continue - myrqlst = select.copy(solutions=lchecksolutions) - myunion.append(myrqlst) - # in-place rewrite + annotation / simplification - lcheckdef = [({var: 'X'}, rqlexprs) for var, rqlexprs in lcheckdef] - self.rewrite(myrqlst, lcheckdef, kwargs) - _add_noinvariant(noinvariant, restricted, myrqlst, nbtrees) - if () in localchecks: - select.set_possible_types(localchecks[()]) - add_types_restriction(self.schema, select) - _add_noinvariant(noinvariant, restricted, select, nbtrees) - self.annotate(union) - - def rewrite(self, select, snippets, kwargs, existingvars=None): - """ - snippets: (varmap, list of rql expression) - with varmap a *dict* {select var: snippet var} - """ - self.select = select - # remove_solutions used below require a copy - self.solutions = solutions = select.solutions[:] - self.kwargs = kwargs - self.u_varname = None - self.removing_ambiguity = False - self.exists_snippet = {} - self.pending_keys = [] - self.existingvars = existingvars - # we have to annotate the rqlst before inserting snippets, even though - # we'll have to redo it later - self.annotate(select) - self.insert_snippets(snippets) - if not self.exists_snippet and self.u_varname: - # U has been inserted than cancelled, cleanup - select.undefine_variable(select.defined_vars[self.u_varname]) - # clean solutions according to initial solutions - newsolutions = remove_solutions(solutions, select.solutions, - select.defined_vars) - assert len(newsolutions) >= len(solutions), ( - 'rewritten rql %s has lost some solutions, there is probably ' - 'something wrong in your schema permission (for instance using a ' - 'RQLExpression which inserts a relation which doesn\'t exist in ' - 'the schema)\nOrig solutions: %s\nnew solutions: %s' % ( - select, solutions, newsolutions)) - if len(newsolutions) > len(solutions): - newsolutions = self.remove_ambiguities(snippets, newsolutions) - assert newsolutions - select.solutions = newsolutions - add_types_restriction(self.schema, select) - - def insert_snippets(self, snippets, varexistsmap=None): - self.rewritten = {} - for varmap, rqlexprs in snippets: - if isinstance(varmap, dict): - varmap = tuple(sorted(varmap.items())) - else: - assert isinstance(varmap, tuple), varmap - if varexistsmap is not None and not varmap in varexistsmap: - continue - self.insert_varmap_snippets(varmap, rqlexprs, varexistsmap) - - def init_from_varmap(self, varmap, varexistsmap=None): - self.varmap = varmap - self.revvarmap = {} - self.varinfos = [] - for i, (selectvar, snippetvar) in enumerate(varmap): - assert snippetvar in 'SOX' - self.revvarmap[snippetvar] = (selectvar, i) - vi = {} - self.varinfos.append(vi) - try: - vi['const'] = int(selectvar) - vi['rhs_rels'] = vi['lhs_rels'] = {} - except ValueError: - try: - vi['stinfo'] = sti = self.select.defined_vars[selectvar].stinfo - except KeyError: - vi['stinfo'] = sti = self._subquery_variable(selectvar) - if varexistsmap is None: - # build an index for quick access to relations - vi['rhs_rels'] = {} - for rel in sti.get('rhsrelations', []): - vi['rhs_rels'].setdefault(rel.r_type, []).append(rel) - vi['lhs_rels'] = {} - for rel in sti.get('relations', []): - if not rel in sti.get('rhsrelations', []): - vi['lhs_rels'].setdefault(rel.r_type, []).append(rel) - else: - vi['rhs_rels'] = vi['lhs_rels'] = {} - - def _subquery_variable(self, selectvar): - raise VariableFromSubQuery(selectvar) - - def insert_varmap_snippets(self, varmap, rqlexprs, varexistsmap): - try: - self.init_from_varmap(varmap, varexistsmap) - except VariableFromSubQuery as ex: - # variable may have been moved to a newly inserted subquery - # we should insert snippet in that subquery - subquery = self.select.aliases[ex.variable].query - assert len(subquery.children) == 1, subquery - subselect = subquery.children[0] - RQLRewriter(self.session).rewrite(subselect, [(varmap, rqlexprs)], - self.kwargs) - return - self._insert_scope = None - previous = None - inserted = False - for rqlexpr in rqlexprs: - self.current_expr = rqlexpr - if varexistsmap is None: - try: - new = self.insert_snippet(varmap, rqlexpr.snippet_rqlst, previous) - except Unsupported: - continue - inserted = True - if new is not None and self._insert_scope is None: - self.exists_snippet[rqlexpr] = new - previous = previous or new - else: - # called to reintroduce snippet due to ambiguity creation, - # so skip snippets which are not introducing this ambiguity - exists = varexistsmap[varmap] - if self.exists_snippet.get(rqlexpr) is exists: - self.insert_snippet(varmap, rqlexpr.snippet_rqlst, exists) - if varexistsmap is None and not inserted: - # no rql expression found matching rql solutions. User has no access right - raise Unauthorized() # XXX may also be because of bad constraints in schema definition - - def insert_snippet(self, varmap, snippetrqlst, previous=None): - new = snippetrqlst.where.accept(self) - existing = self.existingvars - self.existingvars = None - try: - return self._insert_snippet(varmap, previous, new) - finally: - self.existingvars = existing - - def _inserted_root(self, new): - if not isinstance(new, (n.Exists, n.Not)): - new = n.Exists(new) - return new - - def _insert_snippet(self, varmap, previous, new): - """insert `new` snippet into the syntax tree, which have been rewritten - using `varmap`. In cases where an action is protected by several rql - expresssion, `previous` will be the first rql expression which has been - inserted, and so should be ORed with the following expressions. - """ - if new is not None: - if self._insert_scope is None: - insert_scope = None - for vi in self.varinfos: - scope = vi.get('stinfo', {}).get('scope', self.select) - if insert_scope is None: - insert_scope = scope - else: - insert_scope = common_parent(scope, insert_scope) - else: - insert_scope = self._insert_scope - if self._insert_scope is None and any(vi.get('stinfo', {}).get('optrelations') - for vi in self.varinfos): - assert previous is None - self._insert_scope, new = self.snippet_subquery(varmap, new) - self.insert_pending() - #self._insert_scope = None - return new - new = self._inserted_root(new) - if previous is None: - insert_scope.add_restriction(new) - else: - grandpa = previous.parent - or_ = n.Or(previous, new) - grandpa.replace(previous, or_) - if not self.removing_ambiguity: - try: - self.compute_solutions() - except Unsupported: - # some solutions have been lost, can't apply this rql expr - if previous is None: - self.current_statement().remove_node(new, undefine=True) - else: - grandpa.replace(or_, previous) - self._cleanup_inserted(new) - raise - else: - with tempattr(self, '_insert_scope', new): - self.insert_pending() - return new - self.insert_pending() - - def insert_pending(self): - """pending_keys hold variable referenced by U has__permission X - relation. - - Once the snippet introducing this has been inserted and solutions - recomputed, we have to insert snippet defined for of entity - types taken by X - """ - stmt = self.current_statement() - while self.pending_keys: - key, action = self.pending_keys.pop() - try: - varname = self.rewritten[key] - except KeyError: - try: - varname = self.revvarmap[key[-1]][0] - except KeyError: - # variable isn't used anywhere else, we can't insert security - raise Unauthorized() - ptypes = stmt.defined_vars[varname].stinfo['possibletypes'] - if len(ptypes) > 1: - # XXX dunno how to handle this - self.session.error( - 'cant check security of %s, ambigous type for %s in %s', - stmt, varname, key[0]) # key[0] == the rql expression - raise Unauthorized() - etype = next(iter(ptypes)) - eschema = self.schema.eschema(etype) - if not eschema.has_perm(self.session, action): - rqlexprs = eschema.get_rqlexprs(action) - if not rqlexprs: - raise Unauthorized() - self.insert_snippets([({varname: 'X'}, rqlexprs)]) - - def snippet_subquery(self, varmap, transformedsnippet): - """introduce the given snippet in a subquery""" - subselect = stmts.Select() - snippetrqlst = n.Exists(transformedsnippet.copy(subselect)) - get_rschema = self.schema.rschema - aliases = [] - done = set() - for i, (selectvar, _) in enumerate(varmap): - need_null_test = False - subselectvar = subselect.get_variable(selectvar) - subselect.append_selected(n.VariableRef(subselectvar)) - aliases.append(selectvar) - todo = [(selectvar, self.varinfos[i]['stinfo'])] - while todo: - varname, stinfo = todo.pop() - done.add(varname) - for rel in iter_relations(stinfo): - if rel in done: - continue - done.add(rel) - rschema = get_rschema(rel.r_type) - if rschema.final or rschema.inlined: - rel.children[0].name = varname # XXX explain why - subselect.add_restriction(rel.copy(subselect)) - for vref in rel.children[1].iget_nodes(n.VariableRef): - if isinstance(vref.variable, n.ColumnAlias): - # XXX could probably be handled by generating the - # subquery into the detected subquery - raise BadSchemaDefinition( - "cant insert security because of usage two inlined " - "relations in this query. You should probably at " - "least uninline %s" % rel.r_type) - subselect.append_selected(vref.copy(subselect)) - aliases.append(vref.name) - self.select.remove_node(rel) - # when some inlined relation has to be copied in the - # subquery and that relation is optional, we need to - # test that either value is NULL or that the snippet - # condition is satisfied - if varname == selectvar and rel.optional and rschema.inlined: - need_null_test = True - # also, if some attributes or inlined relation of the - # object variable are accessed, we need to get all those - # from the subquery as well - if vref.name not in done and rschema.inlined: - # we can use vref here define in above for loop - ostinfo = vref.variable.stinfo - for orel in iter_relations(ostinfo): - orschema = get_rschema(orel.r_type) - if orschema.final or orschema.inlined: - todo.append( (vref.name, ostinfo) ) - break - if need_null_test: - snippetrqlst = n.Or( - n.make_relation(subselect.get_variable(selectvar), 'is', - (None, None), n.Constant, - operator='='), - snippetrqlst) - subselect.add_restriction(snippetrqlst) - if self.u_varname: - # generate an identifier for the substitution - argname = subselect.allocate_varname() - while argname in self.kwargs: - argname = subselect.allocate_varname() - subselect.add_constant_restriction(subselect.get_variable(self.u_varname), - 'eid', text_type(argname), 'Substitute') - self.kwargs[argname] = self.session.user.eid - add_types_restriction(self.schema, subselect, subselect, - solutions=self.solutions) - myunion = stmts.Union() - myunion.append(subselect) - aliases = [n.VariableRef(self.select.get_variable(name, i)) - for i, name in enumerate(aliases)] - self.select.add_subquery(n.SubQuery(aliases, myunion), check=False) - self._cleanup_inserted(transformedsnippet) - try: - self.compute_solutions() - except Unsupported: - # some solutions have been lost, can't apply this rql expr - self.select.remove_subquery(self.select.with_[-1]) - raise - return subselect, snippetrqlst - - def remove_ambiguities(self, snippets, newsolutions): - # the snippet has introduced some ambiguities, we have to resolve them - # "manually" - variantes = self.build_variantes(newsolutions) - # insert "is" where necessary - varexistsmap = {} - self.removing_ambiguity = True - for (erqlexpr, varmap, oldvarname), etype in variantes[0].items(): - varname = self.rewritten[(erqlexpr, varmap, oldvarname)] - var = self.select.defined_vars[varname] - exists = var.references()[0].scope - exists.add_constant_restriction(var, 'is', etype, 'etype') - varexistsmap[varmap] = exists - # insert ORED exists where necessary - for variante in variantes[1:]: - self.insert_snippets(snippets, varexistsmap) - for key, etype in variante.items(): - varname = self.rewritten[key] - try: - var = self.select.defined_vars[varname] - except KeyError: - # not a newly inserted variable - continue - exists = var.references()[0].scope - exists.add_constant_restriction(var, 'is', etype, 'etype') - # recompute solutions - self.compute_solutions() - # clean solutions according to initial solutions - return remove_solutions(self.solutions, self.select.solutions, - self.select.defined_vars) - - def build_variantes(self, newsolutions): - variantes = set() - for sol in newsolutions: - variante = [] - for key, newvar in self.rewritten.items(): - variante.append( (key, sol[newvar]) ) - variantes.add(tuple(variante)) - # rebuild variantes as dict - variantes = [dict(variante) for variante in variantes] - # remove variable which have always the same type - for key in self.rewritten: - it = iter(variantes) - etype = next(it)[key] - for variante in it: - if variante[key] != etype: - break - else: - for variante in variantes: - del variante[key] - return variantes - - def _cleanup_inserted(self, node): - # cleanup inserted variable references - removed = set() - for vref in node.iget_nodes(n.VariableRef): - vref.unregister_reference() - if not vref.variable.stinfo['references']: - # no more references, undefine the variable - del self.select.defined_vars[vref.name] - removed.add(vref.name) - for key, newvar in list(self.rewritten.items()): - if newvar in removed: - del self.rewritten[key] - - - def _may_be_shared_with(self, sniprel, target): - """if the snippet relation can be skipped to use a relation from the - original query, return that relation node - """ - if sniprel.neged(strict=True): - return None # no way - rschema = self.schema.rschema(sniprel.r_type) - stmt = self.current_statement() - for vi in self.varinfos: - try: - if target == 'object': - orels = vi['lhs_rels'][sniprel.r_type] - cardindex = 0 - ttypes_func = rschema.objects - rdef = rschema.rdef - else: # target == 'subject': - orels = vi['rhs_rels'][sniprel.r_type] - cardindex = 1 - ttypes_func = rschema.subjects - rdef = lambda x, y: rschema.rdef(y, x) - except KeyError: - # may be raised by vi['xhs_rels'][sniprel.r_type] - continue - # if cardinality isn't in '?1', we can't ignore the snippet relation - # and use variable from the original query - if _has_multiple_cardinality(vi['stinfo']['possibletypes'], rdef, - ttypes_func, cardindex): - continue - orel = _compatible_relation(orels, stmt, sniprel) - if orel is not None: - return orel - return None - - def _use_orig_term(self, snippet_varname, term): - key = (self.current_expr, self.varmap, snippet_varname) - if key in self.rewritten: - stmt = self.current_statement() - insertedvar = stmt.defined_vars.pop(self.rewritten[key]) - for inserted_vref in insertedvar.references(): - inserted_vref.parent.replace(inserted_vref, term.copy(stmt)) - self.rewritten[key] = term.name - - def _get_varname_or_term(self, vname): - stmt = self.current_statement() - if vname == 'U': - stmt = self.select - if self.u_varname is None: - self.u_varname = stmt.allocate_varname() - # generate an identifier for the substitution - argname = stmt.allocate_varname() - while argname in self.kwargs: - argname = stmt.allocate_varname() - # insert "U eid %(u)s" - stmt.add_constant_restriction( - stmt.get_variable(self.u_varname), - 'eid', text_type(argname), 'Substitute') - self.kwargs[argname] = self.session.user.eid - return self.u_varname - key = (self.current_expr, self.varmap, vname) - try: - return self.rewritten[key] - except KeyError: - self.rewritten[key] = newvname = stmt.allocate_varname() - return newvname - - # visitor methods ########################################################## - - def _visit_binary(self, node, cls): - newnode = cls() - for c in node.children: - new = c.accept(self) - if new is None: - continue - newnode.append(new) - if len(newnode.children) == 0: - return None - if len(newnode.children) == 1: - return newnode.children[0] - return newnode - - def _visit_unary(self, node, cls): - newc = node.children[0].accept(self) - if newc is None: - return None - newnode = cls() - newnode.append(newc) - return newnode - - def visit_and(self, node): - return self._visit_binary(node, n.And) - - def visit_or(self, node): - return self._visit_binary(node, n.Or) - - def visit_not(self, node): - return self._visit_unary(node, n.Not) - - def visit_exists(self, node): - return self._visit_unary(node, n.Exists) - - def keep_var(self, varname): - if varname in 'SO': - return varname in self.existingvars - if varname == 'U': - return True - vargraph = self.current_expr.vargraph - for existingvar in self.existingvars: - #path = has_path(vargraph, varname, existingvar) - if not varname in vargraph or has_path(vargraph, varname, existingvar): - return True - # no path from this variable to an existing variable - return False - - def visit_relation(self, node): - lhs, rhs = node.get_variable_parts() - # remove relations where an unexistant variable and or a variable linked - # to an unexistant variable is used. - if self.existingvars: - if not self.keep_var(lhs.name): - return - if node.r_type in ('has_add_permission', 'has_update_permission', - 'has_delete_permission', 'has_read_permission'): - assert lhs.name == 'U' - action = node.r_type.split('_')[1] - key = (self.current_expr, self.varmap, rhs.name) - self.pending_keys.append( (key, action) ) - return - if isinstance(rhs, n.VariableRef): - if self.existingvars and not self.keep_var(rhs.name): - return - if lhs.name in self.revvarmap and rhs.name != 'U': - orel = self._may_be_shared_with(node, 'object') - if orel is not None: - self._use_orig_term(rhs.name, orel.children[1].children[0]) - return - elif rhs.name in self.revvarmap and lhs.name != 'U': - orel = self._may_be_shared_with(node, 'subject') - if orel is not None: - self._use_orig_term(lhs.name, orel.children[0]) - return - rel = n.Relation(node.r_type, node.optional) - for c in node.children: - rel.append(c.accept(self)) - return rel - - def visit_comparison(self, node): - cmp_ = n.Comparison(node.operator) - for c in node.children: - cmp_.append(c.accept(self)) - return cmp_ - - def visit_mathexpression(self, node): - cmp_ = n.MathExpression(node.operator) - for c in node.children: - cmp_.append(c.accept(self)) - return cmp_ - - def visit_function(self, node): - """generate filter name for a function""" - function_ = n.Function(node.name) - for c in node.children: - function_.append(c.accept(self)) - return function_ - - def visit_constant(self, node): - """generate filter name for a constant""" - return n.Constant(node.value, node.type) - - def visit_variableref(self, node): - """get the sql name for a variable reference""" - stmt = self.current_statement() - if node.name in self.revvarmap: - selectvar, index = self.revvarmap[node.name] - vi = self.varinfos[index] - if vi.get('const') is not None: - return n.Constant(vi['const'], 'Int') - return n.VariableRef(stmt.get_variable(selectvar)) - vname_or_term = self._get_varname_or_term(node.name) - if isinstance(vname_or_term, string_types): - return n.VariableRef(stmt.get_variable(vname_or_term)) - # shared term - return vname_or_term.copy(stmt) - - def current_statement(self): - if self._insert_scope is None: - return self.select - return self._insert_scope.stmt - - -class RQLRelationRewriter(RQLRewriter): - """Insert some rql snippets into another rql syntax tree, replacing computed - relations by their associated rule. - - This class *isn't thread safe*. - """ - def __init__(self, session): - super(RQLRelationRewriter, self).__init__(session) - self.rules = {} - for rschema in self.schema.iter_computed_relations(): - self.rules[rschema.type] = RRQLExpression(rschema.rule) - - def rewrite(self, union, kwargs=None): - self.kwargs = kwargs - self.removing_ambiguity = False - self.existingvars = None - self.pending_keys = None - for relation in union.iget_nodes(n.Relation): - if relation.r_type in self.rules: - self.select = relation.stmt - self.solutions = solutions = self.select.solutions[:] - self.current_expr = self.rules[relation.r_type] - self._insert_scope = relation.scope - self.rewritten = {} - lhs, rhs = relation.get_variable_parts() - varmap = {lhs.name: 'S', rhs.name: 'O'} - self.init_from_varmap(tuple(sorted(varmap.items()))) - self.insert_snippet(varmap, self.current_expr.snippet_rqlst) - self.select.remove_node(relation) - - def _subquery_variable(self, selectvar): - return self.select.aliases[selectvar].stinfo - - def _inserted_root(self, new): - return new diff -r 058bb3dc685f -r 0b59724cb3f2 rset.py --- a/rset.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,730 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""The `ResultSet` class which is returned as result of an rql query""" -__docformat__ = "restructuredtext en" - -from warnings import warn - -from six import PY3 -from six.moves import range - -from logilab.common import nullobject -from logilab.common.decorators import cached, clear_cache, copy_cache -from rql import nodes, stmts - -from cubicweb import NotAnEntity, NoResultError, MultipleResultsError - - -_MARKER = nullobject() - - -class ResultSet(object): - """A result set wraps a RQL query result. This object implements - partially the list protocol to allow direct use as a list of - result rows. - - :type rowcount: int - :param rowcount: number of rows in the result - - :type rows: list - :param rows: list of rows of result - - :type description: list - :param description: - result's description, using the same structure as the result itself - - :type rql: str or unicode - :param rql: the original RQL query string - """ - - def __init__(self, results, rql, args=None, description=None, rqlst=None): - if rqlst is not None: - warn('[3.20] rqlst parameter is deprecated', - DeprecationWarning, stacklevel=2) - self.rows = results - self.rowcount = results and len(results) or 0 - # original query and arguments - self.rql = rql - self.args = args - # entity types for each cell (same shape as rows) - # maybe discarded if specified when the query has been executed - if description is None: - self.description = [] - else: - self.description = description - # set to (limit, offset) when a result set is limited using the - # .limit method - self.limited = None - # set by the cursor which returned this resultset - self.req = None - # actions cache - self._rsetactions = None - - def __str__(self): - if not self.rows: - return '' % self.rql - return '' % (self.rql, len(self.rows)) - - def __repr__(self): - if not self.rows: - return '' % self.rql - rows = self.rows - if len(rows) > 10: - rows = rows[:10] + ['...'] - if len(rows) > 1: - # add a line break before first entity if more that one. - pattern = '' - else: - pattern = '' - - if not self.description: - return pattern % (self.rql, len(self.rows), - '\n'.join(str(r) for r in rows)) - return pattern % (self.rql, len(self.rows), - '\n'.join('%s (%s)' % (r, d) - for r, d in zip(rows, self.description))) - - def possible_actions(self, **kwargs): - if self._rsetactions is None: - self._rsetactions = {} - if kwargs: - key = tuple(sorted(kwargs.items())) - else: - key = None - try: - return self._rsetactions[key] - except KeyError: - actions = self.req.vreg['actions'].poss_visible_objects( - self.req, rset=self, **kwargs) - self._rsetactions[key] = actions - return actions - - def __len__(self): - """returns the result set's size""" - return self.rowcount - - def __getitem__(self, i): - """returns the ith element of the result set""" - return self.rows[i] #ResultSetRow(self.rows[i]) - - def __iter__(self): - """Returns an iterator over rows""" - return iter(self.rows) - - def __add__(self, rset): - # XXX buggy implementation (.rql and .args attributes at least much - # probably differ) - # at least rql could be fixed now that we have union and sub-queries - # but I tend to think that since we have that, we should not need this - # method anymore (syt) - rset = ResultSet(self.rows+rset.rows, self.rql, self.args, - self.description + rset.description) - rset.req = self.req - return rset - - def copy(self, rows=None, descr=None): - if rows is None: - rows = self.rows[:] - descr = self.description[:] - rset = ResultSet(rows, self.rql, self.args, descr) - rset.req = self.req - return rset - - def transformed_rset(self, transformcb): - """ the result set according to a given column types - - :type transormcb: callable(row, desc) - :param transformcb: - a callable which should take a row and its type description as - parameters, and return the transformed row and type description. - - - :type col: int - :param col: the column index - - :rtype: `ResultSet` - """ - rows, descr = [], [] - rset = self.copy(rows, descr) - for row, desc in zip(self.rows, self.description): - nrow, ndesc = transformcb(row, desc) - if ndesc: # transformcb returns None for ndesc to skip that row - rows.append(nrow) - descr.append(ndesc) - rset.rowcount = len(rows) - return rset - - def filtered_rset(self, filtercb, col=0): - """filter the result set according to a given filtercb - - :type filtercb: callable(entity) - :param filtercb: - a callable which should take an entity as argument and return - False if it should be skipped, else True - - :type col: int - :param col: the column index - - :rtype: `ResultSet` - """ - rows, descr = [], [] - rset = self.copy(rows, descr) - for i in range(len(self)): - if not filtercb(self.get_entity(i, col)): - continue - rows.append(self.rows[i]) - descr.append(self.description[i]) - rset.rowcount = len(rows) - return rset - - - def sorted_rset(self, keyfunc, reverse=False, col=0): - """sorts the result set according to a given keyfunc - - :type keyfunc: callable(entity) - :param keyfunc: - a callable which should take an entity as argument and return - the value used to compare and sort - - :type reverse: bool - :param reverse: if the result should be reversed - - :type col: int - :param col: the column index. if col = -1, the whole row are used - - :rtype: `ResultSet` - """ - rows, descr = [], [] - rset = self.copy(rows, descr) - if col >= 0: - entities = sorted(enumerate(self.entities(col)), - key=lambda t: keyfunc(t[1]), reverse=reverse) - else: - entities = sorted(enumerate(self), - key=lambda t: keyfunc(t[1]), reverse=reverse) - for index, _ in entities: - rows.append(self.rows[index]) - descr.append(self.description[index]) - rset.rowcount = len(rows) - return rset - - def split_rset(self, keyfunc=None, col=0, return_dict=False): - """splits the result set in multiple result sets according to - a given key - - :type keyfunc: callable(entity or FinalType) - :param keyfunc: - a callable which should take a value of the rset in argument and - return the value used to group the value. If not define, raw value - of the specified columns is used. - - :type col: int - :param col: the column index. if col = -1, the whole row are used - - :type return_dict: Boolean - :param return_dict: If true, the function return a mapping - (key -> rset) instead of a list of rset - - :rtype: List of `ResultSet` or mapping of `ResultSet` - - """ - result = [] - mapping = {} - for idx, line in enumerate(self): - if col >= 0: - try: - key = self.get_entity(idx, col) - except NotAnEntity: - key = line[col] - else: - key = line - if keyfunc is not None: - key = keyfunc(key) - - if key not in mapping: - rows, descr = [], [] - rset = self.copy(rows, descr) - mapping[key] = rset - result.append(rset) - else: - rset = mapping[key] - rset.rows.append(self.rows[idx]) - rset.description.append(self.description[idx]) - for rset in result: - rset.rowcount = len(rset.rows) - if return_dict: - return mapping - else: - return result - - def limited_rql(self): - """returns a printable rql for the result set associated to the object, - with limit/offset correctly set according to maximum page size and - currently displayed page when necessary - """ - # try to get page boundaries from the navigation component - # XXX we should probably not have a ref to this component here (eg in - # cubicweb) - nav = self.req.vreg['components'].select_or_none('navigation', self.req, - rset=self) - if nav: - start, stop = nav.page_boundaries() - rql = self._limit_offset_rql(stop - start, start) - # result set may have be limited manually in which case navigation won't - # apply - elif self.limited: - rql = self._limit_offset_rql(*self.limited) - # navigation component doesn't apply and rset has not been limited, no - # need to limit query - else: - rql = self.printable_rql() - return rql - - def _limit_offset_rql(self, limit, offset): - rqlst = self.syntax_tree() - if len(rqlst.children) == 1: - select = rqlst.children[0] - olimit, ooffset = select.limit, select.offset - select.limit, select.offset = limit, offset - rql = rqlst.as_string(kwargs=self.args) - # restore original limit/offset - select.limit, select.offset = olimit, ooffset - else: - newselect = stmts.Select() - newselect.limit = limit - newselect.offset = offset - aliases = [nodes.VariableRef(newselect.get_variable(chr(65+i), i)) - for i in range(len(rqlst.children[0].selection))] - for vref in aliases: - newselect.append_selected(nodes.VariableRef(vref.variable)) - newselect.set_with([nodes.SubQuery(aliases, rqlst)], check=False) - newunion = stmts.Union() - newunion.append(newselect) - rql = newunion.as_string(kwargs=self.args) - rqlst.parent = None - return rql - - def limit(self, limit, offset=0, inplace=False): - """limit the result set to the given number of rows optionally starting - from an index different than 0 - - :type limit: int - :param limit: the maximum number of results - - :type offset: int - :param offset: the offset index - - :type inplace: bool - :param inplace: - if true, the result set is modified in place, else a new result set - is returned and the original is left unmodified - - :rtype: `ResultSet` - """ - stop = limit+offset - rows = self.rows[offset:stop] - descr = self.description[offset:stop] - if inplace: - rset = self - rset.rows, rset.description = rows, descr - rset.rowcount = len(rows) - clear_cache(rset, 'description_struct') - if offset: - clear_cache(rset, 'get_entity') - # we also have to fix/remove from the request entity cache entities - # which get a wrong rset reference by this limit call - for entity in self.req.cached_entities(): - if entity.cw_rset is self: - if offset <= entity.cw_row < stop: - entity.cw_row = entity.cw_row - offset - else: - entity.cw_rset = entity.as_rset() - entity.cw_row = entity.cw_col = 0 - else: - rset = self.copy(rows, descr) - if not offset: - # can copy built entity caches - copy_cache(rset, 'get_entity', self) - rset.limited = (limit, offset) - return rset - - def printable_rql(self, encoded=_MARKER): - """return the result set's origin rql as a string, with arguments - substitued - """ - if encoded is not _MARKER: - warn('[3.21] the "encoded" argument is deprecated', DeprecationWarning) - encoding = self.req.encoding - rqlstr = self.syntax_tree().as_string(kwargs=self.args) - if PY3: - return rqlstr - # sounds like we get encoded or unicode string due to a bug in as_string - if not encoded: - if isinstance(rqlstr, unicode): - return rqlstr - return unicode(rqlstr, encoding) - else: - if isinstance(rqlstr, unicode): - return rqlstr.encode(encoding) - return rqlstr - - # client helper methods ################################################### - - def entities(self, col=0): - """iter on entities with eid in the `col` column of the result set""" - for i in range(len(self)): - # may have None values in case of outer join (or aggregat on eid - # hacks) - if self.rows[i][col] is not None: - yield self.get_entity(i, col) - - def iter_rows_with_entities(self): - """ iterates over rows, and for each row - eids are converted to plain entities - """ - for i, row in enumerate(self): - _row = [] - for j, col in enumerate(row): - try: - _row.append(self.get_entity(i, j) if col is not None else col) - except NotAnEntity: - _row.append(col) - yield _row - - def complete_entity(self, row, col=0, skip_bytes=True): - """short cut to get an completed entity instance for a particular - row (all instance's attributes have been fetched) - """ - entity = self.get_entity(row, col) - entity.complete(skip_bytes=skip_bytes) - return entity - - @cached - def get_entity(self, row, col): - """convenience method for query retrieving a single entity, returns a - partially initialized Entity instance. - - .. warning:: - - Due to the cache wrapping this function, you should NEVER give row as - a named parameter (i.e. `rset.get_entity(0, 1)` is OK but - `rset.get_entity(row=0, col=1)` isn't) - - :type row,col: int, int - :param row,col: - row and col numbers localizing the entity among the result's table - - :return: the partially initialized `Entity` instance - """ - etype = self.description[row][col] - try: - eschema = self.req.vreg.schema.eschema(etype) - if eschema.final: - raise NotAnEntity(etype) - except KeyError: - raise NotAnEntity(etype) - return self._build_entity(row, col) - - def one(self, col=0): - """Retrieve exactly one entity from the query. - - If the result set is empty, raises :exc:`NoResultError`. - If the result set has more than one row, raises - :exc:`MultipleResultsError`. - - :type col: int - :param col: The column localising the entity in the unique row - - :return: the partially initialized `Entity` instance - """ - if len(self) == 1: - return self.get_entity(0, col) - elif len(self) == 0: - raise NoResultError("No row was found for one()") - else: - raise MultipleResultsError("Multiple rows were found for one()") - - def _build_entity(self, row, col): - """internal method to get a single entity, returns a partially - initialized Entity instance. - - partially means that only attributes selected in the RQL query will be - directly assigned to the entity. - - :type row,col: int, int - :param row,col: - row and col numbers localizing the entity among the result's table - - :return: the partially initialized `Entity` instance - """ - req = self.req - if req is None: - raise AssertionError('dont call get_entity with no req on the result set') - rowvalues = self.rows[row] - eid = rowvalues[col] - assert eid is not None - # return cached entity if exists. This also avoids potential recursion - # XXX should we consider updating a cached entity with possible - # new attributes found in this resultset ? - try: - entity = req.entity_cache(eid) - except KeyError: - pass - else: - if entity.cw_rset is None: - # entity has no rset set, this means entity has been created by - # the querier (req is a repository session) and so jas no rset - # info. Add it. - entity.cw_rset = self - entity.cw_row = row - entity.cw_col = col - return entity - # build entity instance - etype = self.description[row][col] - entity = self.req.vreg['etypes'].etype_class(etype)(req, rset=self, - row=row, col=col) - entity.eid = eid - # cache entity - req.set_entity_cache(entity) - # try to complete the entity if there are some additional columns - if len(rowvalues) > 1: - eschema = entity.e_schema - eid_col, attr_cols, rel_cols = self._rset_structure(eschema, col) - entity.eid = rowvalues[eid_col] - for attr, col_idx in attr_cols.items(): - entity.cw_attr_cache[attr] = rowvalues[col_idx] - for (rtype, role), col_idx in rel_cols.items(): - value = rowvalues[col_idx] - if value is None: - if role == 'subject': - rql = 'Any Y WHERE X %s Y, X eid %s' - else: - rql = 'Any Y WHERE Y %s X, X eid %s' - rrset = ResultSet([], rql % (rtype, entity.eid)) - rrset.req = req - else: - rrset = self._build_entity(row, col_idx).as_rset() - entity.cw_set_relation_cache(rtype, role, rrset) - return entity - - @cached - def _rset_structure(self, eschema, entity_col): - eid_col = col = entity_col - rqlst = self.syntax_tree() - get_rschema = eschema.schema.rschema - attr_cols = {} - rel_cols = {} - if rqlst.TYPE == 'select': - # UNION query, find the subquery from which this entity has been - # found - select, col = rqlst.locate_subquery(entity_col, eschema.type, self.args) - else: - select = rqlst - # take care, due to outer join support, we may find None - # values for non final relation - for i, attr, role in attr_desc_iterator(select, col, entity_col): - rschema = get_rschema(attr) - if rschema.final: - if attr == 'eid': - eid_col = i - else: - attr_cols[attr] = i - else: - # XXX takefirst=True to remove warning triggered by ambiguous relations - rdef = eschema.rdef(attr, role, takefirst=True) - # only keep value if it can't be multivalued - if rdef.role_cardinality(role) in '1?': - rel_cols[(attr, role)] = i - return eid_col, attr_cols, rel_cols - - @cached - def syntax_tree(self): - """return the syntax tree (:class:`rql.stmts.Union`) for the - originating query. You can expect it to have solutions - computed and it will be properly annotated. - """ - return self.req.vreg.parse(self.req, self.rql, self.args) - - @cached - def column_types(self, col): - """return the list of different types in the column with the given col - - :type col: int - :param col: the index of the desired column - - :rtype: list - :return: the different entities type found in the column - """ - return frozenset(struc[-1][col] for struc in self.description_struct()) - - @cached - def description_struct(self): - """return a list describing sequence of results with the same - description, e.g. : - [[0, 4, ('Bug',)] - [[0, 4, ('Bug',), [5, 8, ('Story',)] - [[0, 3, ('Project', 'Version',)]] - """ - result = [] - last = None - for i, row in enumerate(self.description): - if row != last: - if last is not None: - result[-1][1] = i - 1 - result.append( [i, None, row] ) - last = row - if last is not None: - result[-1][1] = i - return result - - def _locate_query_params(self, rqlst, row, col): - locate_query_col = col - etype = self.description[row][col] - # final type, find a better one to locate the correct subquery - # (ambiguous if possible) - eschema = self.req.vreg.schema.eschema - if eschema(etype).final: - for select in rqlst.children: - try: - myvar = select.selection[col].variable - except AttributeError: - # not a variable - continue - for i in range(len(select.selection)): - if i == col: - continue - coletype = self.description[row][i] - # None description possible on column resulting from an - # outer join - if coletype is None or eschema(coletype).final: - continue - try: - ivar = select.selection[i].variable - except AttributeError: - # not a variable - continue - # check variables don't comes from a subquery or are both - # coming from the same subquery - if getattr(ivar, 'query', None) is getattr(myvar, 'query', None): - etype = coletype - locate_query_col = i - if len(self.column_types(i)) > 1: - return etype, locate_query_col - return etype, locate_query_col - - @cached - def related_entity(self, row, col): - """given an cell of the result set, try to return a (entity, relation - name) tuple to which this cell is linked. - - This is especially useful when the cell is an attribute of an entity, - to get the entity to which this attribute belongs to. - """ - rqlst = self.syntax_tree() - # UNION query, we've first to find a 'pivot' column to use to get the - # actual query from which the row is coming - etype, locate_query_col = self._locate_query_params(rqlst, row, col) - # now find the query from which this entity has been found. Returned - # select node may be a subquery with different column indexes. - select = rqlst.locate_subquery(locate_query_col, etype, self.args)[0] - # then get the index of root query's col in the subquery - col = rqlst.subquery_selection_index(select, col) - if col is None: - # XXX unexpected, should fix subquery_selection_index ? - return None, None - try: - myvar = select.selection[col].variable - except AttributeError: - # not a variable - return None, None - rel = myvar.main_relation() - if rel is not None: - index = rel.children[0].root_selection_index() - if index is not None and self.rows[row][index]: - try: - entity = self.get_entity(row, index) - return entity, rel.r_type - except NotAnEntity as exc: - return None, None - return None, None - - @cached - def searched_text(self): - """returns the searched text in case of full-text search - - :return: searched text or `None` if the query is not - a full-text query - """ - rqlst = self.syntax_tree() - for rel in rqlst.iget_nodes(nodes.Relation): - if rel.r_type == 'has_text': - __, rhs = rel.get_variable_parts() - return rhs.eval(self.args) - return None - -def _get_variable(term): - # XXX rewritten const - # use iget_nodes for (hack) case where we have things like MAX(V) - for vref in term.iget_nodes(nodes.VariableRef): - return vref.variable - -def attr_desc_iterator(select, selectidx, rootidx): - """return an iterator on a list of 2-uple (index, attr_relation) - localizing attribute relations of the main variable in a result's row - - :type rqlst: rql.stmts.Select - :param rqlst: the RQL syntax tree to describe - - :return: - a generator on (index, relation, target) describing column being - attribute of the main variable - """ - rootselect = select - while rootselect.parent.parent is not None: - rootselect = rootselect.parent.parent.parent - rootmain = rootselect.selection[selectidx] - rootmainvar = _get_variable(rootmain) - assert rootmainvar - root = rootselect.parent - selectmain = select.selection[selectidx] - for i, term in enumerate(rootselect.selection): - try: - # don't use _get_variable here: if the term isn't a variable - # (function...), we don't want it to be used as an entity attribute - # or relation's value (XXX beside MAX/MIN trick?) - rootvar = term.variable - except AttributeError: - continue - if rootvar.name == rootmainvar.name: - continue - if select is not rootselect and isinstance(rootvar, nodes.ColumnAlias): - term = select.selection[root.subquery_selection_index(select, i)] - var = _get_variable(term) - if var is None: - continue - for ref in var.references(): - rel = ref.relation() - if rel is None or rel.is_types_restriction(): - continue - lhs, rhs = rel.get_variable_parts() - if selectmain.is_equivalent(lhs): - if rhs.is_equivalent(term): - yield (i, rel.r_type, 'subject') - elif selectmain.is_equivalent(rhs): - if lhs.is_equivalent(term): - yield (i, rel.r_type, 'object') diff -r 058bb3dc685f -r 0b59724cb3f2 rtags.py --- a/rtags.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,270 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -A RelationTag object is an object which allows to link a configuration -information to a relation definition. For instance, the standard -primary view uses a RelationTag object (uicfg.primaryview_section) to -get the section to display relations. - -.. sourcecode:: python - - # display ``entry_of`` relations in the ``relations`` section in the ``BlogEntry`` primary view - uicfg.primaryview_section.tag_subject_of(('BlogEntry', 'entry_of', '*'), - 'relations') - - # hide every relation ``entry_of`` in the ``Blog`` primary view - uicfg.primaryview_section.tag_object_of(('*', 'entry_of', 'Blog'), 'hidden') - -Three primitives are defined: - * ``tag_subject_of`` tag a relation in the subject's context - * ``tag_object_of`` tag a relation in the object's context - * ``tag_attribute`` shortcut for tag_subject_of -""" -__docformat__ = "restructuredtext en" - -import logging -from warnings import warn - -from six import string_types - -from logilab.common.logging_ext import set_log_methods -from logilab.common.registry import RegistrableInstance, yes - -def _ensure_str_key(key): - return tuple(str(k) for k in key) - -class RegistrableRtags(RegistrableInstance): - __registry__ = 'uicfg' - __select__ = yes() - - -class RelationTags(RegistrableRtags): - """a tag store for full relation definitions : - - (subject type, relation type, object type, tagged) - - allowing to set tags using wildcard (eg '*') as subject type / object type - - This class associates a single tag to each key. - """ - _allowed_values = None - # _init expected to be a method (introduced in 3.17), while _initfunc a - # function given as __init__ argument and kept for bw compat - _init = _initfunc = None - - def __init__(self): - self._tagdefs = {} - - def __repr__(self): - # find a way to have more infos but keep it readable - # (in error messages in case of an ambiguity for instance) - return '%s (%s): %s' % (id(self), self.__regid__, self.__class__) - - # dict compat - def __getitem__(self, key): - return self.get(*key) - __contains__ = __getitem__ - - def clear(self): - self._tagdefs.clear() - - def _get_keys(self, stype, rtype, otype, tagged): - keys = [] - if '*' not in (stype, otype): - keys.append(('*', rtype, '*', tagged)) - if '*' != stype: - keys.append(('*', rtype, otype, tagged)) - if '*' != otype: - keys.append((stype, rtype, '*', tagged)) - keys.append((stype, rtype, otype, tagged)) - return keys - - def init(self, schema, check=True): - # XXX check existing keys against schema - if check: - for (stype, rtype, otype, tagged), value in list(self._tagdefs.items()): - for ertype in (stype, rtype, otype): - if ertype != '*' and not ertype in schema: - self.warning('removing rtag %s: %s, %s undefined in schema', - (stype, rtype, otype, tagged), value, ertype) - self.del_rtag(stype, rtype, otype, tagged) - break - if self._init is not None: - self.apply(schema, self._init) - - def apply(self, schema, func): - for eschema in schema.entities(): - if eschema.final: - continue - for rschema, tschemas, role in eschema.relation_definitions(True): - for tschema in tschemas: - if role == 'subject': - sschema, oschema = eschema, tschema - else: - sschema, oschema = tschema, eschema - func(sschema, rschema, oschema, role) - - # rtag declaration api #################################################### - - def tag_attribute(self, key, *args, **kwargs): - key = list(key) - key.append('*') - key.append('subject') - self.tag_relation(key, *args, **kwargs) - - def tag_subject_of(self, key, *args, **kwargs): - key = list(key) - key.append('subject') - self.tag_relation(key, *args, **kwargs) - - def tag_object_of(self, key, *args, **kwargs): - key = list(key) - key.append('object') - self.tag_relation(key, *args, **kwargs) - - def tag_relation(self, key, tag): - assert len(key) == 4, 'bad key: %s' % list(key) - if self._allowed_values is not None: - assert tag in self._allowed_values, \ - '%r is not an allowed tag (should be in %s)' % ( - tag, self._allowed_values) - self._tagdefs[_ensure_str_key(key)] = tag - return tag - - def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs): - if isinstance(attr, string_types): - attr, role = attr, 'subject' - else: - attr, role = attr - if role == 'subject': - self.tag_subject_of((etype, attr, desttype), *args, **kwargs) - else: - self.tag_object_of((desttype, attr, etype), *args, **kwargs) - - - # rtag runtime api ######################################################## - - def del_rtag(self, *key): - del self._tagdefs[key] - - def get(self, *key): - for key in reversed(self._get_keys(*key)): - try: - return self._tagdefs[key] - except KeyError: - continue - return None - - def etype_get(self, etype, rtype, role, ttype='*'): - if role == 'subject': - return self.get(etype, rtype, ttype, role) - return self.get(ttype, rtype, etype, role) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - - -class RelationTagsSet(RelationTags): - """This class associates a set of tags to each key. - """ - tag_container_cls = set - - def tag_relation(self, key, tag): - rtags = self._tagdefs.setdefault(_ensure_str_key(key), - self.tag_container_cls()) - rtags.add(tag) - return rtags - - def get(self, stype, rtype, otype, tagged): - rtags = self.tag_container_cls() - for key in self._get_keys(stype, rtype, otype, tagged): - try: - rtags.update(self._tagdefs[key]) - except KeyError: - continue - return rtags - - -class RelationTagsDict(RelationTagsSet): - """This class associates a set of tags to each key.""" - tag_container_cls = dict - - def tag_relation(self, key, tag): - key = _ensure_str_key(key) - try: - rtags = self._tagdefs[key] - rtags.update(tag) - return rtags - except KeyError: - self._tagdefs[key] = tag - return tag - - def setdefault(self, key, tagkey, tagvalue): - key = _ensure_str_key(key) - try: - rtags = self._tagdefs[key] - rtags.setdefault(tagkey, tagvalue) - return rtags - except KeyError: - self._tagdefs[key] = {tagkey: tagvalue} - return self._tagdefs[key] - - -class RelationTagsBool(RelationTags): - _allowed_values = frozenset((True, False)) - - -class NoTargetRelationTagsDict(RelationTagsDict): - - @property - def name(self): - return self.__class__.name - - # tag_subject_of / tag_object_of issue warning if '*' is not given as target - # type, while tag_relation handle it silently since it may be used during - # initialization - def tag_subject_of(self, key, tag): - subj, rtype, obj = key - if obj != '*': - self.warning('using explict target type in %s.tag_subject_of() ' - 'has no effect, use (%s, %s, "*") instead of (%s, %s, %s)', - self.name, subj, rtype, subj, rtype, obj) - super(NoTargetRelationTagsDict, self).tag_subject_of((subj, rtype, '*'), tag) - - def tag_object_of(self, key, tag): - subj, rtype, obj = key - if subj != '*': - self.warning('using explict subject type in %s.tag_object_of() ' - 'has no effect, use ("*", %s, %s) instead of (%s, %s, %s)', - self.name, rtype, obj, subj, rtype, obj) - super(NoTargetRelationTagsDict, self).tag_object_of(('*', rtype, obj), tag) - - def tag_relation(self, key, tag): - if key[-1] == 'subject' and key[-2] != '*': - if isinstance(key, tuple): - key = list(key) - key[-2] = '*' - elif key[-1] == 'object' and key[0] != '*': - if isinstance(key, tuple): - key = list(key) - key[0] = '*' - super(NoTargetRelationTagsDict, self).tag_relation(key, tag) - - -set_log_methods(RelationTags, logging.getLogger('cubicweb.rtags')) diff -r 058bb3dc685f -r 0b59724cb3f2 schema.py --- a/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1458 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""classes to define schemas for CubicWeb""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import re -from os.path import join, basename -from logging import getLogger -from warnings import warn - -from six import PY2, text_type, string_types, add_metaclass -from six.moves import range - -from logilab.common import tempattr -from logilab.common.decorators import cached, clear_cache, monkeypatch, cachedproperty -from logilab.common.logging_ext import set_log_methods -from logilab.common.deprecation import deprecated, class_moved, moved -from logilab.common.textutils import splitstrip -from logilab.common.graph import get_cycles - -import yams -from yams import BadSchemaDefinition, buildobjs as ybo -from yams.schema import Schema, ERSchema, EntitySchema, RelationSchema, \ - RelationDefinitionSchema, PermissionMixIn, role_name -from yams.constraints import (BaseConstraint, FormatConstraint, BoundaryConstraint, - IntervalBoundConstraint, StaticVocabularyConstraint) -from yams.reader import (CONSTRAINTS, PyFileReader, SchemaLoader, - cleanup_sys_modules, fill_schema_from_namespace) - -from rql import parse, nodes, RQLSyntaxError, TypeResolverException -from rql.analyze import ETypeResolver - -import cubicweb -from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized, _ - -try: - from cubicweb import server -except ImportError: - # We need to lookup DEBUG from there, - # however a pure dbapi client may not have it. - class server(object): pass - server.DEBUG = False - - -PURE_VIRTUAL_RTYPES = set(('identity', 'has_text',)) -VIRTUAL_RTYPES = set(('eid', 'identity', 'has_text',)) - -# set of meta-relations available for every entity types -META_RTYPES = set(( - 'owned_by', 'created_by', 'is', 'is_instance_of', 'identity', - 'eid', 'creation_date', 'cw_source', 'modification_date', 'has_text', 'cwuri', - )) -WORKFLOW_RTYPES = set(('custom_workflow', 'in_state', 'wf_info_for')) -WORKFLOW_DEF_RTYPES = set(('workflow_of', 'state_of', 'transition_of', - 'initial_state', 'default_workflow', - 'allowed_transition', 'destination_state', - 'from_state', 'to_state', 'condition', - 'subworkflow', 'subworkflow_state', 'subworkflow_exit', - 'by_transition', - )) -SYSTEM_RTYPES = set(('in_group', 'require_group', - # cwproperty - 'for_user', - 'cw_schema', 'cw_import_of', 'cw_for_source', - 'cw_host_config_of', - )) | WORKFLOW_RTYPES -NO_I18NCONTEXT = META_RTYPES | WORKFLOW_RTYPES - -SKIP_COMPOSITE_RELS = [('cw_source', 'subject')] - -# set of entity and relation types used to build the schema -SCHEMA_TYPES = set(( - 'CWEType', 'CWRType', 'CWComputedRType', 'CWAttribute', 'CWRelation', - 'CWConstraint', 'CWConstraintType', 'CWUniqueTogetherConstraint', - 'RQLExpression', - 'specializes', - 'relation_type', 'from_entity', 'to_entity', - 'constrained_by', 'cstrtype', - 'constraint_of', 'relations', - 'read_permission', 'add_permission', - 'delete_permission', 'update_permission', - )) - -WORKFLOW_TYPES = set(('Transition', 'State', 'TrInfo', 'Workflow', - 'WorkflowTransition', 'BaseTransition', - 'SubWorkflowExitPoint')) - -INTERNAL_TYPES = set(('CWProperty', 'CWCache', 'ExternalUri', 'CWDataImport', - 'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig')) - -UNIQUE_CONSTRAINTS = ('SizeConstraint', 'FormatConstraint', - 'StaticVocabularyConstraint', - 'RQLVocabularyConstraint') - -_LOGGER = getLogger('cubicweb.schemaloader') - -# entity and relation schema created from serialized schema have an eid -ybo.ETYPE_PROPERTIES += ('eid',) -ybo.RTYPE_PROPERTIES += ('eid',) - -def build_schema_from_namespace(items): - schema = CubicWebSchema('noname') - fill_schema_from_namespace(schema, items, register_base_types=False) - return schema - -# Bases for manipulating RQL in schema ######################################### - -def guess_rrqlexpr_mainvars(expression): - defined = set(split_expression(expression)) - mainvars = set() - if 'S' in defined: - mainvars.add('S') - if 'O' in defined: - mainvars.add('O') - if 'U' in defined: - mainvars.add('U') - if not mainvars: - raise BadSchemaDefinition('unable to guess selection variables in %r' - % expression) - return mainvars - -def split_expression(rqlstring): - for expr in rqlstring.split(','): - for noparen1 in expr.split('('): - for noparen2 in noparen1.split(')'): - for word in noparen2.split(): - yield word - -def normalize_expression(rqlstring): - """normalize an rql expression to ease schema synchronization (avoid - suppressing and reinserting an expression if only a space has been - added/removed for instance) - """ - union = parse(u'Any 1 WHERE %s' % rqlstring).as_string() - if PY2 and isinstance(union, str): - union = union.decode('utf-8') - return union.split(' WHERE ', 1)[1] - - -def _check_valid_formula(rdef, formula_rqlst): - """Check the formula is a valid RQL query with some restriction (no union, - single selected node, etc.), raise BadSchemaDefinition if not - """ - if len(formula_rqlst.children) != 1: - raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: ' - 'can not use UNION in formula %(form)r' % - {'attr' : rdef.rtype, - 'etype' : rdef.subject.type, - 'form' : rdef.formula}) - select = formula_rqlst.children[0] - if len(select.selection) != 1: - raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: ' - 'can only select one term in formula %(form)r' % - {'attr' : rdef.rtype, - 'etype' : rdef.subject.type, - 'form' : rdef.formula}) - term = select.selection[0] - types = set(term.get_type(sol) for sol in select.solutions) - if len(types) != 1: - raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: ' - 'multiple possible types (%(types)s) for formula %(form)r' % - {'attr' : rdef.rtype, - 'etype' : rdef.subject.type, - 'types' : list(types), - 'form' : rdef.formula}) - computed_type = types.pop() - expected_type = rdef.object.type - if computed_type != expected_type: - raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: ' - 'computed attribute type (%(comp_type)s) mismatch with ' - 'specified type (%(attr_type)s)' % - {'attr' : rdef.rtype, - 'etype' : rdef.subject.type, - 'comp_type' : computed_type, - 'attr_type' : expected_type}) - - -class RQLExpression(object): - """Base class for RQL expression used in schema (constraints and - permissions) - """ - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - # to be defined in concrete classes - rqlst = None - predefined_variables = None - full_rql = None - - def __init__(self, expression, mainvars, eid): - """ - :type mainvars: sequence of RQL variables' names. Can be provided as a - comma separated string. - :param mainvars: names of the variables being selected. - - """ - self.eid = eid # eid of the entity representing this rql expression - assert mainvars, 'bad mainvars %s' % mainvars - if isinstance(mainvars, string_types): - mainvars = set(splitstrip(mainvars)) - elif not isinstance(mainvars, set): - mainvars = set(mainvars) - self.mainvars = mainvars - self.expression = normalize_expression(expression) - try: - self.full_rql = self.rqlst.as_string() - except RQLSyntaxError: - raise RQLSyntaxError(expression) - for mainvar in mainvars: - # if variable is predefined, an extra reference is inserted - # automatically (`VAR eid %(v)s`) - if mainvar in self.predefined_variables: - min_refs = 3 - else: - min_refs = 2 - if len(self.rqlst.defined_vars[mainvar].references()) < min_refs: - _LOGGER.warn('You did not use the %s variable in your RQL ' - 'expression %s', mainvar, self) - # syntax tree used by read security (inserted in queries when necessary) - self.snippet_rqlst = parse(self.minimal_rql, print_errors=False).children[0] - # graph of links between variables, used by rql rewriter - self.vargraph = vargraph(self.rqlst) - # useful for some instrumentation, e.g. localperms permcheck command - self.package = ybo.PACKAGE - - def __str__(self): - return self.full_rql - def __repr__(self): - return '%s(%s)' % (self.__class__.__name__, self.full_rql) - - def __lt__(self, other): - if hasattr(other, 'expression'): - return self.expression < other.expression - return True - - def __eq__(self, other): - if hasattr(other, 'expression'): - return self.expression == other.expression - return False - - def __ne__(self, other): - return not (self == other) - - def __hash__(self): - return hash(self.expression) - - def __deepcopy__(self, memo): - return self.__class__(self.expression, self.mainvars) - def __getstate__(self): - return (self.expression, self.mainvars) - def __setstate__(self, state): - self.__init__(*state) - - @cachedproperty - def rqlst(self): - select = parse(self.minimal_rql, print_errors=False).children[0] - defined = set(split_expression(self.expression)) - for varname in self.predefined_variables: - if varname in defined: - select.add_eid_restriction(select.get_variable(varname), varname.lower(), 'Substitute') - return select - - # permission rql expression specific stuff ################################# - - @cached - def transform_has_permission(self): - found = None - rqlst = self.rqlst - for var in rqlst.defined_vars.values(): - for varref in var.references(): - rel = varref.relation() - if rel is None: - continue - try: - prefix, action, suffix = rel.r_type.split('_') - except ValueError: - continue - if prefix != 'has' or suffix != 'permission' or \ - not action in ('add', 'delete', 'update', 'read'): - continue - if found is None: - found = [] - rqlst.save_state() - assert rel.children[0].name == 'U' - objvar = rel.children[1].children[0].variable - rqlst.remove_node(rel) - selected = [v.name for v in rqlst.get_selected_variables()] - if objvar.name not in selected: - colindex = len(selected) - rqlst.add_selected(objvar) - else: - colindex = selected.index(objvar.name) - found.append((action, colindex)) - # remove U eid %(u)s if U is not used in any other relation - uvrefs = rqlst.defined_vars['U'].references() - if len(uvrefs) == 1: - rqlst.remove_node(uvrefs[0].relation()) - if found is not None: - rql = rqlst.as_string() - if len(rqlst.selection) == 1 and isinstance(rqlst.where, nodes.Relation): - # only "Any X WHERE X eid %(x)s" remaining, no need to execute the rql - keyarg = rqlst.selection[0].name.lower() - else: - keyarg = None - rqlst.recover() - return rql, found, keyarg - return rqlst.as_string(), None, None - - def _check(self, _cw, **kwargs): - """return True if the rql expression is matching the given relation - between fromeid and toeid - - _cw may be a request or a server side transaction - """ - creating = kwargs.get('creating') - if not creating and self.eid is not None: - key = (self.eid, tuple(sorted(kwargs.items()))) - try: - return _cw.local_perm_cache[key] - except KeyError: - pass - rql, has_perm_defs, keyarg = self.transform_has_permission() - # when creating an entity, expression related to X satisfied - if creating and 'X' in self.rqlst.defined_vars: - return True - if keyarg is None: - kwargs.setdefault('u', _cw.user.eid) - try: - rset = _cw.execute(rql, kwargs, build_descr=True) - except NotImplementedError: - self.critical('cant check rql expression, unsupported rql %s', rql) - if self.eid is not None: - _cw.local_perm_cache[key] = False - return False - except TypeResolverException as ex: - # some expression may not be resolvable with current kwargs - # (type conflict) - self.warning('%s: %s', rql, str(ex)) - if self.eid is not None: - _cw.local_perm_cache[key] = False - return False - except Unauthorized as ex: - self.debug('unauthorized %s: %s', rql, str(ex)) - if self.eid is not None: - _cw.local_perm_cache[key] = False - return False - else: - rset = _cw.eid_rset(kwargs[keyarg]) - # if no special has_*_permission relation in the rql expression, just - # check the result set contains something - if has_perm_defs is None: - if rset: - if self.eid is not None: - _cw.local_perm_cache[key] = True - return True - elif rset: - # check every special has_*_permission relation is satisfied - get_eschema = _cw.vreg.schema.eschema - try: - for eaction, col in has_perm_defs: - for i in range(len(rset)): - eschema = get_eschema(rset.description[i][col]) - eschema.check_perm(_cw, eaction, eid=rset[i][col]) - if self.eid is not None: - _cw.local_perm_cache[key] = True - return True - except Unauthorized: - pass - if self.eid is not None: - _cw.local_perm_cache[key] = False - return False - - @property - def minimal_rql(self): - return 'Any %s WHERE %s' % (','.join(sorted(self.mainvars)), - self.expression) - - - -# rql expressions for use in permission definition ############################# - -class ERQLExpression(RQLExpression): - predefined_variables = 'XU' - - def __init__(self, expression, mainvars=None, eid=None): - RQLExpression.__init__(self, expression, mainvars or 'X', eid) - - def check(self, _cw, eid=None, creating=False, **kwargs): - if 'X' in self.rqlst.defined_vars: - if eid is None: - if creating: - return self._check(_cw, creating=True, **kwargs) - return False - assert creating == False - return self._check(_cw, x=eid, **kwargs) - return self._check(_cw, **kwargs) - - -class CubicWebRelationDefinitionSchema(RelationDefinitionSchema): - def constraint_by_eid(self, eid): - for cstr in self.constraints: - if cstr.eid == eid: - return cstr - raise ValueError('No constraint with eid %d' % eid) - - def rql_expression(self, expression, mainvars=None, eid=None): - """rql expression factory""" - if self.rtype.final: - return ERQLExpression(expression, mainvars, eid) - return RRQLExpression(expression, mainvars, eid) - - def check_permission_definitions(self): - super(CubicWebRelationDefinitionSchema, self).check_permission_definitions() - schema = self.subject.schema - for action, groups in self.permissions.items(): - for group_or_rqlexpr in groups: - if action == 'read' and \ - isinstance(group_or_rqlexpr, RQLExpression): - msg = "can't use rql expression for read permission of %s" - raise BadSchemaDefinition(msg % self) - if self.final and isinstance(group_or_rqlexpr, RRQLExpression): - msg = "can't use RRQLExpression on %s, use an ERQLExpression" - raise BadSchemaDefinition(msg % self) - if not self.final and isinstance(group_or_rqlexpr, ERQLExpression): - msg = "can't use ERQLExpression on %s, use a RRQLExpression" - raise BadSchemaDefinition(msg % self) - -def vargraph(rqlst): - """ builds an adjacency graph of variables from the rql syntax tree, e.g: - Any O,S WHERE T subworkflow_exit S, T subworkflow WF, O state_of WF - => {'WF': ['O', 'T'], 'S': ['T'], 'T': ['WF', 'S'], 'O': ['WF']} - """ - vargraph = {} - for relation in rqlst.get_nodes(nodes.Relation): - try: - rhsvarname = relation.children[1].children[0].variable.name - lhsvarname = relation.children[0].name - except AttributeError: - pass - else: - vargraph.setdefault(lhsvarname, []).append(rhsvarname) - vargraph.setdefault(rhsvarname, []).append(lhsvarname) - #vargraph[(lhsvarname, rhsvarname)] = relation.r_type - return vargraph - - -class GeneratedConstraint(object): - def __init__(self, rqlst, mainvars): - self.snippet_rqlst = rqlst - self.mainvars = mainvars - self.vargraph = vargraph(rqlst) - - -class RRQLExpression(RQLExpression): - predefined_variables = 'SOU' - - def __init__(self, expression, mainvars=None, eid=None): - if mainvars is None: - mainvars = guess_rrqlexpr_mainvars(expression) - RQLExpression.__init__(self, expression, mainvars, eid) - - def check(self, _cw, fromeid=None, toeid=None): - kwargs = {} - if 'S' in self.rqlst.defined_vars: - if fromeid is None: - return False - kwargs['s'] = fromeid - if 'O' in self.rqlst.defined_vars: - if toeid is None: - return False - kwargs['o'] = toeid - return self._check(_cw, **kwargs) - - -# In yams, default 'update' perm for attributes granted to managers and owners. -# Within cw, we want to default to users who may edit the entity holding the -# attribute. -# These default permissions won't be checked by the security hooks: -# since they delegate checking to the entity, we can skip actual checks. -ybo.DEFAULT_ATTRPERMS['update'] = ('managers', ERQLExpression('U has_update_permission X')) -ybo.DEFAULT_ATTRPERMS['add'] = ('managers', ERQLExpression('U has_add_permission X')) - -# we don't want 'add' or 'delete' permissions on computed relation types -# (they're hardcoded to '()' on computed relation definitions) -if 'add' in yams.DEFAULT_COMPUTED_RELPERMS: - del yams.DEFAULT_COMPUTED_RELPERMS['add'] -if 'delete' in yams.DEFAULT_COMPUTED_RELPERMS: - del yams.DEFAULT_COMPUTED_RELPERMS['delete'] - - -PUB_SYSTEM_ENTITY_PERMS = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers',), - 'delete': ('managers',), - 'update': ('managers',), - } -PUB_SYSTEM_REL_PERMS = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers',), - 'delete': ('managers',), - } -PUB_SYSTEM_ATTR_PERMS = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers',), - 'update': ('managers',), - } -RO_REL_PERMS = { - 'read': ('managers', 'users', 'guests',), - 'add': (), - 'delete': (), - } -RO_ATTR_PERMS = { - 'read': ('managers', 'users', 'guests',), - 'add': ybo.DEFAULT_ATTRPERMS['add'], - 'update': (), - } - -# XXX same algorithm as in reorder_cubes and probably other place, -# may probably extract a generic function -def order_eschemas(eschemas): - """return entity schemas ordered such that entity types which specializes an - other one appears after that one - """ - graph = {} - for eschema in eschemas: - if eschema.specializes(): - graph[eschema] = set((eschema.specializes(),)) - else: - graph[eschema] = set() - cycles = get_cycles(graph) - if cycles: - cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles) - raise Exception('cycles in entity schema specialization: %s' - % cycles) - eschemas = [] - while graph: - # sorted to get predictable results - for eschema, deps in sorted(graph.items()): - if not deps: - eschemas.append(eschema) - del graph[eschema] - for deps in graph.values(): - try: - deps.remove(eschema) - except KeyError: - continue - return eschemas - -def bw_normalize_etype(etype): - if etype in ETYPE_NAME_MAP: - msg = '%s has been renamed to %s, please update your code' % ( - etype, ETYPE_NAME_MAP[etype]) - warn(msg, DeprecationWarning, stacklevel=4) - etype = ETYPE_NAME_MAP[etype] - return etype - -def display_name(req, key, form='', context=None): - """return a internationalized string for the key (schema entity or relation - name) in a given form - """ - assert form in ('', 'plural', 'subject', 'object') - if form == 'subject': - form = '' - if form: - key = key + '_' + form - # ensure unicode - if context is not None: - return text_type(req.pgettext(context, key)) - else: - return text_type(req._(key)) - - -# Schema objects definition ################################################### - -def ERSchema_display_name(self, req, form='', context=None): - """return a internationalized string for the entity/relation type name in - a given form - """ - return display_name(req, self.type, form, context) -ERSchema.display_name = ERSchema_display_name - -@cached -def get_groups(self, action): - """return the groups authorized to perform on entities of - this type - - :type action: str - :param action: the name of a permission - - :rtype: tuple - :return: names of the groups with the given permission - """ - assert action in self.ACTIONS, action - #assert action in self._groups, '%s %s' % (self, action) - try: - return frozenset(g for g in self.permissions[action] if isinstance(g, string_types)) - except KeyError: - return () -PermissionMixIn.get_groups = get_groups - -@cached -def get_rqlexprs(self, action): - """return the rql expressions representing queries to check the user is allowed - to perform on entities of this type - - :type action: str - :param action: the name of a permission - - :rtype: tuple - :return: the rql expressions with the given permission - """ - assert action in self.ACTIONS, action - #assert action in self._rqlexprs, '%s %s' % (self, action) - try: - return tuple(g for g in self.permissions[action] if not isinstance(g, string_types)) - except KeyError: - return () -PermissionMixIn.get_rqlexprs = get_rqlexprs - -orig_set_action_permissions = PermissionMixIn.set_action_permissions -def set_action_permissions(self, action, permissions): - """set the groups and rql expressions allowing to perform on - entities of this type - - :type action: str - :param action: the name of a permission - - :type permissions: tuple - :param permissions: the groups and rql expressions allowing the given action - """ - orig_set_action_permissions(self, action, tuple(permissions)) - clear_cache(self, 'get_rqlexprs') - clear_cache(self, 'get_groups') -PermissionMixIn.set_action_permissions = set_action_permissions - -def has_local_role(self, action): - """return true if the action *may* be granted locally (i.e. either rql - expressions or the owners group are used in security definition) - - XXX this method is only there since we don't know well how to deal with - 'add' action checking. Also find a better name would be nice. - """ - assert action in self.ACTIONS, action - if self.get_rqlexprs(action): - return True - if action in ('update', 'delete'): - return 'owners' in self.get_groups(action) - return False -PermissionMixIn.has_local_role = has_local_role - -def may_have_permission(self, action, req): - if action != 'read' and not (self.has_local_role('read') or - self.has_perm(req, 'read')): - return False - return self.has_local_role(action) or self.has_perm(req, action) -PermissionMixIn.may_have_permission = may_have_permission - -def has_perm(self, _cw, action, **kwargs): - """return true if the action is granted globally or locally""" - try: - self.check_perm(_cw, action, **kwargs) - return True - except Unauthorized: - return False -PermissionMixIn.has_perm = has_perm - - -def check_perm(self, _cw, action, **kwargs): - # NB: _cw may be a server transaction or a request object. - # - # check user is in an allowed group, if so that's enough internal - # transactions should always stop there - DBG = False - if server.DEBUG & server.DBG_SEC: - if action in server._SECURITY_CAPS: - _self_str = str(self) - if server._SECURITY_ITEMS: - if any(item in _self_str for item in server._SECURITY_ITEMS): - DBG = True - else: - DBG = True - groups = self.get_groups(action) - if _cw.user.matching_groups(groups): - if DBG: - print('check_perm: %r %r: user matches %s' % (action, _self_str, groups)) - return - # if 'owners' in allowed groups, check if the user actually owns this - # object, if so that's enough - # - # NB: give _cw to user.owns since user is not be bound to a transaction on - # the repository side - if 'owners' in groups and ( - kwargs.get('creating') - or ('eid' in kwargs and _cw.user.owns(kwargs['eid']))): - if DBG: - print('check_perm: %r %r: user is owner or creation time' % - (action, _self_str)) - return - # else if there is some rql expressions, check them - if DBG: - print('check_perm: %r %r %s' % - (action, _self_str, [(rqlexpr, kwargs, rqlexpr.check(_cw, **kwargs)) - for rqlexpr in self.get_rqlexprs(action)])) - if any(rqlexpr.check(_cw, **kwargs) - for rqlexpr in self.get_rqlexprs(action)): - return - raise Unauthorized(action, str(self)) -PermissionMixIn.check_perm = check_perm - - -CubicWebRelationDefinitionSchema._RPROPERTIES['eid'] = None -# remember rproperties defined at this point. Others will have to be serialized in -# CWAttribute.extra_props -KNOWN_RPROPERTIES = CubicWebRelationDefinitionSchema.ALL_PROPERTIES() - - -class CubicWebEntitySchema(EntitySchema): - """a entity has a type, a set of subject and or object relations - the entity schema defines the possible relations for a given type and some - constraints on those relations - """ - def __init__(self, schema=None, edef=None, eid=None, **kwargs): - super(CubicWebEntitySchema, self).__init__(schema, edef, **kwargs) - if eid is None and edef is not None: - eid = getattr(edef, 'eid', None) - self.eid = eid - - def targets(self, role): - assert role in ('subject', 'object') - if role == 'subject': - return self.subjrels.values() - return self.objrels.values() - - @cachedproperty - def composite_rdef_roles(self): - """Return all relation definitions that define the current entity - type as a composite. - """ - rdef_roles = [] - for role in ('subject', 'object'): - for rschema in self.targets(role): - if rschema.final: - continue - for rdef in rschema.rdefs.values(): - if (role == 'subject' and rdef.subject == self) or \ - (role == 'object' and rdef.object == self): - crole = rdef.composite - if crole == role: - rdef_roles.append((rdef, role)) - return rdef_roles - - @cachedproperty - def is_composite(self): - return bool(len(self.composite_rdef_roles)) - - def check_permission_definitions(self): - super(CubicWebEntitySchema, self).check_permission_definitions() - for groups in self.permissions.values(): - for group_or_rqlexpr in groups: - if isinstance(group_or_rqlexpr, RRQLExpression): - msg = "can't use RRQLExpression on %s, use an ERQLExpression" - raise BadSchemaDefinition(msg % self.type) - - def is_subobject(self, strict=False, skiprels=None): - if skiprels is None: - skiprels = SKIP_COMPOSITE_RELS - else: - skiprels += SKIP_COMPOSITE_RELS - return super(CubicWebEntitySchema, self).is_subobject(strict, - skiprels=skiprels) - - def attribute_definitions(self): - """return an iterator on attribute definitions - - attribute relations are a subset of subject relations where the - object's type is a final entity - - an attribute definition is a 2-uple : - * name of the relation - * schema of the destination entity type - """ - iter = super(CubicWebEntitySchema, self).attribute_definitions() - for rschema, attrschema in iter: - if rschema.type == 'has_text': - continue - yield rschema, attrschema - - def main_attribute(self): - """convenience method that returns the *main* (i.e. the first non meta) - attribute defined in the entity schema - """ - for rschema, _ in self.attribute_definitions(): - if not (rschema in META_RTYPES - or self.is_metadata(rschema)): - return rschema - - def add_subject_relation(self, rschema): - """register the relation schema as possible subject relation""" - super(CubicWebEntitySchema, self).add_subject_relation(rschema) - if rschema.final: - if self.rdef(rschema).get('fulltextindexed'): - self._update_has_text() - elif rschema.fulltext_container: - self._update_has_text() - - def add_object_relation(self, rschema): - """register the relation schema as possible object relation""" - super(CubicWebEntitySchema, self).add_object_relation(rschema) - if rschema.fulltext_container: - self._update_has_text() - - def del_subject_relation(self, rtype): - super(CubicWebEntitySchema, self).del_subject_relation(rtype) - if 'has_text' in self.subjrels: - self._update_has_text(deletion=True) - - def del_object_relation(self, rtype): - super(CubicWebEntitySchema, self).del_object_relation(rtype) - if 'has_text' in self.subjrels: - self._update_has_text(deletion=True) - - def _update_has_text(self, deletion=False): - may_need_has_text, has_has_text = False, False - need_has_text = None - for rschema in self.subject_relations(): - if rschema.final: - if rschema == 'has_text': - has_has_text = True - elif self.rdef(rschema).get('fulltextindexed'): - may_need_has_text = True - elif rschema.fulltext_container: - if rschema.fulltext_container == 'subject': - may_need_has_text = True - else: - need_has_text = False - for rschema in self.object_relations(): - if rschema.fulltext_container: - if rschema.fulltext_container == 'object': - may_need_has_text = True - else: - need_has_text = False - if need_has_text is None: - need_has_text = may_need_has_text - if need_has_text and not has_has_text and not deletion: - rdef = ybo.RelationDefinition(self.type, 'has_text', 'String', - __permissions__=RO_ATTR_PERMS) - self.schema.add_relation_def(rdef) - elif not need_has_text and has_has_text: - # use rschema.del_relation_def and not schema.del_relation_def to - # avoid deleting the relation type accidentally... - self.schema['has_text'].del_relation_def(self, self.schema['String']) - - def schema_entity(self): # XXX @property for consistency with meta - """return True if this entity type is used to build the schema""" - return self.type in SCHEMA_TYPES - - def rql_expression(self, expression, mainvars=None, eid=None): - """rql expression factory""" - return ERQLExpression(expression, mainvars, eid) - - -class CubicWebRelationSchema(PermissionMixIn, RelationSchema): - permissions = {} - ACTIONS = () - rdef_class = CubicWebRelationDefinitionSchema - - def __init__(self, schema=None, rdef=None, eid=None, **kwargs): - if rdef is not None: - # if this relation is inlined - self.inlined = rdef.inlined - super(CubicWebRelationSchema, self).__init__(schema, rdef, **kwargs) - if eid is None and rdef is not None: - eid = getattr(rdef, 'eid', None) - self.eid = eid - - def init_computed_relation(self, rdef): - self.ACTIONS = ('read',) - super(CubicWebRelationSchema, self).init_computed_relation(rdef) - - def advertise_new_add_permission(self): - pass - - def check_permission_definitions(self): - RelationSchema.check_permission_definitions(self) - PermissionMixIn.check_permission_definitions(self) - - @property - def meta(self): - return self.type in META_RTYPES - - def schema_relation(self): # XXX @property for consistency with meta - """return True if this relation type is used to build the schema""" - return self.type in SCHEMA_TYPES - - def may_have_permission(self, action, req, eschema=None, role=None): - if eschema is not None: - for tschema in self.targets(eschema, role): - rdef = self.role_rdef(eschema, tschema, role) - if rdef.may_have_permission(action, req): - return True - else: - for rdef in self.rdefs.values(): - if rdef.may_have_permission(action, req): - return True - return False - - def has_perm(self, _cw, action, **kwargs): - """return true if the action is granted globally or locally""" - if self.final: - assert not ('fromeid' in kwargs or 'toeid' in kwargs), kwargs - assert action in ('read', 'update') - if 'eid' in kwargs: - subjtype = _cw.entity_metas(kwargs['eid'])['type'] - else: - subjtype = objtype = None - else: - assert not 'eid' in kwargs, kwargs - assert action in ('read', 'add', 'delete') - if 'fromeid' in kwargs: - subjtype = _cw.entity_metas(kwargs['fromeid'])['type'] - elif 'frometype' in kwargs: - subjtype = kwargs.pop('frometype') - else: - subjtype = None - if 'toeid' in kwargs: - objtype = _cw.entity_metas(kwargs['toeid'])['type'] - elif 'toetype' in kwargs: - objtype = kwargs.pop('toetype') - else: - objtype = None - if objtype and subjtype: - return self.rdef(subjtype, objtype).has_perm(_cw, action, **kwargs) - elif subjtype: - for tschema in self.targets(subjtype, 'subject'): - rdef = self.rdef(subjtype, tschema) - if not rdef.has_perm(_cw, action, **kwargs): - return False - elif objtype: - for tschema in self.targets(objtype, 'object'): - rdef = self.rdef(tschema, objtype) - if not rdef.has_perm(_cw, action, **kwargs): - return False - else: - for rdef in self.rdefs.values(): - if not rdef.has_perm(_cw, action, **kwargs): - return False - return True - - @deprecated('use .rdef(subjtype, objtype).role_cardinality(role)') - def cardinality(self, subjtype, objtype, target): - return self.rdef(subjtype, objtype).role_cardinality(target) - - -class CubicWebSchema(Schema): - """set of entities and relations schema defining the possible data sets - used in an application - - :type name: str - :ivar name: name of the schema, usually the instance identifier - - :type base: str - :ivar base: path of the directory where the schema is defined - """ - reading_from_database = False - entity_class = CubicWebEntitySchema - relation_class = CubicWebRelationSchema - no_specialization_inference = ('identity',) - - def __init__(self, *args, **kwargs): - self._eid_index = {} - super(CubicWebSchema, self).__init__(*args, **kwargs) - ybo.register_base_types(self) - rschema = self.add_relation_type(ybo.RelationType('eid')) - rschema.final = True - rschema = self.add_relation_type(ybo.RelationType('has_text')) - rschema.final = True - rschema = self.add_relation_type(ybo.RelationType('identity')) - rschema.final = False - - etype_name_re = r'[A-Z][A-Za-z0-9]*[a-z]+[A-Za-z0-9]*$' - def add_entity_type(self, edef): - edef.name = str(edef.name) - edef.name = bw_normalize_etype(edef.name) - if not re.match(self.etype_name_re, edef.name): - raise BadSchemaDefinition( - '%r is not a valid name for an entity type. It should start ' - 'with an upper cased letter and be followed by at least a ' - 'lower cased letter' % edef.name) - eschema = super(CubicWebSchema, self).add_entity_type(edef) - if not eschema.final: - # automatically add the eid relation to non final entity types - rdef = ybo.RelationDefinition(eschema.type, 'eid', 'Int', - cardinality='11', uid=True, - __permissions__=RO_ATTR_PERMS) - self.add_relation_def(rdef) - rdef = ybo.RelationDefinition(eschema.type, 'identity', eschema.type, - __permissions__=RO_REL_PERMS) - self.add_relation_def(rdef) - self._eid_index[eschema.eid] = eschema - return eschema - - def add_relation_type(self, rdef): - if not rdef.name.islower(): - raise BadSchemaDefinition( - '%r is not a valid name for a relation type. It should be ' - 'lower cased' % rdef.name) - rdef.name = str(rdef.name) - rschema = super(CubicWebSchema, self).add_relation_type(rdef) - self._eid_index[rschema.eid] = rschema - return rschema - - def add_relation_def(self, rdef): - """build a part of a relation schema - (i.e. add a relation between two specific entity's types) - - :type subject: str - :param subject: entity's type that is subject of the relation - - :type rtype: str - :param rtype: the relation's type (i.e. the name of the relation) - - :type obj: str - :param obj: entity's type that is object of the relation - - :rtype: RelationSchema - :param: the newly created or just completed relation schema - """ - rdef.name = rdef.name.lower() - rdef.subject = bw_normalize_etype(rdef.subject) - rdef.object = bw_normalize_etype(rdef.object) - rdefs = super(CubicWebSchema, self).add_relation_def(rdef) - if rdefs: - try: - self._eid_index[rdef.eid] = rdefs - except AttributeError: - pass # not a serialized schema - return rdefs - - def del_relation_type(self, rtype): - rschema = self.rschema(rtype) - self._eid_index.pop(rschema.eid, None) - super(CubicWebSchema, self).del_relation_type(rtype) - - def del_relation_def(self, subjtype, rtype, objtype): - for k, v in self._eid_index.items(): - if not isinstance(v, RelationDefinitionSchema): - continue - if v.subject == subjtype and v.rtype == rtype and v.object == objtype: - del self._eid_index[k] - break - super(CubicWebSchema, self).del_relation_def(subjtype, rtype, objtype) - - def del_entity_type(self, etype): - eschema = self.eschema(etype) - self._eid_index.pop(eschema.eid, None) - # deal with has_text first, else its automatic deletion (see above) - # may trigger an error in ancestor's del_entity_type method - if 'has_text' in eschema.subject_relations(): - self.del_relation_def(etype, 'has_text', 'String') - super(CubicWebSchema, self).del_entity_type(etype) - - def schema_by_eid(self, eid): - return self._eid_index[eid] - - def iter_computed_attributes(self): - for relation in self.relations(): - for rdef in relation.rdefs.values(): - if rdef.final and rdef.formula is not None: - yield rdef - - def iter_computed_relations(self): - for relation in self.relations(): - if relation.rule: - yield relation - - def finalize(self): - super(CubicWebSchema, self).finalize() - self.finalize_computed_attributes() - self.finalize_computed_relations() - - def finalize_computed_attributes(self): - """Check computed attributes validity (if any), else raise - `BadSchemaDefinition` - """ - analyzer = ETypeResolver(self) - for rdef in self.iter_computed_attributes(): - rqlst = parse(rdef.formula) - select = rqlst.children[0] - select.add_type_restriction(select.defined_vars['X'], str(rdef.subject)) - analyzer.visit(select) - _check_valid_formula(rdef, rqlst) - rdef.formula_select = select # avoid later recomputation - - - def finalize_computed_relations(self): - """Build relation definitions for computed relations - - The subject and object types are infered using rql analyzer. - """ - analyzer = ETypeResolver(self) - for rschema in self.iter_computed_relations(): - # XXX rule is valid if both S and O are defined and not in an exists - rqlexpr = RRQLExpression(rschema.rule) - rqlst = rqlexpr.snippet_rqlst - analyzer.visit(rqlst) - couples = set((sol['S'], sol['O']) for sol in rqlst.solutions) - for subjtype, objtype in couples: - if self[objtype].final: - raise BadSchemaDefinition('computed relations cannot be final') - rdef = ybo.RelationDefinition( - subjtype, rschema.type, objtype, - __permissions__={'add': (), - 'delete': (), - 'read': rschema.permissions['read']}) - rdef.infered = True - self.add_relation_def(rdef) - - def rebuild_infered_relations(self): - super(CubicWebSchema, self).rebuild_infered_relations() - self.finalize_computed_attributes() - self.finalize_computed_relations() - - -# additional cw specific constraints ########################################### - -# these are implemented as CHECK constraints in sql, don't do the work -# twice -StaticVocabularyConstraint.check = lambda *args: True -IntervalBoundConstraint.check = lambda *args: True -BoundaryConstraint.check = lambda *args: True - -class BaseRQLConstraint(RRQLExpression, BaseConstraint): - """base class for rql constraints""" - distinct_query = None - - def serialize(self): - # start with a semicolon for bw compat, see below - return ';' + ','.join(sorted(self.mainvars)) + ';' + self.expression - - @classmethod - def deserialize(cls, value): - _, mainvars, expression = value.split(';', 2) - return cls(expression, mainvars) - - def check(self, entity, rtype, value): - """return true if the value satisfy the constraint, else false""" - # implemented as a hook in the repository - return 1 - - def __str__(self): - if self.distinct_query: - selop = 'Any' - else: - selop = 'DISTINCT Any' - return '%s(%s %s WHERE %s)' % (self.__class__.__name__, selop, - ','.join(sorted(self.mainvars)), - self.expression) - - def __repr__(self): - return '<%s @%#x>' % (self.__str__(), id(self)) - - -class RQLVocabularyConstraint(BaseRQLConstraint): - """the rql vocabulary constraint: - - limits the proposed values to a set of entities returned by an rql query, - but this is not enforced at the repository level - - `expression` is an additional rql restriction that will be added to - a predefined query, where the S and O variables respectively represent - the subject and the object of the relation - - `mainvars` is a set of variables that should be used as selection variables - (i.e. `'Any %s WHERE ...' % mainvars`). If not specified, an attempt will be - made to guess it based on the variables used in the expression. - """ - - def repo_check(self, session, eidfrom, rtype, eidto): - """raise ValidationError if the relation doesn't satisfy the constraint - """ - pass # this is a vocabulary constraint, not enforced - - -class RepoEnforcedRQLConstraintMixIn(object): - - def __init__(self, expression, mainvars=None, msg=None): - super(RepoEnforcedRQLConstraintMixIn, self).__init__(expression, mainvars) - self.msg = msg - - def serialize(self): - # start with a semicolon for bw compat, see below - return ';%s;%s\n%s' % (','.join(sorted(self.mainvars)), self.expression, - self.msg or '') - - @classmethod - def deserialize(cls, value): - value, msg = value.split('\n', 1) - _, mainvars, expression = value.split(';', 2) - return cls(expression, mainvars, msg) - - def repo_check(self, session, eidfrom, rtype, eidto=None): - """raise ValidationError if the relation doesn't satisfy the constraint - """ - if not self.match_condition(session, eidfrom, eidto): - # XXX at this point if both or neither of S and O are in mainvar we - # dunno if the validation error `occurred` on eidfrom or eidto (from - # user interface point of view) - # - # possible enhancement: check entity being created, it's probably - # the main eid unless this is a composite relation - if eidto is None or 'S' in self.mainvars or not 'O' in self.mainvars: - maineid = eidfrom - qname = role_name(rtype, 'subject') - else: - maineid = eidto - qname = role_name(rtype, 'object') - if self.msg: - msg = session._(self.msg) - else: - msg = '%(constraint)s %(expression)s failed' % { - 'constraint': session._(self.type()), - 'expression': self.expression} - raise ValidationError(maineid, {qname: msg}) - - def exec_query(self, _cw, eidfrom, eidto): - if eidto is None: - # checking constraint for an attribute relation - expression = 'S eid %(s)s, ' + self.expression - args = {'s': eidfrom} - else: - expression = 'S eid %(s)s, O eid %(o)s, ' + self.expression - args = {'s': eidfrom, 'o': eidto} - if 'U' in self.rqlst.defined_vars: - expression = 'U eid %(u)s, ' + expression - args['u'] = _cw.user.eid - rql = 'Any %s WHERE %s' % (','.join(sorted(self.mainvars)), expression) - if self.distinct_query: - rql = 'DISTINCT ' + rql - return _cw.execute(rql, args, build_descr=False) - - -class RQLConstraint(RepoEnforcedRQLConstraintMixIn, BaseRQLConstraint): - """the rql constraint is similar to the RQLVocabularyConstraint but - are also enforced at the repository level - """ - distinct_query = False - - def match_condition(self, session, eidfrom, eidto): - return self.exec_query(session, eidfrom, eidto) - - -class RQLUniqueConstraint(RepoEnforcedRQLConstraintMixIn, BaseRQLConstraint): - """the unique rql constraint check that the result of the query isn't - greater than one. - - You *must* specify `mainvars` when instantiating the constraint since there - is no way to guess it correctly (e.g. if using S,O or U the constraint will - always be satisfied because we've to use a DISTINCT query). - """ - # XXX turns mainvars into a required argument in __init__ - distinct_query = True - - def match_condition(self, session, eidfrom, eidto): - return len(self.exec_query(session, eidfrom, eidto)) <= 1 - - -# workflow extensions ######################################################### - -from yams.buildobjs import _add_relation as yams_add_relation - -class workflowable_definition(ybo.metadefinition): - """extends default EntityType's metaclass to add workflow relations - (i.e. in_state, wf_info_for and custom_workflow). This is the default - metaclass for WorkflowableEntityType. - """ - def __new__(mcs, name, bases, classdict): - abstract = classdict.pop('__abstract__', False) - cls = super(workflowable_definition, mcs).__new__(mcs, name, bases, - classdict) - if not abstract: - make_workflowable(cls) - return cls - - -@add_metaclass(workflowable_definition) -class WorkflowableEntityType(ybo.EntityType): - """Use this base class instead of :class:`EntityType` to have workflow - relations (i.e. `in_state`, `wf_info_for` and `custom_workflow`) on your - entity type. - """ - __abstract__ = True - - -def make_workflowable(cls, in_state_descr=None): - """Adds workflow relations as :class:`WorkflowableEntityType`, but usable on - existing classes which are not using that base class. - """ - existing_rels = set(rdef.name for rdef in cls.__relations__) - # let relation types defined in cw.schemas.workflow carrying - # cardinality, constraints and other relation definition properties - etype = getattr(cls, 'name', cls.__name__) - if 'custom_workflow' not in existing_rels: - rdef = ybo.RelationDefinition(etype, 'custom_workflow', 'Workflow') - yams_add_relation(cls.__relations__, rdef) - if 'in_state' not in existing_rels: - rdef = ybo.RelationDefinition(etype, 'in_state', 'State', - description=in_state_descr) - yams_add_relation(cls.__relations__, rdef) - if 'wf_info_for' not in existing_rels: - rdef = ybo.RelationDefinition('TrInfo', 'wf_info_for', etype) - yams_add_relation(cls.__relations__, rdef) - - -# schema loading ############################################################## - -CONSTRAINTS['RQLConstraint'] = RQLConstraint -CONSTRAINTS['RQLUniqueConstraint'] = RQLUniqueConstraint -CONSTRAINTS['RQLVocabularyConstraint'] = RQLVocabularyConstraint -CONSTRAINTS.pop('MultipleStaticVocabularyConstraint', None) # don't want this in cw yams schema -PyFileReader.context.update(CONSTRAINTS) - - -class BootstrapSchemaLoader(SchemaLoader): - """cubicweb specific schema loader, loading only schema necessary to read - the persistent schema - """ - schemacls = CubicWebSchema - - def load(self, config, path=(), **kwargs): - """return a Schema instance from the schema definition read - from - """ - return super(BootstrapSchemaLoader, self).load( - path, config.appid, register_base_types=False, **kwargs) - - def _load_definition_files(self, cubes=None): - # bootstraping, ignore cubes - filepath = join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'bootstrap.py') - self.info('loading %s', filepath) - with tempattr(ybo, 'PACKAGE', 'cubicweb'): # though we don't care here - self.handle_file(filepath) - - def unhandled_file(self, filepath): - """called when a file without handler associated has been found""" - self.warning('ignoring file %r', filepath) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -class CubicWebSchemaLoader(BootstrapSchemaLoader): - """cubicweb specific schema loader, automatically adding metadata to the - instance's schema - """ - - def load(self, config, **kwargs): - """return a Schema instance from the schema definition read - from - """ - self.info('loading %s schemas', ', '.join(config.cubes())) - self.extrapath = {} - for cubesdir in config.cubes_search_path(): - if cubesdir != config.CUBES_DIR: - self.extrapath[cubesdir] = 'cubes' - if config.apphome: - path = tuple(reversed([config.apphome] + config.cubes_path())) - else: - path = tuple(reversed(config.cubes_path())) - try: - return super(CubicWebSchemaLoader, self).load(config, path=path, **kwargs) - finally: - # we've to cleanup modules imported from cubicweb.schemas as well - cleanup_sys_modules([join(cubicweb.CW_SOFTWARE_ROOT, 'schemas')]) - - def _load_definition_files(self, cubes): - for filepath in (join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'bootstrap.py'), - join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'base.py'), - join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'workflow.py'), - join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'Bookmark.py')): - self.info('loading %s', filepath) - with tempattr(ybo, 'PACKAGE', 'cubicweb'): - self.handle_file(filepath) - for cube in cubes: - for filepath in self.get_schema_files(cube): - with tempattr(ybo, 'PACKAGE', basename(cube)): - self.handle_file(filepath) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - - -set_log_methods(CubicWebSchemaLoader, getLogger('cubicweb.schemaloader')) -set_log_methods(BootstrapSchemaLoader, getLogger('cubicweb.bootstrapschemaloader')) -set_log_methods(RQLExpression, getLogger('cubicweb.schema')) - -# _() is just there to add messages to the catalog, don't care about actual -# translation -MAY_USE_TEMPLATE_FORMAT = set(('managers',)) -NEED_PERM_FORMATS = [_('text/cubicweb-page-template')] - -@monkeypatch(FormatConstraint) -def vocabulary(self, entity=None, form=None): - cw = None - if form is None and entity is not None: - cw = entity._cw - elif form is not None: - cw = form._cw - if cw is not None: - if hasattr(cw, 'write_security'): # test it's a session and not a request - # cw is a server session - hasperm = not cw.write_security or \ - not cw.is_hook_category_activated('integrity') or \ - cw.user.matching_groups(MAY_USE_TEMPLATE_FORMAT) - else: - hasperm = cw.user.matching_groups(MAY_USE_TEMPLATE_FORMAT) - if hasperm: - return self.regular_formats + tuple(NEED_PERM_FORMATS) - return self.regular_formats - -# XXX itou for some Statement methods -from rql import stmts -orig_get_etype = stmts.ScopeNode.get_etype -def bw_get_etype(self, name): - return orig_get_etype(self, bw_normalize_etype(name)) -stmts.ScopeNode.get_etype = bw_get_etype - -orig_add_main_variable_delete = stmts.Delete.add_main_variable -def bw_add_main_variable_delete(self, etype, vref): - return orig_add_main_variable_delete(self, bw_normalize_etype(etype), vref) -stmts.Delete.add_main_variable = bw_add_main_variable_delete - -orig_add_main_variable_insert = stmts.Insert.add_main_variable -def bw_add_main_variable_insert(self, etype, vref): - return orig_add_main_variable_insert(self, bw_normalize_etype(etype), vref) -stmts.Insert.add_main_variable = bw_add_main_variable_insert - -orig_set_statement_type = stmts.Select.set_statement_type -def bw_set_statement_type(self, etype): - return orig_set_statement_type(self, bw_normalize_etype(etype)) -stmts.Select.set_statement_type = bw_set_statement_type diff -r 058bb3dc685f -r 0b59724cb3f2 schemas/Bookmark.py --- a/schemas/Bookmark.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,49 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""the Bookmark entity type for internal links - -""" -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from yams.buildobjs import EntityType, RelationType, SubjectRelation, String -from cubicweb.schema import RRQLExpression - -class Bookmark(EntityType): - """bookmarks are used to have user's specific internal links""" - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', 'users',), - 'delete': ('managers', 'owners',), - 'update': ('managers', 'owners',), - } - - title = String(required=True, maxsize=128, internationalizable=True) - path = String(maxsize=2048, required=True, - description=_("relative url of the bookmarked page")) - - bookmarked_by = SubjectRelation('CWUser', - description=_("users using this bookmark")) - - -class bookmarked_by(RelationType): - __permissions__ = {'read': ('managers', 'users', 'guests',), - # test user in users group to avoid granting permission to anonymous user - 'add': ('managers', RRQLExpression('O identity U, U in_group G, G name "users"')), - 'delete': ('managers', RRQLExpression('O identity U, U in_group G, G name "users"')), - } diff -r 058bb3dc685f -r 0b59724cb3f2 schemas/__init__.py --- a/schemas/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,51 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some constants and classes to define schema permissions""" - -__docformat__ = "restructuredtext en" - -from cubicweb.schema import RO_REL_PERMS, RO_ATTR_PERMS, \ - PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, \ - ERQLExpression, RRQLExpression - -# permissions for "meta" entity type (readable by anyone, can only be -# added/deleted by managers) -META_ETYPE_PERMS = PUB_SYSTEM_ENTITY_PERMS # XXX deprecates -# permissions for "meta" relation type (readable by anyone, can only be -# added/deleted by managers) -META_RTYPE_PERMS = PUB_SYSTEM_REL_PERMS # XXX deprecates -# permissions for relation type that should only set by hooks using unsafe -# execute, readable by anyone -HOOKS_RTYPE_PERMS = RO_REL_PERMS # XXX deprecates - - -from logilab.common.modutils import LazyObject -from logilab.common.deprecation import deprecated -class MyLazyObject(LazyObject): - - def _getobj(self): - try: - return super(MyLazyObject, self)._getobj() - except ImportError: - raise ImportError('In cubicweb 3.14, function %s has been moved to ' - 'cube localperms. Install it first.' % self.obj) - -for name in ('xperm', 'xexpr', 'xrexpr', 'xorexpr', 'sexpr', 'restricted_sexpr', - 'restricted_oexpr', 'oexpr', 'relxperm', 'relxexpr', '_perm'): - msg = '[3.14] import %s from cubes.localperms' % name - globals()[name] = deprecated(msg, name=name, doc='deprecated')(MyLazyObject('cubes.localperms', name)) diff -r 058bb3dc685f -r 0b59724cb3f2 schemas/_regproc.mysql.sql --- a/schemas/_regproc.mysql.sql Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ -/* -*- sql -*- - - mysql specific registered procedures, - -*/ - -/* XXX limit_size version dealing with format as postgres version does. - XXX mysql doesn't support overloading, each function should have a different name - - NOTE: fulltext renamed since it cause a mysql name conflict - */ - -CREATE FUNCTION text_limit_size(vfulltext TEXT, maxsize INT) -RETURNS TEXT -NO SQL -BEGIN - IF LENGTH(vfulltext) < maxsize THEN - RETURN vfulltext; - ELSE - RETURN SUBSTRING(vfulltext from 1 for maxsize) || '...'; - END IF; -END ;; diff -r 058bb3dc685f -r 0b59724cb3f2 schemas/_regproc.postgres.sql --- a/schemas/_regproc.postgres.sql Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,55 +0,0 @@ -/* -*- sql -*- - - postgres specific registered procedures, - require the plpgsql language installed - -*/ - -DROP FUNCTION IF EXISTS comma_join (anyarray) CASCADE; -CREATE FUNCTION comma_join (anyarray) RETURNS text AS $$ - SELECT array_to_string($1, ', ') -$$ LANGUAGE SQL;; - - -DROP FUNCTION IF EXISTS cw_array_append_unique (anyarray, anyelement) CASCADE; -CREATE FUNCTION cw_array_append_unique (anyarray, anyelement) RETURNS anyarray AS $$ - SELECT array_append($1, (SELECT $2 WHERE $2 <> ALL($1))) -$$ LANGUAGE SQL;; - -DROP AGGREGATE IF EXISTS group_concat (anyelement) CASCADE; -CREATE AGGREGATE group_concat ( - basetype = anyelement, - sfunc = cw_array_append_unique, - stype = anyarray, - finalfunc = comma_join, - initcond = '{}' -);; - - -DROP FUNCTION IF EXISTS limit_size (fulltext text, format text, maxsize integer); -CREATE FUNCTION limit_size (fulltext text, format text, maxsize integer) RETURNS text AS $$ -DECLARE - plaintext text; -BEGIN - IF char_length(fulltext) < maxsize THEN - RETURN fulltext; - END IF; - IF format = 'text/html' OR format = 'text/xhtml' OR format = 'text/xml' THEN - plaintext := regexp_replace(fulltext, '<[a-zA-Z/][^>]*>', '', 'g'); - ELSE - plaintext := fulltext; - END IF; - IF char_length(plaintext) < maxsize THEN - RETURN plaintext; - ELSE - RETURN substring(plaintext from 1 for maxsize) || '...'; - END IF; -END -$$ LANGUAGE plpgsql;; - -DROP FUNCTION IF EXISTS text_limit_size (fulltext text, maxsize integer); -CREATE FUNCTION text_limit_size (fulltext text, maxsize integer) RETURNS text AS $$ -BEGIN - RETURN limit_size(fulltext, 'text/plain', maxsize); -END -$$ LANGUAGE plpgsql;; diff -r 058bb3dc685f -r 0b59724cb3f2 schemas/base.py --- a/schemas/base.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,383 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""core CubicWeb schema, but not necessary at bootstrap time""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from yams.buildobjs import (EntityType, RelationType, RelationDefinition, - SubjectRelation, - String, TZDatetime, Datetime, Password, Interval, - Boolean, UniqueConstraint) -from cubicweb.schema import ( - RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression, - PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS, - RO_ATTR_PERMS) - -class CWUser(WorkflowableEntityType): - """define a CubicWeb user""" - __permissions__ = { - 'read': ('managers', 'users', ERQLExpression('X identity U')), - 'add': ('managers',), - 'delete': ('managers',), - 'update': ('managers', ERQLExpression('X identity U, NOT U in_group G, G name "guests"'),), - } - - login = String(required=True, unique=True, maxsize=64, - description=_('unique identifier used to connect to the application')) - upassword = Password(required=True) # password is a reserved word for mysql - firstname = String(maxsize=64) - surname = String(maxsize=64) - last_login_time = TZDatetime(description=_('last connection date')) - in_group = SubjectRelation('CWGroup', cardinality='+*', - constraints=[RQLConstraint('NOT O name "owners"')], - description=_('groups grant permissions to the user')) - - -class EmailAddress(EntityType): - """an electronic mail address associated to a short alias""" - __permissions__ = { - # application that wishes public email, or use it for something else - # than users (eg Company, Person), should explicitly change permissions - 'read': ('managers', ERQLExpression('U use_email X')), - 'add': ('managers', 'users',), - 'delete': ('managers', 'owners', ERQLExpression('P use_email X, U has_update_permission P')), - 'update': ('managers', 'owners', ERQLExpression('P use_email X, U has_update_permission P')), - } - - alias = String(fulltextindexed=True, maxsize=56) - address = String(required=True, fulltextindexed=True, - indexed=True, unique=True, maxsize=128) - prefered_form = SubjectRelation('EmailAddress', cardinality='?*', - description=_('when multiple addresses are equivalent \ -(such as python-projects@logilab.org and python-projects@lists.logilab.org), set this \ -to indicate which is the preferred form.')) - -class use_email(RelationType): - fulltext_container = 'subject' - - -class use_email_relation(RelationDefinition): - """user's email account""" - name = "use_email" - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', RRQLExpression('U has_update_permission S'),), - 'delete': ('managers', RRQLExpression('U has_update_permission S'),), - } - subject = "CWUser" - object = "EmailAddress" - cardinality = '*?' - composite = 'subject' - - -class primary_email(RelationDefinition): - """the prefered email""" - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', RRQLExpression('U has_update_permission S'),), - 'delete': ('managers', RRQLExpression('U has_update_permission S'),), - } - subject = "CWUser" - object = "EmailAddress" - cardinality = '??' - constraints= [RQLConstraint('S use_email O')] - - -class prefered_form(RelationType): - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - # XXX should have update __permissions__ on both subject and object, - # though by doing this we will probably have no way to add - # this relation in the web ui. The easiest way to acheive this - # is probably to be able to have "U has_update_permission O" as - # RQLConstraint of the relation definition, though this is not yet - # possible - 'add': ('managers', RRQLExpression('U has_update_permission S'),), - 'delete': ('managers', RRQLExpression('U has_update_permission S'),), - } - -class in_group(RelationType): - """core relation indicating a user's groups""" - __permissions__ = PUB_SYSTEM_REL_PERMS - -class owned_by(RelationType): - """core relation indicating owners of an entity. This relation - implicitly put the owner into the owners group for the entity - """ - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('S owned_by U'),), - 'delete': ('managers', RRQLExpression('S owned_by U'),), - } - # 0..n cardinality for entities created by internal session (no attached user) - # and to support later deletion of a user which has created some entities - cardinality = '**' - subject = '*' - object = 'CWUser' - -class created_by(RelationType): - """core relation indicating the original creator of an entity""" - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'delete': ('managers',), - } - # 0..1 cardinality for entities created by internal session (no attached user) - # and to support later deletion of a user which has created some entities - cardinality = '?*' - subject = '*' - object = 'CWUser' - - -class creation_date(RelationType): - """creation time of an entity""" - __permissions__ = PUB_SYSTEM_ATTR_PERMS - cardinality = '11' - subject = '*' - object = 'TZDatetime' - - -class modification_date(RelationType): - """latest modification time of an entity""" - __permissions__ = PUB_SYSTEM_ATTR_PERMS - cardinality = '11' - subject = '*' - object = 'TZDatetime' - - -class cwuri(RelationType): - """internal entity uri""" - __permissions__ = RO_ATTR_PERMS - cardinality = '11' - subject = '*' - object = 'String' - - -# XXX find a better relation name -class for_user(RelationType): - """link a property to the user which want this property customization. Unless - you're a site manager, this relation will be handled automatically. - """ - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'delete': ('managers',), - } - inlined = True - subject = 'CWProperty' - object = 'CWUser' - composite = 'object' - cardinality = '?*' - - -class ExternalUri(EntityType): - """a URI representing an object in external data store""" - uri = String(required=True, unique=True, maxsize=256, - description=_('the URI of the object')) - - -class same_as(RelationType): - """generic relation to specify that an external entity represent the same - object as a local one: - http://www.w3.org/TR/owl-ref/#sameAs-def - """ - #NOTE: You'll have to explicitly declare which entity types can have a - #same_as relation - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', 'users'), - 'delete': ('managers', 'owners'), - } - cardinality = '**' - symmetric = True - # NOTE: the 'object = ExternalUri' declaration will still be mandatory - # in the cube's schema. - object = 'ExternalUri' - - -class CWCache(EntityType): - """a simple cache entity characterized by a name and - a validity date. - - The target application is responsible for updating timestamp - when necessary to invalidate the cache (typically in hooks). - - Also, checkout the AppObject.get_cache() method. - """ - # XXX only handle by hooks, shouldn't be readable/editable at all through - # the ui and so no permissions should be granted, no? - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'update': ('managers', 'users',), # XXX - 'delete': ('managers',), - } - - name = String(required=True, unique=True, maxsize=128, - description=_('name of the cache')) - timestamp = TZDatetime(default='NOW') - - -class CWSource(EntityType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'update': ('managers',), - 'delete': ('managers',), - } - name = String(required=True, unique=True, maxsize=128, - description=_('name of the source')) - type = String(required=True, maxsize=20, description=_('type of the source')) - config = String(description=_('source\'s configuration. One key=value per ' - 'line, authorized keys depending on the ' - 'source\'s type'), - __permissions__={ - 'read': ('managers',), - 'add': ('managers',), - 'update': ('managers',), - }) - # put this here and not in a subclass even if it's only for some sources - # since having subclasses on generic relation (cw_source) double the number - # of rdef in the schema, and make ms planning harder since queries solutions - # may changes when sources are specified - url = String(description=_('URLs from which content will be imported. You can put one url per line')) - parser = String(description=_('parser to use to extract entities from content retrieved at given URLs.')) - latest_retrieval = TZDatetime(description=_('latest synchronization time')) - in_synchronization = TZDatetime(description=_('start timestamp of the currently in synchronization, or NULL when no synchronization in progress.')) - - -ENTITY_MANAGERS_PERMISSIONS = { - 'read': ('managers',), - 'add': ('managers',), - 'update': ('managers',), - 'delete': ('managers',), - } -RELATION_MANAGERS_PERMISSIONS = { - 'read': ('managers',), - 'add': ('managers',), - 'delete': ('managers',), - } - - -class CWSourceHostConfig(EntityType): - __permissions__ = ENTITY_MANAGERS_PERMISSIONS - __unique_together__ = [('match_host', 'cw_host_config_of')] - match_host = String(required=True, maxsize=128, - description=_('regexp matching host(s) to which this config applies')) - config = String(required=True, - description=_('Source\'s configuration for a particular host. ' - 'One key=value per line, authorized keys ' - 'depending on the source\'s type, overriding ' - 'values defined on the source.'), - __permissions__={ - 'read': ('managers',), - 'add': ('managers',), - 'update': ('managers',), - }) - - -class cw_host_config_of(RelationDefinition): - __permissions__ = RELATION_MANAGERS_PERMISSIONS - subject = 'CWSourceHostConfig' - object = 'CWSource' - cardinality = '1*' - composite = 'object' - inlined = True - -class cw_source(RelationDefinition): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'delete': ('managers',), - } - subject = '*' - object = 'CWSource' - cardinality = '1*' - composite = 'object' - - -class CWDataImport(EntityType): - __permissions__ = ENTITY_MANAGERS_PERMISSIONS - start_timestamp = TZDatetime() - end_timestamp = TZDatetime() - log = String() - status = String(required=True, internationalizable=True, indexed=True, - default='in progress', - vocabulary=[_('in progress'), _('success'), _('failed')]) - -class cw_import_of(RelationDefinition): - __permissions__ = RELATION_MANAGERS_PERMISSIONS - subject = 'CWDataImport' - object = 'CWSource' - cardinality = '1*' - composite = 'object' - - -class CWSourceSchemaConfig(EntityType): - __permissions__ = ENTITY_MANAGERS_PERMISSIONS - cw_for_source = SubjectRelation( - 'CWSource', inlined=True, cardinality='1*', composite='object', - __permissions__=RELATION_MANAGERS_PERMISSIONS) - options = String(description=_('allowed options depends on the source type')) - - -class rtype_cw_schema(RelationDefinition): - __permissions__ = RELATION_MANAGERS_PERMISSIONS - name = 'cw_schema' - subject = 'CWSourceSchemaConfig' - object = ('CWEType', 'CWRType') - inlined = True - cardinality = '1*' - composite = 'object' - constraints = [RQLConstraint('NOT O final TRUE')] - -class rdef_cw_schema(RelationDefinition): - __permissions__ = RELATION_MANAGERS_PERMISSIONS - name = 'cw_schema' - subject = 'CWSourceSchemaConfig' - object = 'CWRelation' - inlined = True - cardinality = '1*' - composite = 'object' - -# "abtract" relation types, no definition in cubicweb itself ################### - -class identical_to(RelationType): - """identical to""" - symmetric = True - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - # XXX should have update __permissions__ on both subject and object, - # though by doing this we will probably have no way to add - # this relation in the web ui. The easiest way to acheive this - # is probably to be able to have "U has_update_permission O" as - # RQLConstraint of the relation definition, though this is not yet - # possible - 'add': ('managers', RRQLExpression('U has_update_permission S'),), - 'delete': ('managers', RRQLExpression('U has_update_permission S'),), - } - -class see_also(RelationType): - """generic relation to link one entity to another""" - symmetric = True - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', RRQLExpression('U has_update_permission S'),), - 'delete': ('managers', RRQLExpression('U has_update_permission S'),), - } diff -r 058bb3dc685f -r 0b59724cb3f2 schemas/bootstrap.py --- a/schemas/bootstrap.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,357 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""core CubicWeb schema necessary for bootstrapping the actual instance's schema -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from yams.buildobjs import (EntityType, RelationType, RelationDefinition, Bytes, - SubjectRelation, RichString, String, Boolean, Int) -from cubicweb.schema import ( - RQLConstraint, - PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS - ) - -# not restricted since as "is" is handled as other relations, guests need -# access to this -class CWEType(EntityType): - """define an entity type, used to build the instance schema""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - name = String(required=True, indexed=True, internationalizable=True, - unique=True, maxsize=64) - description = RichString(internationalizable=True, - description=_('semantic description of this entity type')) - # necessary to filter using RQL - final = Boolean(default=False, description=_('automatic')) - - -class CWRType(EntityType): - """define a relation type, used to build the instance schema""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - name = String(required=True, indexed=True, internationalizable=True, - unique=True, maxsize=64) - description = RichString(internationalizable=True, - description=_('semantic description of this relation type')) - symmetric = Boolean(description=_('is this relation equivalent in both direction ?')) - inlined = Boolean(description=_('is this relation physically inlined? you should know what you\'re doing if you are changing this!')) - fulltext_container = String(description=_('if full text content of subject/object entity ' - 'should be added to other side entity (the container).'), - vocabulary=('', _('subject'), _('object')), - maxsize=8, default=None) - final = Boolean(description=_('automatic')) - - -class CWComputedRType(EntityType): - """define a virtual relation type, used to build the instance schema""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - name = String(required=True, indexed=True, internationalizable=True, - unique=True, maxsize=64) - description = RichString(internationalizable=True, - description=_('semantic description of this relation type')) - rule = String(required=True) - - -class CWAttribute(EntityType): - """define a final relation: link a final relation type from a non final - entity to a final entity type. - - used to build the instance schema - """ - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - relation_type = SubjectRelation('CWRType', cardinality='1*', - constraints=[RQLConstraint('O final TRUE')], - composite='object') - from_entity = SubjectRelation('CWEType', cardinality='1*', - constraints=[RQLConstraint('O final FALSE')], - composite='object') - to_entity = SubjectRelation('CWEType', cardinality='1*', - constraints=[RQLConstraint('O final TRUE')], - composite='object') - constrained_by = SubjectRelation('CWConstraint', cardinality='*1', composite='subject') - - cardinality = String(maxsize=2, internationalizable=True, - vocabulary=[_('?1'), _('11')], - description=_('subject/object cardinality')) - ordernum = Int(description=('control subject entity\'s relations order'), default=0) - - formula = String(maxsize=2048) - indexed = Boolean(description=_('create an index for quick search on this attribute')) - fulltextindexed = Boolean(description=_('index this attribute\'s value in the plain text index')) - internationalizable = Boolean(description=_('is this attribute\'s value translatable')) - defaultval = Bytes(description=_('default value as gziped pickled python object')) - extra_props = Bytes(description=_('additional type specific properties')) - - description = RichString(internationalizable=True, - description=_('semantic description of this attribute')) - - -CARDINALITY_VOCAB = [_('?*'), _('1*'), _('+*'), _('**'), - _('?+'), _('1+'), _('++'), _('*+'), - _('?1'), _('11'), _('+1'), _('*1'), - _('??'), _('1?'), _('+?'), _('*?')] - -class CWRelation(EntityType): - """define a non final relation: link a non final relation type from a non - final entity to a non final entity type. - - used to build the instance schema - """ - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - relation_type = SubjectRelation('CWRType', cardinality='1*', - constraints=[RQLConstraint('O final FALSE')], - composite='object') - from_entity = SubjectRelation('CWEType', cardinality='1*', - constraints=[RQLConstraint('O final FALSE')], - composite='object') - to_entity = SubjectRelation('CWEType', cardinality='1*', - constraints=[RQLConstraint('O final FALSE')], - composite='object') - constrained_by = SubjectRelation('CWConstraint', cardinality='*1', composite='subject') - - cardinality = String(maxsize=2, internationalizable=True, - vocabulary=CARDINALITY_VOCAB, - description=_('subject/object cardinality')) - ordernum = Int(description=_('control subject entity\'s relations order'), - default=0) - composite = String(description=_('is the subject/object entity of the relation ' - 'composed of the other ? This implies that when ' - 'the composite is deleted, composants are also ' - 'deleted.'), - vocabulary=('', _('subject'), _('object')), - maxsize=8, default=None) - - description = RichString(internationalizable=True, - description=_('semantic description of this relation')) - - -# not restricted since it has to be read when checking allowed transitions -class RQLExpression(EntityType): - """define a rql expression used to define permissions""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - exprtype = String(required=True, vocabulary=['ERQLExpression', 'RRQLExpression']) - mainvars = String(maxsize=8, - description=_('name of the main variables which should be ' - 'used in the selection if necessary (comma ' - 'separated)')) - expression = String(required=True, - description=_('restriction part of a rql query. ' - 'For entity rql expression, X and U are ' - 'predefined respectivly to the current object and to ' - 'the request user. For relation rql expression, ' - 'S, O and U are predefined respectivly to the current ' - 'relation\'subject, object and to ' - 'the request user. ')) - - -class CWConstraint(EntityType): - """define a schema constraint""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - cstrtype = SubjectRelation('CWConstraintType', cardinality='1*') - value = String(description=_('depends on the constraint type')) - - -class CWUniqueTogetherConstraint(EntityType): - """defines a sql-level multicolumn unique index""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - name = String(required=True, unique=True, maxsize=64) - constraint_of = SubjectRelation('CWEType', cardinality='1*', composite='object', - inlined=True) - relations = SubjectRelation('CWRType', cardinality='+*', - constraints=[RQLConstraint( - 'S constraint_of ET, RDEF relation_type O, RDEF from_entity ET, ' - 'O final TRUE OR (O final FALSE AND O inlined TRUE)')]) - - -class CWConstraintType(EntityType): - """define a schema constraint type""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - name = String(required=True, indexed=True, internationalizable=True, - unique=True, maxsize=64) - - -# not restricted since it has to be read when checking allowed transitions -class CWGroup(EntityType): - """define a CubicWeb users group""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - name = String(required=True, indexed=True, internationalizable=True, - unique=True, maxsize=64) - - -class CWProperty(EntityType): - """used for cubicweb configuration. Once a property has been created you - can't change the key. - """ - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', 'users',), - 'update': ('managers', 'owners',), - 'delete': ('managers', 'owners',), - } - # key is a reserved word for mysql - pkey = String(required=True, internationalizable=True, maxsize=256, - description=_('defines what\'s the property is applied for. ' - 'You must select this first to be able to set ' - 'value')) - value = String(internationalizable=True, maxsize=256) - -class relation_type(RelationType): - """link a relation definition to its relation type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - -class from_entity(RelationType): - """link a relation definition to its subject entity type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - -class to_entity(RelationType): - """link a relation definition to its object entity type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - -class constrained_by(RelationType): - """constraints applying on this relation""" - __permissions__ = PUB_SYSTEM_REL_PERMS - -class cstrtype(RelationType): - """constraint factory""" - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - - -class read_permission_cwgroup(RelationDefinition): - """groups allowed to read entities/relations of this type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - name = 'read_permission' - subject = ('CWEType', 'CWAttribute', 'CWRelation', 'CWComputedRType') - object = 'CWGroup' - cardinality = '**' - -class add_permission_cwgroup(RelationDefinition): - """groups allowed to add entities/relations of this type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - name = 'add_permission' - subject = ('CWEType', 'CWRelation', 'CWAttribute') - object = 'CWGroup' - cardinality = '**' - -class delete_permission_cwgroup(RelationDefinition): - """groups allowed to delete entities/relations of this type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - name = 'delete_permission' - subject = ('CWEType', 'CWRelation') - object = 'CWGroup' - cardinality = '**' - -class update_permission_cwgroup(RelationDefinition): - """groups allowed to update entities/relations of this type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - name = 'update_permission' - subject = ('CWEType', 'CWAttribute') - object = 'CWGroup' - cardinality = '**' - -class read_permission_rqlexpr(RelationDefinition): - """rql expression allowing to read entities/relations of this type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - name = 'read_permission' - subject = ('CWEType', 'CWAttribute', 'CWRelation', 'CWComputedRType') - object = 'RQLExpression' - cardinality = '*?' - composite = 'subject' - -class add_permission_rqlexpr(RelationDefinition): - """rql expression allowing to add entities/relations of this type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - name = 'add_permission' - subject = ('CWEType', 'CWRelation', 'CWAttribute') - object = 'RQLExpression' - cardinality = '*?' - composite = 'subject' - -class delete_permission_rqlexpr(RelationDefinition): - """rql expression allowing to delete entities/relations of this type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - name = 'delete_permission' - subject = ('CWEType', 'CWRelation') - object = 'RQLExpression' - cardinality = '*?' - composite = 'subject' - -class update_permission_rqlexpr(RelationDefinition): - """rql expression allowing to update entities/relations of this type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - name = 'update_permission' - subject = ('CWEType', 'CWAttribute') - object = 'RQLExpression' - cardinality = '*?' - composite = 'subject' - - -class is_(RelationType): - """core relation indicating the type of an entity - """ - name = 'is' - # don't explicitly set composite here, this is handled anyway - #composite = 'object' - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': (), - 'delete': (), - } - cardinality = '1*' - subject = '*' - object = 'CWEType' - -class is_instance_of(RelationType): - """core relation indicating the types (including specialized types) - of an entity - """ - # don't explicitly set composite here, this is handled anyway - #composite = 'object' - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': (), - 'delete': (), - } - cardinality = '+*' - subject = '*' - object = 'CWEType' - -class specializes(RelationType): - name = 'specializes' - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'delete': ('managers',), - } - cardinality = '?*' - subject = 'CWEType' - object = 'CWEType' - -def post_build_callback(schema): - """set attributes permissions for schema/workflow entities""" - from cubicweb.schema import SCHEMA_TYPES, WORKFLOW_TYPES, META_RTYPES - wftypes = WORKFLOW_TYPES - set(('TrInfo',)) - for eschema in schema.entities(): - if eschema in SCHEMA_TYPES or eschema in wftypes: - for rschema in eschema.subject_relations(): - if rschema.final and not rschema in META_RTYPES: - rdef = eschema.rdef(rschema) - rdef.permissions = PUB_SYSTEM_ATTR_PERMS diff -r 058bb3dc685f -r 0b59724cb3f2 schemas/workflow.py --- a/schemas/workflow.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,283 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""workflow related schemas - -""" -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from yams.buildobjs import (EntityType, RelationType, RelationDefinition, - SubjectRelation, - RichString, String, Int) -from cubicweb.schema import RQLConstraint -from cubicweb.schemas import (PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, - RO_REL_PERMS) - -class Workflow(EntityType): - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - - name = String(required=True, indexed=True, internationalizable=True, - maxsize=256) - description = RichString(default_format='text/rest', - description=_('semantic description of this workflow')) - - workflow_of = SubjectRelation('CWEType', cardinality='+*', - description=_('entity types which may use this workflow'), - constraints=[RQLConstraint('O final FALSE')]) - - initial_state = SubjectRelation('State', cardinality='?*', - constraints=[RQLConstraint('O state_of S', - msg=_('state doesn\'t belong to this workflow'))], - description=_('initial state for this workflow')) - - -class default_workflow(RelationType): - """default workflow for an entity type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - - subject = 'CWEType' - object = 'Workflow' - cardinality = '?*' - constraints = [RQLConstraint('S final FALSE, O workflow_of S', - msg=_('workflow isn\'t a workflow for this type'))] - - -class State(EntityType): - """used to associate simple states to an entity type and/or to define - workflows - """ - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - __unique_together__ = [('name', 'state_of')] - name = String(required=True, indexed=True, internationalizable=True, maxsize=256) - description = RichString(default_format='text/rest', - description=_('semantic description of this state')) - - # XXX should be on BaseTransition w/ AND/OR selectors when we will - # implements #345274 - allowed_transition = SubjectRelation('BaseTransition', cardinality='**', - constraints=[RQLConstraint('S state_of WF, O transition_of WF', - msg=_('state and transition don\'t belong the the same workflow'))], - description=_('allowed transitions from this state')) - state_of = SubjectRelation('Workflow', cardinality='1*', composite='object', inlined=True, - description=_('workflow to which this state belongs')) - - -class BaseTransition(EntityType): - """abstract base class for transitions""" - __permissions__ = PUB_SYSTEM_ENTITY_PERMS - __unique_together__ = [('name', 'transition_of')] - - name = String(required=True, indexed=True, internationalizable=True, maxsize=256) - type = String(vocabulary=(_('normal'), _('auto')), default='normal') - description = RichString(description=_('semantic description of this transition')) - - transition_of = SubjectRelation('Workflow', cardinality='1*', composite='object', inlined=True, - description=_('workflow to which this transition belongs')) - - -class require_group(RelationDefinition): - """group in which a user should be to be allowed to pass this transition""" - __permissions__ = PUB_SYSTEM_REL_PERMS - subject = 'BaseTransition' - object = 'CWGroup' - - -class condition(RelationDefinition): - """a RQL expression which should return some results, else the transition - won't be available. - - This query may use X and U variables that will respectivly represents the - current entity and the current user. - """ - __permissions__ = PUB_SYSTEM_REL_PERMS - subject = 'BaseTransition' - object = 'RQLExpression' - cardinality = '*?' - composite = 'subject' - - -class Transition(BaseTransition): - """use to define a transition from one or multiple states to a destination - states in workflow's definitions. Transition without destination state will - go back to the state from which we arrived to the current state. - """ - __specializes_schema__ = True - - destination_state = SubjectRelation( - 'State', cardinality='?*', - constraints=[RQLConstraint('S transition_of WF, O state_of WF', - msg=_('state and transition don\'t belong the the same workflow'))], - description=_('destination state for this transition')) - - -class WorkflowTransition(BaseTransition): - """special transition allowing to go through a sub-workflow""" - __specializes_schema__ = True - - subworkflow = SubjectRelation('Workflow', cardinality='1*', - constraints=[RQLConstraint('S transition_of WF, WF workflow_of ET, O workflow_of ET', - msg=_('subworkflow isn\'t a workflow for the same types as the transition\'s workflow'))] - ) - # XXX use exit_of and inline it - subworkflow_exit = SubjectRelation('SubWorkflowExitPoint', cardinality='*1', - composite='subject') - - -class SubWorkflowExitPoint(EntityType): - """define how we get out from a sub-workflow""" - subworkflow_state = SubjectRelation( - 'State', cardinality='1*', - constraints=[RQLConstraint('T subworkflow_exit S, T subworkflow WF, O state_of WF', - msg=_('exit state must be a subworkflow state'))], - description=_('subworkflow state')) - destination_state = SubjectRelation( - 'State', cardinality='?*', - constraints=[RQLConstraint('T subworkflow_exit S, T transition_of WF, O state_of WF', - msg=_('destination state must be in the same workflow as our parent transition'))], - description=_('destination state. No destination state means that transition ' - 'should go back to the state from which we\'ve entered the ' - 'subworkflow.')) - - -class TrInfo(EntityType): - """workflow history item""" - # 'add' security actually done by hooks - __permissions__ = { - 'read': ('managers', 'users', 'guests',), # XXX U has_read_permission O ? - 'add': ('managers', 'users', 'guests',), - 'delete': (), # XXX should we allow managers to delete TrInfo? - 'update': ('managers', 'owners',), - } - # The unique_together constraint ensures that 2 repositories - # sharing the db won't be able to fire a transition simultaneously - # on the same entity tr_count is filled in the FireTransitionHook - # to the number of TrInfo attached to the entity on which we - # attempt to fire a transition. In other word, it contains the - # rank of the TrInfo for that entity, and the constraint says we - # cannot have 2 TrInfo with the same rank. - __unique_together__ = [('tr_count', 'wf_info_for')] - from_state = SubjectRelation('State', cardinality='1*', inlined=True) - to_state = SubjectRelation('State', cardinality='1*', inlined=True) - # make by_transition optional because we want to allow managers to set - # entity into an arbitrary state without having to respect wf transition - by_transition = SubjectRelation('BaseTransition', cardinality='?*') - comment = RichString(fulltextindexed=True, default_format='text/plain') - tr_count = Int(description='autocomputed attribute used to ensure transition coherency') - # get actor and date time using owned_by and creation_date - -class from_state(RelationType): - __permissions__ = RO_REL_PERMS.copy() - inlined = True - -class to_state(RelationType): - __permissions__ = RO_REL_PERMS.copy() - inlined = True - -class by_transition(RelationType): - # 'add' security actually done by hooks - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', 'users', 'guests',), - 'delete': (), - } - inlined = True - - -class workflow_of(RelationType): - """link a workflow to one or more entity type""" - __permissions__ = PUB_SYSTEM_REL_PERMS - -class state_of(RelationType): - """link a state to one or more workflow""" - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - -class transition_of(RelationType): - """link a transition to one or more workflow""" - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - -class destination_state(RelationType): - """destination state of a transition""" - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - -class allowed_transition(RelationType): - """allowed transitions from this state""" - __permissions__ = PUB_SYSTEM_REL_PERMS - -class initial_state(RelationType): - """indicate which state should be used by default when an entity using - states is created - """ - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - - -class subworkflow(RelationType): - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - -class exit_point(RelationType): - __permissions__ = PUB_SYSTEM_REL_PERMS - -class subworkflow_state(RelationType): - __permissions__ = PUB_SYSTEM_REL_PERMS - inlined = True - - -# "abstract" relations, set by WorkflowableEntityType ########################## - -class custom_workflow(RelationType): - """allow to set a specific workflow for an entity""" - __permissions__ = PUB_SYSTEM_REL_PERMS - - cardinality = '?*' - constraints = [RQLConstraint('S is ET, O workflow_of ET', - msg=_('workflow isn\'t a workflow for this type'))] - object = 'Workflow' - - -class wf_info_for(RelationType): - """link a transition information to its object""" - # 'add' security actually done by hooks - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', 'users', 'guests',), - 'delete': (), - } - inlined = True - - cardinality = '1*' - composite = 'object' - fulltext_container = composite - subject = 'TrInfo' - - -class in_state(RelationType): - """indicate the current state of an entity""" - __permissions__ = RO_REL_PERMS - - # not inlined intentionnally since when using ldap sources, user'state - # has to be stored outside the CWUser table - inlined = False - - cardinality = '1*' - constraints = [RQLConstraint('S is ET, O state_of WF, WF workflow_of ET', - msg=_('state doesn\'t apply to this entity\'s type'))] - object = 'State' diff -r 058bb3dc685f -r 0b59724cb3f2 selectors.py --- a/selectors.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,107 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from warnings import warn - -from six import string_types - -from logilab.common.deprecation import deprecated, class_renamed - -from cubicweb.predicates import * - - -warn('[3.15] cubicweb.selectors renamed into cubicweb.predicates', - DeprecationWarning, stacklevel=2) - -# XXX pre 3.15 bw compat -from cubicweb.appobject import (objectify_selector, traced_selection, - lltrace, yes) - -ExpectedValueSelector = class_renamed('ExpectedValueSelector', - ExpectedValuePredicate) -EClassSelector = class_renamed('EClassSelector', EClassPredicate) -EntitySelector = class_renamed('EntitySelector', EntityPredicate) - - -class on_transition(is_in_state): - """Return 1 if entity is in one of the transitions given as argument list - - Especially useful to match passed transition to enable notifications when - your workflow allows several transition to the same states. - - Note that if workflow `change_state` adapter method is used, this predicate - will not be triggered. - - You should use this instead of your own :class:`score_entity` predicate to - avoid some gotchas: - - * possible views gives a fake entity with no state - * you must use the latest tr info thru the workflow adapter for repository - side checking of the current state - - In debug mode, this predicate can raise: - :raises: :exc:`ValueError` for unknown transition names - (etype workflow only not checked in custom workflow) - - :rtype: int - """ - @deprecated('[3.12] on_transition is deprecated, you should rather use ' - 'on_fire_transition(etype, trname)') - def __init__(self, *expected): - super(on_transition, self).__init__(*expected) - - def _score(self, adapted): - trinfo = adapted.latest_trinfo() - if trinfo and trinfo.by_transition: - return trinfo.by_transition[0].name in self.expected - - def _validate(self, adapted): - wf = adapted.current_workflow - valid = [n.name for n in wf.reverse_transition_of] - unknown = sorted(self.expected.difference(valid)) - if unknown: - raise ValueError("%s: unknown transition(s): %s" - % (wf.name, ",".join(unknown))) - - -entity_implements = class_renamed('entity_implements', is_instance) - -class _but_etype(EntityPredicate): - """accept if the given entity types are not found in the result set. - - See `EntityPredicate` documentation for behaviour when row is not specified. - - :param *etypes: entity types (`string_types`) which should be refused - """ - def __init__(self, *etypes): - super(_but_etype, self).__init__() - self.but_etypes = etypes - - def score(self, req, rset, row, col): - if rset.description[row][col] in self.but_etypes: - return 0 - return 1 - -but_etype = class_renamed('but_etype', _but_etype, 'use ~is_instance(*etypes) instead') - -# XXX deprecated the one_* variants of predicates below w/ multi_xxx(nb=1)? -# take care at the implementation though (looking for the 'row' argument's -# value) -two_lines_rset = class_renamed('two_lines_rset', multi_lines_rset) -two_cols_rset = class_renamed('two_cols_rset', multi_columns_rset) -two_etypes_rset = class_renamed('two_etypes_rset', multi_etypes_rset) diff -r 058bb3dc685f -r 0b59724cb3f2 server/__init__.py --- a/server/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,364 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Server subcube of cubicweb : defines objects used only on the server -(repository) side - -The server module contains functions to initialize a new repository. -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -from os.path import join, exists -from glob import glob -from contextlib import contextmanager - -from six import text_type, string_types -from six.moves import filter - -from logilab.common.modutils import LazyObject -from logilab.common.textutils import splitstrip -from logilab.common.registry import yes -from logilab import database - -from yams import BASE_GROUPS - -from cubicweb import CW_SOFTWARE_ROOT -from cubicweb.appobject import AppObject - -class ShuttingDown(BaseException): - """raised when trying to access some resources while the repository is - shutting down. Inherit from BaseException so that `except Exception` won't - catch it. - """ - -# server-side services ######################################################### - -class Service(AppObject): - """Base class for services. - - A service is a selectable object that performs an action server-side. - Use :class:`cubicweb.dbapi.Connection.call_service` to call them from - the web-side. - - When inheriting this class, do not forget to define at least the __regid__ - attribute (and probably __select__ too). - """ - __registry__ = 'services' - __select__ = yes() - - def call(self, **kwargs): - raise NotImplementedError - - -# server-side debugging ######################################################## - -# server debugging flags. They may be combined using binary operators. - -#:no debug information -DBG_NONE = 0 #: no debug information -#: rql execution information -DBG_RQL = 1 -#: executed sql -DBG_SQL = 2 -#: repository events -DBG_REPO = 4 -#: multi-sources -DBG_MS = 8 -#: hooks -DBG_HOOKS = 16 -#: operations -DBG_OPS = 32 -#: security -DBG_SEC = 64 -#: more verbosity -DBG_MORE = 128 -#: all level enabled -DBG_ALL = DBG_RQL + DBG_SQL + DBG_REPO + DBG_MS + DBG_HOOKS + DBG_OPS + DBG_SEC + DBG_MORE - -_SECURITY_ITEMS = [] -_SECURITY_CAPS = ['read', 'add', 'update', 'delete', 'transition'] - -#: current debug mode -DEBUG = 0 - -@contextmanager -def tunesecurity(items=(), capabilities=()): - """Context manager to use in conjunction with DBG_SEC. - - This allows some tuning of: - * the monitored capabilities ('read', 'add', ....) - * the object being checked by the security checkers - - When no item is given, all of them will be watched. - By default all capabilities are monitored, unless specified. - - Example use:: - - from cubicweb.server import debugged, DBG_SEC, tunesecurity - with debugged(DBG_SEC): - with tunesecurity(items=('Elephant', 'trumps'), - capabilities=('update', 'delete')): - babar.cw_set(trumps=celeste) - flore.cw_delete() - - ==> - - check_perm: 'update' 'relation Elephant.trumps.Elephant' - [(ERQLExpression(Any X WHERE U has_update_permission X, X eid %(x)s, U eid %(u)s), - {'eid': 2167}, True)] - check_perm: 'delete' 'Elephant' - [(ERQLExpression(Any X WHERE U has_delete_permission X, X eid %(x)s, U eid %(u)s), - {'eid': 2168}, True)] - - """ - olditems = _SECURITY_ITEMS[:] - _SECURITY_ITEMS.extend(list(items)) - oldactions = _SECURITY_CAPS[:] - _SECURITY_CAPS[:] = capabilities - yield - _SECURITY_ITEMS[:] = olditems - _SECURITY_CAPS[:] = oldactions - -def set_debug(debugmode): - """change the repository debugging mode""" - global DEBUG - if not debugmode: - DEBUG = 0 - return - if isinstance(debugmode, string_types): - for mode in splitstrip(debugmode, sep='|'): - DEBUG |= globals()[mode] - else: - DEBUG |= debugmode - -class debugged(object): - """Context manager and decorator to help debug the repository. - - It can be used either as a context manager: - - >>> with debugged('DBG_RQL | DBG_REPO'): - ... # some code in which you want to debug repository activity, - ... # seing information about RQL being executed an repository events. - - or as a function decorator: - - >>> @debugged('DBG_RQL | DBG_REPO') - ... def some_function(): - ... # some code in which you want to debug repository activity, - ... # seing information about RQL being executed an repository events - - The debug mode will be reset to its original value when leaving the "with" - block or the decorated function. - """ - def __init__(self, debugmode): - self.debugmode = debugmode - self._clevel = None - - def __enter__(self): - """enter with block""" - self._clevel = DEBUG - set_debug(self.debugmode) - - def __exit__(self, exctype, exc, traceback): - """leave with block""" - set_debug(self._clevel) - return traceback is None - - def __call__(self, func): - """decorate function""" - def wrapped(*args, **kwargs): - _clevel = DEBUG - set_debug(self.debugmode) - try: - return func(*args, **kwargs) - finally: - set_debug(self._clevel) - return wrapped - -# database initialization ###################################################### - -def create_user(session, login, pwd, *groups): - # monkey patch this method if you want to customize admin/anon creation - # (that maybe necessary if you change CWUser's schema) - user = session.create_entity('CWUser', login=login, upassword=pwd) - for group in groups: - session.execute('SET U in_group G WHERE U eid %(u)s, G name %(group)s', - {'u': user.eid, 'group': text_type(group)}) - return user - -def init_repository(config, interactive=True, drop=False, vreg=None, - init_config=None): - """initialise a repository database by creating tables add filling them - with the minimal set of entities (ie at least the schema, base groups and - a initial user) - """ - from cubicweb.repoapi import get_repository, connect - from cubicweb.server.repository import Repository - from cubicweb.server.utils import manager_userpasswd - from cubicweb.server.sqlutils import sqlexec, sqlschema, sql_drop_all_user_tables - from cubicweb.server.sqlutils import _SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION as drop_filter - # configuration to avoid db schema loading and user'state checking - # on connection - config.creating = True - config.consider_user_state = False - config.cubicweb_appobject_path = set(('hooks', 'entities')) - config.cube_appobject_path = set(('hooks', 'entities')) - # only enable the system source at initialization time - repo = Repository(config, vreg=vreg) - if init_config is not None: - # further config initialization once it has been bootstrapped - init_config(config) - schema = repo.schema - sourcescfg = config.read_sources_file() - source = sourcescfg['system'] - driver = source['db-driver'] - with repo.internal_cnx() as cnx: - sqlcnx = cnx.cnxset.cnx - sqlcursor = cnx.cnxset.cu - execute = sqlcursor.execute - if drop: - helper = database.get_db_helper(driver) - dropsql = sql_drop_all_user_tables(helper, sqlcursor) - # We may fail dropping some tables because of table dependencies, in a first pass. - # So, we try a second drop sequence to drop remaining tables if needed. - # Note that 2 passes is an arbitrary choice as it seems enough for our usecases - # (looping may induce infinite recursion when user have no rights for example). - # Here we try to keep code simple and backend independent. That's why we don't try to - # distinguish remaining tables (missing privileges, dependencies, ...). - failed = sqlexec(dropsql, execute, cnx=sqlcnx, - pbtitle='-> dropping tables (first pass)') - if failed: - failed = sqlexec(failed, execute, cnx=sqlcnx, - pbtitle='-> dropping tables (second pass)') - remainings = list(filter(drop_filter, helper.list_tables(sqlcursor))) - assert not remainings, 'Remaining tables: %s' % ', '.join(remainings) - handler = config.migration_handler(schema, interactive=False, repo=repo, cnx=cnx) - # install additional driver specific sql files - handler.cmd_install_custom_sql_scripts() - for cube in reversed(config.cubes()): - handler.cmd_install_custom_sql_scripts(cube) - _title = '-> creating tables ' - print(_title, end=' ') - # schema entities and relations tables - # can't skip entities table even if system source doesn't support them, - # they are used sometimes by generated sql. Keeping them empty is much - # simpler than fixing this... - schemasql = sqlschema(schema, driver) - #skip_entities=[str(e) for e in schema.entities() - # if not repo.system_source.support_entity(str(e))]) - failed = sqlexec(schemasql, execute, pbtitle=_title, delimiter=';;') - if failed: - print('The following SQL statements failed. You should check your schema.') - print(failed) - raise Exception('execution of the sql schema failed, you should check your schema') - sqlcursor.close() - sqlcnx.commit() - with repo.internal_cnx() as cnx: - # insert entity representing the system source - ssource = cnx.create_entity('CWSource', type=u'native', name=u'system') - repo.system_source.eid = ssource.eid - cnx.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid}) - # insert base groups and default admin - print('-> inserting default user and default groups.') - try: - login = text_type(sourcescfg['admin']['login']) - pwd = sourcescfg['admin']['password'] - except KeyError: - if interactive: - msg = 'enter login and password of the initial manager account' - login, pwd = manager_userpasswd(msg=msg, confirm=True) - else: - login, pwd = text_type(source['db-user']), source['db-password'] - # sort for eid predicatability as expected in some server tests - for group in sorted(BASE_GROUPS): - cnx.create_entity('CWGroup', name=text_type(group)) - admin = create_user(cnx, login, pwd, u'managers') - cnx.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s', - {'u': admin.eid}) - cnx.commit() - repo.shutdown() - # re-login using the admin user - config._cubes = None # avoid assertion error - repo = get_repository(config=config) - with connect(repo, login, password=pwd) as cnx: - with cnx.security_enabled(False, False): - repo.system_source.eid = ssource.eid # redo this manually - handler = config.migration_handler(schema, interactive=False, - cnx=cnx, repo=repo) - # serialize the schema - initialize_schema(config, schema, handler) - # yoo ! - cnx.commit() - repo.system_source.init_creating() - cnx.commit() - repo.shutdown() - # restore initial configuration - config.creating = False - config.consider_user_state = True - # (drop instance attribute to get back to class attribute) - del config.cubicweb_appobject_path - del config.cube_appobject_path - print('-> database for instance %s initialized.' % config.appid) - - -def initialize_schema(config, schema, mhandler, event='create'): - from cubicweb.server.schemaserial import serialize_schema - cnx = mhandler.cnx - cubes = config.cubes() - # deactivate every hooks but those responsible to set metadata - # so, NO INTEGRITY CHECKS are done, to have quicker db creation. - # Active integrity is kept else we may pb such as two default - # workflows for one entity type. - with cnx.deny_all_hooks_but('metadata', 'activeintegrity'): - # execute cubicweb's pre script - mhandler.cmd_exec_event_script('pre%s' % event) - # execute cubes pre script if any - for cube in reversed(cubes): - mhandler.cmd_exec_event_script('pre%s' % event, cube) - # execute instance's pre script (useful in tests) - mhandler.cmd_exec_event_script('pre%s' % event, apphome=True) - # enter instance'schema into the database - serialize_schema(cnx, schema) - cnx.commit() - # execute cubicweb's post script - mhandler.cmd_exec_event_script('post%s' % event) - # execute cubes'post script if any - for cube in reversed(cubes): - mhandler.cmd_exec_event_script('post%s' % event, cube) - # execute instance's post script (useful in tests) - mhandler.cmd_exec_event_script('post%s' % event, apphome=True) - - -# sqlite'stored procedures have to be registered at connection opening time -from logilab.database import SQL_CONNECT_HOOKS - -# add to this set relations which should have their add security checking done -# *BEFORE* adding the actual relation (done after by default) -BEFORE_ADD_RELATIONS = set(('owned_by',)) - -# add to this set relations which should have their add security checking done -# *at COMMIT TIME* (done after by default) -ON_COMMIT_ADD_RELATIONS = set(()) - -# available sources registry -SOURCE_TYPES = {'native': LazyObject('cubicweb.server.sources.native', 'NativeSQLSource'), - 'datafeed': LazyObject('cubicweb.server.sources.datafeed', 'DataFeedSource'), - 'ldapfeed': LazyObject('cubicweb.server.sources.ldapfeed', 'LDAPFeedSource'), - } diff -r 058bb3dc685f -r 0b59724cb3f2 server/checkintegrity.py --- a/server/checkintegrity.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,410 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Integrity checking tool for instances: - -* integrity of a CubicWeb repository. Hum actually only the system database is - checked. -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -from datetime import datetime - -from logilab.common.shellutils import ProgressBar - -from cubicweb.schema import PURE_VIRTUAL_RTYPES, VIRTUAL_RTYPES, UNIQUE_CONSTRAINTS -from cubicweb.server.sqlutils import SQL_PREFIX - -def notify_fixed(fix): - if fix: - sys.stderr.write(' [FIXED]') - sys.stderr.write('\n') - -def has_eid(cnx, sqlcursor, eid, eids): - """return true if the eid is a valid eid""" - if eid in eids: - return eids[eid] - sqlcursor.execute('SELECT type FROM entities WHERE eid=%s' % eid) - try: - etype = sqlcursor.fetchone()[0] - except Exception: - eids[eid] = False - return False - if etype not in cnx.vreg.schema: - eids[eid] = False - return False - sqlcursor.execute('SELECT * FROM %s%s WHERE %seid=%s' % (SQL_PREFIX, etype, - SQL_PREFIX, eid)) - result = sqlcursor.fetchall() - if len(result) == 0: - eids[eid] = False - return False - elif len(result) > 1: - msg = (' More than one entity with eid %s exists in source!\n' - ' WARNING : Unable to fix this, do it yourself!\n') - sys.stderr.write(msg % eid) - eids[eid] = True - return True - -# XXX move to yams? -def etype_fti_containers(eschema, _done=None): - if _done is None: - _done = set() - _done.add(eschema) - containers = tuple(eschema.fulltext_containers()) - if containers: - for rschema, target in containers: - if target == 'object': - targets = rschema.objects(eschema) - else: - targets = rschema.subjects(eschema) - for targeteschema in targets: - if targeteschema in _done: - continue - _done.add(targeteschema) - for container in etype_fti_containers(targeteschema, _done): - yield container - else: - yield eschema - -def reindex_entities(schema, cnx, withpb=True, etypes=None): - """reindex all entities in the repository""" - # deactivate modification_date hook since we don't want them - # to be updated due to the reindexation - repo = cnx.repo - dbhelper = repo.system_source.dbhelper - cursor = cnx.cnxset.cu - if not dbhelper.has_fti_table(cursor): - print('no text index table') - dbhelper.init_fti(cursor) - repo.system_source.do_fti = True # ensure full-text indexation is activated - if etypes is None: - print('Reindexing entities') - etypes = set() - for eschema in schema.entities(): - if eschema.final: - continue - indexable_attrs = tuple(eschema.indexable_attributes()) # generator - if not indexable_attrs: - continue - for container in etype_fti_containers(eschema): - etypes.add(container) - # clear fti table first - cnx.system_sql('DELETE FROM %s' % dbhelper.fti_table) - else: - print('Reindexing entities of type %s' % \ - ', '.join(sorted(str(e) for e in etypes))) - # clear fti table first. Use subquery for sql compatibility - cnx.system_sql("DELETE FROM %s WHERE EXISTS(SELECT 1 FROM ENTITIES " - "WHERE eid=%s AND type IN (%s))" % ( - dbhelper.fti_table, dbhelper.fti_uid_attr, - ','.join("'%s'" % etype for etype in etypes))) - if withpb: - pb = ProgressBar(len(etypes) + 1) - pb.update() - # reindex entities by generating rql queries which set all indexable - # attribute to their current value - source = repo.system_source - for eschema in etypes: - etype_class = cnx.vreg['etypes'].etype_class(str(eschema)) - for rset in etype_class.cw_fti_index_rql_limit(cnx): - source.fti_index_entities(cnx, rset.entities()) - # clear entity cache to avoid high memory consumption on big tables - cnx.drop_entity_cache() - if withpb: - pb.update() - if withpb: - pb.finish() - - -def check_schema(schema, cnx, eids, fix=1): - """check serialized schema""" - print('Checking serialized schema') - rql = ('Any COUNT(X),RN,SN,ON,CTN GROUPBY RN,SN,ON,CTN ORDERBY 1 ' - 'WHERE X is CWConstraint, R constrained_by X, ' - 'R relation_type RT, RT name RN, R from_entity ST, ST name SN, ' - 'R to_entity OT, OT name ON, X cstrtype CT, CT name CTN') - for count, rn, sn, on, cstrname in cnx.execute(rql): - if count == 1: - continue - if cstrname in UNIQUE_CONSTRAINTS: - print("ERROR: got %s %r constraints on relation %s.%s.%s" % ( - count, cstrname, sn, rn, on)) - if fix: - print('dunno how to fix, do it yourself') - - - -def check_text_index(schema, cnx, eids, fix=1): - """check all entities registered in the text index""" - print('Checking text index') - msg = ' Entity with eid %s exists in the text index but in no source (autofix will remove from text index)' - cursor = cnx.system_sql('SELECT uid FROM appears;') - for row in cursor.fetchall(): - eid = row[0] - if not has_eid(cnx, cursor, eid, eids): - sys.stderr.write(msg % eid) - if fix: - cnx.system_sql('DELETE FROM appears WHERE uid=%s;' % eid) - notify_fixed(fix) - - -def check_entities(schema, cnx, eids, fix=1): - """check all entities registered in the repo system table""" - print('Checking entities system table') - # system table but no source - msg = ' Entity %s with eid %s exists in the system table but in no source (autofix will delete the entity)' - cursor = cnx.system_sql('SELECT eid,type FROM entities;') - for row in cursor.fetchall(): - eid, etype = row - if not has_eid(cnx, cursor, eid, eids): - sys.stderr.write(msg % (etype, eid)) - if fix: - cnx.system_sql('DELETE FROM entities WHERE eid=%s;' % eid) - notify_fixed(fix) - # source in entities, but no relation cw_source - # XXX this (get_versions) requires a second connection to the db when we already have one open - applcwversion = cnx.repo.get_versions().get('cubicweb') - if applcwversion >= (3, 13, 1): # entities.asource appeared in 3.13.1 - cursor = cnx.system_sql('SELECT e.eid FROM entities as e, cw_CWSource as s ' - 'WHERE s.cw_name=e.asource AND ' - 'NOT EXISTS(SELECT 1 FROM cw_source_relation as cs ' - ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' - 'ORDER BY e.eid') - msg = (' Entity with eid %s refers to source in entities table, ' - 'but is missing relation cw_source (autofix will create the relation)\n') - for row in cursor.fetchall(): - sys.stderr.write(msg % row[0]) - if fix: - cnx.system_sql('INSERT INTO cw_source_relation (eid_from, eid_to) ' - 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWSource as s ' - 'WHERE s.cw_name=e.asource AND NOT EXISTS(SELECT 1 FROM cw_source_relation as cs ' - ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') - notify_fixed(True) - # inconsistencies for 'is' - msg = ' %s #%s is missing relation "is" (autofix will create the relation)\n' - cursor = cnx.system_sql('SELECT e.type, e.eid FROM entities as e, cw_CWEType as s ' - 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_relation as cs ' - ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' - 'ORDER BY e.eid') - for row in cursor.fetchall(): - sys.stderr.write(msg % tuple(row)) - if fix: - cnx.system_sql('INSERT INTO is_relation (eid_from, eid_to) ' - 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWEType as s ' - 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_relation as cs ' - ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') - notify_fixed(True) - # inconsistencies for 'is_instance_of' - msg = ' %s #%s is missing relation "is_instance_of" (autofix will create the relation)\n' - cursor = cnx.system_sql('SELECT e.type, e.eid FROM entities as e, cw_CWEType as s ' - 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs ' - ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' - 'ORDER BY e.eid') - for row in cursor.fetchall(): - sys.stderr.write(msg % tuple(row)) - if fix: - cnx.system_sql('INSERT INTO is_instance_of_relation (eid_from, eid_to) ' - 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWEType as s ' - 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs ' - ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') - notify_fixed(True) - print('Checking entities tables') - msg = ' Entity with eid %s exists in the %s table but not in the system table (autofix will delete the entity)' - for eschema in schema.entities(): - if eschema.final: - continue - table = SQL_PREFIX + eschema.type - column = SQL_PREFIX + 'eid' - cursor = cnx.system_sql('SELECT %s FROM %s;' % (column, table)) - for row in cursor.fetchall(): - eid = row[0] - # eids is full since we have fetched everything from the entities table, - # no need to call has_eid - if not eid in eids or not eids[eid]: - sys.stderr.write(msg % (eid, eschema.type)) - if fix: - cnx.system_sql('DELETE FROM %s WHERE %s=%s;' % (table, column, eid)) - notify_fixed(fix) - - -def bad_related_msg(rtype, target, eid, fix): - msg = ' A relation %s with %s eid %s exists but no such entity in sources' - sys.stderr.write(msg % (rtype, target, eid)) - notify_fixed(fix) - -def bad_inlined_msg(rtype, parent_eid, eid, fix): - msg = (' An inlined relation %s from %s to %s exists but the latter ' - 'entity does not exist') - sys.stderr.write(msg % (rtype, parent_eid, eid)) - notify_fixed(fix) - - -def check_relations(schema, cnx, eids, fix=1): - """check that eids referenced by relations are registered in the repo system - table - """ - print('Checking relations') - for rschema in schema.relations(): - if rschema.final or rschema.type in PURE_VIRTUAL_RTYPES: - continue - if rschema.inlined: - for subjtype in rschema.subjects(): - table = SQL_PREFIX + str(subjtype) - column = SQL_PREFIX + str(rschema) - sql = 'SELECT cw_eid,%s FROM %s WHERE %s IS NOT NULL;' % ( - column, table, column) - cursor = cnx.system_sql(sql) - for row in cursor.fetchall(): - parent_eid, eid = row - if not has_eid(cnx, cursor, eid, eids): - bad_inlined_msg(rschema, parent_eid, eid, fix) - if fix: - sql = 'UPDATE %s SET %s=NULL WHERE %s=%s;' % ( - table, column, column, eid) - cnx.system_sql(sql) - continue - try: - cursor = cnx.system_sql('SELECT eid_from FROM %s_relation;' % rschema) - except Exception as ex: - # usually because table doesn't exist - print('ERROR', ex) - continue - for row in cursor.fetchall(): - eid = row[0] - if not has_eid(cnx, cursor, eid, eids): - bad_related_msg(rschema, 'subject', eid, fix) - if fix: - sql = 'DELETE FROM %s_relation WHERE eid_from=%s;' % ( - rschema, eid) - cnx.system_sql(sql) - cursor = cnx.system_sql('SELECT eid_to FROM %s_relation;' % rschema) - for row in cursor.fetchall(): - eid = row[0] - if not has_eid(cnx, cursor, eid, eids): - bad_related_msg(rschema, 'object', eid, fix) - if fix: - sql = 'DELETE FROM %s_relation WHERE eid_to=%s;' % ( - rschema, eid) - cnx.system_sql(sql) - - -def check_mandatory_relations(schema, cnx, eids, fix=1): - """check entities missing some mandatory relation""" - print('Checking mandatory relations') - msg = '%s #%s is missing mandatory %s relation %s (autofix will delete the entity)' - for rschema in schema.relations(): - if rschema.final or rschema in PURE_VIRTUAL_RTYPES or rschema in ('is', 'is_instance_of'): - continue - smandatory = set() - omandatory = set() - for rdef in rschema.rdefs.values(): - if rdef.cardinality[0] in '1+': - smandatory.add(rdef.subject) - if rdef.cardinality[1] in '1+': - omandatory.add(rdef.object) - for role, etypes in (('subject', smandatory), ('object', omandatory)): - for etype in etypes: - if role == 'subject': - rql = 'Any X WHERE NOT X %s Y, X is %s' % (rschema, etype) - else: - rql = 'Any X WHERE NOT Y %s X, X is %s' % (rschema, etype) - for entity in cnx.execute(rql).entities(): - sys.stderr.write(msg % (entity.cw_etype, entity.eid, role, rschema)) - if fix: - #if entity.cw_describe()['source']['uri'] == 'system': XXX - entity.cw_delete() # XXX this is BRUTAL! - notify_fixed(fix) - - -def check_mandatory_attributes(schema, cnx, eids, fix=1): - """check for entities stored in the system source missing some mandatory - attribute - """ - print('Checking mandatory attributes') - msg = '%s #%s is missing mandatory attribute %s (autofix will delete the entity)' - for rschema in schema.relations(): - if not rschema.final or rschema in VIRTUAL_RTYPES: - continue - for rdef in rschema.rdefs.values(): - if rdef.cardinality[0] in '1+': - rql = 'Any X WHERE X %s NULL, X is %s, X cw_source S, S name "system"' % ( - rschema, rdef.subject) - for entity in cnx.execute(rql).entities(): - sys.stderr.write(msg % (entity.cw_etype, entity.eid, rschema)) - if fix: - entity.cw_delete() - notify_fixed(fix) - - -def check_metadata(schema, cnx, eids, fix=1): - """check entities has required metadata - - FIXME: rewrite using RQL queries ? - """ - print('Checking metadata') - cursor = cnx.system_sql("SELECT DISTINCT type FROM entities;") - eidcolumn = SQL_PREFIX + 'eid' - msg = ' %s with eid %s has no %s (autofix will set it to now)' - for etype, in cursor.fetchall(): - if etype not in cnx.vreg.schema: - sys.stderr.write('entities table references unknown type %s\n' % - etype) - if fix: - cnx.system_sql("DELETE FROM entities WHERE type = %(type)s", - {'type': etype}) - continue - table = SQL_PREFIX + etype - for rel, default in ( ('creation_date', datetime.utcnow()), - ('modification_date', datetime.utcnow()), ): - column = SQL_PREFIX + rel - cursor = cnx.system_sql("SELECT %s FROM %s WHERE %s is NULL" - % (eidcolumn, table, column)) - for eid, in cursor.fetchall(): - sys.stderr.write(msg % (etype, eid, rel)) - if fix: - cnx.system_sql("UPDATE %s SET %s=%%(v)s WHERE %s=%s ;" - % (table, column, eidcolumn, eid), - {'v': default}) - notify_fixed(fix) - - -def check(repo, cnx, checks, reindex, fix, withpb=True): - """check integrity of instance's repository, - using given user and password to locally connect to the repository - (no running cubicweb server needed) - """ - # yo, launch checks - if checks: - eids_cache = {} - with cnx.security_enabled(read=False, write=False): # ensure no read security - for check in checks: - check_func = globals()['check_%s' % check] - check_func(repo.schema, cnx, eids_cache, fix=fix) - if fix: - cnx.commit() - else: - print() - if not fix: - print('WARNING: Diagnostic run, nothing has been corrected') - if reindex: - cnx.rollback() - reindex_entities(repo.schema, cnx, withpb=withpb) - cnx.commit() diff -r 058bb3dc685f -r 0b59724cb3f2 server/cwzmq.py --- a/server/cwzmq.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,130 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2012-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from threading import Thread -from logging import getLogger - -import zmq -from zmq.eventloop import ioloop -import zmq.eventloop.zmqstream - -from cubicweb import set_log_methods - - -ctx = zmq.Context() - - -class ZMQComm(object): - """ - A simple ZMQ-based notification bus. - - There should at most one instance of this class attached to a - Repository. A typical usage may be something like:: - - def callback(msg): - self.info('received message: %s', ' '.join(msg)) - repo.app_instances_bus.subscribe('hello', callback) - - to subsribe to the 'hello' kind of message. On the other side, to - emit a notification, call:: - - repo.app_instances_bus.publish(['hello', 'world']) - - See http://docs.cubicweb.org for more details. - """ - def __init__(self): - self.ioloop = ioloop.IOLoop() - self._topics = {} - self._subscribers = [] - self.publisher = None - - def add_publisher(self, address): - assert self.publisher is None, "more than one publisher is not supported" - self.publisher = Publisher(self.ioloop, address) - - def add_subscription(self, topic, callback): - for subscriber in self._subscribers: - subscriber.subscribe(topic, callback) - self._topics[topic] = callback - - def add_subscriber(self, address): - subscriber = Subscriber(self.ioloop, address) - for topic, callback in self._topics.items(): - subscriber.subscribe(topic, callback) - self._subscribers.append(subscriber) - - def publish(self, msg): - if self.publisher is None: - return - self.publisher.send(msg) - - def start(self): - Thread(target=self.ioloop.start).start() - - def stop(self): - self.ioloop.add_callback(self.ioloop.stop) - - def __del__(self): - self.ioloop.close() - - -class Publisher(object): - def __init__(self, ioloop, address): - self.address = address - self._topics = {} - self._subscribers = [] - self.ioloop = ioloop - def callback(): - s = ctx.socket(zmq.PUB) - self.stream = zmq.eventloop.zmqstream.ZMQStream(s, io_loop=ioloop) - self.stream.bind(self.address) - self.debug('start publisher on %s', self.address) - ioloop.add_callback(callback) - - def send(self, msg): - self.ioloop.add_callback(lambda:self.stream.send_multipart(msg)) - - -class Subscriber(object): - def __init__(self, ioloop, address): - self.address = address - self.dispatch_table = {} - self.ioloop = ioloop - def callback(): - s = ctx.socket(zmq.SUB) - self.stream = zmq.eventloop.zmqstream.ZMQStream(s, io_loop=ioloop) - self.stream.on_recv(self.dispatch) - self.stream.connect(self.address) - self.debug('start subscriber on %s', self.address) - ioloop.add_callback(callback) - - def dispatch(self, msg): - try: - f = self.dispatch_table[msg[0]] - except KeyError: - return - f(msg) - - def subscribe(self, topic, callback): - self.dispatch_table[topic] = callback - self.ioloop.add_callback(lambda: self.stream.setsockopt(zmq.SUBSCRIBE, topic)) - - -set_log_methods(Publisher, getLogger('cubicweb.zmq.pub')) -set_log_methods(Subscriber, getLogger('cubicweb.zmq.sub')) diff -r 058bb3dc685f -r 0b59724cb3f2 server/edition.py --- a/server/edition.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,159 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""helper classes to handle server-side edition of entities""" -__docformat__ = "restructuredtext en" - -from copy import copy -from yams import ValidationError - - -_MARKER = object() - -class dict_protocol_catcher(object): - def __init__(self, entity): - self.__entity = entity - def __getitem__(self, attr): - return self.__entity.cw_edited[attr] - def __setitem__(self, attr, value): - self.__entity.cw_edited[attr] = value - def __getattr__(self, attr): - return getattr(self.__entity, attr) - - -class EditedEntity(dict): - """encapsulate entities attributes being written by an RQL query""" - def __init__(self, entity, **kwargs): - super(EditedEntity, self).__init__(**kwargs) - self.entity = entity - self.skip_security = set() - self.querier_pending_relations = {} - self.saved = False - - def __hash__(self): - # dict|set keyable - return hash(id(self)) - - def __lt__(self, other): - # we don't want comparison by value inherited from dict - raise NotImplementedError - - def __eq__(self, other): - return self is other - - def __ne__(self, other): - return not (self == other) - - def __setitem__(self, attr, value): - assert attr != 'eid' - # don't add attribute into skip_security if already in edited - # attributes, else we may accidentally skip a desired security check - if attr not in self: - self.skip_security.add(attr) - self.edited_attribute(attr, value) - - def __delitem__(self, attr): - assert not self.saved, 'too late to modify edited attributes' - super(EditedEntity, self).__delitem__(attr) - self.entity.cw_attr_cache.pop(attr, None) - - def __copy__(self): - # default copy protocol fails in EditedEntity.__setitem__ because - # copied entity has no skip_security attribute at this point - return EditedEntity(self.entity, **self) - - def pop(self, attr, *args): - # don't update skip_security by design (think to storage api) - assert not self.saved, 'too late to modify edited attributes' - value = super(EditedEntity, self).pop(attr, *args) - self.entity.cw_attr_cache.pop(attr, *args) - return value - - def setdefault(self, attr, default): - assert attr != 'eid' - # don't add attribute into skip_security if already in edited - # attributes, else we may accidentally skip a desired security check - if attr not in self: - self[attr] = default - return self[attr] - - def update(self, values, skipsec=True): - if skipsec: - setitem = self.__setitem__ - else: - setitem = self.edited_attribute - for attr, value in values.items(): - setitem(attr, value) - - def edited_attribute(self, attr, value): - """attribute being edited by a rql query: should'nt be added to - skip_security - """ - assert not self.saved, 'too late to modify edited attributes' - super(EditedEntity, self).__setitem__(attr, value) - self.entity.cw_attr_cache[attr] = value - if self.entity._cw.vreg.schema.rschema(attr).final: - self.entity._cw_dont_cache_attribute(attr) - - def oldnewvalue(self, attr): - """returns the couple (old attr value, new attr value) - - NOTE: will only work in a before_update_entity hook - """ - assert not self.saved, 'too late to get the old value' - # get new value and remove from local dict to force a db query to - # fetch old value - newvalue = self.entity.cw_attr_cache.pop(attr, _MARKER) - oldvalue = getattr(self.entity, attr) - if newvalue is not _MARKER: - self.entity.cw_attr_cache[attr] = newvalue - else: - newvalue = oldvalue - return oldvalue, newvalue - - def set_defaults(self): - """set default values according to the schema""" - for attr, value in self.entity.e_schema.defaults(): - if not attr in self: - self[str(attr)] = value - - def check(self, creation=False): - """check the entity edition against its schema. Only final relation - are checked here, constraint on actual relations are checked in hooks - """ - entity = self.entity - if creation: - # on creations, we want to check all relations, especially - # required attributes - relations = [rschema for rschema in entity.e_schema.subject_relations() - if rschema.final and rschema.type != 'eid'] - else: - relations = [entity._cw.vreg.schema.rschema(rtype) - for rtype in self] - try: - entity.e_schema.check(dict_protocol_catcher(entity), - creation=creation, relations=relations) - except ValidationError as ex: - ex.entity = self.entity.eid - raise - - def clone(self): - thecopy = EditedEntity(copy(self.entity)) - thecopy.entity.cw_attr_cache = copy(self.entity.cw_attr_cache) - thecopy.entity._cw_related_cache = {} - thecopy.update(self, skipsec=False) - return thecopy diff -r 058bb3dc685f -r 0b59724cb3f2 server/hook.py --- a/server/hook.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1024 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -Generalities ------------- - -Paraphrasing the `emacs`_ documentation, let us say that hooks are an important -mechanism for customizing an application. A hook is basically a list of -functions to be called on some well-defined occasion (this is called `running -the hook`). - -.. _`emacs`: http://www.gnu.org/software/emacs/manual/html_node/emacs/Hooks.html - -Hooks -~~~~~ - -In |cubicweb|, hooks are subclasses of the :class:`~cubicweb.server.hook.Hook` -class. They are selected over a set of pre-defined `events` (and possibly more -conditions, hooks being selectable appobjects like views and components). They -should implement a :meth:`~cubicweb.server.hook.Hook.__call__` method that will -be called when the hook is triggered. - -There are two families of events: data events (before / after any individual -update of an entity / or a relation in the repository) and server events (such -as server startup or shutdown). In a typical application, most of the hooks are -defined over data events. - -Also, some :class:`~cubicweb.server.hook.Operation` may be registered by hooks, -which will be fired when the transaction is commited or rolled back. - -The purpose of data event hooks is usually to complement the data model as -defined in the schema, which is static by nature and only provide a restricted -builtin set of dynamic constraints, with dynamic or value driven behaviours. -For instance they can serve the following purposes: - -* enforcing constraints that the static schema cannot express (spanning several - entities/relations, exotic value ranges and cardinalities, etc.) - -* implement computed attributes - -It is functionally equivalent to a `database trigger`_, except that database -triggers definition languages are not standardized, hence not portable (for -instance, PL/SQL works with Oracle and PostgreSQL but not SqlServer nor Sqlite). - -.. _`database trigger`: http://en.wikipedia.org/wiki/Database_trigger - - -.. hint:: - - It is a good practice to write unit tests for each hook. See an example in - :ref:`hook_test` - -Operations -~~~~~~~~~~ - -Operations are subclasses of the :class:`~cubicweb.server.hook.Operation` class -that may be created by hooks and scheduled to happen on `precommit`, -`postcommit` or `rollback` event (i.e. respectivly before/after a commit or -before a rollback of a transaction). - -Hooks are being fired immediately on data operations, and it is sometime -necessary to delay the actual work down to a time where we can expect all -information to be there, or when all other hooks have run (though take case -since operations may themselves trigger hooks). Also while the order of -execution of hooks is data dependant (and thus hard to predict), it is possible -to force an order on operations. - -So, for such case where you may miss some information that may be set later in -the transaction, you should instantiate an operation in the hook. - -Operations may be used to: - -* implements a validation check which needs that all relations be already set on - an entity - -* process various side effects associated with a transaction such as filesystem - udpates, mail notifications, etc. - - -Events ------- - -Hooks are mostly defined and used to handle `dataflow`_ operations. It -means as data gets in (entities added, updated, relations set or -unset), specific events are issued and the Hooks matching these events -are called. - -You can get the event that triggered a hook by accessing its `event` -attribute. - -.. _`dataflow`: http://en.wikipedia.org/wiki/Dataflow - - -Entity modification related events -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -When called for one of these events, hook will have an `entity` attribute -containing the entity instance. - -- `before_add_entity`, `before_update_entity`: - - On those events, you can access the modified attributes of the entity using - the `entity.cw_edited` dictionary. The values can be modified and the old - values can be retrieved. - - If you modify the `entity.cw_edited` dictionary in the hook, that is before - the database operations take place, you will avoid the need to process a whole - new rql query and the underlying backend query (eg usually sql) will contain - the modified data. For example: - - .. sourcecode:: python - - self.entity.cw_edited['age'] = 42 - - will modify the age before it is written to the backend storage. - - Similarly, removing an attribute from `cw_edited` will cancel its - modification: - - .. sourcecode:: python - - del self.entity.cw_edited['age'] - - On a `before_update_entity` event, you can access the old and new values: - - .. sourcecode:: python - - old, new = entity.cw_edited.oldnewvalue('age') - -- `after_add_entity`, `after_update_entity` - - On those events, you can get the list of attributes that were modified using - the `entity.cw_edited` dictionary, but you can not modify it or get the old - value of an attribute. - -- `before_delete_entity`, `after_delete_entity` - - On those events, the entity has no `cw_edited` dictionary. - -.. note:: `self.entity.cw_set(age=42)` will set the `age` attribute to - 42. But to do so, it will generate a rql query that will have to be processed, - hence may trigger some hooks, etc. This could lead to infinitely looping hooks. - -Relation modification related events -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -When called for one of these events, hook will have `eidfrom`, `rtype`, `eidto` -attributes containing respectively the eid of the subject entity, the relation -type and the eid of the object entity. - -* `before_add_relation`, `before_delete_relation` - - On those events, you can still get the original relation by issuing a rql query. - -* `after_add_relation`, `after_delete_relation` - -Specific selectors are shipped for these kinds of events, see in particular -:class:`~cubicweb.server.hook.match_rtype`. - -Also note that relations can be added or deleted, but not updated. - -Non data events -~~~~~~~~~~~~~~~ - -Hooks called on server start/maintenance/stop event (e.g. -`server_startup`, `server_maintenance`, `before_server_shutdown`, -`server_shutdown`) have a `repo` attribute, but *their `_cw` attribute -is None*. The `server_startup` is called on regular startup, while -`server_maintenance` is called on cubicweb-ctl upgrade or shell -commands. `server_shutdown` is called anyway but connections to the -native source is impossible; `before_server_shutdown` handles that. - -Hooks called on backup/restore event (eg `server_backup`, -`server_restore`) have a `repo` and a `timestamp` attributes, but -*their `_cw` attribute is None*. - -Hooks called on session event (eg `session_open`, `session_close`) have no -special attribute. - - -API ---- - -Hooks control -~~~~~~~~~~~~~ - -It is sometimes convenient to explicitly enable or disable some hooks. For -instance if you want to disable some integrity checking hook. This can be -controlled more finely through the `category` class attribute, which is a string -giving a category name. One can then uses the -:meth:`~cubicweb.server.session.Connection.deny_all_hooks_but` and -:meth:`~cubicweb.server.session.Connection.allow_all_hooks_but` context managers to -explicitly enable or disable some categories. - -The existing categories are: - -* ``security``, security checking hooks - -* ``worfklow``, workflow handling hooks - -* ``metadata``, hooks setting meta-data on newly created entities - -* ``notification``, email notification hooks - -* ``integrity``, data integrity checking hooks - -* ``activeintegrity``, data integrity consistency hooks, that you should **never** - want to disable - -* ``syncsession``, hooks synchronizing existing sessions - -* ``syncschema``, hooks synchronizing instance schema (including the physical database) - -* ``email``, email address handling hooks - -* ``bookmark``, bookmark entities handling hooks - - -Nothing precludes one to invent new categories and use existing mechanisms to -filter them in or out. - - -Hooks specific predicates -~~~~~~~~~~~~~~~~~~~~~~~~~ -.. autoclass:: cubicweb.server.hook.match_rtype -.. autoclass:: cubicweb.server.hook.match_rtype_sets - - -Hooks and operations classes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. autoclass:: cubicweb.server.hook.Hook -.. autoclass:: cubicweb.server.hook.Operation -.. autoclass:: cubicweb.server.hook.LateOperation -.. autoclass:: cubicweb.server.hook.DataOperationMixIn -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from warnings import warn -from logging import getLogger -from itertools import chain - -from logilab.common.decorators import classproperty, cached -from logilab.common.deprecation import deprecated, class_renamed -from logilab.common.logging_ext import set_log_methods -from logilab.common.registry import (NotPredicate, OrPredicate, - objectify_predicate) - -from cubicweb import RegistryNotFound, server -from cubicweb.cwvreg import CWRegistry, CWRegistryStore -from cubicweb.predicates import ExpectedValuePredicate, is_instance -from cubicweb.appobject import AppObject - -ENTITIES_HOOKS = set(('before_add_entity', 'after_add_entity', - 'before_update_entity', 'after_update_entity', - 'before_delete_entity', 'after_delete_entity')) -RELATIONS_HOOKS = set(('before_add_relation', 'after_add_relation' , - 'before_delete_relation','after_delete_relation')) -SYSTEM_HOOKS = set(('server_backup', 'server_restore', - 'server_startup', 'server_maintenance', - 'server_shutdown', 'before_server_shutdown', - 'session_open', 'session_close')) -ALL_HOOKS = ENTITIES_HOOKS | RELATIONS_HOOKS | SYSTEM_HOOKS - -def _iter_kwargs(entities, eids_from_to, kwargs): - if not entities and not eids_from_to: - yield kwargs - elif entities: - for entity in entities: - kwargs['entity'] = entity - yield kwargs - else: - for subject, object in eids_from_to: - kwargs.update({'eidfrom': subject, 'eidto': object}) - yield kwargs - - -class HooksRegistry(CWRegistry): - - def register(self, obj, **kwargs): - obj.check_events() - super(HooksRegistry, self).register(obj, **kwargs) - - def call_hooks(self, event, cnx=None, **kwargs): - """call `event` hooks for an entity or a list of entities (passed - respectively as the `entity` or ``entities`` keyword argument). - """ - kwargs['event'] = event - if cnx is None: # True for events such as server_start - for hook in sorted(self.possible_objects(cnx, **kwargs), - key=lambda x: x.order): - hook() - else: - if 'entities' in kwargs: - assert 'entity' not in kwargs, \ - 'can\'t pass "entities" and "entity" arguments simultaneously' - assert 'eids_from_to' not in kwargs, \ - 'can\'t pass "entities" and "eids_from_to" arguments simultaneously' - entities = kwargs.pop('entities') - eids_from_to = [] - elif 'eids_from_to' in kwargs: - entities = [] - eids_from_to = kwargs.pop('eids_from_to') - else: - entities = [] - eids_from_to = [] - pruned = self.get_pruned_hooks(cnx, event, - entities, eids_from_to, kwargs) - - # by default, hooks are executed with security turned off - with cnx.security_enabled(read=False): - for _kwargs in _iter_kwargs(entities, eids_from_to, kwargs): - hooks = sorted(self.filtered_possible_objects(pruned, cnx, **_kwargs), - key=lambda x: x.order) - debug = server.DEBUG & server.DBG_HOOKS - with cnx.security_enabled(write=False): - with cnx.running_hooks_ops(): - for hook in hooks: - if debug: - print(event, _kwargs, hook) - hook() - - def get_pruned_hooks(self, cnx, event, entities, eids_from_to, kwargs): - """return a set of hooks that should not be considered by filtered_possible objects - - the idea is to make a first pass over all the hooks in the - registry and to mark put some of them in a pruned list. The - pruned hooks are the one which: - - * are disabled at the connection level - - * have a selector containing a :class:`match_rtype` or an - :class:`is_instance` predicate which does not match the rtype / etype - of the relations / entities for which we are calling the hooks. This - works because the repository calls the hooks grouped by rtype or by - etype when using the entities or eids_to_from keyword arguments - - Only hooks with a simple predicate or an AndPredicate of simple - predicates are considered for disabling. - - """ - if 'entity' in kwargs: - entities = [kwargs['entity']] - if len(entities): - look_for_selector = is_instance - etype = entities[0].__regid__ - elif 'rtype' in kwargs: - look_for_selector = match_rtype - etype = None - else: # nothing to prune, how did we get there ??? - return set() - cache_key = (event, kwargs.get('rtype'), etype) - pruned = cnx.pruned_hooks_cache.get(cache_key) - if pruned is not None: - return pruned - pruned = set() - cnx.pruned_hooks_cache[cache_key] = pruned - if look_for_selector is not None: - for id, hooks in self.items(): - for hook in hooks: - enabled_cat, main_filter = hook.filterable_selectors() - if enabled_cat is not None: - if not enabled_cat(hook, cnx): - pruned.add(hook) - continue - if main_filter is not None: - if isinstance(main_filter, match_rtype) and \ - (main_filter.frometypes is not None or \ - main_filter.toetypes is not None): - continue - first_kwargs = next(_iter_kwargs(entities, eids_from_to, kwargs)) - if not main_filter(hook, cnx, **first_kwargs): - pruned.add(hook) - return pruned - - - def filtered_possible_objects(self, pruned, *args, **kwargs): - for appobjects in self.values(): - if pruned: - filtered_objects = [obj for obj in appobjects if obj not in pruned] - if not filtered_objects: - continue - else: - filtered_objects = appobjects - obj = self._select_best(filtered_objects, - *args, **kwargs) - if obj is None: - continue - yield obj - -class HooksManager(object): - def __init__(self, vreg): - self.vreg = vreg - - def call_hooks(self, event, cnx=None, **kwargs): - try: - registry = self.vreg['%s_hooks' % event] - except RegistryNotFound: - return # no hooks for this event - registry.call_hooks(event, cnx, **kwargs) - - -for event in ALL_HOOKS: - CWRegistryStore.REGISTRY_FACTORY['%s_hooks' % event] = HooksRegistry - - -# some hook specific predicates ################################################# - -@objectify_predicate -def enabled_category(cls, req, **kwargs): - if req is None: - return True # XXX how to deactivate server startup / shutdown event - return req.is_hook_activated(cls) - -@objectify_predicate -def issued_from_user_query(cls, req, **kwargs): - return 0 if req.hooks_in_progress else 1 - -from_dbapi_query = class_renamed('from_dbapi_query', - issued_from_user_query, - message='[3.21] ') - - -class rechain(object): - def __init__(self, *iterators): - self.iterators = iterators - def __iter__(self): - return iter(chain(*self.iterators)) - - -class match_rtype(ExpectedValuePredicate): - """accept if the relation type is found in expected ones. Optional - named parameters `frometypes` and `toetypes` can be used to restrict - target subject and/or object entity types of the relation. - - :param \*expected: possible relation types - :param frometypes: candidate entity types as subject of relation - :param toetypes: candidate entity types as object of relation - """ - def __init__(self, *expected, **more): - self.expected = expected - self.frometypes = more.pop('frometypes', None) - self.toetypes = more.pop('toetypes', None) - assert not more, "unexpected kwargs in match_rtype: %s" % more - - def __call__(self, cls, req, *args, **kwargs): - if kwargs.get('rtype') not in self.expected: - return 0 - if self.frometypes is not None and \ - req.entity_metas(kwargs['eidfrom'])['type'] not in self.frometypes: - return 0 - if self.toetypes is not None and \ - req.entity_metas(kwargs['eidto'])['type'] not in self.toetypes: - return 0 - return 1 - - -class match_rtype_sets(ExpectedValuePredicate): - """accept if the relation type is in one of the sets given as initializer - argument. The goal of this predicate is that it keeps reference to original sets, - so modification to thoses sets are considered by the predicate. For instance - - .. sourcecode:: python - - MYSET = set() - - class Hook1(Hook): - __regid__ = 'hook1' - __select__ = Hook.__select__ & match_rtype_sets(MYSET) - ... - - class Hook2(Hook): - __regid__ = 'hook2' - __select__ = Hook.__select__ & match_rtype_sets(MYSET) - - Client code can now change `MYSET`, this will changes the selection criteria - of :class:`Hook1` and :class:`Hook1`. - """ - - def __init__(self, *expected): - self.expected = expected - - def __call__(self, cls, req, *args, **kwargs): - for rel_set in self.expected: - if kwargs.get('rtype') in rel_set: - return 1 - return 0 - - -# base class for hook ########################################################## - -class Hook(AppObject): - """Base class for hook. - - Hooks being appobjects like views, they have a `__regid__` and a `__select__` - class attribute. Like all appobjects, hooks have the `self._cw` attribute which - represents the current connection. In entity hooks, a `self.entity` attribute is - also present. - - The `events` tuple is used by the base class selector to dispatch the hook - on the right events. It is possible to dispatch on multiple events at once - if needed (though take care as hook attribute may vary as described above). - - .. Note:: - - Do not forget to extend the base class selectors as in: - - .. sourcecode:: python - - class MyHook(Hook): - __regid__ = 'whatever' - __select__ = Hook.__select__ & is_instance('Person') - - else your hooks will be called madly, whatever the event. - """ - __select__ = enabled_category() - # set this in derivated classes - events = None - category = None - order = 0 - # stop pylint from complaining about missing attributes in Hooks classes - eidfrom = eidto = entity = rtype = repo = None - - @classmethod - @cached - def filterable_selectors(cls): - search = cls.__select__.search_selector - if search((NotPredicate, OrPredicate)): - return None, None - enabled_cat = search(enabled_category) - main_filter = search((is_instance, match_rtype)) - return enabled_cat, main_filter - - @classmethod - def check_events(cls): - try: - for event in cls.events: - if event not in ALL_HOOKS: - raise Exception('bad event %s on %s.%s' % ( - event, cls.__module__, cls.__name__)) - except AttributeError: - raise - except TypeError: - raise Exception('bad .events attribute %s on %s.%s' % ( - cls.events, cls.__module__, cls.__name__)) - - @classmethod - def __registered__(cls, reg): - cls.check_events() - - @classproperty - def __registries__(cls): - if cls.events is None: - return [] - return ['%s_hooks' % ev for ev in cls.events] - - known_args = set(('entity', 'rtype', 'eidfrom', 'eidto', 'repo', 'timestamp')) - def __init__(self, req, event, **kwargs): - for arg in self.known_args: - if arg in kwargs: - setattr(self, arg, kwargs.pop(arg)) - super(Hook, self).__init__(req, **kwargs) - self.event = event - -set_log_methods(Hook, getLogger('cubicweb.hook')) - - -# abtract hooks for relation propagation ####################################### -# See example usage in hooks of the nosylist cube - -class PropagateRelationHook(Hook): - """propagate some `main_rtype` relation on entities linked as object of - `subject_relations` or as subject of `object_relations` (the watched - relations). - - This hook ensure that when one of the watched relation is added, the - `main_rtype` relation is added to the target entity of the relation. - Notice there are no default behaviour defined when a watched relation is - deleted, you'll have to handle this by yourself. - - You usually want to use the :class:`match_rtype_sets` predicate on concrete - classes. - """ - events = ('after_add_relation',) - - # to set in concrete class - main_rtype = None - subject_relations = None - object_relations = None - - def __call__(self): - assert self.main_rtype - for eid in (self.eidfrom, self.eidto): - etype = self._cw.entity_metas(eid)['type'] - if self.main_rtype not in self._cw.vreg.schema.eschema(etype).subjrels: - return - if self.rtype in self.subject_relations: - meid, seid = self.eidfrom, self.eidto - else: - assert self.rtype in self.object_relations - meid, seid = self.eidto, self.eidfrom - self._cw.execute( - 'SET E %s P WHERE X %s P, X eid %%(x)s, E eid %%(e)s, NOT E %s P' - % (self.main_rtype, self.main_rtype, self.main_rtype), - {'x': meid, 'e': seid}) - - -class PropagateRelationAddHook(Hook): - """Propagate to entities at the end of watched relations when a `main_rtype` - relation is added. - - `subject_relations` and `object_relations` attributes should be specified on - subclasses and are usually shared references with attributes of the same - name on :class:`PropagateRelationHook`. - - Because of those shared references, you can use `skip_subject_relations` and - `skip_object_relations` attributes when you don't want to propagate to - entities linked through some particular relations. - """ - events = ('after_add_relation',) - - # to set in concrete class (mandatory) - subject_relations = None - object_relations = None - # to set in concrete class (optionally) - skip_subject_relations = () - skip_object_relations = () - - def __call__(self): - eschema = self._cw.vreg.schema.eschema(self._cw.entity_metas(self.eidfrom)['type']) - execute = self._cw.execute - for rel in self.subject_relations: - if rel in eschema.subjrels and not rel in self.skip_subject_relations: - execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' - 'X %s R, NOT R %s P' % (self.rtype, rel, self.rtype), - {'x': self.eidfrom, 'p': self.eidto}) - for rel in self.object_relations: - if rel in eschema.objrels and not rel in self.skip_object_relations: - execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' - 'R %s X, NOT R %s P' % (self.rtype, rel, self.rtype), - {'x': self.eidfrom, 'p': self.eidto}) - - -class PropagateRelationDelHook(PropagateRelationAddHook): - """Propagate to entities at the end of watched relations when a `main_rtype` - relation is deleted. - - This is the opposite of the :class:`PropagateRelationAddHook`, see its - documentation for how to use this class. - """ - events = ('after_delete_relation',) - - def __call__(self): - eschema = self._cw.vreg.schema.eschema(self._cw.entity_metas(self.eidfrom)['type']) - execute = self._cw.execute - for rel in self.subject_relations: - if rel in eschema.subjrels and not rel in self.skip_subject_relations: - execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' - 'X %s R' % (self.rtype, rel), - {'x': self.eidfrom, 'p': self.eidto}) - for rel in self.object_relations: - if rel in eschema.objrels and not rel in self.skip_object_relations: - execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' - 'R %s X' % (self.rtype, rel), - {'x': self.eidfrom, 'p': self.eidto}) - - - -# abstract classes for operation ############################################### - -class Operation(object): - """Base class for operations. - - Operation may be instantiated in the hooks' `__call__` method. It always - takes a connection object as first argument (accessible as `.cnx` from the - operation instance), and optionally all keyword arguments needed by the - operation. These keyword arguments will be accessible as attributes from the - operation instance. - - An operation is triggered on connections set events related to commit / - rollback transations. Possible events are: - - * `precommit`: - - the transaction is being prepared for commit. You can freely do any heavy - computation, raise an exception if the commit can't go. or even add some - new operations during this phase. If you do anything which has to be - reverted if the commit fails afterwards (eg altering the file system for - instance), you'll have to support the 'revertprecommit' event to revert - things by yourself - - * `revertprecommit`: - - if an operation failed while being pre-commited, this event is triggered - for all operations which had their 'precommit' event already fired to let - them revert things (including the operation which made the commit fail) - - * `rollback`: - - the transaction has been either rolled back either: - - * intentionally - * a 'precommit' event failed, in which case all operations are rolled back - once 'revertprecommit'' has been called - - * `postcommit`: - - the transaction is over. All the ORM entities accessed by the earlier - transaction are invalid. If you need to work on the database, you need to - start a new transaction, for instance using a new internal connection, - which you will need to commit. - - For an operation to support an event, one has to implement the `_event` method with no arguments. - - The order of operations may be important, and is controlled according to - the insert_index's method output (whose implementation vary according to the - base hook class used). - """ - - def __init__(self, cnx, **kwargs): - self.cnx = cnx - self.__dict__.update(kwargs) - self.register(cnx) - # execution information - self.processed = None # 'precommit', 'commit' - self.failed = False - - @property - @deprecated('[3.19] Operation.session is deprecated, use Operation.cnx instead') - def session(self): - return self.cnx - - def register(self, cnx): - cnx.add_operation(self, self.insert_index()) - - def insert_index(self): - """return the index of the latest instance which is not a - LateOperation instance - """ - # faster by inspecting operation in reverse order for heavy transactions - i = None - for i, op in enumerate(reversed(self.cnx.pending_operations)): - if isinstance(op, (LateOperation, SingleLastOperation)): - continue - return -i or None - if i is None: - return None - return -(i + 1) - - def handle_event(self, event): - """delegate event handling to the opertaion""" - getattr(self, event)() - - def precommit_event(self): - """the observed connections set is preparing a commit""" - - def revertprecommit_event(self): - """an error went when pre-commiting this operation or a later one - - should revert pre-commit's changes but take care, they may have not - been all considered if it's this operation which failed - """ - - def rollback_event(self): - """the observed connections set has been rolled back - - do nothing by default - """ - - def postcommit_event(self): - """the observed connections set has committed""" - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -set_log_methods(Operation, getLogger('cubicweb.session')) - -def _container_add(container, value): - {set: set.add, list: list.append}[container.__class__](container, value) - - -class DataOperationMixIn(object): - """Mix-in class to ease applying a single operation on a set of data, - avoiding to create as many as operation as they are individual modification. - The body of the operation must then iterate over the values that have been - stored in a single operation instance. - - You should try to use this instead of creating on operation for each - `value`, since handling operations becomes costly on massive data import. - - Usage looks like: - - .. sourcecode:: python - - class MyEntityHook(Hook): - __regid__ = 'my.entity.hook' - __select__ = Hook.__select__ & is_instance('MyEntity') - events = ('after_add_entity',) - - def __call__(self): - MyOperation.get_instance(self._cw).add_data(self.entity) - - - class MyOperation(DataOperationMixIn, Operation): - def precommit_event(self): - for bucket in self.get_data(): - process(bucket) - - You can modify the `containercls` class attribute, which defines the - container class that should be instantiated to hold payloads. An instance is - created on instantiation, and then the :meth:`add_data` method will add the - given data to the existing container. Default to a `set`. Give `list` if you - want to keep arrival ordering. You can also use another kind of container - by redefining :meth:`_build_container` and :meth:`add_data` - - More optional parameters can be given to the `get_instance` operation, that - will be given to the operation constructor (for obvious reasons those - parameters should not vary accross different calls to this method for a - given operation). - - .. Note:: - For sanity reason `get_data` will reset the operation, so that once - the operation has started its treatment, if some hook want to push - additional data to this same operation, a new instance will be created - (else that data has a great chance to be never treated). This implies: - - * you should **always** call `get_data` when starting treatment - - * you should **never** call `get_data` for another reason. - """ - containercls = set - - @classproperty - def data_key(cls): - return ('cw.dataops', cls.__name__) - - @classmethod - def get_instance(cls, cnx, **kwargs): - # no need to lock: transaction_data already comes from thread's local storage - try: - return cnx.transaction_data[cls.data_key] - except KeyError: - op = cnx.transaction_data[cls.data_key] = cls(cnx, **kwargs) - return op - - def __init__(self, *args, **kwargs): - super(DataOperationMixIn, self).__init__(*args, **kwargs) - self._container = self._build_container() - self._processed = False - - def __contains__(self, value): - return value in self._container - - def _build_container(self): - return self.containercls() - - def union(self, data): - """only when container is a set""" - assert not self._processed, """Trying to add data to a closed operation. -Iterating over operation data closed it and should be reserved to precommit / -postcommit method of the operation.""" - self._container |= data - - def add_data(self, data): - assert not self._processed, """Trying to add data to a closed operation. -Iterating over operation data closed it and should be reserved to precommit / -postcommit method of the operation.""" - _container_add(self._container, data) - - def remove_data(self, data): - assert not self._processed, """Trying to add data to a closed operation. -Iterating over operation data closed it and should be reserved to precommit / -postcommit method of the operation.""" - self._container.remove(data) - - def get_data(self): - assert not self._processed, """Trying to get data from a closed operation. -Iterating over operation data closed it and should be reserved to precommit / -postcommit method of the operation.""" - self._processed = True - op = self.cnx.transaction_data.pop(self.data_key) - assert op is self, "Bad handling of operation data, found %s instead of %s for key %s" % ( - op, self, self.data_key) - return self._container - - - -class LateOperation(Operation): - """special operation which should be called after all possible (ie non late) - operations - """ - def insert_index(self): - """return the index of the lastest instance which is not a - SingleLastOperation instance - """ - # faster by inspecting operation in reverse order for heavy transactions - i = None - for i, op in enumerate(reversed(self.cnx.pending_operations)): - if isinstance(op, SingleLastOperation): - continue - return -i or None - if i is None: - return None - return -(i + 1) - - - -class SingleLastOperation(Operation): - """special operation which should be called once and after all other - operations - """ - - def register(self, cnx): - """override register to handle cases where this operation has already - been added - """ - operations = cnx.pending_operations - index = self.equivalent_index(operations) - if index is not None: - equivalent = operations.pop(index) - else: - equivalent = None - cnx.add_operation(self, self.insert_index()) - return equivalent - - def equivalent_index(self, operations): - """return the index of the equivalent operation if any""" - for i, op in enumerate(reversed(operations)): - if op.__class__ is self.__class__: - return -(i+1) - return None - - def insert_index(self): - return None - - -class SendMailOp(SingleLastOperation): - def __init__(self, cnx, msg=None, recipients=None, **kwargs): - # may not specify msg yet, as - # `cubicweb.sobjects.supervision.SupervisionMailOp` - if msg is not None: - assert recipients - self.to_send = [(msg, recipients)] - else: - assert recipients is None - self.to_send = [] - super(SendMailOp, self).__init__(cnx, **kwargs) - - def register(self, cnx): - previous = super(SendMailOp, self).register(cnx) - if previous: - self.to_send = previous.to_send + self.to_send - - def postcommit_event(self): - self.cnx.repo.threaded_task(self.sendmails) - - def sendmails(self): - self.cnx.vreg.config.sendmails(self.to_send) - - -class RQLPrecommitOperation(Operation): - # to be defined in concrete classes - rqls = None - - def precommit_event(self): - execute = self.cnx.execute - for rql in self.rqls: - execute(*rql) - - -class CleanupNewEidsCacheOp(DataOperationMixIn, SingleLastOperation): - """on rollback of a insert query we have to remove from repository's - type/source cache eids of entities added in that transaction. - - NOTE: querier's rqlst/solutions cache may have been polluted too with - queries such as Any X WHERE X eid 32 if 32 has been rolled back however - generated queries are unpredictable and analysing all the cache probably - too expensive. Notice that there is no pb when using args to specify eids - instead of giving them into the rql string. - """ - data_key = 'neweids' - - def rollback_event(self): - """the observed connections set has been rolled back, - remove inserted eid from repository type/source cache - """ - try: - self.cnx.repo.clear_caches(self.get_data()) - except KeyError: - pass - -class CleanupDeletedEidsCacheOp(DataOperationMixIn, SingleLastOperation): - """on commit of delete query, we have to remove from repository's - type/source cache eids of entities deleted in that transaction. - """ - data_key = 'pendingeids' - def postcommit_event(self): - """the observed connections set has been rolled back, - remove inserted eid from repository type/source cache - """ - try: - eids = self.get_data() - self.cnx.repo.clear_caches(eids) - self.cnx.repo.app_instances_bus.publish(['delete'] + list(str(eid) for eid in eids)) - except KeyError: - pass diff -r 058bb3dc685f -r 0b59724cb3f2 server/migractions.py --- a/server/migractions.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1603 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""a class implementing basic actions used in migration scripts. - -The following schema actions are supported for now: -* add/drop/rename attribute -* add/drop entity/relation type -* rename entity type - -The following data actions are supported for now: -* add an entity -* execute raw RQL queries -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -import os -import tarfile -import tempfile -import shutil -import os.path as osp -from datetime import datetime -from glob import glob -from copy import copy -from warnings import warn -from contextlib import contextmanager - -from six import PY2, text_type - -from logilab.common.deprecation import deprecated -from logilab.common.decorators import cached, clear_cache - -from yams.buildobjs import EntityType -from yams.constraints import SizeConstraint -from yams.schema import RelationDefinitionSchema - -from cubicweb import CW_SOFTWARE_ROOT, AuthenticationError, ExecutionError -from cubicweb.predicates import is_instance -from cubicweb.schema import (ETYPE_NAME_MAP, META_RTYPES, VIRTUAL_RTYPES, - PURE_VIRTUAL_RTYPES, - CubicWebRelationSchema, order_eschemas) -from cubicweb.cwvreg import CW_EVENT_MANAGER -from cubicweb import repoapi -from cubicweb.migration import MigrationHelper, yes -from cubicweb.server import hook, schemaserial as ss -from cubicweb.server.schema2sql import eschema2sql, rschema2sql, unique_index_name, sql_type -from cubicweb.server.utils import manager_userpasswd -from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX - - -class ClearGroupMap(hook.Hook): - __regid__ = 'cw.migration.clear_group_mapping' - __select__ = hook.Hook.__select__ & is_instance('CWGroup') - events = ('after_add_entity', 'after_update_entity',) - def __call__(self): - clear_cache(self.mih, 'group_mapping') - self.mih._synchronized.clear() - - @classmethod - def mih_register(cls, repo): - # may be already registered in tests (e.g. unittest_migractions at - # least) - if not cls.__regid__ in repo.vreg['after_add_entity_hooks']: - repo.vreg.register(ClearGroupMap) - - -class ServerMigrationHelper(MigrationHelper): - """specific migration helper for server side migration scripts, - providing actions related to schema/data migration - """ - - def __init__(self, config, schema, interactive=True, - repo=None, cnx=None, verbosity=1, connect=True): - MigrationHelper.__init__(self, config, interactive, verbosity) - if not interactive: - assert cnx - assert repo - if cnx is not None: - assert repo - self.cnx = cnx - self.repo = repo - self.session = cnx.session - elif connect: - self.repo = config.repository() - self.set_cnx() - else: - self.session = None - # no config on shell to a remote instance - if config is not None and (cnx or connect): - repo = self.repo - # register a hook to clear our group_mapping cache and the - # self._synchronized set when some group is added or updated - ClearGroupMap.mih = self - ClearGroupMap.mih_register(repo) - CW_EVENT_MANAGER.bind('after-registry-reload', - ClearGroupMap.mih_register, repo) - # notify we're starting maintenance (called instead of server_start - # which is called on regular start - repo.hm.call_hooks('server_maintenance', repo=repo) - if not schema and not config.quick_start: - insert_lperms = self.repo.get_versions()['cubicweb'] < (3, 14, 0) and 'localperms' in config.available_cubes() - if insert_lperms: - cubes = config._cubes - config._cubes += ('localperms',) - try: - schema = config.load_schema(expand_cubes=True) - finally: - if insert_lperms: - config._cubes = cubes - self.fs_schema = schema - self._synchronized = set() - - # overriden from base MigrationHelper ###################################### - - def set_cnx(self): - try: - login = self.repo.config.default_admin_config['login'] - pwd = self.repo.config.default_admin_config['password'] - except KeyError: - login, pwd = manager_userpasswd() - while True: - try: - self.cnx = repoapi.connect(self.repo, login, password=pwd) - if not 'managers' in self.cnx.user.groups: - print('migration need an account in the managers group') - else: - break - except AuthenticationError: - print('wrong user/password') - except (KeyboardInterrupt, EOFError): - print('aborting...') - sys.exit(0) - try: - login, pwd = manager_userpasswd() - except (KeyboardInterrupt, EOFError): - print('aborting...') - sys.exit(0) - self.session = self.repo._get_session(self.cnx.sessionid) - - def cube_upgraded(self, cube, version): - self.cmd_set_property('system.version.%s' % cube.lower(), - text_type(version)) - self.commit() - - def shutdown(self): - if self.repo is not None: - self.repo.shutdown() - - def migrate(self, vcconf, toupgrade, options): - if not options.fs_only: - if options.backup_db is None: - self.backup_database() - elif options.backup_db: - self.backup_database(askconfirm=False) - # disable notification during migration - with self.cnx.allow_all_hooks_but('notification'): - super(ServerMigrationHelper, self).migrate(vcconf, toupgrade, options) - - def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): - try: - return super(ServerMigrationHelper, self).cmd_process_script( - migrscript, funcname, *args, **kwargs) - except ExecutionError as err: - sys.stderr.write("-> %s\n" % err) - except BaseException: - self.rollback() - raise - - # Adjust docstring - cmd_process_script.__doc__ = MigrationHelper.cmd_process_script.__doc__ - - # server specific migration methods ######################################## - - def backup_database(self, backupfile=None, askconfirm=True, format='native'): - config = self.config - repo = self.repo - # paths - timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') - instbkdir = osp.join(config.appdatahome, 'backup') - if not osp.exists(instbkdir): - os.makedirs(instbkdir) - backupfile = backupfile or osp.join(instbkdir, '%s-%s.tar.gz' - % (config.appid, timestamp)) - # check backup has to be done - if osp.exists(backupfile) and not \ - self.confirm('Backup file %s exists, overwrite it?' % backupfile): - print('-> no backup done.') - return - elif askconfirm and not self.confirm('Backup %s database?' % config.appid): - print('-> no backup done.') - return - open(backupfile,'w').close() # kinda lock - os.chmod(backupfile, 0o600) - # backup - source = repo.system_source - tmpdir = tempfile.mkdtemp() - try: - failed = False - try: - source.backup(osp.join(tmpdir, source.uri), self.confirm, format=format) - except Exception as ex: - print('-> error trying to backup %s [%s]' % (source.uri, ex)) - if not self.confirm('Continue anyway?', default='n'): - raise SystemExit(1) - else: - failed = True - with open(osp.join(tmpdir, 'format.txt'), 'w') as format_file: - format_file.write('%s\n' % format) - with open(osp.join(tmpdir, 'versions.txt'), 'w') as version_file: - versions = repo.get_versions() - for cube, version in versions.items(): - version_file.write('%s %s\n' % (cube, version)) - if not failed: - bkup = tarfile.open(backupfile, 'w|gz') - for filename in os.listdir(tmpdir): - bkup.add(osp.join(tmpdir, filename), filename) - bkup.close() - # call hooks - repo.hm.call_hooks('server_backup', repo=repo, timestamp=timestamp) - # done - print('-> backup file', backupfile) - finally: - shutil.rmtree(tmpdir) - - def restore_database(self, backupfile, drop=True, askconfirm=True, format='native'): - # check - if not osp.exists(backupfile): - raise ExecutionError("Backup file %s doesn't exist" % backupfile) - if askconfirm and not self.confirm('Restore %s database from %s ?' - % (self.config.appid, backupfile)): - return - # unpack backup - tmpdir = tempfile.mkdtemp() - try: - bkup = tarfile.open(backupfile, 'r|gz') - except tarfile.ReadError: - # assume restoring old backup - shutil.copy(backupfile, osp.join(tmpdir, 'system')) - else: - for name in bkup.getnames(): - if name[0] in '/.': - raise ExecutionError('Security check failed, path starts with "/" or "."') - bkup.close() # XXX seek error if not close+open !?! - bkup = tarfile.open(backupfile, 'r|gz') - bkup.extractall(path=tmpdir) - bkup.close() - if osp.isfile(osp.join(tmpdir, 'format.txt')): - with open(osp.join(tmpdir, 'format.txt')) as format_file: - written_format = format_file.readline().strip() - if written_format in ('portable', 'native'): - format = written_format - self.config.init_cnxset_pool = False - repo = self.repo = self.config.repository() - source = repo.system_source - try: - source.restore(osp.join(tmpdir, source.uri), self.confirm, drop, format) - except Exception as exc: - print('-> error trying to restore %s [%s]' % (source.uri, exc)) - if not self.confirm('Continue anyway?', default='n'): - raise SystemExit(1) - shutil.rmtree(tmpdir) - # call hooks - repo.init_cnxset_pool() - repo.hm.call_hooks('server_restore', repo=repo, timestamp=backupfile) - print('-> database restored.') - - def commit(self): - self.cnx.commit() - - def rollback(self): - self.cnx.rollback() - - def rqlexecall(self, rqliter, ask_confirm=False): - for rql, kwargs in rqliter: - self.rqlexec(rql, kwargs, ask_confirm=ask_confirm) - - @cached - def _create_context(self): - """return a dictionary to use as migration script execution context""" - context = super(ServerMigrationHelper, self)._create_context() - context.update({'commit': self.checkpoint, - 'rollback': self.rollback, - 'sql': self.sqlexec, - 'rql': self.rqlexec, - 'rqliter': self.rqliter, - 'schema': self.repo.get_schema(), - 'cnx': self.cnx, - 'fsschema': self.fs_schema, - 'session' : self.cnx, - 'repo' : self.repo, - }) - return context - - @cached - def group_mapping(self): - """cached group mapping""" - return ss.group_mapping(self.cnx) - - def cstrtype_mapping(self): - """cached constraint types mapping""" - return ss.cstrtype_mapping(self.cnx) - - def cmd_exec_event_script(self, event, cube=None, funcname=None, - *args, **kwargs): - """execute a cube event scripts `migration/.py` where event - is one of 'precreate', 'postcreate', 'preremove' and 'postremove'. - """ - assert event in ('precreate', 'postcreate', 'preremove', 'postremove') - if cube: - cubepath = self.config.cube_dir(cube) - apc = osp.join(cubepath, 'migration', '%s.py' % event) - elif kwargs.pop('apphome', False): - apc = osp.join(self.config.apphome, 'migration', '%s.py' % event) - else: - apc = osp.join(self.config.migration_scripts_dir(), '%s.py' % event) - if osp.exists(apc): - if self.config.free_wheel: - self.cmd_deactivate_verification_hooks() - self.info('executing %s', apc) - confirm = self.confirm - execscript_confirm = self.execscript_confirm - self.confirm = yes - self.execscript_confirm = yes - try: - if event == 'postcreate': - with self.cnx.allow_all_hooks_but(): - return self.cmd_process_script(apc, funcname, *args, **kwargs) - return self.cmd_process_script(apc, funcname, *args, **kwargs) - finally: - self.confirm = confirm - self.execscript_confirm = execscript_confirm - if self.config.free_wheel: - self.cmd_reactivate_verification_hooks() - - def cmd_install_custom_sql_scripts(self, cube=None): - """install a cube custom sql scripts `schema/*..sql` where - depends on the instance main database backend (eg 'postgres', - 'mysql'...) - """ - driver = self.repo.system_source.dbdriver - if cube is None: - directory = osp.join(CW_SOFTWARE_ROOT, 'schemas') - else: - directory = osp.join(self.config.cube_dir(cube), 'schema') - sql_scripts = glob(osp.join(directory, '*.%s.sql' % driver)) - for fpath in sql_scripts: - print('-> installing', fpath) - failed = sqlexec(open(fpath).read(), self.cnx.system_sql, False, - delimiter=';;') - if failed: - print('-> ERROR, skipping', fpath) - - # schema synchronization internals ######################################## - - def _synchronize_permissions(self, erschema, teid): - """permission synchronization for an entity or relation type""" - assert teid, erschema - if 'update' in erschema.ACTIONS or erschema.final: - # entity type - exprtype = u'ERQLExpression' - else: - # relation type - exprtype = u'RRQLExpression' - gm = self.group_mapping() - confirm = self.verbosity >= 2 - # * remove possibly deprecated permission (eg in the persistent schema - # but not in the new schema) - # * synchronize existing expressions - # * add new groups/expressions - for action in erschema.ACTIONS: - perm = '%s_permission' % action - # handle groups - newgroups = list(erschema.get_groups(action)) - for geid, gname in self.rqlexec('Any G, GN WHERE T %s G, G name GN, ' - 'T eid %%(x)s' % perm, {'x': teid}, - ask_confirm=False): - if not gname in newgroups: - if not confirm or self.confirm('Remove %s permission of %s to %s?' - % (action, erschema, gname)): - self.rqlexec('DELETE T %s G WHERE G eid %%(x)s, T eid %s' - % (perm, teid), - {'x': geid}, ask_confirm=False) - else: - newgroups.remove(gname) - for gname in newgroups: - if not confirm or self.confirm('Grant %s permission of %s to %s?' - % (action, erschema, gname)): - try: - self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s' - % (perm, teid), - {'x': gm[gname]}, ask_confirm=False) - except KeyError: - self.error('can grant %s perm to unexistant group %s', - action, gname) - # handle rql expressions - newexprs = dict((expr.expression, expr) for expr in erschema.get_rqlexprs(action)) - for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, ' - 'T eid %s' % (perm, teid), - ask_confirm=False): - if not expression in newexprs: - if not confirm or self.confirm('Remove %s expression for %s permission of %s?' - % (expression, action, erschema)): - # deleting the relation will delete the expression entity - self.rqlexec('DELETE T %s E WHERE E eid %%(x)s, T eid %s' - % (perm, teid), - {'x': expreid}, ask_confirm=False) - else: - newexprs.pop(expression) - for expression in newexprs.values(): - expr = expression.expression - if not confirm or self.confirm('Add %s expression for %s permission of %s?' - % (expr, action, erschema)): - self.rqlexec('INSERT RQLExpression X: X exprtype %%(exprtype)s, ' - 'X expression %%(expr)s, X mainvars %%(vars)s, T %s X ' - 'WHERE T eid %%(x)s' % perm, - {'expr': expr, 'exprtype': exprtype, - 'vars': u','.join(sorted(expression.mainvars)), - 'x': teid}, - ask_confirm=False) - - def _synchronize_rschema(self, rtype, syncrdefs=True, - syncperms=True, syncprops=True): - """synchronize properties of the persistent relation schema against its - current definition: - - * description - * symmetric, meta - * inlined - * relation definitions if `syncrdefs` - * permissions if `syncperms` - - physical schema changes should be handled by repository's schema hooks - """ - rtype = str(rtype) - if rtype in self._synchronized: - return - if syncrdefs and syncperms and syncprops: - self._synchronized.add(rtype) - rschema = self.fs_schema.rschema(rtype) - reporschema = self.repo.schema.rschema(rtype) - if syncprops: - assert reporschema.eid, reporschema - self.rqlexecall(ss.updaterschema2rql(rschema, reporschema.eid), - ask_confirm=self.verbosity>=2) - if rschema.rule: - if syncperms: - self._synchronize_permissions(rschema, reporschema.eid) - elif syncrdefs: - for subj, obj in rschema.rdefs: - if (subj, obj) not in reporschema.rdefs: - continue - if rschema in VIRTUAL_RTYPES: - continue - self._synchronize_rdef_schema(subj, rschema, obj, - syncprops=syncprops, - syncperms=syncperms) - - def _synchronize_eschema(self, etype, syncrdefs=True, - syncperms=True, syncprops=True): - """synchronize properties of the persistent entity schema against - its current definition: - - * description - * internationalizable, fulltextindexed, indexed, meta - * relations from/to this entity - * __unique_together__ - * permissions if `syncperms` - """ - etype = str(etype) - if etype in self._synchronized: - return - if syncrdefs and syncperms and syncprops: - self._synchronized.add(etype) - repoeschema = self.repo.schema.eschema(etype) - try: - eschema = self.fs_schema.eschema(etype) - except KeyError: - return # XXX somewhat unexpected, no?... - if syncprops: - repospschema = repoeschema.specializes() - espschema = eschema.specializes() - if repospschema and not espschema: - self.rqlexec('DELETE X specializes Y WHERE X is CWEType, X name %(x)s', - {'x': str(repoeschema)}, ask_confirm=False) - elif not repospschema and espschema: - self.rqlexec('SET X specializes Y WHERE X is CWEType, X name %(x)s, ' - 'Y is CWEType, Y name %(y)s', - {'x': str(repoeschema), 'y': str(espschema)}, - ask_confirm=False) - self.rqlexecall(ss.updateeschema2rql(eschema, repoeschema.eid), - ask_confirm=self.verbosity >= 2) - if syncperms: - self._synchronize_permissions(eschema, repoeschema.eid) - if syncrdefs: - for rschema, targettypes, role in eschema.relation_definitions(True): - if rschema in VIRTUAL_RTYPES: - continue - if role == 'subject': - if not rschema in repoeschema.subject_relations(): - continue - subjtypes, objtypes = [etype], targettypes - else: # role == 'object' - if not rschema in repoeschema.object_relations(): - continue - subjtypes, objtypes = targettypes, [etype] - self._synchronize_rschema(rschema, syncrdefs=False, - syncprops=syncprops, syncperms=syncperms) - if rschema.rule: # rdef for computed rtype are infered hence should not be - # synchronized - continue - reporschema = self.repo.schema.rschema(rschema) - for subj in subjtypes: - for obj in objtypes: - if (subj, obj) not in reporschema.rdefs: - continue - self._synchronize_rdef_schema(subj, rschema, obj, - syncprops=syncprops, syncperms=syncperms) - if syncprops: # need to process __unique_together__ after rdefs were processed - # mappings from constraint name to columns - # filesystem (fs) and repository (repo) wise - fs = {} - repo = {} - for cols in eschema._unique_together or (): - fs[unique_index_name(repoeschema, cols)] = sorted(cols) - schemaentity = self.cnx.entity_from_eid(repoeschema.eid) - for entity in schemaentity.related('constraint_of', 'object', - targettypes=('CWUniqueTogetherConstraint',)).entities(): - repo[entity.name] = sorted(rel.name for rel in entity.relations) - added = set(fs) - set(repo) - removed = set(repo) - set(fs) - - for name in removed: - self.rqlexec('DELETE CWUniqueTogetherConstraint C WHERE C name %(name)s', - {'name': name}) - - def possible_unique_constraint(cols): - for name in cols: - rschema = repoeschema.subjrels.get(name) - if rschema is None: - print('dont add %s unique constraint on %s, missing %s' % ( - ','.join(cols), eschema, name)) - return False - if not (rschema.final or rschema.inlined): - print('dont add %s unique constraint on %s, %s is neither final nor inlined' % ( - ','.join(cols), eschema, name)) - return False - return True - - for name in added: - if possible_unique_constraint(fs[name]): - rql, substs = ss._uniquetogether2rql(eschema, fs[name]) - substs['x'] = repoeschema.eid - substs['name'] = name - self.rqlexec(rql, substs) - - def _synchronize_rdef_schema(self, subjtype, rtype, objtype, - syncperms=True, syncprops=True): - """synchronize properties of the persistent relation definition schema - against its current definition: - * order and other properties - * constraints - * permissions - """ - subjtype, objtype = str(subjtype), str(objtype) - rschema = self.fs_schema.rschema(rtype) - if rschema.rule: - raise ExecutionError('Cannot synchronize a relation definition for a ' - 'computed relation (%s)' % rschema) - reporschema = self.repo.schema.rschema(rschema) - if (subjtype, rschema, objtype) in self._synchronized: - return - if syncperms and syncprops: - self._synchronized.add((subjtype, rschema, objtype)) - if rschema.symmetric: - self._synchronized.add((objtype, rschema, subjtype)) - rdef = rschema.rdef(subjtype, objtype) - if rdef.infered: - return # don't try to synchronize infered relation defs - repordef = reporschema.rdef(subjtype, objtype) - confirm = self.verbosity >= 2 - if syncprops: - # properties - self.rqlexecall(ss.updaterdef2rql(rdef, repordef.eid), - ask_confirm=confirm) - # constraints - # 0. eliminate the set of unmodified constraints from the sets of - # old/new constraints - newconstraints = set(rdef.constraints) - oldconstraints = set(repordef.constraints) - unchanged_constraints = newconstraints & oldconstraints - newconstraints -= unchanged_constraints - oldconstraints -= unchanged_constraints - # 1. remove old constraints and update constraints of the same type - # NOTE: don't use rschema.constraint_by_type because it may be - # out of sync with newconstraints when multiple - # constraints of the same type are used - for cstr in oldconstraints: - self.rqlexec('DELETE CWConstraint C WHERE C eid %(x)s', - {'x': cstr.eid}, ask_confirm=confirm) - # 2. add new constraints - cstrtype_map = self.cstrtype_mapping() - self.rqlexecall(ss.constraints2rql(cstrtype_map, newconstraints, - repordef.eid), - ask_confirm=confirm) - if syncperms and not rschema in VIRTUAL_RTYPES: - self._synchronize_permissions(rdef, repordef.eid) - - # base actions ############################################################ - - def checkpoint(self, ask_confirm=True): - """checkpoint action""" - if not ask_confirm or self.confirm('Commit now ?', shell=False): - self.commit() - - def cmd_add_cube(self, cube, update_database=True): - self.cmd_add_cubes( (cube,), update_database) - - def cmd_add_cubes(self, cubes, update_database=True): - """update_database is telling if the database schema should be updated - or if only the relevant eproperty should be inserted (for the case where - a cube has been extracted from an existing instance, so the - cube schema is already in there) - """ - newcubes = super(ServerMigrationHelper, self).cmd_add_cubes(cubes) - if not newcubes: - return - for cube in newcubes: - self.cmd_set_property('system.version.'+cube, - self.config.cube_version(cube)) - # ensure added cube is in config cubes - # XXX worth restoring on error? - if not cube in self.config._cubes: - self.config._cubes += (cube,) - if not update_database: - self.commit() - return - newcubes_schema = self.config.load_schema(construction_mode='non-strict') - # XXX we have to replace fs_schema, used in cmd_add_relation_type - # etc. and fsschema of migration script contexts - self.fs_schema = newcubes_schema - self.update_context('fsschema', self.fs_schema) - new = set() - # execute pre-create files - driver = self.repo.system_source.dbdriver - for cube in reversed(newcubes): - self.cmd_install_custom_sql_scripts(cube) - self.cmd_exec_event_script('precreate', cube) - # add new entity and relation types - for rschema in newcubes_schema.relations(): - if not rschema in self.repo.schema: - self.cmd_add_relation_type(rschema.type) - new.add(rschema.type) - toadd = [eschema for eschema in newcubes_schema.entities() - if not eschema in self.repo.schema] - for eschema in order_eschemas(toadd): - self.cmd_add_entity_type(eschema.type) - new.add(eschema.type) - # check if attributes has been added to existing entities - for rschema in newcubes_schema.relations(): - existingschema = self.repo.schema.rschema(rschema.type) - for (fromtype, totype) in rschema.rdefs: - # if rdef already exists or is infered from inheritance, - # don't add it - if (fromtype, totype) in existingschema.rdefs \ - or rschema.rdefs[(fromtype, totype)].infered: - continue - # check we should actually add the relation definition - if not (fromtype in new or totype in new or rschema in new): - continue - self.cmd_add_relation_definition(str(fromtype), rschema.type, - str(totype)) - # execute post-create files - for cube in reversed(newcubes): - with self.cnx.allow_all_hooks_but(): - self.cmd_exec_event_script('postcreate', cube) - self.commit() - - def cmd_drop_cube(self, cube, removedeps=False): - removedcubes = super(ServerMigrationHelper, self).cmd_drop_cube( - cube, removedeps) - if not removedcubes: - return - fsschema = self.fs_schema - removedcubes_schema = self.config.load_schema(construction_mode='non-strict') - reposchema = self.repo.schema - # execute pre-remove files - for cube in reversed(removedcubes): - self.cmd_exec_event_script('preremove', cube) - # remove cubes'entity and relation types - for rschema in fsschema.relations(): - if not rschema in removedcubes_schema and rschema in reposchema: - self.cmd_drop_relation_type(rschema.type) - toremove = [eschema for eschema in fsschema.entities() - if not eschema in removedcubes_schema - and eschema in reposchema] - for eschema in reversed(order_eschemas(toremove)): - self.cmd_drop_entity_type(eschema.type) - for rschema in fsschema.relations(): - if rschema in removedcubes_schema and rschema in reposchema: - # check if attributes/relations has been added to entities from - # other cubes - for fromtype, totype in rschema.rdefs: - if (fromtype, totype) not in removedcubes_schema[rschema.type].rdefs and \ - (fromtype, totype) in reposchema[rschema.type].rdefs: - self.cmd_drop_relation_definition( - str(fromtype), rschema.type, str(totype)) - # execute post-remove files - for cube in reversed(removedcubes): - self.cmd_exec_event_script('postremove', cube) - self.rqlexec('DELETE CWProperty X WHERE X pkey %(pk)s', - {'pk': u'system.version.'+cube}, ask_confirm=False) - self.commit() - - # schema migration actions ################################################ - - def cmd_add_attribute(self, etype, attrname, attrtype=None, commit=True): - """add a new attribute on the given entity type""" - if attrtype is None: - rschema = self.fs_schema.rschema(attrname) - attrtype = rschema.objects(etype)[0] - self.cmd_add_relation_definition(etype, attrname, attrtype, commit=commit) - - def cmd_drop_attribute(self, etype, attrname, commit=True): - """drop an existing attribute from the given entity type - - `attrname` is a string giving the name of the attribute to drop - """ - try: - rschema = self.repo.schema.rschema(attrname) - attrtype = rschema.objects(etype)[0] - except KeyError: - print('warning: attribute %s %s is not known, skip deletion' % ( - etype, attrname)) - else: - self.cmd_drop_relation_definition(etype, attrname, attrtype, - commit=commit) - - def cmd_rename_attribute(self, etype, oldname, newname, commit=True): - """rename an existing attribute of the given entity type - - `oldname` is a string giving the name of the existing attribute - `newname` is a string giving the name of the renamed attribute - """ - eschema = self.fs_schema.eschema(etype) - attrtype = eschema.destination(newname) - # have to commit this first step anyway to get the definition - # actually in the schema - self.cmd_add_attribute(etype, newname, attrtype, commit=True) - # skipp NULL values if the attribute is required - rql = 'SET X %s VAL WHERE X is %s, X %s VAL' % (newname, etype, oldname) - card = eschema.rdef(newname).cardinality[0] - if card == '1': - rql += ', NOT X %s NULL' % oldname - self.rqlexec(rql, ask_confirm=self.verbosity>=2) - # XXX if both attributes fulltext indexed, should skip fti rebuild - # XXX if old attribute was fti indexed but not the new one old value - # won't be removed from the index (this occurs on other kind of - # fulltextindexed change...) - self.cmd_drop_attribute(etype, oldname, commit=commit) - - def cmd_add_entity_type(self, etype, auto=True, commit=True): - """register a new entity type - - in auto mode, automatically register entity's relation where the - targeted type is known - """ - instschema = self.repo.schema - eschema = self.fs_schema.eschema(etype) - if etype in instschema and not (eschema.final and eschema.eid is None): - print('warning: %s already known, skip addition' % etype) - return - confirm = self.verbosity >= 2 - groupmap = self.group_mapping() - cstrtypemap = self.cstrtype_mapping() - # register the entity into CWEType - execute = self.cnx.execute - if eschema.final and eschema not in instschema: - # final types are expected to be in the living schema by default, but they are not if - # the type is defined in a cube that is being added - edef = EntityType(eschema.type, __permissions__=eschema.permissions) - instschema.add_entity_type(edef) - ss.execschemarql(execute, eschema, ss.eschema2rql(eschema, groupmap)) - # add specializes relation if needed - specialized = eschema.specializes() - if specialized: - try: - specialized.eid = instschema[specialized].eid - except KeyError: - raise ExecutionError('trying to add entity type but parent type is ' - 'not yet in the database schema') - self.rqlexecall(ss.eschemaspecialize2rql(eschema), ask_confirm=confirm) - # register entity's attributes - for rschema, attrschema in eschema.attribute_definitions(): - # ignore those meta relations, they will be automatically added - if rschema.type in META_RTYPES: - continue - if not attrschema.type in instschema: - self.cmd_add_entity_type(attrschema.type, False, False) - if not rschema.type in instschema: - # need to add the relation type and to commit to get it - # actually in the schema - self.cmd_add_relation_type(rschema.type, False, commit=True) - # register relation definition - rdef = self._get_rdef(rschema, eschema, eschema.destination(rschema)) - ss.execschemarql(execute, rdef, ss.rdef2rql(rdef, cstrtypemap, groupmap),) - # take care to newly introduced base class - # XXX some part of this should probably be under the "if auto" block - for spschema in eschema.specialized_by(recursive=False): - try: - instspschema = instschema[spschema] - except KeyError: - # specialized entity type not in schema, ignore - continue - if instspschema.specializes() != eschema: - self.rqlexec('SET D specializes P WHERE D eid %(d)s, P name %(pn)s', - {'d': instspschema.eid, 'pn': eschema.type}, - ask_confirm=confirm) - for rschema, tschemas, role in spschema.relation_definitions(True): - for tschema in tschemas: - if not tschema in instschema: - continue - if role == 'subject': - subjschema = spschema - objschema = tschema - if rschema.final and rschema in instspschema.subjrels: - # attribute already set, has_rdef would check if - # it's of the same type, we don't want this so - # simply skip here - continue - elif role == 'object': - subjschema = tschema - objschema = spschema - if (rschema.rdef(subjschema, objschema).infered - or (instschema.has_relation(rschema) and - (subjschema, objschema) in instschema[rschema].rdefs)): - continue - self.cmd_add_relation_definition( - subjschema.type, rschema.type, objschema.type) - if auto: - # we have commit here to get relation types actually in the schema - self.commit() - added = [] - for rschema in eschema.subject_relations(): - # attribute relation have already been processed and - # 'owned_by'/'created_by' will be automatically added - if rschema.final or rschema.type in META_RTYPES: - continue - rtypeadded = rschema.type in instschema - for targetschema in rschema.objects(etype): - # ignore relations where the targeted type is not in the - # current instance schema - targettype = targetschema.type - if not targettype in instschema and targettype != etype: - continue - if not rtypeadded: - # need to add the relation type and to commit to get it - # actually in the schema - added.append(rschema.type) - self.cmd_add_relation_type(rschema.type, False, commit=True) - rtypeadded = True - # register relation definition - # remember this two avoid adding twice non symmetric relation - # such as "Emailthread forked_from Emailthread" - added.append((etype, rschema.type, targettype)) - rdef = self._get_rdef(rschema, eschema, targetschema) - ss.execschemarql(execute, rdef, - ss.rdef2rql(rdef, cstrtypemap, groupmap)) - for rschema in eschema.object_relations(): - if rschema.type in META_RTYPES: - continue - rtypeadded = rschema.type in instschema or rschema.type in added - for targetschema in rschema.subjects(etype): - # ignore relations where the targeted type is not in the - # current instance schema - targettype = targetschema.type - # don't check targettype != etype since in this case the - # relation has already been added as a subject relation - if not targettype in instschema: - continue - if not rtypeadded: - # need to add the relation type and to commit to get it - # actually in the schema - self.cmd_add_relation_type(rschema.type, False, commit=True) - rtypeadded = True - elif (targettype, rschema.type, etype) in added: - continue - # register relation definition - rdef = self._get_rdef(rschema, targetschema, eschema) - ss.execschemarql(execute, rdef, - ss.rdef2rql(rdef, cstrtypemap, groupmap)) - if commit: - self.commit() - - def cmd_drop_entity_type(self, etype, commit=True): - """Drop an existing entity type. - - This will trigger deletion of necessary relation types and definitions. - Note that existing entities of the given type will be deleted without - any hooks called. - """ - # XXX what if we delete an entity type which is specialized by other types - # unregister the entity from CWEType - self.rqlexec('DELETE CWEType X WHERE X name %(etype)s', {'etype': etype}, - ask_confirm=self.verbosity>=2) - if commit: - self.commit() - - def cmd_rename_entity_type(self, oldname, newname, attrs=None, commit=True): - """rename an existing entity type in the persistent schema - - `oldname` is a string giving the name of the existing entity type - `newname` is a string giving the name of the renamed entity type - """ - schema = self.repo.schema - if oldname not in schema: - print('warning: entity type %s is unknown, skip renaming' % oldname) - return - # if merging two existing entity types - if newname in schema: - assert oldname in ETYPE_NAME_MAP, \ - '%s should be mapped to %s in ETYPE_NAME_MAP' % (oldname, - newname) - if attrs is None: - attrs = ','.join(SQL_PREFIX + rschema.type - for rschema in schema[newname].subject_relations() - if (rschema.final or rschema.inlined) - and not rschema in PURE_VIRTUAL_RTYPES) - else: - attrs += ('eid', 'creation_date', 'modification_date', 'cwuri') - attrs = ','.join(SQL_PREFIX + attr for attr in attrs) - self.sqlexec('INSERT INTO %s%s(%s) SELECT %s FROM %s%s' % ( - SQL_PREFIX, newname, attrs, attrs, SQL_PREFIX, oldname), - ask_confirm=False) - # old entity type has not been added to the schema, can't gather it - new = schema.eschema(newname) - oldeid = self.rqlexec('CWEType ET WHERE ET name %(on)s', - {'on': oldname}, ask_confirm=False)[0][0] - # backport old type relations to new type - # XXX workflows, other relations? - for r1, rr1 in [('from_entity', 'to_entity'), - ('to_entity', 'from_entity')]: - self.rqlexec('SET X %(r1)s NET WHERE X %(r1)s OET, ' - 'NOT EXISTS(X2 %(r1)s NET, X relation_type XRT, ' - 'X2 relation_type XRT, X %(rr1)s XTE, X2 %(rr1)s XTE), ' - 'OET eid %%(o)s, NET eid %%(n)s' % locals(), - {'o': oldeid, 'n': new.eid}, ask_confirm=False) - # backport is / is_instance_of relation to new type - for rtype in ('is', 'is_instance_of'): - self.sqlexec('UPDATE %s_relation SET eid_to=%s WHERE eid_to=%s' - % (rtype, new.eid, oldeid), ask_confirm=False) - # delete relations using SQL to avoid relations content removal - # triggered by schema synchronization hooks. - for rdeftype in ('CWRelation', 'CWAttribute'): - thispending = set( (eid for eid, in self.sqlexec( - 'SELECT cw_eid FROM cw_%s WHERE cw_from_entity=%%(eid)s OR ' - ' cw_to_entity=%%(eid)s' % rdeftype, - {'eid': oldeid}, ask_confirm=False)) ) - # we should add deleted eids into pending eids else we may - # get some validation error on commit since integrity hooks - # may think some required relation is missing... This also ensure - # repository caches are properly cleanup - hook.CleanupDeletedEidsCacheOp.get_instance(self.cnx).union(thispending) - # and don't forget to remove record from system tables - entities = [self.cnx.entity_from_eid(eid, rdeftype) for eid in thispending] - self.repo.system_source.delete_info_multi(self.cnx, entities) - self.sqlexec('DELETE FROM cw_%s WHERE cw_from_entity=%%(eid)s OR ' - 'cw_to_entity=%%(eid)s' % rdeftype, - {'eid': oldeid}, ask_confirm=False) - # now we have to manually cleanup relations pointing to deleted - # entities - thiseids = ','.join(str(eid) for eid in thispending) - for rschema, ttypes, role in schema[rdeftype].relation_definitions(): - if rschema.type in VIRTUAL_RTYPES: - continue - sqls = [] - if role == 'object': - if rschema.inlined: - for eschema in ttypes: - sqls.append('DELETE FROM cw_%s WHERE cw_%s IN(%%s)' - % (eschema, rschema)) - else: - sqls.append('DELETE FROM %s_relation WHERE eid_to IN(%%s)' - % rschema) - elif not rschema.inlined: - sqls.append('DELETE FROM %s_relation WHERE eid_from IN(%%s)' - % rschema) - for sql in sqls: - self.sqlexec(sql % thiseids, ask_confirm=False) - # remove the old type: use rql to propagate deletion - self.rqlexec('DELETE CWEType ET WHERE ET name %(on)s', {'on': oldname}, - ask_confirm=False) - # elif simply renaming an entity type - else: - self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(on)s', - {'newname' : text_type(newname), 'on' : oldname}, - ask_confirm=False) - if commit: - self.commit() - - def cmd_add_relation_type(self, rtype, addrdef=True, commit=True): - """register a new relation type named `rtype`, as described in the - schema description file. - - `addrdef` is a boolean value; when True, it will also add all relations - of the type just added found in the schema definition file. Note that it - implies an intermediate "commit" which commits the relation type - creation (but not the relation definitions themselves, for which - committing depends on the `commit` argument value). - - """ - reposchema = self.repo.schema - rschema = self.fs_schema.rschema(rtype) - execute = self.cnx.execute - if rtype in reposchema: - print('warning: relation type %s is already known, skip addition' % ( - rtype)) - elif rschema.rule: - gmap = self.group_mapping() - ss.execschemarql(execute, rschema, ss.crschema2rql(rschema, gmap)) - else: - # register the relation into CWRType and insert necessary relation - # definitions - ss.execschemarql(execute, rschema, ss.rschema2rql(rschema, addrdef=False)) - if not rschema.rule and addrdef: - self.commit() - gmap = self.group_mapping() - cmap = self.cstrtype_mapping() - done = set() - for subj, obj in rschema.rdefs: - if not (reposchema.has_entity(subj) - and reposchema.has_entity(obj)): - continue - # symmetric relations appears twice - if (subj, obj) in done: - continue - done.add( (subj, obj) ) - self.cmd_add_relation_definition(subj, rtype, obj) - if rtype in META_RTYPES: - # if the relation is in META_RTYPES, ensure we're adding it for - # all entity types *in the persistent schema*, not only those in - # the fs schema - for etype in self.repo.schema.entities(): - if not etype in self.fs_schema: - # get sample object type and rproperties - objtypes = rschema.objects() - assert len(objtypes) == 1, objtypes - objtype = objtypes[0] - rdef = copy(rschema.rdef(rschema.subjects(objtype)[0], objtype)) - rdef.subject = etype - rdef.rtype = self.repo.schema.rschema(rschema) - rdef.object = self.repo.schema.eschema(objtype) - ss.execschemarql(execute, rdef, - ss.rdef2rql(rdef, cmap, gmap)) - if commit: - self.commit() - - def cmd_drop_relation_type(self, rtype, commit=True): - """Drop an existing relation type. - - Note that existing relations of the given type will be deleted without - any hooks called. - """ - self.rqlexec('DELETE CWRType X WHERE X name %r' % rtype, - ask_confirm=self.verbosity>=2) - self.rqlexec('DELETE CWComputedRType X WHERE X name %r' % rtype, - ask_confirm=self.verbosity>=2) - if commit: - self.commit() - - def cmd_rename_relation_type(self, oldname, newname, commit=True, force=False): - """rename an existing relation - - `oldname` is a string giving the name of the existing relation - `newname` is a string giving the name of the renamed relation - - If `force` is True, proceed even if `oldname` still appears in the fs schema - """ - if oldname in self.fs_schema and not force: - if not self.confirm('Relation %s is still present in the filesystem schema,' - ' do you really want to drop it?' % oldname, - default='n'): - return - self.cmd_add_relation_type(newname, commit=True) - if not self.repo.schema[oldname].rule: - self.rqlexec('SET X %s Y WHERE X %s Y' % (newname, oldname), - ask_confirm=self.verbosity>=2) - self.cmd_drop_relation_type(oldname, commit=commit) - - def cmd_add_relation_definition(self, subjtype, rtype, objtype, commit=True): - """register a new relation definition, from its definition found in the - schema definition file - """ - rschema = self.fs_schema.rschema(rtype) - if rschema.rule: - raise ExecutionError('Cannot add a relation definition for a ' - 'computed relation (%s)' % rschema) - if not rtype in self.repo.schema: - self.cmd_add_relation_type(rtype, addrdef=False, commit=True) - if (subjtype, objtype) in self.repo.schema.rschema(rtype).rdefs: - print('warning: relation %s %s %s is already known, skip addition' % ( - subjtype, rtype, objtype)) - return - rdef = self._get_rdef(rschema, subjtype, objtype) - ss.execschemarql(self.cnx.execute, rdef, - ss.rdef2rql(rdef, self.cstrtype_mapping(), - self.group_mapping())) - if commit: - self.commit() - - def _get_rdef(self, rschema, subjtype, objtype): - return self._set_rdef_eid(rschema.rdefs[(subjtype, objtype)]) - - def _set_rdef_eid(self, rdef): - for attr in ('rtype', 'subject', 'object'): - schemaobj = getattr(rdef, attr) - if getattr(schemaobj, 'eid', None) is None: - schemaobj.eid = self.repo.schema[schemaobj].eid - assert schemaobj.eid is not None, schemaobj - return rdef - - def cmd_drop_relation_definition(self, subjtype, rtype, objtype, commit=True): - """Drop an existing relation definition. - - Note that existing relations of the given definition will be deleted - without any hooks called. - """ - rschema = self.repo.schema.rschema(rtype) - if rschema.rule: - raise ExecutionError('Cannot drop a relation definition for a ' - 'computed relation (%s)' % rschema) - # unregister the definition from CWAttribute or CWRelation - if rschema.final: - etype = 'CWAttribute' - else: - etype = 'CWRelation' - rql = ('DELETE %s X WHERE X from_entity FE, FE name "%s",' - 'X relation_type RT, RT name "%s", X to_entity TE, TE name "%s"') - self.rqlexec(rql % (etype, subjtype, rtype, objtype), - ask_confirm=self.verbosity>=2) - if commit: - self.commit() - - def cmd_sync_schema_props_perms(self, ertype=None, syncperms=True, - syncprops=True, syncrdefs=True, commit=True): - """synchronize the persistent schema against the current definition - schema. - - `ertype` can be : - - None, in that case everything will be synced ; - - a string, it should be an entity type or - a relation type. In that case, only the corresponding - entities / relations will be synced ; - - an rdef object to synchronize only this specific relation definition - - It will synch common stuff between the definition schema and the - actual persistent schema, it won't add/remove any entity or relation. - """ - assert syncperms or syncprops, 'nothing to do' - if ertype is not None: - if isinstance(ertype, RelationDefinitionSchema): - ertype = ertype.as_triple() - if isinstance(ertype, (tuple, list)): - assert len(ertype) == 3, 'not a relation definition' - self._synchronize_rdef_schema(ertype[0], ertype[1], ertype[2], - syncperms=syncperms, - syncprops=syncprops) - else: - erschema = self.repo.schema[ertype] - if isinstance(erschema, CubicWebRelationSchema): - self._synchronize_rschema(erschema, syncrdefs=syncrdefs, - syncperms=syncperms, - syncprops=syncprops) - else: - self._synchronize_eschema(erschema, syncrdefs=syncrdefs, - syncperms=syncperms, - syncprops=syncprops) - else: - for etype in self.repo.schema.entities(): - if etype.eid is None: - # not yet added final etype (thing to BigInt defined in - # yams though 3.13 migration not done yet) - continue - self._synchronize_eschema(etype, syncrdefs=syncrdefs, - syncprops=syncprops, syncperms=syncperms) - if commit: - self.commit() - - def cmd_change_relation_props(self, subjtype, rtype, objtype, - commit=True, **kwargs): - """change some properties of a relation definition - - you usually want to use sync_schema_props_perms instead. - """ - assert kwargs - restriction = [] - if subjtype and subjtype != 'Any': - restriction.append('X from_entity FE, FE name "%s"' % subjtype) - if objtype and objtype != 'Any': - restriction.append('X to_entity TE, TE name "%s"' % objtype) - if rtype and rtype != 'Any': - restriction.append('X relation_type RT, RT name "%s"' % rtype) - assert restriction - values = [] - for k, v in kwargs.items(): - values.append('X %s %%(%s)s' % (k, k)) - if PY2 and isinstance(v, str): - kwargs[k] = unicode(v) - rql = 'SET %s WHERE %s' % (','.join(values), ','.join(restriction)) - self.rqlexec(rql, kwargs, ask_confirm=self.verbosity>=2) - if commit: - self.commit() - - def cmd_set_size_constraint(self, etype, rtype, size, commit=True): - """set change size constraint of a string attribute - - if size is None any size constraint will be removed. - - you usually want to use sync_schema_props_perms instead. - """ - oldvalue = None - for constr in self.repo.schema.eschema(etype).rdef(rtype).constraints: - if isinstance(constr, SizeConstraint): - oldvalue = constr.max - if oldvalue == size: - return - if oldvalue is None and not size is None: - ceid = self.rqlexec('INSERT CWConstraint C: C value %(v)s, C cstrtype CT ' - 'WHERE CT name "SizeConstraint"', - {'v': SizeConstraint(size).serialize()}, - ask_confirm=self.verbosity>=2)[0][0] - self.rqlexec('SET X constrained_by C WHERE X from_entity S, X relation_type R, ' - 'S name "%s", R name "%s", C eid %s' % (etype, rtype, ceid), - ask_confirm=self.verbosity>=2) - elif not oldvalue is None: - if not size is None: - self.rqlexec('SET C value %%(v)s WHERE X from_entity S, X relation_type R,' - 'X constrained_by C, C cstrtype CT, CT name "SizeConstraint",' - 'S name "%s", R name "%s"' % (etype, rtype), - {'v': text_type(SizeConstraint(size).serialize())}, - ask_confirm=self.verbosity>=2) - else: - self.rqlexec('DELETE X constrained_by C WHERE X from_entity S, X relation_type R,' - 'X constrained_by C, C cstrtype CT, CT name "SizeConstraint",' - 'S name "%s", R name "%s"' % (etype, rtype), - ask_confirm=self.verbosity>=2) - # cleanup unused constraints - self.rqlexec('DELETE CWConstraint C WHERE NOT X constrained_by C') - if commit: - self.commit() - - # Workflows handling ###################################################### - - def cmd_make_workflowable(self, etype): - """add workflow relations to an entity type to make it workflowable""" - self.cmd_add_relation_definition(etype, 'in_state', 'State') - self.cmd_add_relation_definition(etype, 'custom_workflow', 'Workflow') - self.cmd_add_relation_definition('TrInfo', 'wf_info_for', etype) - - def cmd_add_workflow(self, name, wfof, default=True, commit=False, - ensure_workflowable=True, **kwargs): - """ - create a new workflow and links it to entity types - :type name: unicode - :param name: name of the workflow - - :type wfof: string or list/tuple of strings - :param wfof: entity type(s) having this workflow - - :type default: bool - :param default: tells wether this is the default workflow - for the specified entity type(s); set it to false in - the case of a subworkflow - - :rtype: `Workflow` - """ - wf = self.cmd_create_entity('Workflow', name=text_type(name), - **kwargs) - if not isinstance(wfof, (list, tuple)): - wfof = (wfof,) - def _missing_wf_rel(etype): - return 'missing workflow relations, see make_workflowable(%s)' % etype - for etype in wfof: - eschema = self.repo.schema[etype] - etype = text_type(etype) - if ensure_workflowable: - assert 'in_state' in eschema.subjrels, _missing_wf_rel(etype) - assert 'custom_workflow' in eschema.subjrels, _missing_wf_rel(etype) - assert 'wf_info_for' in eschema.objrels, _missing_wf_rel(etype) - rset = self.rqlexec( - 'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s', - {'x': wf.eid, 'et': text_type(etype)}, ask_confirm=False) - assert rset, 'unexistant entity type %s' % etype - if default: - self.rqlexec( - 'SET ET default_workflow X WHERE X eid %(x)s, ET name %(et)s', - {'x': wf.eid, 'et': text_type(etype)}, ask_confirm=False) - if commit: - self.commit() - return wf - - def cmd_get_workflow_for(self, etype): - """return default workflow for the given entity type""" - rset = self.rqlexec('Workflow X WHERE ET default_workflow X, ET name %(et)s', - {'et': etype}) - return rset.get_entity(0, 0) - - # CWProperty handling ###################################################### - - def cmd_property_value(self, pkey): - """retreive the site-wide persistent property value for the given key. - - To get a user specific property value, use appropriate method on CWUser - instance. - """ - rset = self.rqlexec( - 'Any V WHERE X is CWProperty, X pkey %(k)s, X value V, NOT X for_user U', - {'k': pkey}, ask_confirm=False) - return rset[0][0] - - def cmd_set_property(self, pkey, value): - """set the site-wide persistent property value for the given key to the - given value. - - To set a user specific property value, use appropriate method on CWUser - instance. - """ - value = text_type(value) - try: - prop = self.rqlexec( - 'CWProperty X WHERE X pkey %(k)s, NOT X for_user U', - {'k': text_type(pkey)}, ask_confirm=False).get_entity(0, 0) - except Exception: - self.cmd_create_entity('CWProperty', pkey=text_type(pkey), value=value) - else: - prop.cw_set(value=value) - - # other data migration commands ########################################### - - def cmd_storage_changed(self, etype, attribute): - """migrate entities to a custom storage. The new storage is expected to - be set, it will be temporarily removed for the migration. - """ - from logilab.common.shellutils import ProgressBar - source = self.repo.system_source - storage = source.storage(etype, attribute) - source.unset_storage(etype, attribute) - rset = self.rqlexec('Any X WHERE X is %s' % etype, ask_confirm=False) - pb = ProgressBar(len(rset)) - for entity in rset.entities(): - # fill cache. Do not fetch that attribute using the global rql query - # since we may exhaust memory doing that.... - getattr(entity, attribute) - storage.migrate_entity(entity, attribute) - # remove from entity cache to avoid memory exhaustion - del entity.cw_attr_cache[attribute] - pb.update() - print() - source.set_storage(etype, attribute, storage) - - def cmd_create_entity(self, etype, commit=False, **kwargs): - """add a new entity of the given type""" - entity = self.cnx.create_entity(etype, **kwargs) - if commit: - self.commit() - return entity - - def cmd_find(self, etype, **kwargs): - """find entities of the given type and attribute values""" - return self.cnx.find(etype, **kwargs) - - @deprecated("[3.19] use find(*args, **kwargs).entities() instead") - def cmd_find_entities(self, etype, **kwargs): - """find entities of the given type and attribute values""" - return self.cnx.find(etype, **kwargs).entities() - - @deprecated("[3.19] use find(*args, **kwargs).one() instead") - def cmd_find_one_entity(self, etype, **kwargs): - """find one entity of the given type and attribute values. - - raise :exc:`cubicweb.req.FindEntityError` if can not return one and only - one entity. - """ - return self.cnx.find(etype, **kwargs).one() - - def cmd_update_etype_fti_weight(self, etype, weight): - if self.repo.system_source.dbdriver == 'postgres': - self.sqlexec('UPDATE appears SET weight=%(weight)s ' - 'FROM entities as X ' - 'WHERE X.eid=appears.uid AND X.type=%(type)s', - {'type': etype, 'weight': weight}, ask_confirm=False) - - def cmd_reindex_entities(self, etypes=None): - """force reindexaction of entities of the given types or of all - indexable entity types - """ - from cubicweb.server.checkintegrity import reindex_entities - reindex_entities(self.repo.schema, self.cnx, etypes=etypes) - - @contextmanager - def cmd_dropped_constraints(self, etype, attrname, cstrtype=None, - droprequired=False): - """context manager to drop constraints temporarily on fs_schema - - `cstrtype` should be a constraint class (or a tuple of classes) - and will be passed to isinstance directly - - For instance:: - - >>> with dropped_constraints('MyType', 'myattr', - ... UniqueConstraint, droprequired=True): - ... add_attribute('MyType', 'myattr') - ... # + instructions to fill MyType.myattr column - ... - >>> - - """ - rdef = self.fs_schema.eschema(etype).rdef(attrname) - original_constraints = rdef.constraints - # remove constraints - if cstrtype: - rdef.constraints = [cstr for cstr in original_constraints - if not (cstrtype and isinstance(cstr, cstrtype))] - if droprequired: - original_cardinality = rdef.cardinality - rdef.cardinality = '?' + rdef.cardinality[1] - yield - # restore original constraints - rdef.constraints = original_constraints - if droprequired: - rdef.cardinality = original_cardinality - # update repository schema - self.cmd_sync_schema_props_perms(rdef, syncperms=False) - - def sqlexec(self, sql, args=None, ask_confirm=True): - """execute the given sql if confirmed - - should only be used for low level stuff undoable with existing higher - level actions - """ - if not ask_confirm or self.confirm('Execute sql: %s ?' % sql): - try: - cu = self.cnx.system_sql(sql, args) - except Exception: - ex = sys.exc_info()[1] - if self.confirm('Error: %s\nabort?' % ex, pdb=True): - raise - return - try: - return cu.fetchall() - except Exception: - # no result to fetch - return - - def rqlexec(self, rql, kwargs=None, build_descr=True, - ask_confirm=False): - """rql action""" - if not isinstance(rql, (tuple, list)): - rql = ( (rql, kwargs), ) - res = None - execute = self.cnx.execute - for rql, kwargs in rql: - if kwargs: - msg = '%s (%s)' % (rql, kwargs) - else: - msg = rql - if not ask_confirm or self.confirm('Execute rql: %s ?' % msg): - try: - res = execute(rql, kwargs, build_descr=build_descr) - except Exception as ex: - if self.confirm('Error: %s\nabort?' % ex, pdb=True): - raise - return res - - def rqliter(self, rql, kwargs=None, ask_confirm=True): - return ForRqlIterator(self, rql, kwargs, ask_confirm) - - # low-level commands to repair broken system database ###################### - - def cmd_change_attribute_type(self, etype, attr, newtype, commit=True): - """low level method to change the type of an entity attribute. This is - a quick hack which has some drawback: - * only works when the old type can be changed to the new type by the - underlying rdbms (eg using ALTER TABLE) - * the actual schema won't be updated until next startup - """ - rschema = self.repo.schema.rschema(attr) - oldschema = rschema.objects(etype)[0] - rdef = rschema.rdef(etype, oldschema) - sql = ("UPDATE cw_CWAttribute " - "SET cw_to_entity=(SELECT cw_eid FROM cw_CWEType WHERE cw_name='%s')" - "WHERE cw_eid=%s") % (newtype, rdef.eid) - self.sqlexec(sql, ask_confirm=False) - dbhelper = self.repo.system_source.dbhelper - newrdef = self.fs_schema.rschema(attr).rdef(etype, newtype) - sqltype = sql_type(dbhelper, newrdef) - cursor = self.cnx.cnxset.cu - # consider former cardinality by design, since cardinality change is not handled here - allownull = rdef.cardinality[0] != '1' - dbhelper.change_col_type(cursor, 'cw_%s' % etype, 'cw_%s' % attr, sqltype, allownull) - if commit: - self.commit() - # manually update live schema - eschema = self.repo.schema[etype] - rschema._subj_schemas[eschema].remove(oldschema) - rschema._obj_schemas[oldschema].remove(eschema) - newschema = self.repo.schema[newtype] - rschema._update(eschema, newschema) - rdef.object = newschema - del rschema.rdefs[(eschema, oldschema)] - rschema.rdefs[(eschema, newschema)] = rdef - - def cmd_add_entity_type_table(self, etype, commit=True): - """low level method to create the sql table for an existing entity. - This may be useful on accidental desync between the repository schema - and a sql database - """ - dbhelper = self.repo.system_source.dbhelper - tablesql = eschema2sql(dbhelper, self.repo.schema.eschema(etype), - prefix=SQL_PREFIX) - for sql in tablesql.split(';'): - if sql.strip(): - self.sqlexec(sql) - if commit: - self.commit() - - def cmd_add_relation_type_table(self, rtype, commit=True): - """low level method to create the sql table for an existing relation. - This may be useful on accidental desync between the repository schema - and a sql database - """ - tablesql = rschema2sql(self.repo.schema.rschema(rtype)) - for sql in tablesql.split(';'): - if sql.strip(): - self.sqlexec(sql) - if commit: - self.commit() - - @deprecated("[3.15] use rename_relation_type(oldname, newname)") - def cmd_rename_relation(self, oldname, newname, commit=True): - self.cmd_rename_relation_type(oldname, newname, commit) - - -class ForRqlIterator: - """specific rql iterator to make the loop skipable""" - def __init__(self, helper, rql, kwargs, ask_confirm): - self._h = helper - self.rql = rql - self.kwargs = kwargs - self.ask_confirm = ask_confirm - self._rsetit = None - - def __iter__(self): - return self - - def _get_rset(self): - rql, kwargs = self.rql, self.kwargs - if kwargs: - msg = '%s (%s)' % (rql, kwargs) - else: - msg = rql - if self.ask_confirm: - if not self._h.confirm('Execute rql: %s ?' % msg): - raise StopIteration - try: - return self._h._cw.execute(rql, kwargs) - except Exception as ex: - if self._h.confirm('Error: %s\nabort?' % ex): - raise - else: - raise StopIteration - - def __next__(self): - if self._rsetit is not None: - return next(self._rsetit) - rset = self._get_rset() - self._rsetit = iter(rset) - return next(self._rsetit) - - next = __next__ - - def entities(self): - try: - rset = self._get_rset() - except StopIteration: - return [] - return rset.entities() diff -r 058bb3dc685f -r 0b59724cb3f2 server/querier.py --- a/server/querier.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,737 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Helper classes to execute RQL queries on a set of sources, performing -security checking and data aggregation. -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from itertools import repeat - -from six import text_type, string_types, integer_types -from six.moves import range - -from rql import RQLSyntaxError, CoercionError -from rql.stmts import Union -from rql.nodes import ETYPE_PYOBJ_MAP, etype_from_pyobj, Relation, Exists, Not -from yams import BASE_TYPES - -from cubicweb import ValidationError, Unauthorized, UnknownEid -from cubicweb.rqlrewrite import RQLRelationRewriter -from cubicweb import Binary, server -from cubicweb.rset import ResultSet - -from cubicweb.utils import QueryCache, RepeatList -from cubicweb.server.rqlannotation import SQLGenAnnotator, set_qdata -from cubicweb.server.ssplanner import READ_ONLY_RTYPES, add_types_restriction -from cubicweb.server.edition import EditedEntity -from cubicweb.server.ssplanner import SSPlanner -from cubicweb.statsd_logger import statsd_timeit, statsd_c - -ETYPE_PYOBJ_MAP[Binary] = 'Bytes' - - -def empty_rset(rql, args, rqlst=None): - """build an empty result set object""" - return ResultSet([], rql, args, rqlst=rqlst) - -def update_varmap(varmap, selected, table): - """return a sql schema to store RQL query result""" - for i, term in enumerate(selected): - key = term.as_string() - value = '%s.C%s' % (table, i) - if varmap.get(key, value) != value: - raise Exception('variable name conflict on %s: got %s / %s' - % (key, value, varmap)) - varmap[key] = value - -# permission utilities ######################################################## - -def check_no_password_selected(rqlst): - """check that Password entities are not selected""" - for solution in rqlst.solutions: - for var, etype in solution.items(): - if etype == 'Password': - raise Unauthorized('Password selection is not allowed (%s)' % var) - -def term_etype(cnx, term, solution, args): - """return the entity type for the given term (a VariableRef or a Constant - node) - """ - try: - return solution[term.name] - except AttributeError: - return cnx.entity_metas(term.eval(args))['type'] - -def check_relations_read_access(cnx, select, args): - """Raise :exc:`Unauthorized` if the given user doesn't have credentials to - read relations used in the given syntax tree - """ - # use `term_etype` since we've to deal with rewritten constants here, - # when used as an external source by another repository. - # XXX what about local read security w/ those rewritten constants... - # XXX constants can also happen in some queries generated by req.find() - DBG = (server.DEBUG & server.DBG_SEC) and 'read' in server._SECURITY_CAPS - schema = cnx.repo.schema - user = cnx.user - if select.where is not None: - for rel in select.where.iget_nodes(Relation): - for solution in select.solutions: - # XXX has_text may have specific perm ? - if rel.r_type in READ_ONLY_RTYPES: - continue - rschema = schema.rschema(rel.r_type) - if rschema.final: - eschema = schema.eschema(term_etype(cnx, rel.children[0], - solution, args)) - rdef = eschema.rdef(rschema) - else: - rdef = rschema.rdef(term_etype(cnx, rel.children[0], - solution, args), - term_etype(cnx, rel.children[1].children[0], - solution, args)) - if not user.matching_groups(rdef.get_groups('read')): - if DBG: - print('check_read_access: %s %s does not match %s' % - (rdef, user.groups, rdef.get_groups('read'))) - # XXX rqlexpr not allowed - raise Unauthorized('read', rel.r_type) - if DBG: - print('check_read_access: %s %s matches %s' % - (rdef, user.groups, rdef.get_groups('read'))) - -def get_local_checks(cnx, rqlst, solution): - """Check that the given user has credentials to access data read by the - query and return a dict defining necessary "local checks" (i.e. rql - expression in read permission defined in the schema) where no group grants - him the permission. - - Returned dictionary's keys are variable names and values the rql expressions - for this variable (with the given solution). - - Raise :exc:`Unauthorized` if access is known to be defined, i.e. if there is - no matching group and no local permissions. - """ - DBG = (server.DEBUG & server.DBG_SEC) and 'read' in server._SECURITY_CAPS - schema = cnx.repo.schema - user = cnx.user - localchecks = {} - # iterate on defined_vars and not on solutions to ignore column aliases - for varname in rqlst.defined_vars: - eschema = schema.eschema(solution[varname]) - if eschema.final: - continue - if not user.matching_groups(eschema.get_groups('read')): - erqlexprs = eschema.get_rqlexprs('read') - if not erqlexprs: - ex = Unauthorized('read', solution[varname]) - ex.var = varname - if DBG: - print('check_read_access: %s %s %s %s' % - (varname, eschema, user.groups, eschema.get_groups('read'))) - raise ex - # don't insert security on variable only referenced by 'NOT X relation Y' or - # 'NOT EXISTS(X relation Y)' - varinfo = rqlst.defined_vars[varname].stinfo - if varinfo['selected'] or ( - len([r for r in varinfo['relations'] - if (not schema.rschema(r.r_type).final - and ((isinstance(r.parent, Exists) and r.parent.neged(strict=True)) - or isinstance(r.parent, Not)))]) - != - len(varinfo['relations'])): - localchecks[varname] = erqlexprs - return localchecks - - -# Plans ####################################################################### - -class ExecutionPlan(object): - """the execution model of a rql query, composed of querier steps""" - - def __init__(self, querier, rqlst, args, cnx): - # original rql syntax tree - self.rqlst = rqlst - self.args = args or {} - # cnx executing the query - self.cnx = cnx - # quick reference to the system source - self.syssource = cnx.repo.system_source - # execution steps - self.steps = [] - # various resource accesors - self.querier = querier - self.schema = querier.schema - self.sqlannotate = querier.sqlgen_annotate - self.rqlhelper = cnx.vreg.rqlhelper - - def annotate_rqlst(self): - if not self.rqlst.annotated: - self.rqlhelper.annotate(self.rqlst) - - def add_step(self, step): - """add a step to the plan""" - self.steps.append(step) - - def sqlexec(self, sql, args=None): - return self.syssource.sqlexec(self.cnx, sql, args) - - def execute(self): - """execute a plan and return resulting rows""" - for step in self.steps: - result = step.execute() - # the latest executed step contains the full query result - return result - - def preprocess(self, union, security=True): - """insert security when necessary then annotate rql st for sql generation - - return rqlst to actually execute - """ - cached = None - if security and self.cnx.read_security: - # ensure security is turned of when security is inserted, - # else we may loop for ever... - if self.cnx.transaction_data.get('security-rqlst-cache'): - key = self.cache_key - else: - key = None - if key is not None and key in self.cnx.transaction_data: - cachedunion, args = self.cnx.transaction_data[key] - union.children[:] = [] - for select in cachedunion.children: - union.append(select) - union.has_text_query = cachedunion.has_text_query - args.update(self.args) - self.args = args - cached = True - else: - with self.cnx.security_enabled(read=False): - noinvariant = self._insert_security(union) - if key is not None: - self.cnx.transaction_data[key] = (union, self.args) - else: - noinvariant = () - if cached is None: - self.rqlhelper.simplify(union) - self.sqlannotate(union) - set_qdata(self.schema.rschema, union, noinvariant) - if union.has_text_query: - self.cache_key = None - - def _insert_security(self, union): - noinvariant = set() - for select in union.children[:]: - for subquery in select.with_: - self._insert_security(subquery.query) - localchecks, restricted = self._check_permissions(select) - if any(localchecks): - self.cnx.rql_rewriter.insert_local_checks( - select, self.args, localchecks, restricted, noinvariant) - return noinvariant - - def _check_permissions(self, rqlst): - """Return a dict defining "local checks", i.e. RQLExpression defined in - the schema that should be inserted in the original query, together with - a set of variable names which requires some security to be inserted. - - Solutions where a variable has a type which the user can't definitly - read are removed, else if the user *may* read it (i.e. if an rql - expression is defined for the "read" permission of the related type), - the local checks dict is updated. - - The local checks dict has entries for each different local check - necessary, with associated solutions as value, a local check being - defined by a list of 2-uple (variable name, rql expressions) for each - variable which has to be checked. Solutions which don't require local - checks will be associated to the empty tuple key. - - Note rqlst should not have been simplified at this point. - """ - cnx = self.cnx - msgs = [] - # dict(varname: eid), allowing to check rql expression for variables - # which have a known eid - varkwargs = {} - if not cnx.transaction_data.get('security-rqlst-cache'): - for var in rqlst.defined_vars.values(): - if var.stinfo['constnode'] is not None: - eid = var.stinfo['constnode'].eval(self.args) - varkwargs[var.name] = int(eid) - # dictionary of variables restricted for security reason - localchecks = {} - restricted_vars = set() - newsolutions = [] - for solution in rqlst.solutions: - try: - localcheck = get_local_checks(cnx, rqlst, solution) - except Unauthorized as ex: - msg = 'remove %s from solutions since %s has no %s access to %s' - msg %= (solution, cnx.user.login, ex.args[0], ex.args[1]) - msgs.append(msg) - LOGGER.info(msg) - else: - newsolutions.append(solution) - # try to benefit of rqlexpr.check cache for entities which - # are specified by eid in query'args - for varname, eid in varkwargs.items(): - try: - rqlexprs = localcheck.pop(varname) - except KeyError: - continue - # if entity has been added in the current transaction, the - # user can read it whatever rql expressions are associated - # to its type - if cnx.added_in_transaction(eid): - continue - for rqlexpr in rqlexprs: - if rqlexpr.check(cnx, eid): - break - else: - raise Unauthorized('No read acces on %r with eid %i.' % (var, eid)) - # mark variables protected by an rql expression - restricted_vars.update(localcheck) - # turn local check into a dict key - localcheck = tuple(sorted(localcheck.items())) - localchecks.setdefault(localcheck, []).append(solution) - # raise Unautorized exception if the user can't access to any solution - if not newsolutions: - raise Unauthorized('\n'.join(msgs)) - # if there is some message, solutions have been modified and must be - # reconsidered by the syntax treee - if msgs: - rqlst.set_possible_types(newsolutions) - return localchecks, restricted_vars - - def finalize(self, select, solutions, insertedvars): - rqlst = Union() - rqlst.append(select) - for mainvarname, rschema, newvarname in insertedvars: - nvartype = str(rschema.objects(solutions[0][mainvarname])[0]) - for sol in solutions: - sol[newvarname] = nvartype - select.clean_solutions(solutions) - add_types_restriction(self.schema, select) - self.rqlhelper.annotate(rqlst) - self.preprocess(rqlst, security=False) - return rqlst - - -class InsertPlan(ExecutionPlan): - """an execution model specific to the INSERT rql query - """ - - def __init__(self, querier, rqlst, args, cnx): - ExecutionPlan.__init__(self, querier, rqlst, args, cnx) - # save originally selected variable, we may modify this - # dictionary for substitution (query parameters) - self.selected = rqlst.selection - # list of rows of entities definition (ssplanner.EditedEntity) - self.e_defs = [[]] - # list of new relation definition (3-uple (from_eid, r_type, to_eid) - self.r_defs = set() - # indexes to track entity definitions bound to relation definitions - self._r_subj_index = {} - self._r_obj_index = {} - self._expanded_r_defs = {} - - def add_entity_def(self, edef): - """add an entity definition to build""" - self.e_defs[-1].append(edef) - - def add_relation_def(self, rdef): - """add an relation definition to build""" - self.r_defs.add(rdef) - if not isinstance(rdef[0], int): - self._r_subj_index.setdefault(rdef[0], []).append(rdef) - if not isinstance(rdef[2], int): - self._r_obj_index.setdefault(rdef[2], []).append(rdef) - - def substitute_entity_def(self, edef, edefs): - """substitute an incomplete entity definition by a list of complete - equivalents - - e.g. on queries such as :: - INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y - WHERE U login 'admin', U login N - - X will be inserted as many times as U exists, and so the X travaille Y - relations as to be added as many time as X is inserted - """ - if not edefs or not self.e_defs: - # no result, no entity will be created - self.e_defs = () - return - # first remove the incomplete entity definition - colidx = self.e_defs[0].index(edef) - for i, row in enumerate(self.e_defs[:]): - self.e_defs[i][colidx] = edefs[0] - samplerow = self.e_defs[i] - for edef_ in edefs[1:]: - row = [ed.clone() for i, ed in enumerate(samplerow) - if i != colidx] - row.insert(colidx, edef_) - self.e_defs.append(row) - # now, see if this entity def is referenced as subject in some relation - # definition - if edef in self._r_subj_index: - for rdef in self._r_subj_index[edef]: - expanded = self._expanded(rdef) - result = [] - for exp_rdef in expanded: - for edef_ in edefs: - result.append( (edef_, exp_rdef[1], exp_rdef[2]) ) - self._expanded_r_defs[rdef] = result - # and finally, see if this entity def is referenced as object in some - # relation definition - if edef in self._r_obj_index: - for rdef in self._r_obj_index[edef]: - expanded = self._expanded(rdef) - result = [] - for exp_rdef in expanded: - for edef_ in edefs: - result.append( (exp_rdef[0], exp_rdef[1], edef_) ) - self._expanded_r_defs[rdef] = result - - def _expanded(self, rdef): - """return expanded value for the given relation definition""" - try: - return self._expanded_r_defs[rdef] - except KeyError: - self.r_defs.remove(rdef) - return [rdef] - - def relation_defs(self): - """return the list for relation definitions to insert""" - for rdefs in self._expanded_r_defs.values(): - for rdef in rdefs: - yield rdef - for rdef in self.r_defs: - yield rdef - - def insert_entity_defs(self): - """return eids of inserted entities in a suitable form for the resulting - result set, e.g.: - - e.g. on queries such as :: - INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y - WHERE U login 'admin', U login N - - if there is two entities matching U, the result set will look like - [(eidX1, eidY1), (eidX2, eidY2)] - """ - cnx = self.cnx - repo = cnx.repo - results = [] - for row in self.e_defs: - results.append([repo.glob_add_entity(cnx, edef) - for edef in row]) - return results - - def insert_relation_defs(self): - cnx = self.cnx - repo = cnx.repo - edited_entities = {} - relations = {} - for subj, rtype, obj in self.relation_defs(): - # if a string is given into args instead of an int, we get it here - if isinstance(subj, string_types): - subj = int(subj) - elif not isinstance(subj, integer_types): - subj = subj.entity.eid - if isinstance(obj, string_types): - obj = int(obj) - elif not isinstance(obj, integer_types): - obj = obj.entity.eid - if repo.schema.rschema(rtype).inlined: - if subj not in edited_entities: - entity = cnx.entity_from_eid(subj) - edited = EditedEntity(entity) - edited_entities[subj] = edited - else: - edited = edited_entities[subj] - edited.edited_attribute(rtype, obj) - else: - if rtype in relations: - relations[rtype].append((subj, obj)) - else: - relations[rtype] = [(subj, obj)] - repo.glob_add_relations(cnx, relations) - for edited in edited_entities.values(): - repo.glob_update_entity(cnx, edited) - - -class QuerierHelper(object): - """helper class to execute rql queries, putting all things together""" - - def __init__(self, repo, schema): - # system info helper - self._repo = repo - # instance schema - self.set_schema(schema) - - def set_schema(self, schema): - self.schema = schema - repo = self._repo - # rql st and solution cache. - self._rql_cache = QueryCache(repo.config['rql-cache-size']) - # rql cache key cache. Don't bother using a Cache instance: we should - # have a limited number of queries in there, since there are no entries - # in this cache for user queries (which have no args) - self._rql_ck_cache = {} - # some cache usage stats - self.cache_hit, self.cache_miss = 0, 0 - # rql parsing / analysing helper - self.solutions = repo.vreg.solutions - rqlhelper = repo.vreg.rqlhelper - # set backend on the rql helper, will be used for function checking - rqlhelper.backend = repo.config.system_source_config['db-driver'] - self._parse = rqlhelper.parse - self._annotate = rqlhelper.annotate - # rql planner - self._planner = SSPlanner(schema, rqlhelper) - # sql generation annotator - self.sqlgen_annotate = SQLGenAnnotator(schema).annotate - - def parse(self, rql, annotate=False): - """return a rql syntax tree for the given rql""" - try: - return self._parse(text_type(rql), annotate=annotate) - except UnicodeError: - raise RQLSyntaxError(rql) - - def plan_factory(self, rqlst, args, cnx): - """create an execution plan for an INSERT RQL query""" - if rqlst.TYPE == 'insert': - return InsertPlan(self, rqlst, args, cnx) - return ExecutionPlan(self, rqlst, args, cnx) - - @statsd_timeit - def execute(self, cnx, rql, args=None, build_descr=True): - """execute a rql query, return resulting rows and their description in - a `ResultSet` object - - * `rql` should be a Unicode string or a plain ASCII string - * `args` the optional parameters dictionary associated to the query - * `build_descr` is a boolean flag indicating if the description should - be built on select queries (if false, the description will be en empty - list) - - on INSERT queries, there will be one row with the eid of each inserted - entity - - result for DELETE and SET queries is undefined yet - - to maximize the rql parsing/analyzing cache performance, you should - always use substitute arguments in queries (i.e. avoid query such as - 'Any X WHERE X eid 123'!) - """ - if server.DEBUG & (server.DBG_RQL | server.DBG_SQL): - if server.DEBUG & (server.DBG_MORE | server.DBG_SQL): - print('*'*80) - print('querier input', repr(rql), repr(args)) - # parse the query and binds variables - cachekey = (rql,) - try: - if args: - # search for named args in query which are eids (hence - # influencing query's solutions) - eidkeys = self._rql_ck_cache[rql] - if eidkeys: - # if there are some, we need a better cache key, eg (rql + - # entity type of each eid) - try: - cachekey = self._repo.querier_cache_key(cnx, rql, - args, eidkeys) - except UnknownEid: - # we want queries such as "Any X WHERE X eid 9999" - # return an empty result instead of raising UnknownEid - return empty_rset(rql, args) - rqlst = self._rql_cache[cachekey] - self.cache_hit += 1 - statsd_c('cache_hit') - except KeyError: - self.cache_miss += 1 - statsd_c('cache_miss') - rqlst = self.parse(rql) - try: - # compute solutions for rqlst and return named args in query - # which are eids. Notice that if you may not need `eidkeys`, we - # have to compute solutions anyway (kept as annotation on the - # tree) - eidkeys = self.solutions(cnx, rqlst, args) - except UnknownEid: - # we want queries such as "Any X WHERE X eid 9999" return an - # empty result instead of raising UnknownEid - return empty_rset(rql, args) - if args and rql not in self._rql_ck_cache: - self._rql_ck_cache[rql] = eidkeys - if eidkeys: - cachekey = self._repo.querier_cache_key(cnx, rql, args, - eidkeys) - self._rql_cache[cachekey] = rqlst - if rqlst.TYPE != 'select': - if cnx.read_security: - check_no_password_selected(rqlst) - cachekey = None - else: - if cnx.read_security: - for select in rqlst.children: - check_no_password_selected(select) - check_relations_read_access(cnx, select, args) - # on select query, always copy the cached rqlst so we don't have to - # bother modifying it. This is not necessary on write queries since - # a new syntax tree is built from them. - rqlst = rqlst.copy() - # Rewrite computed relations - rewriter = RQLRelationRewriter(cnx) - rewriter.rewrite(rqlst, args) - self._annotate(rqlst) - if args: - # different SQL generated when some argument is None or not (IS - # NULL). This should be considered when computing sql cache key - cachekey += tuple(sorted([k for k, v in args.items() - if v is None])) - # make an execution plan - plan = self.plan_factory(rqlst, args, cnx) - plan.cache_key = cachekey - self._planner.build_plan(plan) - # execute the plan - try: - results = plan.execute() - except (Unauthorized, ValidationError): - # getting an Unauthorized/ValidationError exception means the - # transaction must be rolled back - # - # notes: - # * we should not reset the connections set here, since we don't want the - # connection to loose it during processing - # * don't rollback if we're in the commit process, will be handled - # by the connection - if cnx.commit_state is None: - cnx.commit_state = 'uncommitable' - raise - # build a description for the results if necessary - descr = () - if build_descr: - if rqlst.TYPE == 'select': - # sample selection - if len(rqlst.children) == 1 and len(rqlst.children[0].solutions) == 1: - # easy, all lines are identical - selected = rqlst.children[0].selection - solution = rqlst.children[0].solutions[0] - description = _make_description(selected, args, solution) - descr = RepeatList(len(results), tuple(description)) - else: - # hard, delegate the work :o) - descr = manual_build_descr(cnx, rqlst, args, results) - elif rqlst.TYPE == 'insert': - # on insert plan, some entities may have been auto-casted, - # so compute description manually even if there is only - # one solution - basedescr = [None] * len(plan.selected) - todetermine = list(zip(range(len(plan.selected)), repeat(False))) - descr = _build_descr(cnx, results, basedescr, todetermine) - # FIXME: get number of affected entities / relations on non - # selection queries ? - # return a result set object - return ResultSet(results, rql, args, descr) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -from logging import getLogger -from cubicweb import set_log_methods -LOGGER = getLogger('cubicweb.querier') -set_log_methods(QuerierHelper, LOGGER) - - -def manual_build_descr(cnx, rqlst, args, result): - """build a description for a given result by analysing each row - - XXX could probably be done more efficiently during execution of query - """ - # not so easy, looks for variable which changes from one solution - # to another - unstables = rqlst.get_variable_indices() - basedescr = [] - todetermine = [] - for i in range(len(rqlst.children[0].selection)): - ttype = _selection_idx_type(i, rqlst, args) - if ttype is None or ttype == 'Any': - ttype = None - isfinal = True - else: - isfinal = ttype in BASE_TYPES - if ttype is None or i in unstables: - basedescr.append(None) - todetermine.append( (i, isfinal) ) - else: - basedescr.append(ttype) - if not todetermine: - return RepeatList(len(result), tuple(basedescr)) - return _build_descr(cnx, result, basedescr, todetermine) - -def _build_descr(cnx, result, basedescription, todetermine): - description = [] - entity_metas = cnx.entity_metas - todel = [] - for i, row in enumerate(result): - row_descr = basedescription[:] - for index, isfinal in todetermine: - value = row[index] - if value is None: - # None value inserted by an outer join, no type - row_descr[index] = None - continue - if isfinal: - row_descr[index] = etype_from_pyobj(value) - else: - try: - row_descr[index] = entity_metas(value)['type'] - except UnknownEid: - cnx.error('wrong eid %s in repository, you should ' - 'db-check the database' % value) - todel.append(i) - break - else: - description.append(tuple(row_descr)) - for i in reversed(todel): - del result[i] - return description - -def _make_description(selected, args, solution): - """return a description for a result set""" - description = [] - for term in selected: - description.append(term.get_type(solution, args)) - return description - -def _selection_idx_type(i, rqlst, args): - """try to return type of term at index `i` of the rqlst's selection""" - for select in rqlst.children: - term = select.selection[i] - for solution in select.solutions: - try: - ttype = term.get_type(solution, args) - if ttype is not None: - return ttype - except CoercionError: - return None diff -r 058bb3dc685f -r 0b59724cb3f2 server/repository.py --- a/server/repository.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1133 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Defines the central class for the CubicWeb RQL server: the repository. - -The repository is an abstraction allowing execution of rql queries against -data sources. Most of the work is actually done in helper classes. The -repository mainly: - -* brings these classes all together to provide a single access - point to a cubicweb instance. -* handles session management -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import threading -from warnings import warn -from itertools import chain -from time import time, localtime, strftime -from contextlib import contextmanager - -from six.moves import range, queue - -from logilab.common.decorators import cached, clear_cache -from logilab.common.deprecation import deprecated - -from yams import BadSchemaDefinition -from rql.utils import rqlvar_maker - -from cubicweb import (CW_MIGRATION_MAP, QueryError, - UnknownEid, AuthenticationError, ExecutionError, - BadConnectionId, ValidationError, Unauthorized, - UniqueTogetherError, onevent, ViolatedConstraint) -from cubicweb import cwvreg, schema, server -from cubicweb.server import ShuttingDown, utils, hook, querier, sources -from cubicweb.server.session import Session, InternalManager - -NO_CACHE_RELATIONS = set( [('owned_by', 'object'), - ('created_by', 'object'), - ('cw_source', 'object'), - ]) - -def prefill_entity_caches(entity): - cnx = entity._cw - # prefill entity relation caches - for rschema in entity.e_schema.subject_relations(): - rtype = str(rschema) - if rtype in schema.VIRTUAL_RTYPES or (rtype, 'subject') in NO_CACHE_RELATIONS: - continue - if rschema.final: - entity.cw_attr_cache.setdefault(rtype, None) - else: - entity.cw_set_relation_cache(rtype, 'subject', - cnx.empty_rset()) - for rschema in entity.e_schema.object_relations(): - rtype = str(rschema) - if rtype in schema.VIRTUAL_RTYPES or (rtype, 'object') in NO_CACHE_RELATIONS: - continue - entity.cw_set_relation_cache(rtype, 'object', cnx.empty_rset()) - -def del_existing_rel_if_needed(cnx, eidfrom, rtype, eidto): - """delete existing relation when adding a new one if card is 1 or ? - - have to be done once the new relation has been inserted to avoid having - an entity without a relation for some time - - this kind of behaviour has to be done in the repository so we don't have - hooks order hazardness - """ - # skip that if integrity explicitly disabled - if not cnx.is_hook_category_activated('activeintegrity'): - return - rdef = cnx.rtype_eids_rdef(rtype, eidfrom, eidto) - card = rdef.cardinality - # one may be tented to check for neweids but this may cause more than one - # relation even with '1?' cardinality if thoses relations are added in the - # same transaction where the entity is being created. This never occurs from - # the web interface but may occurs during test or dbapi connection (though - # not expected for this). So: don't do it, we pretend to ensure repository - # consistency. - # - # notes: - # * inlined relations will be implicitly deleted for the subject entity - # * we don't want read permissions to be applied but we want delete - # permission to be checked - if card[0] in '1?': - with cnx.security_enabled(read=False): - cnx.execute('DELETE X %s Y WHERE X eid %%(x)s, ' - 'NOT Y eid %%(y)s' % rtype, - {'x': eidfrom, 'y': eidto}) - if card[1] in '1?': - with cnx.security_enabled(read=False): - cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s, ' - 'NOT X eid %%(x)s' % rtype, - {'x': eidfrom, 'y': eidto}) - - -def preprocess_inlined_relations(cnx, entity): - """when an entity is added, check if it has some inlined relation which - requires to be extrated for proper call hooks - """ - relations = [] - activeintegrity = cnx.is_hook_category_activated('activeintegrity') - eschema = entity.e_schema - for attr in entity.cw_edited: - rschema = eschema.subjrels[attr] - if not rschema.final: # inlined relation - value = entity.cw_edited[attr] - relations.append((attr, value)) - cnx.update_rel_cache_add(entity.eid, attr, value) - rdef = cnx.rtype_eids_rdef(attr, entity.eid, value) - if rdef.cardinality[1] in '1?' and activeintegrity: - with cnx.security_enabled(read=False): - cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s' % attr, - {'x': entity.eid, 'y': value}) - return relations - - -class NullEventBus(object): - def publish(self, msg): - pass - - def add_subscription(self, topic, callback): - pass - - def start(self): - pass - - def stop(self): - pass - - -class Repository(object): - """a repository provides access to a set of persistent storages for - entities and relations - """ - - def __init__(self, config, tasks_manager=None, vreg=None): - self.config = config - if vreg is None: - vreg = cwvreg.CWRegistryStore(config) - self.vreg = vreg - self._tasks_manager = tasks_manager - - self.app_instances_bus = NullEventBus() - self.info('starting repository from %s', self.config.apphome) - # dictionary of opened sessions - self._sessions = {} - - # list of functions to be called at regular interval - # list of running threads - self._running_threads = [] - # initial schema, should be build or replaced latter - self.schema = schema.CubicWebSchema(config.appid) - self.vreg.schema = self.schema # until actual schema is loaded... - # shutdown flag - self.shutting_down = False - # sources (additional sources info in the system database) - self.system_source = self.get_source('native', 'system', - config.system_source_config.copy()) - self.sources_by_uri = {'system': self.system_source} - # querier helper, need to be created after sources initialization - self.querier = querier.QuerierHelper(self, self.schema) - # cache eid -> (type, extid, actual source) - self._type_source_cache = {} - # cache extid -> eid - self._extid_cache = {} - # open some connection sets - if config.init_cnxset_pool: - self.init_cnxset_pool() - # the hooks manager - self.hm = hook.HooksManager(self.vreg) - # registry hook to fix user class on registry reload - @onevent('after-registry-reload', self) - def fix_user_classes(self): - # After registry reload the 'CWUser' class used for CWEtype - # changed. So any existing user object have a different class than - # the new loaded one. We are hot fixing this. - usercls = self.vreg['etypes'].etype_class('CWUser') - for session in self._sessions.values(): - if not isinstance(session.user, InternalManager): - session.user.__class__ = usercls - - def init_cnxset_pool(self): - """should be called bootstrap_repository, as this is what it does""" - config = self.config - self._cnxsets_pool = queue.Queue() - # 0. init a cnxset that will be used to fetch bootstrap information from - # the database - self._cnxsets_pool.put_nowait(self.system_source.wrapped_connection()) - # 1. set used cubes - if config.creating or not config.read_instance_schema: - config.bootstrap_cubes() - else: - self.set_schema(self.config.load_bootstrap_schema(), resetvreg=False) - config.init_cubes(self.get_cubes()) - # 2. load schema - if config.quick_start: - # quick start: only to get a minimal repository to get cubes - # information (eg dump/restore/...) - # - # restrict appobject_path to only load hooks and entity classes in - # the registry - config.cube_appobject_path = set(('hooks', 'entities')) - config.cubicweb_appobject_path = set(('hooks', 'entities')) - # limit connections pool to 1 - config['connections-pool-size'] = 1 - if config.quick_start or config.creating or not config.read_instance_schema: - # load schema from the file system - if not config.creating: - self.info("set fs instance'schema") - self.set_schema(config.load_schema(expand_cubes=True)) - else: - # normal start: load the instance schema from the database - self.info('loading schema from the repository') - self.set_schema(self.deserialize_schema()) - # 3. initialize data sources - if config.creating: - # call init_creating so that for instance native source can - # configurate tsearch according to postgres version - self.system_source.init_creating() - else: - self.init_sources_from_database() - if 'CWProperty' in self.schema: - self.vreg.init_properties(self.properties()) - # 4. close initialization connection set and reopen fresh ones for - # proper initialization - self._get_cnxset().close(True) - self.cnxsets = [] # list of available cnxsets (can't iterate on a Queue) - for i in range(config['connections-pool-size']): - self.cnxsets.append(self.system_source.wrapped_connection()) - self._cnxsets_pool.put_nowait(self.cnxsets[-1]) - - # internals ############################################################### - - def init_sources_from_database(self): - self.sources_by_eid = {} - if self.config.quick_start \ - or not 'CWSource' in self.schema: # # 3.10 migration - self.system_source.init_creating() - return - with self.internal_cnx() as cnx: - # FIXME: sources should be ordered (add_entity priority) - for sourceent in cnx.execute( - 'Any S, SN, SA, SC WHERE S is_instance_of CWSource, ' - 'S name SN, S type SA, S config SC').entities(): - if sourceent.name == 'system': - self.system_source.eid = sourceent.eid - self.sources_by_eid[sourceent.eid] = self.system_source - self.system_source.init(True, sourceent) - continue - self.add_source(sourceent) - - def _clear_planning_caches(self): - clear_cache(self, 'source_defs') - - def add_source(self, sourceent): - try: - source = self.get_source(sourceent.type, sourceent.name, - sourceent.host_config, sourceent.eid) - except RuntimeError: - if self.config.repairing: - self.exception('cant setup source %s, skipped', sourceent.name) - return - raise - self.sources_by_eid[sourceent.eid] = source - self.sources_by_uri[sourceent.name] = source - if self.config.source_enabled(source): - # call source's init method to complete their initialisation if - # needed (for instance looking for persistent configuration using an - # internal session, which is not possible until connections sets have been - # initialized) - source.init(True, sourceent) - else: - source.init(False, sourceent) - self._clear_planning_caches() - - def remove_source(self, uri): - source = self.sources_by_uri.pop(uri) - del self.sources_by_eid[source.eid] - self._clear_planning_caches() - - def get_source(self, type, uri, source_config, eid=None): - # set uri and type in source config so it's available through - # source_defs() - source_config['uri'] = uri - source_config['type'] = type - return sources.get_source(type, source_config, self, eid) - - def set_schema(self, schema, resetvreg=True): - self.info('set schema %s %#x', schema.name, id(schema)) - if resetvreg: - # trigger full reload of all appobjects - self.vreg.set_schema(schema) - else: - self.vreg._set_schema(schema) - self.querier.set_schema(schema) - for source in self.sources_by_uri.values(): - source.set_schema(schema) - self.schema = schema - - def deserialize_schema(self): - """load schema from the database""" - from cubicweb.server.schemaserial import deserialize_schema - appschema = schema.CubicWebSchema(self.config.appid) - self.debug('deserializing db schema into %s %#x', appschema.name, id(appschema)) - with self.internal_cnx() as cnx: - try: - deserialize_schema(appschema, cnx) - except BadSchemaDefinition: - raise - except Exception as ex: - import traceback - traceback.print_exc() - raise Exception('Is the database initialised ? (cause: %s)' % ex) - return appschema - - def _prepare_startup(self): - """Prepare "Repository as a server" for startup. - - * trigger server startup hook, - * register session clean up task. - """ - if not (self.config.creating or self.config.repairing - or self.config.quick_start): - # call instance level initialisation hooks - self.hm.call_hooks('server_startup', repo=self) - # register a task to cleanup expired session - self.cleanup_session_time = self.config['cleanup-session-time'] or 60 * 60 * 24 - assert self.cleanup_session_time > 0 - cleanup_session_interval = min(60*60, self.cleanup_session_time / 3) - assert self._tasks_manager is not None, "This Repository is not intended to be used as a server" - self._tasks_manager.add_looping_task(cleanup_session_interval, - self.clean_sessions) - - def start_looping_tasks(self): - """Actual "Repository as a server" startup. - - * trigger server startup hook, - * register session clean up task, - * start all tasks. - - XXX Other startup related stuffs are done elsewhere. In Repository - XXX __init__ or in external codes (various server managers). - """ - self._prepare_startup() - assert self._tasks_manager is not None, "This Repository is not intended to be used as a server" - self._tasks_manager.start() - - def looping_task(self, interval, func, *args): - """register a function to be called every `interval` seconds. - - looping tasks can only be registered during repository initialization, - once done this method will fail. - """ - assert self._tasks_manager is not None, "This Repository is not intended to be used as a server" - self._tasks_manager.add_looping_task(interval, func, *args) - - def threaded_task(self, func): - """start function in a separated thread""" - utils.RepoThread(func, self._running_threads).start() - - #@locked - def _get_cnxset(self): - try: - return self._cnxsets_pool.get(True, timeout=5) - except queue.Empty: - raise Exception('no connections set available after 5 secs, probably either a ' - 'bug in code (too many uncommited/rolled back ' - 'connections) or too much load on the server (in ' - 'which case you can try to set a bigger ' - 'connections pool size)') - - def _free_cnxset(self, cnxset): - self._cnxsets_pool.put_nowait(cnxset) - - def shutdown(self): - """called on server stop event to properly close opened sessions and - connections - """ - assert not self.shutting_down, 'already shutting down' - if not (self.config.creating or self.config.repairing - or self.config.quick_start): - # then, the system source is still available - self.hm.call_hooks('before_server_shutdown', repo=self) - self.shutting_down = True - self.system_source.shutdown() - if self._tasks_manager is not None: - self._tasks_manager.stop() - if not (self.config.creating or self.config.repairing - or self.config.quick_start): - self.hm.call_hooks('server_shutdown', repo=self) - for thread in self._running_threads: - self.info('waiting thread %s...', thread.getName()) - thread.join() - self.info('thread %s finished', thread.getName()) - self.close_sessions() - while not self._cnxsets_pool.empty(): - cnxset = self._cnxsets_pool.get_nowait() - try: - cnxset.close(True) - except Exception: - self.exception('error while closing %s' % cnxset) - continue - hits, misses = self.querier.cache_hit, self.querier.cache_miss - try: - self.info('rql st cache hit/miss: %s/%s (%s%% hits)', hits, misses, - (hits * 100) / (hits + misses)) - hits, misses = self.system_source.cache_hit, self.system_source.cache_miss - self.info('sql cache hit/miss: %s/%s (%s%% hits)', hits, misses, - (hits * 100) / (hits + misses)) - nocache = self.system_source.no_cache - self.info('sql cache usage: %s/%s (%s%%)', hits+ misses, nocache, - ((hits + misses) * 100) / (hits + misses + nocache)) - except ZeroDivisionError: - pass - - def check_auth_info(self, cnx, login, authinfo): - """validate authentication, raise AuthenticationError on failure, return - associated CWUser's eid on success. - """ - # iter on sources_by_uri then check enabled source since sources doesn't - # contain copy based sources - for source in self.sources_by_uri.values(): - if self.config.source_enabled(source) and source.support_entity('CWUser'): - try: - return source.authenticate(cnx, login, **authinfo) - except AuthenticationError: - continue - else: - raise AuthenticationError('authentication failed with all sources') - - def authenticate_user(self, cnx, login, **authinfo): - """validate login / password, raise AuthenticationError on failure - return associated CWUser instance on success - """ - eid = self.check_auth_info(cnx, login, authinfo) - cwuser = self._build_user(cnx, eid) - if self.config.consider_user_state and \ - not cwuser.cw_adapt_to('IWorkflowable').state in cwuser.AUTHENTICABLE_STATES: - raise AuthenticationError('user is not in authenticable state') - return cwuser - - def _build_user(self, cnx, eid): - """return a CWUser entity for user with the given eid""" - cls = self.vreg['etypes'].etype_class('CWUser') - st = cls.fetch_rqlst(cnx.user, ordermethod=None) - st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute') - rset = cnx.execute(st.as_string(), {'x': eid}) - assert len(rset) == 1, rset - cwuser = rset.get_entity(0, 0) - # pylint: disable=W0104 - # prefetch / cache cwuser's groups and properties. This is especially - # useful for internal sessions to avoid security insertions - cwuser.groups - cwuser.properties - return cwuser - - # public (dbapi) interface ################################################ - - @deprecated("[3.19] use _cw.call_service('repo_stats')") - def stats(self): # XXX restrict to managers session? - """Return a dictionary containing some statistics about the repository - resources usage. - - This is a public method, not requiring a session id. - - This method is deprecated in favor of using _cw.call_service('repo_stats') - """ - with self.internal_cnx() as cnx: - return cnx.call_service('repo_stats') - - @deprecated("[3.19] use _cw.call_service('repo_gc_stats')") - def gc_stats(self, nmax=20): - """Return a dictionary containing some statistics about the repository - memory usage. - - This is a public method, not requiring a session id. - - nmax is the max number of (most) referenced object returned as - the 'referenced' result - """ - with self.internal_cnx() as cnx: - return cnx.call_service('repo_gc_stats', nmax=nmax) - - def get_schema(self): - """Return the instance schema. - - This is a public method, not requiring a session id. - """ - return self.schema - - def get_cubes(self): - """Return the list of cubes used by this instance. - - This is a public method, not requiring a session id. - """ - versions = self.get_versions(not (self.config.creating - or self.config.repairing - or self.config.quick_start - or self.config.mode == 'test')) - cubes = list(versions) - cubes.remove('cubicweb') - return cubes - - def get_option_value(self, option, foreid=None): - """Return the value for `option` in the configuration. - - This is a public method, not requiring a session id. - - `foreid` argument is deprecated and now useless (as of 3.19). - """ - if foreid is not None: - warn('[3.19] foreid argument is deprecated', DeprecationWarning, - stacklevel=2) - # XXX we may want to check we don't give sensible information - return self.config[option] - - @cached - def get_versions(self, checkversions=False): - """Return the a dictionary containing cubes used by this instance - as key with their version as value, including cubicweb version. - - This is a public method, not requiring a session id. - """ - from logilab.common.changelog import Version - vcconf = {} - with self.internal_cnx() as cnx: - for pk, version in cnx.execute( - 'Any K,V WHERE P is CWProperty, P value V, P pkey K, ' - 'P pkey ~="system.version.%"', build_descr=False): - cube = pk.split('.')[-1] - # XXX cubicweb migration - if cube in CW_MIGRATION_MAP: - cube = CW_MIGRATION_MAP[cube] - version = Version(version) - vcconf[cube] = version - if checkversions: - if cube != 'cubicweb': - fsversion = self.config.cube_version(cube) - else: - fsversion = self.config.cubicweb_version() - if version < fsversion: - msg = ('instance has %s version %s but %s ' - 'is installed. Run "cubicweb-ctl upgrade".') - raise ExecutionError(msg % (cube, version, fsversion)) - return vcconf - - @cached - def source_defs(self): - """Return the a dictionary containing source uris as value and a - dictionary describing each source as value. - - This is a public method, not requiring a session id. - """ - sources = {} - # remove sensitive information - for uri, source in self.sources_by_uri.items(): - sources[uri] = source.public_config - return sources - - def properties(self): - """Return a result set containing system wide properties. - - This is a public method, not requiring a session id. - """ - with self.internal_cnx() as cnx: - # don't use cnx.execute, we don't want rset.req set - return self.querier.execute(cnx, 'Any K,V WHERE P is CWProperty,' - 'P pkey K, P value V, NOT P for_user U', - build_descr=False) - - @deprecated("[3.19] Use session.call_service('register_user') instead'") - def register_user(self, login, password, email=None, **kwargs): - """check a user with the given login exists, if not create it with the - given password. This method is designed to be used for anonymous - registration on public web site. - """ - with self.internal_cnx() as cnx: - cnx.call_service('register_user', login=login, password=password, - email=email, **kwargs) - cnx.commit() - - def find_users(self, fetch_attrs, **query_attrs): - """yield user attributes for cwusers matching the given query_attrs - (the result set cannot survive this method call) - - This can be used by low-privileges account (anonymous comes to - mind). - - `fetch_attrs`: tuple of attributes to be fetched - `query_attrs`: dict of attr/values to restrict the query - """ - assert query_attrs - if not hasattr(self, '_cwuser_attrs'): - cwuser = self.schema['CWUser'] - self._cwuser_attrs = set(str(rschema) - for rschema, _eschema in cwuser.attribute_definitions() - if not rschema.meta) - cwuserattrs = self._cwuser_attrs - for k in chain(fetch_attrs, query_attrs): - if k not in cwuserattrs: - raise Exception('bad input for find_user') - with self.internal_cnx() as cnx: - varmaker = rqlvar_maker() - vars = [(attr, next(varmaker)) for attr in fetch_attrs] - rql = 'Any %s WHERE X is CWUser, ' % ','.join(var[1] for var in vars) - rql += ','.join('X %s %s' % (var[0], var[1]) for var in vars) + ',' - rset = cnx.execute(rql + ','.join('X %s %%(%s)s' % (attr, attr) - for attr in query_attrs), - query_attrs) - return rset.rows - - def new_session(self, login, **kwargs): - """open a new session for a given user - - raise `AuthenticationError` if the authentication failed - raise `ConnectionError` if we can't open a connection - """ - cnxprops = kwargs.pop('cnxprops', None) - # use an internal connection - with self.internal_cnx() as cnx: - # try to get a user object - user = self.authenticate_user(cnx, login, **kwargs) - session = Session(user, self, cnxprops) - user._cw = user.cw_rset.req = session - user.cw_clear_relation_cache() - self._sessions[session.sessionid] = session - self.info('opened session %s for user %s', session.sessionid, login) - with session.new_cnx() as cnx: - self.hm.call_hooks('session_open', cnx) - # commit connection at this point in case write operation has been - # done during `session_open` hooks - cnx.commit() - return session - - def connect(self, login, **kwargs): - """open a new session for a given user and return its sessionid """ - return self.new_session(login, **kwargs).sessionid - - def close(self, sessionid, txid=None, checkshuttingdown=True): - """close the session with the given id""" - session = self._get_session(sessionid, txid=txid, - checkshuttingdown=checkshuttingdown) - # operation uncommited before close are rolled back before hook is called - with session.new_cnx() as cnx: - self.hm.call_hooks('session_close', cnx) - # commit connection at this point in case write operation has been - # done during `session_close` hooks - cnx.commit() - session.close() - del self._sessions[sessionid] - self.info('closed session %s for user %s', sessionid, session.user.login) - - # session handling ######################################################## - - def close_sessions(self): - """close every opened sessions""" - for sessionid in list(self._sessions): - try: - self.close(sessionid, checkshuttingdown=False) - except Exception: # XXX BaseException? - self.exception('error while closing session %s' % sessionid) - - def clean_sessions(self): - """close sessions not used since an amount of time specified in the - configuration - """ - mintime = time() - self.cleanup_session_time - self.debug('cleaning session unused since %s', - strftime('%H:%M:%S', localtime(mintime))) - nbclosed = 0 - for session in self._sessions.values(): - if session.timestamp < mintime: - self.close(session.sessionid) - nbclosed += 1 - return nbclosed - - @contextmanager - def internal_cnx(self): - """Context manager returning a Connection using internal user which have - every access rights on the repository. - - Beware that unlike the older :meth:`internal_session`, internal - connections have all hooks beside security enabled. - """ - with Session(InternalManager(), self) as session: - with session.new_cnx() as cnx: - cnx.user._cw = cnx # XXX remove when "vreg = user._cw.vreg" - # hack in entity.py is gone - with cnx.security_enabled(read=False, write=False): - yield cnx - - def _get_session(self, sessionid, txid=None, checkshuttingdown=True): - """return the session associated with the given session identifier""" - if checkshuttingdown and self.shutting_down: - raise ShuttingDown('Repository is shutting down') - try: - session = self._sessions[sessionid] - except KeyError: - raise BadConnectionId('No such session %s' % sessionid) - return session - - # data sources handling ################################################### - # * correspondance between eid and (type, source) - # * correspondance between eid and local id (i.e. specific to a given source) - - def type_and_source_from_eid(self, eid, cnx): - """return a tuple `(type, extid, actual source uri)` for the entity of - the given `eid` - """ - try: - eid = int(eid) - except ValueError: - raise UnknownEid(eid) - try: - return self._type_source_cache[eid] - except KeyError: - etype, extid, auri = self.system_source.eid_type_source(cnx, eid) - self._type_source_cache[eid] = (etype, extid, auri) - return etype, extid, auri - - def clear_caches(self, eids): - etcache = self._type_source_cache - extidcache = self._extid_cache - rqlcache = self.querier._rql_cache - for eid in eids: - try: - etype, extid, auri = etcache.pop(int(eid)) # may be a string in some cases - rqlcache.pop( ('%s X WHERE X eid %s' % (etype, eid),), None) - extidcache.pop(extid, None) - except KeyError: - etype = None - rqlcache.pop( ('Any X WHERE X eid %s' % eid,), None) - self.system_source.clear_eid_cache(eid, etype) - - def type_from_eid(self, eid, cnx): - """return the type of the entity with id """ - return self.type_and_source_from_eid(eid, cnx)[0] - - def querier_cache_key(self, cnx, rql, args, eidkeys): - cachekey = [rql] - for key in sorted(eidkeys): - try: - etype = self.type_from_eid(args[key], cnx) - except KeyError: - raise QueryError('bad cache key %s (no value)' % key) - except TypeError: - raise QueryError('bad cache key %s (value: %r)' % ( - key, args[key])) - cachekey.append(etype) - # ensure eid is correctly typed in args - args[key] = int(args[key]) - return tuple(cachekey) - - @deprecated('[3.22] use the new store API') - def extid2eid(self, source, extid, etype, cnx, insert=True, - sourceparams=None): - """Return eid from a local id. If the eid is a negative integer, that - means the entity is known but has been copied back to the system source - hence should be ignored. - - If no record is found, ie the entity is not known yet: - - 1. an eid is attributed - - 2. the source's :meth:`before_entity_insertion` method is called to - build the entity instance - - 3. unless source's :attr:`should_call_hooks` tell otherwise, - 'before_add_entity' hooks are called - - 4. record is added into the system source - - 5. the source's :meth:`after_entity_insertion` method is called to - complete building of the entity instance - - 6. unless source's :attr:`should_call_hooks` tell otherwise, - 'before_add_entity' hooks are called - """ - try: - return self._extid_cache[extid] - except KeyError: - pass - eid = self.system_source.extid2eid(cnx, extid) - if eid is not None: - self._extid_cache[extid] = eid - self._type_source_cache[eid] = (etype, extid, source.uri) - return eid - if not insert: - return - # no link between extid and eid, create one - # write query, ensure connection's mode is 'write' so connections - # won't be released until commit/rollback - try: - eid = self.system_source.create_eid(cnx) - self._extid_cache[extid] = eid - self._type_source_cache[eid] = (etype, extid, source.uri) - entity = source.before_entity_insertion( - cnx, extid, etype, eid, sourceparams) - if source.should_call_hooks: - # get back a copy of operation for later restore if - # necessary, see below - pending_operations = cnx.pending_operations[:] - self.hm.call_hooks('before_add_entity', cnx, entity=entity) - self.add_info(cnx, entity, source, extid) - source.after_entity_insertion(cnx, extid, entity, sourceparams) - if source.should_call_hooks: - self.hm.call_hooks('after_add_entity', cnx, entity=entity) - return eid - except Exception: - # XXX do some cleanup manually so that the transaction has a - # chance to be commited, with simply this entity discarded - self._extid_cache.pop(extid, None) - self._type_source_cache.pop(eid, None) - if 'entity' in locals(): - hook.CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(entity.eid) - self.system_source.delete_info_multi(cnx, [entity]) - if source.should_call_hooks: - cnx.pending_operations = pending_operations - raise - - def add_info(self, cnx, entity, source, extid=None): - """add type and source info for an eid into the system table, - and index the entity with the full text index - """ - # begin by inserting eid/type/source/extid into the entities table - hook.CleanupNewEidsCacheOp.get_instance(cnx).add_data(entity.eid) - self.system_source.add_info(cnx, entity, source, extid) - - def _delete_cascade_multi(self, cnx, entities): - """same as _delete_cascade but accepts a list of entities with - the same etype and belonging to the same source. - """ - pendingrtypes = cnx.transaction_data.get('pendingrtypes', ()) - # delete remaining relations: if user can delete the entity, he can - # delete all its relations without security checking - with cnx.security_enabled(read=False, write=False): - in_eids = ','.join([str(_e.eid) for _e in entities]) - with cnx.running_hooks_ops(): - for rschema, _, role in entities[0].e_schema.relation_definitions(): - if rschema.rule: - continue # computed relation - rtype = rschema.type - if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes: - continue - if role == 'subject': - # don't skip inlined relation so they are regularly - # deleted and so hooks are correctly called - rql = 'DELETE X %s Y WHERE X eid IN (%s)' % (rtype, in_eids) - else: - rql = 'DELETE Y %s X WHERE X eid IN (%s)' % (rtype, in_eids) - try: - cnx.execute(rql, build_descr=False) - except ValidationError: - raise - except Unauthorized: - self.exception('Unauthorized exception while cascading delete for entity %s. ' - 'RQL: %s.\nThis should not happen since security is disabled here.', - entities, rql) - raise - except Exception: - if self.config.mode == 'test': - raise - self.exception('error while cascading delete for entity %s. RQL: %s', - entities, rql) - - def init_entity_caches(self, cnx, entity, source): - """add entity to connection entities cache and repo's extid cache. - Return entity's ext id if the source isn't the system source. - """ - cnx.set_entity_cache(entity) - if source.uri == 'system': - extid = None - else: - extid = source.get_extid(entity) - self._extid_cache[str(extid)] = entity.eid - self._type_source_cache[entity.eid] = (entity.cw_etype, extid, source.uri) - return extid - - def glob_add_entity(self, cnx, edited): - """add an entity to the repository - - the entity eid should originally be None and a unique eid is assigned to - the entity instance - """ - entity = edited.entity - entity._cw_is_saved = False # entity has an eid but is not yet saved - # init edited_attributes before calling before_add_entity hooks - entity.cw_edited = edited - source = self.system_source - # allocate an eid to the entity before calling hooks - entity.eid = self.system_source.create_eid(cnx) - # set caches asap - extid = self.init_entity_caches(cnx, entity, source) - if server.DEBUG & server.DBG_REPO: - print('ADD entity', self, entity.cw_etype, entity.eid, edited) - prefill_entity_caches(entity) - self.hm.call_hooks('before_add_entity', cnx, entity=entity) - relations = preprocess_inlined_relations(cnx, entity) - edited.set_defaults() - if cnx.is_hook_category_activated('integrity'): - edited.check(creation=True) - self.add_info(cnx, entity, source, extid) - try: - source.add_entity(cnx, entity) - except (UniqueTogetherError, ViolatedConstraint) as exc: - userhdlr = cnx.vreg['adapters'].select( - 'IUserFriendlyError', cnx, entity=entity, exc=exc) - userhdlr.raise_user_exception() - edited.saved = entity._cw_is_saved = True - # trigger after_add_entity after after_add_relation - self.hm.call_hooks('after_add_entity', cnx, entity=entity) - # call hooks for inlined relations - for attr, value in relations: - self.hm.call_hooks('before_add_relation', cnx, - eidfrom=entity.eid, rtype=attr, eidto=value) - self.hm.call_hooks('after_add_relation', cnx, - eidfrom=entity.eid, rtype=attr, eidto=value) - return entity.eid - - def glob_update_entity(self, cnx, edited): - """replace an entity in the repository - the type and the eid of an entity must not be changed - """ - entity = edited.entity - if server.DEBUG & server.DBG_REPO: - print('UPDATE entity', entity.cw_etype, entity.eid, - entity.cw_attr_cache, edited) - hm = self.hm - eschema = entity.e_schema - cnx.set_entity_cache(entity) - orig_edited = getattr(entity, 'cw_edited', None) - entity.cw_edited = edited - source = self.system_source - try: - only_inline_rels, need_fti_update = True, False - relations = [] - for attr in list(edited): - if attr == 'eid': - continue - rschema = eschema.subjrels[attr] - if rschema.final: - if getattr(eschema.rdef(attr), 'fulltextindexed', False): - need_fti_update = True - only_inline_rels = False - else: - # inlined relation - previous_value = entity.related(attr) or None - if previous_value is not None: - previous_value = previous_value[0][0] # got a result set - if previous_value == entity.cw_attr_cache[attr]: - previous_value = None - else: - hm.call_hooks('before_delete_relation', cnx, - eidfrom=entity.eid, rtype=attr, - eidto=previous_value) - relations.append((attr, edited[attr], previous_value)) - # call hooks for inlined relations - for attr, value, _t in relations: - hm.call_hooks('before_add_relation', cnx, - eidfrom=entity.eid, rtype=attr, eidto=value) - if not only_inline_rels: - hm.call_hooks('before_update_entity', cnx, entity=entity) - if cnx.is_hook_category_activated('integrity'): - edited.check() - try: - source.update_entity(cnx, entity) - edited.saved = True - except (UniqueTogetherError, ViolatedConstraint) as exc: - userhdlr = cnx.vreg['adapters'].select( - 'IUserFriendlyError', cnx, entity=entity, exc=exc) - userhdlr.raise_user_exception() - self.system_source.update_info(cnx, entity, need_fti_update) - if not only_inline_rels: - hm.call_hooks('after_update_entity', cnx, entity=entity) - for attr, value, prevvalue in relations: - # if the relation is already cached, update existant cache - relcache = entity.cw_relation_cached(attr, 'subject') - if prevvalue is not None: - hm.call_hooks('after_delete_relation', cnx, - eidfrom=entity.eid, rtype=attr, eidto=prevvalue) - if relcache is not None: - cnx.update_rel_cache_del(entity.eid, attr, prevvalue) - del_existing_rel_if_needed(cnx, entity.eid, attr, value) - cnx.update_rel_cache_add(entity.eid, attr, value) - hm.call_hooks('after_add_relation', cnx, - eidfrom=entity.eid, rtype=attr, eidto=value) - finally: - if orig_edited is not None: - entity.cw_edited = orig_edited - - - def glob_delete_entities(self, cnx, eids): - """delete a list of entities and all related entities from the repository""" - # mark eids as being deleted in cnx info and setup cache update - # operation (register pending eids before actual deletion to avoid - # multiple call to glob_delete_entities) - op = hook.CleanupDeletedEidsCacheOp.get_instance(cnx) - if not isinstance(eids, (set, frozenset)): - warn('[3.13] eids should be given as a set', DeprecationWarning, - stacklevel=2) - eids = frozenset(eids) - eids = eids - op._container - op._container |= eids - data_by_etype = {} # values are [list of entities] - # - # WARNING: the way this dictionary is populated is heavily optimized - # and does not use setdefault on purpose. Unless a new release - # of the Python interpreter advertises large perf improvements - # in setdefault, this should not be changed without profiling. - for eid in eids: - etype = self.type_from_eid(eid, cnx) - # XXX should cache entity's cw_metainformation - entity = cnx.entity_from_eid(eid, etype) - try: - data_by_etype[etype].append(entity) - except KeyError: - data_by_etype[etype] = [entity] - source = self.system_source - for etype, entities in data_by_etype.items(): - if server.DEBUG & server.DBG_REPO: - print('DELETE entities', etype, [entity.eid for entity in entities]) - self.hm.call_hooks('before_delete_entity', cnx, entities=entities) - self._delete_cascade_multi(cnx, entities) - source.delete_entities(cnx, entities) - source.delete_info_multi(cnx, entities) - self.hm.call_hooks('after_delete_entity', cnx, entities=entities) - # don't clear cache here, it is done in a hook on commit - - def glob_add_relation(self, cnx, subject, rtype, object): - """add a relation to the repository""" - self.glob_add_relations(cnx, {rtype: [(subject, object)]}) - - def glob_add_relations(self, cnx, relations): - """add several relations to the repository - - relations is a dictionary rtype: [(subj_eid, obj_eid), ...] - """ - source = self.system_source - relations_by_rtype = {} - subjects_by_types = {} - objects_by_types = {} - activintegrity = cnx.is_hook_category_activated('activeintegrity') - for rtype, eids_subj_obj in relations.items(): - if server.DEBUG & server.DBG_REPO: - for subjeid, objeid in eids_subj_obj: - print('ADD relation', subjeid, rtype, objeid) - for subjeid, objeid in eids_subj_obj: - if rtype in relations_by_rtype: - relations_by_rtype[rtype].append((subjeid, objeid)) - else: - relations_by_rtype[rtype] = [(subjeid, objeid)] - if not activintegrity: - continue - # take care to relation of cardinality '?1', as all eids will - # be inserted later, we've remove duplicated eids since they - # won't be caught by `del_existing_rel_if_needed` - rdef = cnx.rtype_eids_rdef(rtype, subjeid, objeid) - card = rdef.cardinality - if card[0] in '?1': - with cnx.security_enabled(read=False): - cnx.execute('DELETE X %s Y WHERE X eid %%(x)s, ' - 'NOT Y eid %%(y)s' % rtype, - {'x': subjeid, 'y': objeid}) - subjects = subjects_by_types.setdefault(rdef, {}) - if subjeid in subjects: - del relations_by_rtype[rtype][subjects[subjeid]] - subjects[subjeid] = len(relations_by_rtype[rtype]) - 1 - continue - subjects[subjeid] = len(relations_by_rtype[rtype]) - 1 - if card[1] in '?1': - with cnx.security_enabled(read=False): - cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s, ' - 'NOT X eid %%(x)s' % rtype, - {'x': subjeid, 'y': objeid}) - objects = objects_by_types.setdefault(rdef, {}) - if objeid in objects: - del relations_by_rtype[rtype][objects[objeid]] - objects[objeid] = len(relations_by_rtype[rtype]) - continue - objects[objeid] = len(relations_by_rtype[rtype]) - for rtype, source_relations in relations_by_rtype.items(): - self.hm.call_hooks('before_add_relation', cnx, - rtype=rtype, eids_from_to=source_relations) - for rtype, source_relations in relations_by_rtype.items(): - source.add_relations(cnx, rtype, source_relations) - rschema = self.schema.rschema(rtype) - for subjeid, objeid in source_relations: - cnx.update_rel_cache_add(subjeid, rtype, objeid, rschema.symmetric) - for rtype, source_relations in relations_by_rtype.items(): - self.hm.call_hooks('after_add_relation', cnx, - rtype=rtype, eids_from_to=source_relations) - - def glob_delete_relation(self, cnx, subject, rtype, object): - """delete a relation from the repository""" - if server.DEBUG & server.DBG_REPO: - print('DELETE relation', subject, rtype, object) - source = self.system_source - self.hm.call_hooks('before_delete_relation', cnx, - eidfrom=subject, rtype=rtype, eidto=object) - source.delete_relation(cnx, subject, rtype, object) - rschema = self.schema.rschema(rtype) - cnx.update_rel_cache_del(subject, rtype, object, rschema.symmetric) - self.hm.call_hooks('after_delete_relation', cnx, - eidfrom=subject, rtype=rtype, eidto=object) - - - - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(Repository, getLogger('cubicweb.repository')) diff -r 058bb3dc685f -r 0b59724cb3f2 server/rqlannotation.py --- a/server/rqlannotation.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,413 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Functions to add additional annotations on a rql syntax tree to ease later -code generation. -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from rql import BadRQLQuery -from rql.nodes import Relation, VariableRef, Constant, Variable, Or, Exists -from rql.utils import common_parent - -def _annotate_select(annotator, rqlst): - has_text_query = False - for subquery in rqlst.with_: - if annotator._annotate_union(subquery.query): - has_text_query = True - #if server.DEBUG: - # print '-------- sql annotate', repr(rqlst) - getrschema = annotator.schema.rschema - for var in rqlst.defined_vars.values(): - stinfo = var.stinfo - if stinfo.get('ftirels'): - has_text_query = True - if stinfo['attrvar']: - stinfo['invariant'] = False - stinfo['principal'] = _select_main_var(stinfo['rhsrelations']) - continue - if not stinfo['relations'] and stinfo['typerel'] is None: - # Any X, Any MAX(X)... - # those particular queries should be executed using the system - # entities table unless there is some type restriction - stinfo['invariant'] = True - stinfo['principal'] = None - continue - if any(rel for rel in stinfo['relations'] if rel.r_type == 'eid' and rel.operator() != '=') and \ - not any(r for r in var.stinfo['relations'] - var.stinfo['rhsrelations'] - if r.r_type != 'eid' and (getrschema(r.r_type).inlined or getrschema(r.r_type).final)): - # Any X WHERE X eid > 2 - # those particular queries should be executed using the system entities table - stinfo['invariant'] = True - stinfo['principal'] = None - continue - if stinfo['selected'] and var.valuable_references() == 1+bool(stinfo['constnode']): - # "Any X", "Any X, Y WHERE X attr Y" - stinfo['invariant'] = False - continue - joins = set() - invariant = False - for ref in var.references(): - rel = ref.relation() - if rel is None or rel.is_types_restriction(): - continue - lhs, rhs = rel.get_parts() - onlhs = ref is lhs - role = 'subject' if onlhs else 'object' - if rel.r_type == 'eid': - if not (onlhs and len(stinfo['relations']) > 1): - break - if not stinfo['constnode']: - joins.add( (rel, role) ) - continue - elif rel.r_type == 'identity': - # identity can't be used as principal, so check other relation are used - # XXX explain rhs.operator == '=' - if rhs.operator != '=' or len(stinfo['relations']) <= 1: #(stinfo['constnode'] and rhs.operator == '='): - break - joins.add( (rel, role) ) - continue - rschema = getrschema(rel.r_type) - if rel.optional: - if rel in stinfo.get('optrelations', ()): - # optional variable can't be invariant if this is the lhs - # variable of an inlined relation - if not rel in stinfo['rhsrelations'] and rschema.inlined: - break - # variable used as main variable of an optional relation can't - # be invariant, unless we can use some other relation as - # reference for the outer join - elif not stinfo['constnode']: - break - elif len(stinfo['relations']) == 2: - if onlhs: - ostinfo = rhs.children[0].variable.stinfo - else: - ostinfo = lhs.variable.stinfo - if not (ostinfo.get('optcomparisons') or - any(orel for orel in ostinfo['relations'] - if orel.optional and orel is not rel)): - break - if rschema.final or (onlhs and rschema.inlined): - if rschema.type != 'has_text': - # need join anyway if the variable appears in a final or - # inlined relation - break - joins.add( (rel, role) ) - continue - if not stinfo['constnode']: - if rschema.inlined and rel.neged(strict=True): - # if relation is inlined, can't be invariant if that - # variable is used anywhere else. - # see 'Any P WHERE NOT N ecrit_par P, N eid 512': - # sql for 'NOT N ecrit_par P' is 'N.ecrit_par is NULL' so P - # can use N.ecrit_par as principal - if (stinfo['selected'] or len(stinfo['relations']) > 1): - break - joins.add( (rel, role) ) - else: - # if there is at least one ambigous relation and no other to - # restrict types, can't be invariant since we need to filter out - # other types - if not annotator.is_ambiguous(var): - invariant = True - stinfo['invariant'] = invariant - if invariant and joins: - # remember rqlst/solutions analyze information - # we have to select a kindof "main" relation which will "extrajoins" - # the other - # priority should be given to relation which are not in inner queries - # (eg exists) - try: - stinfo['principal'] = principal = _select_principal(var.scope, joins) - if getrschema(principal.r_type).inlined: - # the scope of the lhs variable must be equal or outer to the - # rhs variable's scope (since it's retrieved from lhs's table) - sstinfo = principal.children[0].variable.stinfo - sstinfo['scope'] = common_parent(sstinfo['scope'], stinfo['scope']).scope - except CantSelectPrincipal: - stinfo['invariant'] = False - # see unittest_rqlannotation. test_has_text_security_cache_bug - # XXX probably more to do, but yet that work without more... - for col_alias in rqlst.aliases.values(): - if col_alias.stinfo.get('ftirels'): - has_text_query = True - return has_text_query - - - -class CantSelectPrincipal(Exception): - """raised when no 'principal' variable can be found""" - -def _select_principal(scope, relations, _sort=lambda x:x): - """given a list of rqlst relations, select one which will be used to - represent an invariant variable (e.g. using on extremity of the relation - instead of the variable's type table - """ - # _sort argument is there for test - diffscope_rels = {} - ored_rels = set() - diffscope_rels = set() - for rel, role in _sort(relations): - # note: only eid and has_text among all final relations may be there - if rel.r_type in ('eid', 'identity'): - continue - if rel.optional is not None and len(relations) > 1: - if role == 'subject' and rel.optional == 'right': - continue - if role == 'object' and rel.optional == 'left': - continue - if rel.ored(traverse_scope=True): - ored_rels.add(rel) - elif rel.scope is scope: - return rel - elif not rel.neged(traverse_scope=True): - diffscope_rels.add(rel) - if len(ored_rels) > 1: - ored_rels_copy = tuple(ored_rels) - for rel1 in ored_rels_copy: - for rel2 in ored_rels_copy: - if rel1 is rel2: - continue - if isinstance(common_parent(rel1, rel2), Or): - ored_rels.discard(rel1) - ored_rels.discard(rel2) - for rel in _sort(ored_rels): - if rel.scope is scope: - return rel - diffscope_rels.add(rel) - # if DISTINCT query, can use variable from a different scope as principal - # since introduced duplicates will be removed - if scope.stmt.distinct and diffscope_rels: - return next(iter(_sort(diffscope_rels))) - # XXX could use a relation from a different scope if it can't generate - # duplicates, so we should have to check cardinality - raise CantSelectPrincipal() - -def _select_main_var(relations): - """given a list of rqlst relations, select one which will be used as main - relation for the rhs variable - """ - principal = None - others = [] - # sort for test predictability - for rel in sorted(relations, key=lambda x: (x.children[0].name, x.r_type)): - # only equality relation with a variable as rhs may be principal - if rel.operator() not in ('=', 'IS') \ - or not isinstance(rel.children[1].children[0], VariableRef) or rel.neged(strict=True): - continue - if rel.optional: - others.append(rel) - continue - if rel.scope is rel.stmt: - return rel - principal = rel - if principal is None: - if others: - return others[0] - raise BadRQLQuery('unable to find principal in %s' % ', '.join( - r.as_string() for r in relations)) - return principal - - -def set_qdata(getrschema, union, noinvariant): - """recursive function to set querier data on variables in the syntax tree - """ - for select in union.children: - for subquery in select.with_: - set_qdata(getrschema, subquery.query, noinvariant) - for var in select.defined_vars.values(): - if var.stinfo['invariant']: - if var in noinvariant and not var.stinfo['principal'].r_type == 'has_text': - var._q_invariant = False - else: - var._q_invariant = True - else: - var._q_invariant = False - - -class SQLGenAnnotator(object): - def __init__(self, schema): - self.schema = schema - self.nfdomain = frozenset(eschema.type for eschema in schema.entities() - if not eschema.final) - - def annotate(self, rqlst): - """add information to the rql syntax tree to help sources to do their - job (read sql generation) - - a variable is tagged as invariant if: - * it's a non final variable - * it's not used as lhs in any final or inlined relation - * there is no type restriction on this variable (either explicit in the - syntax tree or because a solution for this variable has been removed - due to security filtering) - """ - #assert rqlst.TYPE == 'select', rqlst - rqlst.has_text_query = self._annotate_union(rqlst) - - def _annotate_union(self, union): - has_text_query = False - for select in union.children: - if _annotate_select(self, select): - has_text_query = True - return has_text_query - - def is_ambiguous(self, var): - # ignore has_text relation when we know it will be used as principal. - # This is expected by the rql2sql generator which will use the `entities` - # table to filter out by type if necessary, This optimisation is very - # interesting in multi-sources cases, as it may avoid a costly query - # on sources to get all entities of a given type to achieve this, while - # we have all the necessary information. - root = var.stmt.root # Union node - # rel.scope -> Select or Exists node, so add .parent to get Union from - # Select node - rels = [rel for rel in var.stinfo['relations'] if rel.scope.parent is root] - if len(rels) == 1 and rels[0].r_type == 'has_text': - return False - try: - data = var.stmt._deamb_data - except AttributeError: - data = var.stmt._deamb_data = IsAmbData(self.schema, self.nfdomain) - data.compute(var.stmt) - return data.is_ambiguous(var) - - -class IsAmbData(object): - def __init__(self, schema, nfdomain): - self.schema = schema - # shortcuts - self.rschema = schema.rschema - self.eschema = schema.eschema - # domain for non final variables - self.nfdomain = nfdomain - # {var: possible solutions set} - self.varsols = {} - # set of ambiguous variables - self.ambiguousvars = set() - # remember if a variable has been deambiguified by another to avoid - # doing the opposite - self.deambification_map = {} - # not invariant variables (access to final.inlined relation) - self.not_invariants = set() - - def is_ambiguous(self, var): - return var in self.ambiguousvars - - def restrict(self, var, restricted_domain): - self.varsols[var] &= restricted_domain - if var in self.ambiguousvars and self.varsols[var] == var.stinfo['possibletypes']: - self.ambiguousvars.remove(var) - - def compute(self, rqlst): - # set domains for each variable - for varname, var in rqlst.defined_vars.items(): - if var.stinfo['uidrel'] is not None or \ - self.eschema(rqlst.solutions[0][varname]).final: - ptypes = var.stinfo['possibletypes'] - else: - ptypes = set(self.nfdomain) - self.ambiguousvars.add(var) - self.varsols[var] = ptypes - if not self.ambiguousvars: - return - # apply relation restriction - self.maydeambrels = maydeambrels = {} - for rel in rqlst.iget_nodes(Relation): - if rel.r_type == 'eid' or rel.is_types_restriction(): - continue - lhs, rhs = rel.get_variable_parts() - if isinstance(lhs, VariableRef) or isinstance(rhs, VariableRef): - rschema = self.rschema(rel.r_type) - if rschema.inlined or rschema.final: - self.not_invariants.add(lhs.variable) - self.set_rel_constraint(lhs, rel, rschema.subjects) - self.set_rel_constraint(rhs, rel, rschema.objects) - # try to deambiguify more variables by considering other variables'type - modified = True - while modified and self.ambiguousvars: - modified = False - for var in self.ambiguousvars.copy(): - try: - for rel in (var.stinfo['relations'] & maydeambrels[var]): - if self.deambiguifying_relation(var, rel): - modified = True - break - except KeyError: - # no relation to deambiguify - continue - - def _debug_print(self): - print('varsols', dict((x, sorted(str(v) for v in values)) - for x, values in self.varsols.items())) - print('ambiguous vars', sorted(self.ambiguousvars)) - - def set_rel_constraint(self, term, rel, etypes_func): - if isinstance(term, VariableRef) and self.is_ambiguous(term.variable): - var = term.variable - if len(var.stinfo['relations']) == 1 \ - or rel.scope is var.scope or rel.r_type == 'identity': - self.restrict(var, frozenset(etypes_func())) - try: - self.maydeambrels[var].add(rel) - except KeyError: - self.maydeambrels[var] = set((rel,)) - - def deambiguifying_relation(self, var, rel): - lhs, rhs = rel.get_variable_parts() - onlhs = var is getattr(lhs, 'variable', None) - other = onlhs and rhs or lhs - otheretypes = None - # XXX isinstance(other.variable, Variable) to skip column alias - if isinstance(other, VariableRef) and isinstance(other.variable, Variable): - deambiguifier = other.variable - if not var is self.deambification_map.get(deambiguifier): - if var.stinfo['typerel'] is None: - otheretypes = deambiguifier.stinfo['possibletypes'] - elif not self.is_ambiguous(deambiguifier): - otheretypes = self.varsols[deambiguifier] - elif deambiguifier in self.not_invariants: - # we know variable won't be invariant, try to use - # it to deambguify the current variable - otheretypes = self.varsols[deambiguifier] - if deambiguifier.stinfo['typerel'] is None: - # if deambiguifier has no type restriction using 'is', - # don't record it - deambiguifier = None - elif isinstance(other, Constant) and other.uidtype: - otheretypes = (other.uidtype,) - deambiguifier = None - if otheretypes is not None: - # to restrict, we must check that for all type in othertypes, - # possible types on the other end of the relation are matching - # variable's possible types - rschema = self.rschema(rel.r_type) - if onlhs: - rtypefunc = rschema.subjects - else: - rtypefunc = rschema.objects - for otheretype in otheretypes: - reltypes = frozenset(rtypefunc(otheretype)) - if var.stinfo['possibletypes'] != reltypes: - return False - self.restrict(var, var.stinfo['possibletypes']) - self.deambification_map[var] = deambiguifier - return True - return False diff -r 058bb3dc685f -r 0b59724cb3f2 server/schema2sql.py --- a/server/schema2sql.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,300 +0,0 @@ -# copyright 2004-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of cubicweb. -# -# yams is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# yams is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with yams. If not, see . -"""write a schema as sql""" - -__docformat__ = "restructuredtext en" - -from hashlib import md5 - -from six import string_types -from six.moves import range - -from yams.constraints import (SizeConstraint, UniqueConstraint, Attribute, - NOW, TODAY) - -# default are usually not handled at the sql level. If you want them, set -# SET_DEFAULT to True -SET_DEFAULT = False - -def rschema_has_table(rschema, skip_relations): - """Return True if the given schema should have a table in the database""" - return not (rschema.final or rschema.inlined or rschema.rule or rschema.type in skip_relations) - - -def schema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''): - """write to the output stream a SQL schema to store the objects - corresponding to the given schema - """ - output = [] - w = output.append - for etype in sorted(schema.entities()): - eschema = schema.eschema(etype) - if eschema.final or eschema.type in skip_entities: - continue - w(eschema2sql(dbhelper, eschema, skip_relations, prefix=prefix)) - for rtype in sorted(schema.relations()): - rschema = schema.rschema(rtype) - if rschema_has_table(rschema, skip_relations): - w(rschema2sql(rschema)) - return '\n'.join(output) - - -def dropschema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''): - """write to the output stream a SQL schema to store the objects - corresponding to the given schema - """ - output = [] - w = output.append - for etype in sorted(schema.entities()): - eschema = schema.eschema(etype) - if eschema.final or eschema.type in skip_entities: - continue - stmts = dropeschema2sql(dbhelper, eschema, skip_relations, prefix=prefix) - for stmt in stmts: - w(stmt) - for rtype in sorted(schema.relations()): - rschema = schema.rschema(rtype) - if rschema_has_table(rschema, skip_relations): - w(droprschema2sql(rschema)) - return '\n'.join(output) - - -def eschema_attrs(eschema, skip_relations): - attrs = [attrdef for attrdef in eschema.attribute_definitions() - if not attrdef[0].type in skip_relations] - attrs += [(rschema, None) - for rschema in eschema.subject_relations() - if not rschema.final and rschema.inlined] - return attrs - -def unique_index_name(eschema, columns): - return u'unique_%s' % md5((eschema.type + - ',' + - ','.join(sorted(columns))).encode('ascii')).hexdigest() - -def iter_unique_index_names(eschema): - for columns in eschema._unique_together or (): - yield columns, unique_index_name(eschema, columns) - -def dropeschema2sql(dbhelper, eschema, skip_relations=(), prefix=''): - """return sql to drop an entity type's table""" - # not necessary to drop indexes, that's implictly done when - # dropping the table, but we need to drop SQLServer views used to - # create multicol unique indices - statements = [] - tablename = prefix + eschema.type - if eschema._unique_together is not None: - for columns, index_name in iter_unique_index_names(eschema): - cols = ['%s%s' % (prefix, col) for col in columns] - sqls = dbhelper.sqls_drop_multicol_unique_index(tablename, cols, index_name) - statements += sqls - statements += ['DROP TABLE %s;' % (tablename)] - return statements - - -def eschema2sql(dbhelper, eschema, skip_relations=(), prefix=''): - """write an entity schema as SQL statements to stdout""" - output = [] - w = output.append - table = prefix + eschema.type - w('CREATE TABLE %s(' % (table)) - attrs = eschema_attrs(eschema, skip_relations) - # XXX handle objectinline physical mode - for i in range(len(attrs)): - rschema, attrschema = attrs[i] - if attrschema is not None: - sqltype = aschema2sql(dbhelper, eschema, rschema, attrschema, - indent=' ') - else: # inline relation - sqltype = 'integer REFERENCES entities (eid)' - if i == len(attrs) - 1: - w(' %s%s %s' % (prefix, rschema.type, sqltype)) - else: - w(' %s%s %s,' % (prefix, rschema.type, sqltype)) - for rschema, aschema in attrs: - if aschema is None: # inline relation - continue - attr = rschema.type - rdef = rschema.rdef(eschema.type, aschema.type) - for constraint in rdef.constraints: - cstrname, check = check_constraint(eschema, aschema, attr, constraint, dbhelper, prefix=prefix) - if cstrname is not None: - w(', CONSTRAINT %s CHECK(%s)' % (cstrname, check)) - w(');') - # create indexes - for i in range(len(attrs)): - rschema, attrschema = attrs[i] - if attrschema is None or eschema.rdef(rschema).indexed: - w(dbhelper.sql_create_index(table, prefix + rschema.type)) - for columns, index_name in iter_unique_index_names(eschema): - cols = ['%s%s' % (prefix, col) for col in columns] - sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, index_name) - for sql in sqls: - w(sql) - w('') - return '\n'.join(output) - -def as_sql(value, dbhelper, prefix): - if isinstance(value, Attribute): - return prefix + value.attr - elif isinstance(value, TODAY): - return dbhelper.sql_current_date() - elif isinstance(value, NOW): - return dbhelper.sql_current_timestamp() - else: - # XXX more quoting for literals? - return value - -def check_constraint(eschema, aschema, attr, constraint, dbhelper, prefix=''): - # XXX should find a better name - cstrname = 'cstr' + md5((eschema.type + attr + constraint.type() + - (constraint.serialize() or '')).encode('ascii')).hexdigest() - if constraint.type() == 'BoundaryConstraint': - value = as_sql(constraint.boundary, dbhelper, prefix) - return cstrname, '%s%s %s %s' % (prefix, attr, constraint.operator, value) - elif constraint.type() == 'IntervalBoundConstraint': - condition = [] - if constraint.minvalue is not None: - value = as_sql(constraint.minvalue, dbhelper, prefix) - condition.append('%s%s >= %s' % (prefix, attr, value)) - if constraint.maxvalue is not None: - value = as_sql(constraint.maxvalue, dbhelper, prefix) - condition.append('%s%s <= %s' % (prefix, attr, value)) - return cstrname, ' AND '.join(condition) - elif constraint.type() == 'StaticVocabularyConstraint': - sample = next(iter(constraint.vocabulary())) - if not isinstance(sample, string_types): - values = ', '.join(str(word) for word in constraint.vocabulary()) - else: - # XXX better quoting? - values = ', '.join("'%s'" % word.replace("'", "''") for word in constraint.vocabulary()) - return cstrname, '%s%s IN (%s)' % (prefix, attr, values) - return None, None - -def aschema2sql(dbhelper, eschema, rschema, aschema, creating=True, indent=''): - """write an attribute schema as SQL statements to stdout""" - attr = rschema.type - rdef = rschema.rdef(eschema.type, aschema.type) - sqltype = type_from_rdef(dbhelper, rdef, creating) - if SET_DEFAULT: - default = eschema.default(attr) - if default is not None: - if aschema.type == 'Boolean': - sqltype += ' DEFAULT %s' % dbhelper.boolean_value(default) - elif aschema.type == 'String': - sqltype += ' DEFAULT %r' % str(default) - elif aschema.type in ('Int', 'BigInt', 'Float'): - sqltype += ' DEFAULT %s' % default - # XXX ignore default for other type - # this is expected for NOW / TODAY - if creating: - if rdef.uid: - sqltype += ' PRIMARY KEY REFERENCES entities (eid)' - elif rdef.cardinality[0] == '1': - # don't set NOT NULL if backend isn't able to change it later - if dbhelper.alter_column_support: - sqltype += ' NOT NULL' - # else we're getting sql type to alter a column, we don't want key / indexes - # / null modifiers - return sqltype - - -def type_from_rdef(dbhelper, rdef, creating=True): - """return a sql type string corresponding to the relation definition""" - constraints = list(rdef.constraints) - unique, sqltype = False, None - if rdef.object.type == 'String': - for constraint in constraints: - if isinstance(constraint, SizeConstraint): - if constraint.max is not None: - size_constrained_string = dbhelper.TYPE_MAPPING.get( - 'SizeConstrainedString', 'varchar(%s)') - sqltype = size_constrained_string % constraint.max - elif isinstance(constraint, UniqueConstraint): - unique = True - if sqltype is None: - sqltype = sql_type(dbhelper, rdef) - if creating and unique: - sqltype += ' UNIQUE' - return sqltype - - -def sql_type(dbhelper, rdef): - sqltype = dbhelper.TYPE_MAPPING[rdef.object] - if callable(sqltype): - sqltype = sqltype(rdef) - return sqltype - - -_SQL_SCHEMA = """ -CREATE TABLE %(table)s ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT %(table)s_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX %(table)s_from_idx ON %(table)s(eid_from); -CREATE INDEX %(table)s_to_idx ON %(table)s(eid_to);""" - - -def rschema2sql(rschema): - assert not rschema.rule - return _SQL_SCHEMA % {'table': '%s_relation' % rschema.type} - - -def droprschema2sql(rschema): - """return sql to drop a relation type's table""" - # not necessary to drop indexes, that's implictly done when dropping - # the table - return 'DROP TABLE %s_relation;' % rschema.type - - -def grant_schema(schema, user, set_owner=True, skip_entities=(), prefix=''): - """write to the output stream a SQL schema to store the objects - corresponding to the given schema - """ - output = [] - w = output.append - for etype in sorted(schema.entities()): - eschema = schema.eschema(etype) - if eschema.final or etype in skip_entities: - continue - w(grant_eschema(eschema, user, set_owner, prefix=prefix)) - for rtype in sorted(schema.relations()): - rschema = schema.rschema(rtype) - if rschema_has_table(rschema, skip_relations=()): # XXX skip_relations should be specified - w(grant_rschema(rschema, user, set_owner)) - return '\n'.join(output) - - -def grant_eschema(eschema, user, set_owner=True, prefix=''): - output = [] - w = output.append - etype = eschema.type - if set_owner: - w('ALTER TABLE %s%s OWNER TO %s;' % (prefix, etype, user)) - w('GRANT ALL ON %s%s TO %s;' % (prefix, etype, user)) - return '\n'.join(output) - - -def grant_rschema(rschema, user, set_owner=True): - output = [] - if set_owner: - output.append('ALTER TABLE %s_relation OWNER TO %s;' % (rschema.type, user)) - output.append('GRANT ALL ON %s_relation TO %s;' % (rschema.type, user)) - return '\n'.join(output) diff -r 058bb3dc685f -r 0b59724cb3f2 server/schemaserial.py --- a/server/schemaserial.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,656 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""functions for schema / permissions (de)serialization using RQL""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import os -import json -import sys - -from six import PY2, text_type, string_types - -from logilab.common.shellutils import ProgressBar, DummyProgressBar - -from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo - -from cubicweb import Binary -from cubicweb.schema import (KNOWN_RPROPERTIES, CONSTRAINTS, ETYPE_NAME_MAP, - VIRTUAL_RTYPES) -from cubicweb.server import sqlutils, schema2sql as y2sql - - -def group_mapping(cnx, interactive=True): - """create a group mapping from an rql cursor - - A group mapping has standard group names as key (managers, owners at least) - and the actual CWGroup entity's eid as associated value. - In interactive mode (the default), missing groups'eid will be prompted - from the user. - """ - res = {} - for eid, name in cnx.execute('Any G, N WHERE G is CWGroup, G name N', - build_descr=False): - res[name] = eid - if not interactive: - return res - missing = [g for g in ('owners', 'managers', 'users', 'guests') if not g in res] - if missing: - print('some native groups are missing but the following groups have been found:') - print('\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items())) - print() - print('enter the eid of a to group to map to each missing native group') - print('or just type enter to skip permissions granted to a group') - for group in missing: - while True: - value = raw_input('eid for group %s: ' % group).strip() - if not value: - continue - try: - eid = int(value) - except ValueError: - print('eid should be an integer') - continue - for eid_ in res.values(): - if eid == eid_: - break - else: - print('eid is not a group eid') - continue - res[name] = eid - break - return res - -def cstrtype_mapping(cnx): - """cached constraint types mapping""" - map = dict(cnx.execute('Any T, X WHERE X is CWConstraintType, X name T')) - return map - -# schema / perms deserialization ############################################## - -def deserialize_schema(schema, cnx): - """return a schema according to information stored in an rql database - as CWRType and CWEType entities - """ - repo = cnx.repo - dbhelper = repo.system_source.dbhelper - - # Computed Rtype - with cnx.ensure_cnx_set: - tables = set(t.lower() for t in dbhelper.list_tables(cnx.cnxset.cu)) - has_computed_relations = 'cw_cwcomputedrtype' in tables - # computed attribute - try: - cnx.system_sql("SELECT cw_formula FROM cw_CWAttribute") - has_computed_attributes = True - except Exception: - cnx.rollback() - has_computed_attributes = False - - # XXX bw compat (3.6 migration) - sqlcu = cnx.system_sql("SELECT * FROM cw_CWRType WHERE cw_name='symetric'") - if sqlcu.fetchall(): - sql = dbhelper.sql_rename_col('cw_CWRType', 'cw_symetric', 'cw_symmetric', - dbhelper.TYPE_MAPPING['Boolean'], True) - sqlcu.execute(sql) - sqlcu.execute("UPDATE cw_CWRType SET cw_name='symmetric' WHERE cw_name='symetric'") - cnx.commit() - ertidx = {} - copiedeids = set() - permsidx = deserialize_ertype_permissions(cnx) - schema.reading_from_database = True - # load every entity types - for eid, etype, desc in cnx.execute( - 'Any X, N, D WHERE X is CWEType, X name N, X description D', - build_descr=False): - # base types are already in the schema, skip them - if etype in schemamod.BASE_TYPES: - # just set the eid - eschema = schema.eschema(etype) - eschema.eid = eid - ertidx[eid] = etype - continue - if etype in ETYPE_NAME_MAP: - needcopy = False - netype = ETYPE_NAME_MAP[etype] - # can't use write rql queries at this point, use raw sql - sqlexec = cnx.system_sql - if sqlexec('SELECT 1 FROM %(p)sCWEType WHERE %(p)sname=%%(n)s' - % {'p': sqlutils.SQL_PREFIX}, {'n': netype}).fetchone(): - # the new type already exists, we should copy (eg make existing - # instances of the old type instances of the new type) - assert etype.lower() != netype.lower() - needcopy = True - else: - # the new type doesn't exist, we should rename - sqlexec('UPDATE %(p)sCWEType SET %(p)sname=%%(n)s WHERE %(p)seid=%%(x)s' - % {'p': sqlutils.SQL_PREFIX}, {'x': eid, 'n': netype}) - if etype.lower() != netype.lower(): - alter_table_sql = dbhelper.sql_rename_table(sqlutils.SQL_PREFIX+etype, - sqlutils.SQL_PREFIX+netype) - sqlexec(alter_table_sql) - sqlexec('UPDATE entities SET type=%(n)s WHERE type=%(x)s', - {'x': etype, 'n': netype}) - cnx.commit(False) - tocleanup = [eid] - tocleanup += (eid for eid, cached in repo._type_source_cache.items() - if etype == cached[0]) - repo.clear_caches(tocleanup) - cnx.commit(False) - if needcopy: - ertidx[eid] = netype - copiedeids.add(eid) - # copy / CWEType entity removal expected to be done through - # rename_entity_type in a migration script - continue - etype = netype - ertidx[eid] = etype - eschema = schema.add_entity_type( - ybo.EntityType(name=etype, description=desc, eid=eid)) - set_perms(eschema, permsidx) - # load inheritance relations - for etype, stype in cnx.execute( - 'Any XN, ETN WHERE X is CWEType, X name XN, X specializes ET, ET name ETN', - build_descr=False): - etype = ETYPE_NAME_MAP.get(etype, etype) - stype = ETYPE_NAME_MAP.get(stype, stype) - schema.eschema(etype)._specialized_type = stype - schema.eschema(stype)._specialized_by.append(etype) - if has_computed_relations: - rset = cnx.execute( - 'Any X, N, R, D WHERE X is CWComputedRType, X name N, ' - 'X rule R, X description D') - for eid, rule_name, rule, description in rset.rows: - rtype = ybo.ComputedRelation(name=rule_name, rule=rule, eid=eid, - description=description) - rschema = schema.add_relation_type(rtype) - set_perms(rschema, permsidx) - # load every relation types - for eid, rtype, desc, sym, il, ftc in cnx.execute( - 'Any X,N,D,S,I,FTC WHERE X is CWRType, X name N, X description D, ' - 'X symmetric S, X inlined I, X fulltext_container FTC', build_descr=False): - ertidx[eid] = rtype - rschema = schema.add_relation_type( - ybo.RelationType(name=rtype, description=desc, - symmetric=bool(sym), inlined=bool(il), - fulltext_container=ftc, eid=eid)) - # remains to load every relation definitions (ie relations and attributes) - cstrsidx = deserialize_rdef_constraints(cnx) - pendingrdefs = [] - # closure to factorize common code of attribute/relation rdef addition - def _add_rdef(rdefeid, seid, reid, oeid, **kwargs): - rdef = ybo.RelationDefinition(ertidx[seid], ertidx[reid], ertidx[oeid], - constraints=cstrsidx.get(rdefeid, ()), - eid=rdefeid, **kwargs) - if seid in copiedeids or oeid in copiedeids: - # delay addition of this rdef. We'll insert them later if needed. We - # have to do this because: - # - # * on etype renaming, we want relation of the old entity type being - # redirected to the new type during migration - # - # * in the case of a copy, we've to take care that rdef already - # existing in the schema are not overwritten by a redirected one, - # since we want correct eid on them (redirected rdef will be - # removed in rename_entity_type) - pendingrdefs.append(rdef) - else: - # add_relation_def return a RelationDefinitionSchema if it has been - # actually added (can be None on duplicated relation definitions, - # e.g. if the relation type is marked as beeing symmetric) - rdefs = schema.add_relation_def(rdef) - if rdefs is not None: - ertidx[rdefeid] = rdefs - set_perms(rdefs, permsidx) - # Get the type parameters for additional base types. - try: - extra_props = dict(cnx.execute('Any X, XTP WHERE X is CWAttribute, ' - 'X extra_props XTP')) - except Exception: - cnx.critical('Previous CRITICAL notification about extra_props is not ' - 'a problem if you are migrating to cubicweb 3.17') - extra_props = {} # not yet in the schema (introduced by 3.17 migration) - - # load attributes - rql = ('Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT%(fm)s ' - 'WHERE X is CWAttribute, X relation_type RT, X cardinality CARD,' - ' X ordernum ORD, X indexed IDX, X description DESC, ' - ' X internationalizable I18N, X defaultval DFLT,%(fmsnip)s' - ' X fulltextindexed FTIDX, X from_entity SE, X to_entity OE') - if has_computed_attributes: - rql = rql % {'fm': ',FM', 'fmsnip': 'X formula FM,'} - else: - rql = rql % {'fm': '', 'fmsnip': ''} - for values in cnx.execute(rql, build_descr=False): - attrs = dict(zip( - ('rdefeid', 'seid', 'reid', 'oeid', 'cardinality', - 'order', 'description', 'indexed', 'fulltextindexed', - 'internationalizable', 'default', 'formula'), values)) - typeparams = extra_props.get(attrs['rdefeid']) - attrs.update(json.loads(typeparams.getvalue().decode('ascii')) if typeparams else {}) - default = attrs['default'] - if default is not None: - if isinstance(default, Binary): - # while migrating from 3.17 to 3.18, we still have to - # handle String defaults - attrs['default'] = default.unzpickle() - _add_rdef(**attrs) - # load relations - for values in cnx.execute( - 'Any X,SE,RT,OE,CARD,ORD,DESC,C WHERE X is CWRelation, X relation_type RT,' - 'X cardinality CARD, X ordernum ORD, X description DESC, ' - 'X from_entity SE, X to_entity OE, X composite C', build_descr=False): - rdefeid, seid, reid, oeid, card, ord, desc, comp = values - _add_rdef(rdefeid, seid, reid, oeid, - cardinality=card, description=desc, order=ord, - composite=comp) - for rdef in pendingrdefs: - try: - rdefs = schema.add_relation_def(rdef) - except BadSchemaDefinition: - continue - if rdefs is not None: - set_perms(rdefs, permsidx) - unique_togethers = {} - rset = cnx.execute( - 'Any X,E,R WHERE ' - 'X is CWUniqueTogetherConstraint, ' - 'X constraint_of E, X relations R', build_descr=False) - for values in rset: - uniquecstreid, eeid, releid = values - eschema = schema.schema_by_eid(eeid) - relations = unique_togethers.setdefault(uniquecstreid, (eschema, [])) - rel = ertidx[releid] - if isinstance(rel, schemamod.RelationDefinitionSchema): - # not yet migrated 3.9 database ('relations' target type changed - # to CWRType in 3.10) - rtype = rel.rtype.type - else: - rtype = str(rel) - relations[1].append(rtype) - for eschema, unique_together in unique_togethers.values(): - eschema._unique_together.append(tuple(sorted(unique_together))) - schema.infer_specialization_rules() - cnx.commit() - schema.finalize() - schema.reading_from_database = False - - -def deserialize_ertype_permissions(cnx): - """return sect action:groups associations for the given - entity or relation schema with its eid, according to schema's - permissions stored in the database as [read|add|delete|update]_permission - relations between CWEType/CWRType and CWGroup entities - """ - res = {} - for action in ('read', 'add', 'update', 'delete'): - rql = 'Any E,N WHERE G is CWGroup, G name N, E %s_permission G' % action - for eid, gname in cnx.execute(rql, build_descr=False): - res.setdefault(eid, {}).setdefault(action, []).append(gname) - rql = ('Any E,X,EXPR,V WHERE X is RQLExpression, X expression EXPR, ' - 'E %s_permission X, X mainvars V' % action) - for eid, expreid, expr, mainvars in cnx.execute(rql, build_descr=False): - # we don't know yet if it's a rql expr for an entity or a relation, - # so append a tuple to differentiate from groups and so we'll be - # able to instantiate it later - res.setdefault(eid, {}).setdefault(action, []).append( (expr, mainvars, expreid) ) - return res - -def deserialize_rdef_constraints(cnx): - """return the list of relation definition's constraints as instances""" - res = {} - for rdefeid, ceid, ct, val in cnx.execute( - 'Any E, X,TN,V WHERE E constrained_by X, X is CWConstraint, ' - 'X cstrtype T, T name TN, X value V', build_descr=False): - cstr = CONSTRAINTS[ct].deserialize(val) - cstr.eid = ceid - res.setdefault(rdefeid, []).append(cstr) - return res - -def set_perms(erschema, permsidx): - """set permissions on the given erschema according to the permission - definition dictionary as built by deserialize_ertype_permissions for a - given erschema's eid - """ - # reset erschema permissions here to avoid getting yams default anyway - erschema.permissions = dict((action, ()) for action in erschema.ACTIONS) - try: - thispermsdict = permsidx[erschema.eid] - except KeyError: - return - for action, somethings in thispermsdict.items(): - erschema.permissions[action] = tuple( - isinstance(p, tuple) and erschema.rql_expression(*p) or p - for p in somethings) - - -# schema / perms serialization ################################################ - -def serialize_schema(cnx, schema): - """synchronize schema and permissions in the database according to - current schema - """ - _title = '-> storing the schema in the database ' - print(_title, end=' ') - execute = cnx.execute - eschemas = schema.entities() - pb_size = (len(eschemas + schema.relations()) - + len(CONSTRAINTS) - + len([x for x in eschemas if x.specializes()])) - if sys.stdout.isatty(): - pb = ProgressBar(pb_size, title=_title) - else: - pb = DummyProgressBar() - groupmap = group_mapping(cnx, interactive=False) - # serialize all entity types, assuring CWEType is serialized first for proper - # is / is_instance_of insertion - eschemas.remove(schema.eschema('CWEType')) - eschemas.insert(0, schema.eschema('CWEType')) - for eschema in eschemas: - execschemarql(execute, eschema, eschema2rql(eschema, groupmap)) - pb.update() - # serialize constraint types - cstrtypemap = {} - rql = 'INSERT CWConstraintType X: X name %(ct)s' - for cstrtype in CONSTRAINTS: - cstrtypemap[cstrtype] = execute(rql, {'ct': text_type(cstrtype)}, - build_descr=False)[0][0] - pb.update() - # serialize relations - for rschema in schema.relations(): - # skip virtual relations such as eid, has_text and identity - if rschema in VIRTUAL_RTYPES: - pb.update() - continue - if rschema.rule: - execschemarql(execute, rschema, crschema2rql(rschema, groupmap)) - pb.update() - continue - execschemarql(execute, rschema, rschema2rql(rschema, addrdef=False)) - if rschema.symmetric: - rdefs = [rdef for k, rdef in rschema.rdefs.items() - if (rdef.subject, rdef.object) == k] - else: - rdefs = rschema.rdefs.values() - for rdef in rdefs: - execschemarql(execute, rdef, - rdef2rql(rdef, cstrtypemap, groupmap)) - pb.update() - # serialize unique_together constraints - for eschema in eschemas: - if eschema._unique_together: - execschemarql(execute, eschema, uniquetogether2rqls(eschema)) - # serialize yams inheritance relationships - for rql, kwargs in specialize2rql(schema): - execute(rql, kwargs, build_descr=False) - pb.update() - print() - - -# high level serialization functions - -def execschemarql(execute, schema, rqls): - for rql, kwargs in rqls: - kwargs['x'] = schema.eid - rset = execute(rql, kwargs, build_descr=False) - if schema.eid is None: - schema.eid = rset[0][0] - else: - assert rset - -def erschema2rql(erschema, groupmap): - if isinstance(erschema, schemamod.EntitySchema): - return eschema2rql(erschema, groupmap=groupmap) - return rschema2rql(erschema, groupmap=groupmap) - -def specialize2rql(schema): - for eschema in schema.entities(): - if eschema.final: - continue - for rql, kwargs in eschemaspecialize2rql(eschema): - yield rql, kwargs - -# etype serialization - -def eschema2rql(eschema, groupmap=None): - """return a list of rql insert statements to enter an entity schema - in the database as an CWEType entity - """ - relations, values = eschema_relations_values(eschema) - # NOTE: 'specializes' relation can't be inserted here since there's no - # way to make sure the parent type is inserted before the child type - yield 'INSERT CWEType X: %s' % ','.join(relations) , values - # entity permissions - if groupmap is not None: - for rql, args in _erperms2rql(eschema, groupmap): - yield rql, args - -def eschema_relations_values(eschema): - values = _ervalues(eschema) - relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] - return relations, values - -def eschemaspecialize2rql(eschema): - specialized_type = eschema.specializes() - if specialized_type: - values = {'x': eschema.eid, 'et': specialized_type.eid} - yield 'SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', values - -def uniquetogether2rqls(eschema): - rql_args = [] - # robustness against duplicated CWUniqueTogetherConstraint (pre 3.18) - columnset = set() - for columns in eschema._unique_together: - if columns in columnset: - print('schemaserial: skipping duplicate unique together %r %r' % - (eschema.type, columns)) - continue - columnset.add(columns) - rql, args = _uniquetogether2rql(eschema, columns) - args['name'] = y2sql.unique_index_name(eschema, columns) - rql_args.append((rql, args)) - return rql_args - -def _uniquetogether2rql(eschema, unique_together): - relations = [] - restrictions = [] - substs = {} - for i, name in enumerate(unique_together): - rschema = eschema.schema.rschema(name) - rtype = 'T%d' % i - substs[rtype] = text_type(rschema.type) - relations.append('C relations %s' % rtype) - restrictions.append('%(rtype)s name %%(%(rtype)s)s' % {'rtype': rtype}) - relations = ', '.join(relations) - restrictions = ', '.join(restrictions) - rql = ('INSERT CWUniqueTogetherConstraint C: C name %%(name)s, C constraint_of X, %s ' - 'WHERE X eid %%(x)s, %s') - return rql % (relations, restrictions), substs - - -def _ervalues(erschema): - try: - type_ = text_type(erschema.type) - except UnicodeDecodeError as e: - raise Exception("can't decode %s [was %s]" % (erschema.type, e)) - try: - desc = text_type(erschema.description) or u'' - except UnicodeDecodeError as e: - raise Exception("can't decode %s [was %s]" % (erschema.description, e)) - return { - 'name': type_, - 'final': erschema.final, - 'description': desc, - } - -# rtype serialization - -def rschema2rql(rschema, cstrtypemap=None, addrdef=True, groupmap=None): - """generate rql insert statements to enter a relation schema - in the database as an CWRType entity - """ - if rschema.type == 'has_text': - return - relations, values = rschema_relations_values(rschema) - yield 'INSERT CWRType X: %s' % ','.join(relations), values - if addrdef: - assert cstrtypemap - # sort for testing purpose - for rdef in sorted(rschema.rdefs.values(), - key=lambda x: (x.subject, x.object)): - for rql, values in rdef2rql(rdef, cstrtypemap, groupmap): - yield rql, values - -def rschema_relations_values(rschema): - values = _ervalues(rschema) - values['final'] = rschema.final - values['symmetric'] = rschema.symmetric - values['inlined'] = rschema.inlined - if PY2 and isinstance(rschema.fulltext_container, str): - values['fulltext_container'] = unicode(rschema.fulltext_container) - else: - values['fulltext_container'] = rschema.fulltext_container - relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] - return relations, values - -def crschema2rql(crschema, groupmap): - relations, values = crschema_relations_values(crschema) - yield 'INSERT CWComputedRType X: %s' % ','.join(relations), values - if groupmap: - for rql, args in _erperms2rql(crschema, groupmap): - yield rql, args - -def crschema_relations_values(crschema): - values = _ervalues(crschema) - values['rule'] = text_type(crschema.rule) - # XXX why oh why? - del values['final'] - relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] - return relations, values - -# rdef serialization - -def rdef2rql(rdef, cstrtypemap, groupmap=None): - # don't serialize inferred relations - if rdef.infered: - return - relations, values = _rdef_values(rdef) - relations.append('X relation_type ER,X from_entity SE,X to_entity OE') - values.update({'se': rdef.subject.eid, 'rt': rdef.rtype.eid, 'oe': rdef.object.eid}) - if rdef.final: - etype = 'CWAttribute' - else: - etype = 'CWRelation' - yield 'INSERT %s X: %s WHERE SE eid %%(se)s,ER eid %%(rt)s,OE eid %%(oe)s' % ( - etype, ','.join(relations), ), values - for rql, values in constraints2rql(cstrtypemap, rdef.constraints): - yield rql, values - # no groupmap means "no security insertion" - if groupmap: - for rql, args in _erperms2rql(rdef, groupmap): - yield rql, args - -_IGNORED_PROPS = ['eid', 'constraints', 'uid', 'infered', 'permissions'] - -def _rdef_values(rdef): - amap = {'order': 'ordernum', 'default': 'defaultval'} - values = {} - extra = {} - for prop in rdef.rproperty_defs(rdef.object): - if prop in _IGNORED_PROPS: - continue - value = getattr(rdef, prop) - if prop not in KNOWN_RPROPERTIES: - extra[prop] = value - continue - # XXX type cast really necessary? - if prop in ('indexed', 'fulltextindexed', 'internationalizable'): - value = bool(value) - elif prop == 'ordernum': - value = int(value) - elif PY2 and isinstance(value, str): - value = unicode(value) - if value is not None and prop == 'default': - value = Binary.zpickle(value) - values[amap.get(prop, prop)] = value - if extra: - values['extra_props'] = Binary(json.dumps(extra).encode('ascii')) - relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] - return relations, values - -def constraints2rql(cstrtypemap, constraints, rdefeid=None): - for constraint in constraints: - values = {'ct': cstrtypemap[constraint.type()], - 'value': text_type(constraint.serialize()), - 'x': rdefeid} # when not specified, will have to be set by the caller - yield 'INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE \ -CT eid %(ct)s, EDEF eid %(x)s', values - - -def _erperms2rql(erschema, groupmap): - """return rql insert statements to enter the entity or relation - schema's permissions in the database as - [read|add|delete|update]_permission relations between CWEType/CWRType - and CWGroup entities - """ - for action in erschema.ACTIONS: - try: - grantedto = erschema.action_permissions(action) - except KeyError: - # may occurs when modifying persistent schema - continue - for group_or_rqlexpr in grantedto: - if isinstance(group_or_rqlexpr, string_types): - # group - try: - yield ('SET X %s_permission Y WHERE Y eid %%(g)s, X eid %%(x)s' % action, - {'g': groupmap[group_or_rqlexpr]}) - except KeyError: - print("WARNING: group %s used in permissions for %s was ignored because it doesn't exist." - " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema)) - continue - else: - # rqlexpr - rqlexpr = group_or_rqlexpr - yield ('INSERT RQLExpression E: E expression %%(e)s, E exprtype %%(t)s, ' - 'E mainvars %%(v)s, X %s_permission E WHERE X eid %%(x)s' % action, - {'e': text_type(rqlexpr.expression), - 'v': text_type(','.join(sorted(rqlexpr.mainvars))), - 't': text_type(rqlexpr.__class__.__name__)}) - -# update functions - -def updateeschema2rql(eschema, eid): - relations, values = eschema_relations_values(eschema) - values['x'] = eid - yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values - -def updaterschema2rql(rschema, eid): - if rschema.rule: - yield ('SET X rule %(r)s WHERE X eid %(x)s', - {'x': eid, 'r': text_type(rschema.rule)}) - else: - relations, values = rschema_relations_values(rschema) - values['x'] = eid - yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values - -def updaterdef2rql(rdef, eid): - relations, values = _rdef_values(rdef) - values['x'] = eid - yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values diff -r 058bb3dc685f -r 0b59724cb3f2 server/serverconfig.py --- a/server/serverconfig.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,350 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""server.serverconfig definition""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -from os.path import join, exists - -from six.moves import StringIO - -import logilab.common.configuration as lgconfig -from logilab.common.decorators import cached - -from cubicweb.toolsutils import read_config, restrict_perms_to_user -from cubicweb.cwconfig import CONFIGURATIONS, CubicWebConfiguration -from cubicweb.server import SOURCE_TYPES - - -USER_OPTIONS = ( - ('login', {'type' : 'string', - 'default': 'admin', - 'help': "cubicweb manager account's login " - '(this user will be created)', - 'level': 0, - }), - ('password', {'type' : 'password', - 'default': lgconfig.REQUIRED, - 'help': "cubicweb manager account's password", - 'level': 0, - }), - ) - -class SourceConfiguration(lgconfig.Configuration): - def __init__(self, appconfig, options): - self.appconfig = appconfig # has to be done before super call - super(SourceConfiguration, self).__init__(options=options) - - # make Method('default_instance_id') usable in db option defs (in native.py) - def default_instance_id(self): - return self.appconfig.appid - - def input_option(self, option, optdict, inputlevel): - try: - dbdriver = self['db-driver'] - except lgconfig.OptionError: - pass - else: - if dbdriver == 'sqlite': - if option in ('db-user', 'db-password'): - return - if option == 'db-name': - optdict = optdict.copy() - optdict['help'] = 'path to the sqlite database' - optdict['default'] = join(self.appconfig.appdatahome, - self.appconfig.appid + '.sqlite') - super(SourceConfiguration, self).input_option(option, optdict, inputlevel) - - - -def ask_source_config(appconfig, type, inputlevel=0): - options = SOURCE_TYPES[type].options - sconfig = SourceConfiguration(appconfig, options=options) - sconfig.input_config(inputlevel=inputlevel) - return sconfig - -def generate_source_config(sconfig, encoding=sys.stdin.encoding): - """serialize a repository source configuration as text""" - stream = StringIO() - optsbysect = list(sconfig.options_by_section()) - assert len(optsbysect) == 1, ( - 'all options for a source should be in the same group, got %s' - % [x[0] for x in optsbysect]) - lgconfig.ini_format(stream, optsbysect[0][1], encoding) - return stream.getvalue() - - -class ServerConfiguration(CubicWebConfiguration): - """standalone RQL server""" - name = 'repository' - - cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set(['sobjects', 'hooks']) - cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(['sobjects', 'hooks']) - - options = lgconfig.merge_options(( - # ctl configuration - ('host', - {'type' : 'string', - 'default': None, - 'help': 'host name if not correctly detectable through gethostname', - 'group': 'main', 'level': 1, - }), - ('pid-file', - {'type' : 'string', - 'default': lgconfig.Method('default_pid_file'), - 'help': 'repository\'s pid file', - 'group': 'main', 'level': 2, - }), - ('uid', - {'type' : 'string', - 'default': None, - 'help': 'if this option is set, use the specified user to start \ -the repository rather than the user running the command', - 'group': 'main', 'level': (CubicWebConfiguration.mode == 'installed') and 0 or 1, - }), - ('cleanup-session-time', - {'type' : 'time', - 'default': '24h', - 'help': 'duration of inactivity after which a session ' - 'will be closed, to limit memory consumption (avoid sessions that ' - 'never expire and cause memory leak when http-session-time is 0, or ' - 'because of bad client that never closes their connection). ' - 'So notice that even if http-session-time is 0 and the user don\'t ' - 'close his browser, he will have to reauthenticate after this time ' - 'of inactivity. Default to 24h.', - 'group': 'main', 'level': 3, - }), - ('connections-pool-size', - {'type' : 'int', - 'default': 4, - 'help': 'size of the connections pool. Each source supporting multiple \ -connections will have this number of opened connections.', - 'group': 'main', 'level': 3, - }), - ('rql-cache-size', - {'type' : 'int', - 'default': 3000, - 'help': 'size of the parsed rql cache size.', - 'group': 'main', 'level': 3, - }), - ('undo-enabled', - {'type' : 'yn', 'default': False, - 'help': 'enable undo support', - 'group': 'main', 'level': 3, - }), - ('keep-transaction-lifetime', - {'type' : 'int', 'default': 7, - 'help': 'number of days during which transaction records should be \ -kept (hence undoable).', - 'group': 'main', 'level': 3, - }), - ('multi-sources-etypes', - {'type' : 'csv', 'default': (), - 'help': 'defines which entity types from this repository are used \ -by some other instances. You should set this properly for these instances to \ -detect updates / deletions.', - 'group': 'main', 'level': 3, - }), - - ('delay-full-text-indexation', - {'type' : 'yn', 'default': False, - 'help': 'When full text indexation of entity has a too important cost' - ' to be done when entity are added/modified by users, activate this ' - 'option and setup a job using cubicweb-ctl db-rebuild-fti on your ' - 'system (using cron for instance).', - 'group': 'main', 'level': 3, - }), - - # email configuration - ('default-recipients-mode', - {'type' : 'choice', - 'choices' : ('default-dest-addrs', 'users', 'none'), - 'default': 'default-dest-addrs', - 'help': 'when a notification should be sent with no specific rules \ -to find recipients, recipients will be found according to this mode. Available \ -modes are "default-dest-addrs" (emails specified in the configuration \ -variable with the same name), "users" (every users which has activated \ -account with an email set), "none" (no notification).', - 'group': 'email', 'level': 2, - }), - ('default-dest-addrs', - {'type' : 'csv', - 'default': (), - 'help': 'comma separated list of email addresses that will be used \ -as default recipient when an email is sent and the notification has no \ -specific recipient rules.', - 'group': 'email', 'level': 2, - }), - ('supervising-addrs', - {'type' : 'csv', - 'default': (), - 'help': 'comma separated list of email addresses that will be \ -notified of every changes.', - 'group': 'email', 'level': 2, - }), - ('zmq-address-sub', - {'type' : 'csv', - 'default' : (), - 'help': ('List of ZMQ addresses to subscribe to (requires pyzmq) ' - '(of the form `tcp://:`)'), - 'group': 'zmq', 'level': 1, - }), - ('zmq-address-pub', - {'type' : 'string', - 'default' : None, - 'help': ('ZMQ address to use for publishing (requires pyzmq) ' - '(of the form `tcp://:`)'), - 'group': 'zmq', 'level': 1, - }), - ) + CubicWebConfiguration.options) - - # should we init the connections pool (eg connect to sources). This is - # usually necessary... - init_cnxset_pool = True - - # read the schema from the database - read_instance_schema = True - # set this to true to get a minimal repository, for instance to get cubes - # information on commands such as i18ninstance, db-restore, etc... - quick_start = False - # check user's state at login time - consider_user_state = True - - # should some hooks be deactivated during [pre|post]create script execution - free_wheel = False - - # list of enables sources when sources restriction is necessary - # (eg repository initialization at least) - enabled_sources = None - - def bootstrap_cubes(self): - from logilab.common.textutils import splitstrip - with open(join(self.apphome, 'bootstrap_cubes')) as f: - for line in f: - line = line.strip() - if not line or line.startswith('#'): - continue - self.init_cubes(self.expand_cubes(splitstrip(line))) - break - else: - # no cubes - self.init_cubes(()) - - def write_bootstrap_cubes_file(self, cubes): - stream = open(join(self.apphome, 'bootstrap_cubes'), 'w') - stream.write('# this is a generated file only used for bootstraping\n') - stream.write('# you should not have to edit this\n') - stream.write('%s\n' % ','.join(cubes)) - stream.close() - - def sources_file(self): - return join(self.apphome, 'sources') - - # this method has to be cached since when the server is running using a - # restricted user, this user usually don't have access to the sources - # configuration file (#16102) - @cached - def read_sources_file(self): - """return a dictionary of values found in the sources file""" - return read_config(self.sources_file(), raise_if_unreadable=True) - - @property - def system_source_config(self): - return self.read_sources_file()['system'] - - @property - def default_admin_config(self): - return self.read_sources_file()['admin'] - - def source_enabled(self, source): - if self.sources_mode is not None: - if 'migration' in self.sources_mode: - assert len(self.sources_mode) == 1 - if source.connect_for_migration: - return True - print('not connecting to source', source.uri, 'during migration') - return False - if 'all' in self.sources_mode: - assert len(self.sources_mode) == 1 - return True - return source.uri in self.sources_mode - if self.quick_start: - return source.uri == 'system' - return (not source.disabled and ( - not self.enabled_sources or source.uri in self.enabled_sources)) - - def write_sources_file(self, sourcescfg): - """serialize repository'sources configuration into a INI like file""" - sourcesfile = self.sources_file() - if exists(sourcesfile): - import shutil - shutil.copy(sourcesfile, sourcesfile + '.bak') - stream = open(sourcesfile, 'w') - for section in ('admin', 'system'): - sconfig = sourcescfg[section] - if isinstance(sconfig, dict): - # get a Configuration object - assert section == 'system', '%r is not system' % section - _sconfig = SourceConfiguration( - self, options=SOURCE_TYPES['native'].options) - for attr, val in sconfig.items(): - try: - _sconfig.set_option(attr, val) - except lgconfig.OptionError: - # skip adapter, may be present on pre 3.10 instances - if attr != 'adapter': - self.error('skip unknown option %s in sources file' % attr) - sconfig = _sconfig - stream.write('[%s]\n%s\n' % (section, generate_source_config(sconfig))) - restrict_perms_to_user(sourcesfile) - - def load_schema(self, expand_cubes=False, **kwargs): - from cubicweb.schema import CubicWebSchemaLoader - if expand_cubes: - # in case some new dependencies have been introduced, we have to - # reinitialize cubes so the full filesystem schema is read - origcubes = self.cubes() - self._cubes = None - self.init_cubes(self.expand_cubes(origcubes)) - schema = CubicWebSchemaLoader().load(self, **kwargs) - if expand_cubes: - # restore original value - self._cubes = origcubes - return schema - - def load_bootstrap_schema(self): - from cubicweb.schema import BootstrapSchemaLoader - schema = BootstrapSchemaLoader().load(self) - schema.name = 'bootstrap' - return schema - - sources_mode = None - def set_sources_mode(self, sources): - self.sources_mode = sources - - def migration_handler(self, schema=None, interactive=True, - cnx=None, repo=None, connect=True, verbosity=None): - """return a migration handler instance""" - from cubicweb.server.migractions import ServerMigrationHelper - if verbosity is None: - verbosity = getattr(self, 'verbosity', 0) - return ServerMigrationHelper(self, schema, interactive=interactive, - cnx=cnx, repo=repo, connect=connect, - verbosity=verbosity) diff -r 058bb3dc685f -r 0b59724cb3f2 server/serverctl.py --- a/server/serverctl.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1100 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb-ctl commands and command handlers specific to the repository""" -from __future__ import print_function - -__docformat__ = 'restructuredtext en' - -# *ctl module should limit the number of import to be imported as quickly as -# possible (for cubicweb-ctl reactivity, necessary for instance for usable bash -# completion). So import locally in command helpers. -import sys -import os -from contextlib import contextmanager -import logging -import subprocess - -from six import string_types -from six.moves import input - -from logilab.common import nullobject -from logilab.common.configuration import Configuration, merge_options -from logilab.common.shellutils import ASK, generate_password - -from logilab.database import get_db_helper, get_connection - -from cubicweb import AuthenticationError, ExecutionError, ConfigurationError -from cubicweb.toolsutils import Command, CommandHandler, underline_title -from cubicweb.cwctl import CWCTL, check_options_consistency, ConfigureInstanceCommand -from cubicweb.server import SOURCE_TYPES -from cubicweb.server.serverconfig import ( - USER_OPTIONS, ServerConfiguration, SourceConfiguration, - ask_source_config, generate_source_config) - -# utility functions ########################################################### - -def source_cnx(source, dbname=None, special_privs=False, interactive=True): - """open and return a connection to the system database defined in the - given server.serverconfig - """ - from getpass import getpass - dbhost = source.get('db-host') - if dbname is None: - dbname = source['db-name'] - driver = source['db-driver'] - dbhelper = get_db_helper(driver) - if interactive: - print('-> connecting to %s database' % driver, end=' ') - if dbhost: - print('%s@%s' % (dbname, dbhost), end=' ') - else: - print(dbname, end=' ') - if dbhelper.users_support: - if not interactive or (not special_privs and source.get('db-user')): - user = source.get('db-user', os.environ.get('USER', '')) - if interactive: - print('as', user) - password = source.get('db-password') - else: - print() - if special_privs: - print('WARNING') - print ('the user will need the following special access rights ' - 'on the database:') - print(special_privs) - print() - default_user = source.get('db-user', os.environ.get('USER', '')) - user = input('Connect as user ? [%r]: ' % default_user) - user = user.strip() or default_user - if user == source.get('db-user'): - password = source.get('db-password') - else: - password = getpass('password: ') - else: - user = password = None - extra_args = source.get('db-extra-arguments') - extra = extra_args and {'extra_args': extra_args} or {} - cnx = get_connection(driver, dbhost, dbname, user, password=password, - port=source.get('db-port'), - schema=source.get('db-namespace'), - **extra) - try: - cnx.logged_user = user - except AttributeError: - # C object, __slots__ - from logilab.database import _SimpleConnectionWrapper - cnx = _SimpleConnectionWrapper(cnx) - cnx.logged_user = user - return cnx - -def system_source_cnx(source, dbms_system_base=False, - special_privs='CREATE/DROP DATABASE', interactive=True): - """shortcut to get a connextion to the instance system database - defined in the given config. If is True, - connect to the dbms system database instead (for task such as - create/drop the instance database) - """ - if dbms_system_base: - system_db = get_db_helper(source['db-driver']).system_database() - return source_cnx(source, system_db, special_privs=special_privs, - interactive=interactive) - return source_cnx(source, special_privs=special_privs, - interactive=interactive) - -def _db_sys_cnx(source, special_privs, interactive=True): - """return a connection on the RDMS system table (to create/drop a user or a - database) - """ - import logilab.common as lgp - lgp.USE_MX_DATETIME = False - driver = source['db-driver'] - helper = get_db_helper(driver) - # connect on the dbms system base to create our base - cnx = system_source_cnx(source, True, special_privs=special_privs, - interactive=interactive) - # disable autocommit (isolation_level(1)) because DROP and - # CREATE DATABASE can't be executed in a transaction - set_isolation_level = getattr(cnx, 'set_isolation_level', None) - if set_isolation_level is not None: - # set_isolation_level() is psycopg specific - set_isolation_level(0) - return cnx - -def repo_cnx(config): - """return a in-memory repository and a repoapi connection to it""" - from cubicweb import repoapi - from cubicweb.server.utils import manager_userpasswd - try: - login = config.default_admin_config['login'] - pwd = config.default_admin_config['password'] - except KeyError: - login, pwd = manager_userpasswd() - while True: - try: - repo = repoapi.get_repository(config=config) - cnx = repoapi.connect(repo, login, password=pwd) - return repo, cnx - except AuthenticationError: - print('-> Error: wrong user/password.') - # reset cubes else we'll have an assertion error on next retry - config._cubes = None - login, pwd = manager_userpasswd() - - -# repository specific command handlers ######################################## - -class RepositoryCreateHandler(CommandHandler): - cmdname = 'create' - cfgname = 'repository' - - def bootstrap(self, cubes, automatic=False, inputlevel=0): - """create an instance by copying files from the given cube and by asking - information necessary to build required configuration files - """ - config = self.config - if not automatic: - print(underline_title('Configuring the repository')) - config.input_config('email', inputlevel) - print('\n'+underline_title('Configuring the sources')) - sourcesfile = config.sources_file() - # hack to make Method('default_instance_id') usable in db option defs - # (in native.py) - sconfig = SourceConfiguration(config, - options=SOURCE_TYPES['native'].options) - if not automatic: - sconfig.input_config(inputlevel=inputlevel) - print() - sourcescfg = {'system': sconfig} - if automatic: - # XXX modify a copy - password = generate_password() - print('-> set administrator account to admin / %s' % password) - USER_OPTIONS[1][1]['default'] = password - sconfig = Configuration(options=USER_OPTIONS) - else: - sconfig = Configuration(options=USER_OPTIONS) - sconfig.input_config(inputlevel=inputlevel) - sourcescfg['admin'] = sconfig - config.write_sources_file(sourcescfg) - # remember selected cubes for later initialization of the database - config.write_bootstrap_cubes_file(cubes) - - def postcreate(self, automatic=False, inputlevel=0): - if automatic: - CWCTL.run(['db-create', '--automatic', self.config.appid]) - elif ASK.confirm('Run db-create to create the system database ?'): - CWCTL.run(['db-create', '--config-level', str(inputlevel), - self.config.appid]) - else: - print('-> nevermind, you can do it later with ' - '"cubicweb-ctl db-create %s".' % self.config.appid) - - -@contextmanager -def db_transaction(source, privilege): - """Open a transaction to the instance database""" - cnx = system_source_cnx(source, special_privs=privilege) - cursor = cnx.cursor() - try: - yield cursor - except: - cnx.rollback() - cnx.close() - raise - else: - cnx.commit() - cnx.close() - - -@contextmanager -def db_sys_transaction(source, privilege): - """Open a transaction to the system database""" - cnx = _db_sys_cnx(source, privilege) - cursor = cnx.cursor() - try: - yield cursor - except: - cnx.rollback() - cnx.close() - raise - else: - cnx.commit() - cnx.close() - - -class RepositoryDeleteHandler(CommandHandler): - cmdname = 'delete' - cfgname = 'repository' - - def _drop_namespace(self, source): - db_namespace = source.get('db-namespace') - with db_transaction(source, privilege='DROP SCHEMA') as cursor: - helper = get_db_helper(source['db-driver']) - helper.drop_schema(cursor, db_namespace) - print('-> database schema %s dropped' % db_namespace) - - def _drop_database(self, source): - dbname = source['db-name'] - if source['db-driver'] == 'sqlite': - print('deleting database file %(db-name)s' % source) - os.unlink(source['db-name']) - print('-> database %(db-name)s dropped.' % source) - else: - helper = get_db_helper(source['db-driver']) - with db_sys_transaction(source, privilege='DROP DATABASE') as cursor: - print('dropping database %(db-name)s' % source) - cursor.execute('DROP DATABASE "%(db-name)s"' % source) - print('-> database %(db-name)s dropped.' % source) - - def _drop_user(self, source): - user = source['db-user'] or None - if user is not None: - with db_sys_transaction(source, privilege='DROP USER') as cursor: - print('dropping user %s' % user) - cursor.execute('DROP USER %s' % user) - - def _cleanup_steps(self, source): - # 1/ delete namespace if used - db_namespace = source.get('db-namespace') - if db_namespace: - yield ('Delete database namespace "%s"' % db_namespace, - self._drop_namespace, True) - # 2/ delete database - yield ('Delete database "%(db-name)s"' % source, - self._drop_database, True) - # 3/ delete user - helper = get_db_helper(source['db-driver']) - if source['db-user'] and helper.users_support: - # XXX should check we are not connected as user - yield ('Delete user "%(db-user)s"' % source, - self._drop_user, False) - - def cleanup(self): - """remove instance's configuration and database""" - source = self.config.system_source_config - for msg, step, default in self._cleanup_steps(source): - if ASK.confirm(msg, default_is_yes=default): - try: - step(source) - except Exception as exc: - print('ERROR', exc) - if ASK.confirm('An error occurred. Continue anyway?', - default_is_yes=False): - continue - raise ExecutionError(str(exc)) - - -# repository specific commands ################################################ - -def createdb(helper, source, dbcnx, cursor, **kwargs): - if dbcnx.logged_user != source['db-user']: - helper.create_database(cursor, source['db-name'], source['db-user'], - source['db-encoding'], **kwargs) - else: - helper.create_database(cursor, source['db-name'], - dbencoding=source['db-encoding'], **kwargs) - - -class CreateInstanceDBCommand(Command): - """Create the system database of an instance (run after 'create'). - - You will be prompted for a login / password to use to connect to - the system database. The given user should have almost all rights - on the database (ie a super user on the DBMS allowed to create - database, users, languages...). - - - the identifier of the instance to initialize. - """ - name = 'db-create' - arguments = '' - min_args = max_args = 1 - options = ( - ('automatic', - {'short': 'a', 'action' : 'store_true', - 'default': False, - 'help': 'automatic mode: never ask and use default answer to every ' - 'question. this may require that your login match a database super ' - 'user (allowed to create database & all).', - }), - ('config-level', - {'short': 'l', 'type' : 'int', 'metavar': '', - 'default': 0, - 'help': 'configuration level (0..2): 0 will ask for essential ' - 'configuration parameters only while 2 will ask for all parameters', - }), - ('create-db', - {'short': 'c', 'type': 'yn', 'metavar': '', - 'default': True, - 'help': 'create the database (yes by default)' - }), - ) - - def run(self, args): - """run the command with its specific arguments""" - check_options_consistency(self.config) - automatic = self.get('automatic') - appid = args.pop() - config = ServerConfiguration.config_for(appid) - source = config.system_source_config - dbname = source['db-name'] - driver = source['db-driver'] - helper = get_db_helper(driver) - if driver == 'sqlite': - if os.path.exists(dbname) and ( - automatic or - ASK.confirm('Database %s already exists. Drop it?' % dbname)): - os.unlink(dbname) - elif self.config.create_db: - print('\n'+underline_title('Creating the system database')) - # connect on the dbms system base to create our base - dbcnx = _db_sys_cnx(source, 'CREATE/DROP DATABASE and / or USER', - interactive=not automatic) - cursor = dbcnx.cursor() - try: - if helper.users_support: - user = source['db-user'] - if not helper.user_exists(cursor, user) and (automatic or \ - ASK.confirm('Create db user %s ?' % user, default_is_yes=False)): - helper.create_user(source['db-user'], source.get('db-password')) - print('-> user %s created.' % user) - if dbname in helper.list_databases(cursor): - if automatic or ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname): - cursor.execute('DROP DATABASE "%s"' % dbname) - else: - print('you may want to run "cubicweb-ctl db-init ' - '--drop %s" manually to continue.' % config.appid) - return - createdb(helper, source, dbcnx, cursor) - dbcnx.commit() - print('-> database %s created.' % dbname) - except BaseException: - dbcnx.rollback() - raise - cnx = system_source_cnx(source, special_privs='CREATE LANGUAGE/SCHEMA', - interactive=not automatic) - cursor = cnx.cursor() - helper.init_fti_extensions(cursor) - namespace = source.get('db-namespace') - if namespace and ASK.confirm('Create schema %s in database %s ?' - % (namespace, dbname)): - helper.create_schema(cursor, namespace) - cnx.commit() - # postgres specific stuff - if driver == 'postgres': - # install plpythonu/plpgsql languages - langs = ('plpythonu', 'plpgsql') - for extlang in langs: - if automatic or ASK.confirm('Create language %s ?' % extlang): - try: - helper.create_language(cursor, extlang) - except Exception as exc: - print('-> ERROR:', exc) - print('-> could not create language %s, some stored procedures might be unusable' % extlang) - cnx.rollback() - else: - cnx.commit() - print('-> database for instance %s created and necessary extensions installed.' % appid) - print() - if automatic: - CWCTL.run(['db-init', '--automatic', '--config-level', '0', - config.appid]) - elif ASK.confirm('Run db-init to initialize the system database ?'): - CWCTL.run(['db-init', '--config-level', - str(self.config.config_level), config.appid]) - else: - print('-> nevermind, you can do it later with ' - '"cubicweb-ctl db-init %s".' % config.appid) - - -class InitInstanceCommand(Command): - """Initialize the system database of an instance (run after 'db-create'). - - Notice this will be done using user specified in the sources files, so this - user should have the create tables grant permissions on the database. - - - the identifier of the instance to initialize. - """ - name = 'db-init' - arguments = '' - min_args = max_args = 1 - options = ( - ('automatic', - {'short': 'a', 'action' : 'store_true', - 'default': False, - 'help': 'automatic mode: never ask and use default answer to every ' - 'question.', - }), - ('config-level', - {'short': 'l', 'type': 'int', 'default': 0, - 'help': 'level threshold for questions asked when configuring ' - 'another source' - }), - ('drop', - {'short': 'd', 'action': 'store_true', - 'default': False, - 'help': 'insert drop statements to remove previously existant ' - 'tables, indexes... (no by default)' - }), - ) - - def run(self, args): - check_options_consistency(self.config) - print('\n'+underline_title('Initializing the system database')) - from cubicweb.server import init_repository - appid = args[0] - config = ServerConfiguration.config_for(appid) - try: - system = config.system_source_config - extra_args = system.get('db-extra-arguments') - extra = extra_args and {'extra_args': extra_args} or {} - get_connection( - system['db-driver'], database=system['db-name'], - host=system.get('db-host'), port=system.get('db-port'), - user=system.get('db-user') or '', password=system.get('db-password') or '', - schema=system.get('db-namespace'), **extra) - except Exception as ex: - raise ConfigurationError( - 'You seem to have provided wrong connection information in '\ - 'the %s file. Resolve this first (error: %s).' - % (config.sources_file(), str(ex).strip())) - init_repository(config, drop=self.config.drop) - if not self.config.automatic: - while ASK.confirm('Enter another source ?', default_is_yes=False): - CWCTL.run(['source-add', '--config-level', - str(self.config.config_level), config.appid]) - - -class AddSourceCommand(Command): - """Add a data source to an instance. - - - the identifier of the instance to initialize. - """ - name = 'source-add' - arguments = '' - min_args = max_args = 1 - options = ( - ('config-level', - {'short': 'l', 'type': 'int', 'default': 1, - 'help': 'level threshold for questions asked when configuring another source' - }), - ) - - def run(self, args): - appid = args[0] - config = ServerConfiguration.config_for(appid) - repo, cnx = repo_cnx(config) - repo.hm.call_hooks('server_maintenance', repo=repo) - try: - with cnx: - used = set(n for n, in cnx.execute('Any SN WHERE S is CWSource, S name SN')) - cubes = repo.get_cubes() - while True: - type = input('source type (%s): ' - % ', '.join(sorted(SOURCE_TYPES))) - if type not in SOURCE_TYPES: - print('-> unknown source type, use one of the available types.') - continue - sourcemodule = SOURCE_TYPES[type].module - if not sourcemodule.startswith('cubicweb.'): - # module names look like cubes.mycube.themodule - sourcecube = SOURCE_TYPES[type].module.split('.', 2)[1] - # if the source adapter is coming from an external component, - # ensure it's specified in used cubes - if not sourcecube in cubes: - print ('-> this source type require the %s cube which is ' - 'not used by the instance.') - continue - break - while True: - parser = input('parser type (%s): ' - % ', '.join(sorted(repo.vreg['parsers']))) - if parser in repo.vreg['parsers']: - break - print('-> unknown parser identifier, use one of the available types.') - while True: - sourceuri = input('source identifier (a unique name used to ' - 'tell sources apart): ').strip() - if not sourceuri: - print('-> mandatory.') - else: - sourceuri = unicode(sourceuri, sys.stdin.encoding) - if sourceuri in used: - print('-> uri already used, choose another one.') - else: - break - url = input('source URL (leave empty for none): ').strip() - url = unicode(url) if url else None - # XXX configurable inputlevel - sconfig = ask_source_config(config, type, inputlevel=self.config.config_level) - cfgstr = unicode(generate_source_config(sconfig), sys.stdin.encoding) - cnx.create_entity('CWSource', name=sourceuri, type=unicode(type), - config=cfgstr, parser=unicode(parser), url=unicode(url)) - cnx.commit() - finally: - repo.hm.call_hooks('server_shutdown') - - -class GrantUserOnInstanceCommand(Command): - """Grant a database user on a repository system database. - - - the identifier of the instance - - the database's user requiring grant access - """ - name = 'db-grant-user' - arguments = ' ' - min_args = max_args = 2 - options = ( - ('set-owner', - {'short': 'o', 'type' : 'yn', 'metavar' : '', - 'default' : False, - 'help': 'Set the user as tables owner if yes (no by default).'} - ), - ) - def run(self, args): - """run the command with its specific arguments""" - from cubicweb.server.sqlutils import sqlexec, sqlgrants - appid, user = args - config = ServerConfiguration.config_for(appid) - source = config.system_source_config - set_owner = self.config.set_owner - cnx = system_source_cnx(source, special_privs='GRANT') - cursor = cnx.cursor() - schema = config.load_schema() - try: - sqlexec(sqlgrants(schema, source['db-driver'], user, - set_owner=set_owner), cursor) - except Exception as ex: - cnx.rollback() - import traceback - traceback.print_exc() - print('-> an error occurred:', ex) - else: - cnx.commit() - print('-> rights granted to %s on instance %s.' % (appid, user)) - - -class ResetAdminPasswordCommand(Command): - """Reset the administrator password. - - - the identifier of the instance - """ - name = 'reset-admin-pwd' - arguments = '' - min_args = max_args = 1 - options = ( - ('password', - {'short': 'p', 'type' : 'string', 'metavar' : '', - 'default' : None, - 'help': 'Use this password instead of prompt for one.\n' - '/!\ THIS IS AN INSECURE PRACTICE /!\ \n' - 'the password will appear in shell history'} - ), - ) - - def run(self, args): - """run the command with its specific arguments""" - from cubicweb.server.utils import crypt_password, manager_userpasswd - appid = args[0] - config = ServerConfiguration.config_for(appid) - sourcescfg = config.read_sources_file() - try: - adminlogin = sourcescfg['admin']['login'] - except KeyError: - print('-> Error: could not get cubicweb administrator login.') - sys.exit(1) - cnx = source_cnx(sourcescfg['system']) - driver = sourcescfg['system']['db-driver'] - dbhelper = get_db_helper(driver) - cursor = cnx.cursor() - # check admin exists - cursor.execute("SELECT * FROM cw_CWUser WHERE cw_login=%(l)s", - {'l': adminlogin}) - if not cursor.fetchall(): - print("-> error: admin user %r specified in sources doesn't exist " - "in the database" % adminlogin) - print(" fix your sources file before running this command") - cnx.close() - sys.exit(1) - if self.config.password is None: - # ask for a new password - msg = 'new password for %s' % adminlogin - _, pwd = manager_userpasswd(adminlogin, confirm=True, passwdmsg=msg) - else: - pwd = self.config.password - try: - cursor.execute("UPDATE cw_CWUser SET cw_upassword=%(p)s WHERE cw_login=%(l)s", - {'p': dbhelper.binary_value(crypt_password(pwd)), 'l': adminlogin}) - sconfig = Configuration(options=USER_OPTIONS) - sconfig['login'] = adminlogin - sconfig['password'] = pwd - sourcescfg['admin'] = sconfig - config.write_sources_file(sourcescfg) - except Exception as ex: - cnx.rollback() - import traceback - traceback.print_exc() - print('-> an error occurred:', ex) - else: - cnx.commit() - print('-> password reset, sources file regenerated.') - cnx.close() - - - -def _remote_dump(host, appid, output, sudo=False): - # XXX generate unique/portable file name - from datetime import date - filename = '%s-%s.tgz' % (appid, date.today().strftime('%Y-%m-%d')) - dmpcmd = 'cubicweb-ctl db-dump -o /tmp/%s %s' % (filename, appid) - if sudo: - dmpcmd = 'sudo %s' % (dmpcmd) - dmpcmd = 'ssh -t %s "%s"' % (host, dmpcmd) - print(dmpcmd) - if os.system(dmpcmd): - raise ExecutionError('Error while dumping the database') - if output is None: - output = filename - cmd = 'scp %s:/tmp/%s %s' % (host, filename, output) - print(cmd) - if os.system(cmd): - raise ExecutionError('Error while retrieving the dump at /tmp/%s' % filename) - rmcmd = 'ssh -t %s "rm -f /tmp/%s"' % (host, filename) - print(rmcmd) - if os.system(rmcmd) and not ASK.confirm( - 'An error occurred while deleting remote dump at /tmp/%s. ' - 'Continue anyway?' % filename): - raise ExecutionError('Error while deleting remote dump at /tmp/%s' % filename) - - -def _local_dump(appid, output, format='native'): - config = ServerConfiguration.config_for(appid) - config.quick_start = True - mih = config.migration_handler(verbosity=1) - mih.backup_database(output, askconfirm=False, format=format) - mih.shutdown() - -def _local_restore(appid, backupfile, drop, format='native'): - config = ServerConfiguration.config_for(appid) - config.verbosity = 1 # else we won't be asked for confirmation on problems - config.quick_start = True - mih = config.migration_handler(connect=False, verbosity=1) - mih.restore_database(backupfile, drop, askconfirm=False, format=format) - repo = mih.repo - # version of the database - dbversions = repo.get_versions() - mih.shutdown() - if not dbversions: - print("bad or missing version information in the database, don't upgrade file system") - return - # version of installed software - eversion = dbversions['cubicweb'] - status = instance_status(config, eversion, dbversions) - # * database version > installed software - if status == 'needsoftupgrade': - print("** The database of %s is more recent than the installed software!" % config.appid) - print("** Upgrade your software, then migrate the database by running the command") - print("** 'cubicweb-ctl upgrade %s'" % config.appid) - return - # * database version < installed software, an upgrade will be necessary - # anyway, just rewrite vc.conf and warn user he has to upgrade - elif status == 'needapplupgrade': - print("** The database of %s is older than the installed software." % config.appid) - print("** Migrate the database by running the command") - print("** 'cubicweb-ctl upgrade %s'" % config.appid) - return - # * database version = installed software, database version = instance fs version - # ok! - -def instance_status(config, cubicwebapplversion, vcconf): - cubicwebversion = config.cubicweb_version() - if cubicwebapplversion > cubicwebversion: - return 'needsoftupgrade' - if cubicwebapplversion < cubicwebversion: - return 'needapplupgrade' - for cube in config.cubes(): - try: - softversion = config.cube_version(cube) - except ConfigurationError: - print('-> Error: no cube version information for %s, please check that the cube is installed.' % cube) - continue - try: - applversion = vcconf[cube] - except KeyError: - print('-> Error: no cube version information for %s in version configuration.' % cube) - continue - if softversion == applversion: - continue - if softversion > applversion: - return 'needsoftupgrade' - elif softversion < applversion: - return 'needapplupgrade' - return None - - -class DBDumpCommand(Command): - """Backup the system database of an instance. - - - the identifier of the instance to backup - format [[user@]host:]appname - """ - name = 'db-dump' - arguments = '' - min_args = max_args = 1 - options = ( - ('output', - {'short': 'o', 'type' : 'string', 'metavar' : '', - 'default' : None, - 'help': 'Specify the backup file where the backup will be stored.'} - ), - ('sudo', - {'short': 's', 'action' : 'store_true', - 'default' : False, - 'help': 'Use sudo on the remote host.'} - ), - ('format', - {'short': 'f', 'default': 'native', 'type': 'choice', - 'choices': ('native', 'portable'), - 'help': '"native" format uses db backend utilities to dump the database. ' - '"portable" format uses a database independent format'} - ), - ) - - def run(self, args): - appid = args[0] - if ':' in appid: - host, appid = appid.split(':') - _remote_dump(host, appid, self.config.output, self.config.sudo) - else: - _local_dump(appid, self.config.output, format=self.config.format) - - - - -class DBRestoreCommand(Command): - """Restore the system database of an instance. - - - the identifier of the instance to restore - """ - name = 'db-restore' - arguments = ' ' - min_args = max_args = 2 - - options = ( - ('no-drop', - {'short': 'n', 'action' : 'store_true', 'default' : False, - 'help': 'for some reason the database doesn\'t exist and so ' - 'should not be dropped.'} - ), - ('format', - {'short': 'f', 'default': 'native', 'type': 'choice', - 'choices': ('native', 'portable'), - 'help': 'the format used when dumping the database'}), - ) - - def run(self, args): - appid, backupfile = args - if self.config.format == 'portable': - # we need to ensure a DB exist before restoring from portable format - if not self.config.no_drop: - try: - CWCTL.run(['db-create', '--automatic', appid]) - except SystemExit as exc: - # continue if the command exited with status 0 (success) - if exc.code: - raise - _local_restore(appid, backupfile, - drop=not self.config.no_drop, - format=self.config.format) - if self.config.format == 'portable': - try: - CWCTL.run(['db-rebuild-fti', appid]) - except SystemExit as exc: - if exc.code: - raise - - -class DBCopyCommand(Command): - """Copy the system database of an instance (backup and restore). - - - the identifier of the instance to backup - format [[user@]host:]appname - - - the identifier of the instance to restore - """ - name = 'db-copy' - arguments = ' ' - min_args = max_args = 2 - options = ( - ('no-drop', - {'short': 'n', 'action' : 'store_true', - 'default' : False, - 'help': 'For some reason the database doesn\'t exist and so ' - 'should not be dropped.'} - ), - ('keep-dump', - {'short': 'k', 'action' : 'store_true', - 'default' : False, - 'help': 'Specify that the dump file should not be automatically removed.'} - ), - ('sudo', - {'short': 's', 'action' : 'store_true', - 'default' : False, - 'help': 'Use sudo on the remote host.'} - ), - ('format', - {'short': 'f', 'default': 'native', 'type': 'choice', - 'choices': ('native', 'portable'), - 'help': '"native" format uses db backend utilities to dump the database. ' - '"portable" format uses a database independent format'} - ), - ) - - def run(self, args): - import tempfile - srcappid, destappid = args - fd, output = tempfile.mkstemp() - os.close(fd) - if ':' in srcappid: - host, srcappid = srcappid.split(':') - _remote_dump(host, srcappid, output, self.config.sudo) - else: - _local_dump(srcappid, output, format=self.config.format) - _local_restore(destappid, output, not self.config.no_drop, - self.config.format) - if self.config.keep_dump: - print('-> you can get the dump file at', output) - else: - os.remove(output) - - -class CheckRepositoryCommand(Command): - """Check integrity of the system database of an instance. - - - the identifier of the instance to check - """ - name = 'db-check' - arguments = '' - min_args = max_args = 1 - options = ( - ('checks', - {'short': 'c', 'type' : 'csv', 'metavar' : '', - 'default' : ('entities', 'relations', - 'mandatory_relations', 'mandatory_attributes', - 'metadata', 'schema', 'text_index'), - 'help': 'Comma separated list of check to run. By default run all \ -checks, i.e. entities, relations, mandatory_relations, mandatory_attributes, \ -metadata, text_index and schema.'} - ), - - ('autofix', - {'short': 'a', 'type' : 'yn', 'metavar' : '', - 'default' : False, - 'help': 'Automatically correct integrity problems if this option \ -is set to "y" or "yes", else only display them'} - ), - ('reindex', - {'short': 'r', 'type' : 'yn', 'metavar' : '', - 'default' : False, - 'help': 're-indexes the database for full text search if this \ -option is set to "y" or "yes" (may be long for large database).'} - ), - ('force', - {'short': 'f', 'action' : 'store_true', - 'default' : False, - 'help': 'don\'t check instance is up to date.'} - ), - - ) - - def run(self, args): - from cubicweb.server.checkintegrity import check - appid = args[0] - config = ServerConfiguration.config_for(appid) - config.repairing = self.config.force - repo, _cnx = repo_cnx(config) - with repo.internal_cnx() as cnx: - check(repo, cnx, - self.config.checks, - self.config.reindex, - self.config.autofix) - - -class RebuildFTICommand(Command): - """Rebuild the full-text index of the system database of an instance. - - [etype(s)] - the identifier of the instance to rebuild - - If no etype is specified, cubicweb will reindex everything, otherwise - only specified etypes will be considered. - """ - name = 'db-rebuild-fti' - arguments = '' - min_args = 1 - - def run(self, args): - from cubicweb.server.checkintegrity import reindex_entities - appid = args.pop(0) - etypes = args or None - config = ServerConfiguration.config_for(appid) - repo, cnx = repo_cnx(config) - with cnx: - reindex_entities(repo.schema, cnx, etypes=etypes) - cnx.commit() - - -class SynchronizeSourceCommand(Command): - """Force a source synchronization. - - - the identifier of the instance - - the name of the source to synchronize. - """ - name = 'source-sync' - arguments = ' ' - min_args = max_args = 2 - options = ( - ('loglevel', - {'short': 'l', 'type' : 'choice', 'metavar': '', - 'default': 'info', 'choices': ('debug', 'info', 'warning', 'error'), - }), - ) - - def run(self, args): - from cubicweb import repoapi - from cubicweb.cwctl import init_cmdline_log_threshold - config = ServerConfiguration.config_for(args[0]) - config.global_set_option('log-file', None) - config.log_format = '%(levelname)s %(name)s: %(message)s' - init_cmdline_log_threshold(config, self['loglevel']) - repo = repoapi.get_repository(config=config) - repo.hm.call_hooks('server_maintenance', repo=repo) - try: - try: - source = repo.sources_by_uri[args[1]] - except KeyError: - raise ExecutionError('no source named %r' % args[1]) - with repo.internal_cnx() as cnx: - stats = source.pull_data(cnx, force=True, raise_on_error=True) - finally: - repo.shutdown() - for key, val in stats.items(): - if val: - print(key, ':', val) - - - -def permissionshandler(relation, perms): - from yams.schema import RelationDefinitionSchema - from yams.buildobjs import DEFAULT_ATTRPERMS - from cubicweb.schema import (PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, - PUB_SYSTEM_ATTR_PERMS, RO_REL_PERMS, RO_ATTR_PERMS) - defaultrelperms = (DEFAULT_ATTRPERMS, PUB_SYSTEM_REL_PERMS, - PUB_SYSTEM_ATTR_PERMS, RO_REL_PERMS, RO_ATTR_PERMS) - defaulteperms = (PUB_SYSTEM_ENTITY_PERMS,) - # canonicalize vs str/unicode - for p in ('read', 'add', 'update', 'delete'): - rule = perms.get(p) - if rule: - perms[p] = tuple(str(x) if isinstance(x, string_types) else x - for x in rule) - return perms, perms in defaultrelperms or perms in defaulteperms - - -class SchemaDiffCommand(Command): - """Generate a diff between schema and fsschema description. - - - the identifier of the instance - - the name of the diff tool to compare the two generated files. - """ - name = 'schema-diff' - arguments = ' ' - min_args = max_args = 2 - - def run(self, args): - from yams.diff import schema_diff - from cubicweb import repoapi - appid = args.pop(0) - diff_tool = args.pop(0) - config = ServerConfiguration.config_for(appid) - repo = repoapi.get_repository(config=config) - fsschema = config.load_schema(expand_cubes=True) - schema_diff(fsschema, repo.schema, permissionshandler, diff_tool, ignore=('eid',)) - - -for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand, - GrantUserOnInstanceCommand, ResetAdminPasswordCommand, - DBDumpCommand, DBRestoreCommand, DBCopyCommand, - AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand, - SynchronizeSourceCommand, SchemaDiffCommand, - ): - CWCTL.register(cmdclass) - -# extend configure command to set options in sources config file ############### - -db_options = ( - ('db', - {'short': 'd', 'type' : 'named', 'metavar' : '[section1.]key1:value1,[section2.]key2:value2', - 'default': None, - 'help': '''set in
      to in "source" configuration file. If
      is not specified, it defaults to "system". - -Beware that changing admin.login or admin.password using this command -will NOT update the database with new admin credentials. Use the -reset-admin-pwd command instead. -''', - }), - ) - -ConfigureInstanceCommand.options = merge_options( - ConfigureInstanceCommand.options + db_options) - -configure_instance = ConfigureInstanceCommand.configure_instance -def configure_instance2(self, appid): - configure_instance(self, appid) - if self.config.db is not None: - appcfg = ServerConfiguration.config_for(appid) - srccfg = appcfg.read_sources_file() - for key, value in self.config.db.items(): - if '.' in key: - section, key = key.split('.', 1) - else: - section = 'system' - try: - srccfg[section][key] = value - except KeyError: - raise ConfigurationError('unknown configuration key "%s" in section "%s" for source' % (key, section)) - admcfg = Configuration(options=USER_OPTIONS) - admcfg['login'] = srccfg['admin']['login'] - admcfg['password'] = srccfg['admin']['password'] - srccfg['admin'] = admcfg - appcfg.write_sources_file(srccfg) -ConfigureInstanceCommand.configure_instance = configure_instance2 diff -r 058bb3dc685f -r 0b59724cb3f2 server/session.py --- a/server/session.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1141 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Repository users' and internal' sessions.""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -from time import time -from uuid import uuid4 -from warnings import warn -import functools -from contextlib import contextmanager - -from six import text_type - -from logilab.common.deprecation import deprecated -from logilab.common.textutils import unormalize -from logilab.common.registry import objectify_predicate - -from cubicweb import QueryError, schema, server, ProgrammingError -from cubicweb.req import RequestSessionBase -from cubicweb.utils import make_uid -from cubicweb.rqlrewrite import RQLRewriter -from cubicweb.server.edition import EditedEntity - - -NO_UNDO_TYPES = schema.SCHEMA_TYPES.copy() -NO_UNDO_TYPES.add('CWCache') -# is / is_instance_of are usually added by sql hooks except when using -# dataimport.NoHookRQLObjectStore, and we don't want to record them -# anyway in the later case -NO_UNDO_TYPES.add('is') -NO_UNDO_TYPES.add('is_instance_of') -NO_UNDO_TYPES.add('cw_source') -# XXX rememberme,forgotpwd,apycot,vcsfile - -@objectify_predicate -def is_user_session(cls, req, **kwargs): - """return 1 when session is not internal. - - This predicate can only be used repository side only. """ - return not req.is_internal_session - -@objectify_predicate -def is_internal_session(cls, req, **kwargs): - """return 1 when session is not internal. - - This predicate can only be used repository side only. """ - return req.is_internal_session - -@objectify_predicate -def repairing(cls, req, **kwargs): - """return 1 when repository is running in repair mode""" - return req.vreg.config.repairing - - -@deprecated('[3.17] use .allow/deny_all_hooks_but instead') -def hooks_control(obj, mode, *categories): - assert mode in (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL) - if mode == HOOKS_ALLOW_ALL: - return obj.allow_all_hooks_but(*categories) - elif mode == HOOKS_DENY_ALL: - return obj.deny_all_hooks_but(*categories) - - -class _hooks_control(object): - """context manager to control activated hooks categories. - - If mode is `HOOKS_DENY_ALL`, given hooks categories will - be enabled. - - If mode is `HOOKS_ALLOW_ALL`, given hooks categories will - be disabled. - - .. sourcecode:: python - - with _hooks_control(cnx, HOOKS_ALLOW_ALL, 'integrity'): - # ... do stuff with all but 'integrity' hooks activated - - with _hooks_control(cnx, HOOKS_DENY_ALL, 'integrity'): - # ... do stuff with none but 'integrity' hooks activated - - This is an internal API, you should rather use - :meth:`~cubicweb.server.session.Connection.deny_all_hooks_but` or - :meth:`~cubicweb.server.session.Connection.allow_all_hooks_but` - Connection methods. - """ - def __init__(self, cnx, mode, *categories): - assert mode in (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL) - self.cnx = cnx - self.mode = mode - self.categories = categories - self.oldmode = None - self.changes = () - - def __enter__(self): - self.oldmode = self.cnx.hooks_mode - self.cnx.hooks_mode = self.mode - if self.mode is HOOKS_DENY_ALL: - self.changes = self.cnx.enable_hook_categories(*self.categories) - else: - self.changes = self.cnx.disable_hook_categories(*self.categories) - self.cnx.ctx_count += 1 - - def __exit__(self, exctype, exc, traceback): - self.cnx.ctx_count -= 1 - try: - if self.categories: - if self.mode is HOOKS_DENY_ALL: - self.cnx.disable_hook_categories(*self.categories) - else: - self.cnx.enable_hook_categories(*self.categories) - finally: - self.cnx.hooks_mode = self.oldmode - - -@deprecated('[3.17] use .security_enabled instead') -def security_enabled(obj, *args, **kwargs): - return obj.security_enabled(*args, **kwargs) - -class _security_enabled(object): - """context manager to control security w/ session.execute, - - By default security is disabled on queries executed on the repository - side. - """ - def __init__(self, cnx, read=None, write=None): - self.cnx = cnx - self.read = read - self.write = write - self.oldread = None - self.oldwrite = None - - def __enter__(self): - if self.read is None: - self.oldread = None - else: - self.oldread = self.cnx.read_security - self.cnx.read_security = self.read - if self.write is None: - self.oldwrite = None - else: - self.oldwrite = self.cnx.write_security - self.cnx.write_security = self.write - self.cnx.ctx_count += 1 - - def __exit__(self, exctype, exc, traceback): - self.cnx.ctx_count -= 1 - if self.oldread is not None: - self.cnx.read_security = self.oldread - if self.oldwrite is not None: - self.cnx.write_security = self.oldwrite - -HOOKS_ALLOW_ALL = object() -HOOKS_DENY_ALL = object() -DEFAULT_SECURITY = object() # evaluated to true by design - -class SessionClosedError(RuntimeError): - pass - - -def _open_only(func): - """decorator for Connection method that check it is open""" - @functools.wraps(func) - def check_open(cnx, *args, **kwargs): - if not cnx._open: - raise ProgrammingError('Closed Connection: %s' - % cnx.connectionid) - return func(cnx, *args, **kwargs) - return check_open - - -class Connection(RequestSessionBase): - """Repository Connection - - Holds all connection related data - - Database connection resources: - - :attr:`hooks_in_progress`, boolean flag telling if the executing - query is coming from a repoapi connection or is a query from - within the repository (e.g. started by hooks) - - :attr:`cnxset`, the connections set to use to execute queries on sources. - If the transaction is read only, the connection set may be freed between - actual queries. This allows multiple connections with a reasonably low - connection set pool size. Control mechanism is detailed below. - - .. automethod:: cubicweb.server.session.Connection.set_cnxset - .. automethod:: cubicweb.server.session.Connection.free_cnxset - - :attr:`mode`, string telling the connections set handling mode, may be one - of 'read' (connections set may be freed), 'write' (some write was done in - the connections set, it can't be freed before end of the transaction), - 'transaction' (we want to keep the connections set during all the - transaction, with or without writing) - - Shared data: - - :attr:`data` is a dictionary bound to the underlying session, - who will be present for the life time of the session. This may - be useful for web clients that rely on the server for managing - bits of session-scoped data. - - :attr:`transaction_data` is a dictionary cleared at the end of - the transaction. Hooks and operations may put arbitrary data in - there. - - Internal state: - - :attr:`pending_operations`, ordered list of operations to be processed on - commit/rollback - - :attr:`commit_state`, describing the transaction commit state, may be one - of None (not yet committing), 'precommit' (calling precommit event on - operations), 'postcommit' (calling postcommit event on operations), - 'uncommitable' (some :exc:`ValidationError` or :exc:`Unauthorized` error - has been raised during the transaction and so it must be rolled back). - - Hooks controls: - - :attr:`hooks_mode`, may be either `HOOKS_ALLOW_ALL` or `HOOKS_DENY_ALL`. - - :attr:`enabled_hook_cats`, when :attr:`hooks_mode` is - `HOOKS_DENY_ALL`, this set contains hooks categories that are enabled. - - :attr:`disabled_hook_cats`, when :attr:`hooks_mode` is - `HOOKS_ALLOW_ALL`, this set contains hooks categories that are disabled. - - Security level Management: - - :attr:`read_security` and :attr:`write_security`, boolean flags telling if - read/write security is currently activated. - - """ - is_request = False - hooks_in_progress = False - is_repo_in_memory = True # bw compat - - def __init__(self, session): - # using super(Connection, self) confuse some test hack - RequestSessionBase.__init__(self, session.vreg) - #: connection unique id - self._open = None - self.connectionid = '%s-%s' % (session.sessionid, uuid4().hex) - self.session = session - self.sessionid = session.sessionid - #: reentrance handling - self.ctx_count = 0 - - #: server.Repository object - self.repo = session.repo - self.vreg = self.repo.vreg - self._execute = self.repo.querier.execute - - # other session utility - self._session_timestamp = session._timestamp - - # internal (root) session - self.is_internal_session = isinstance(session.user, InternalManager) - - #: dict containing arbitrary data cleared at the end of the transaction - self.transaction_data = {} - self._session_data = session.data - #: ordered list of operations to be processed on commit/rollback - self.pending_operations = [] - #: (None, 'precommit', 'postcommit', 'uncommitable') - self.commit_state = None - - ### hook control attribute - self.hooks_mode = HOOKS_ALLOW_ALL - self.disabled_hook_cats = set() - self.enabled_hook_cats = set() - self.pruned_hooks_cache = {} - - - ### security control attributes - self._read_security = DEFAULT_SECURITY # handled by a property - self.write_security = DEFAULT_SECURITY - - # undo control - config = session.repo.config - if config.creating or config.repairing or self.is_internal_session: - self.undo_actions = False - else: - self.undo_actions = config['undo-enabled'] - - # RQLRewriter are not thread safe - self._rewriter = RQLRewriter(self) - - # other session utility - if session.user.login == '__internal_manager__': - self.user = session.user - self.set_language(self.user.prefered_language()) - else: - self._set_user(session.user) - - @_open_only - def source_defs(self): - """Return the definition of sources used by the repository.""" - return self.session.repo.source_defs() - - @_open_only - def get_schema(self): - """Return the schema currently used by the repository.""" - return self.session.repo.source_defs() - - @_open_only - def get_option_value(self, option): - """Return the value for `option` in the configuration.""" - return self.session.repo.get_option_value(option) - - # transaction api - - @_open_only - def undoable_transactions(self, ueid=None, **actionfilters): - """Return a list of undoable transaction objects by the connection's - user, ordered by descendant transaction time. - - Managers may filter according to user (eid) who has done the transaction - using the `ueid` argument. Others will only see their own transactions. - - Additional filtering capabilities is provided by using the following - named arguments: - - * `etype` to get only transactions creating/updating/deleting entities - of the given type - - * `eid` to get only transactions applied to entity of the given eid - - * `action` to get only transactions doing the given action (action in - 'C', 'U', 'D', 'A', 'R'). If `etype`, action can only be 'C', 'U' or - 'D'. - - * `public`: when additional filtering is provided, they are by default - only searched in 'public' actions, unless a `public` argument is given - and set to false. - """ - return self.repo.system_source.undoable_transactions(self, ueid, - **actionfilters) - - @_open_only - def transaction_info(self, txuuid): - """Return transaction object for the given uid. - - raise `NoSuchTransaction` if not found or if session's user is - not allowed (eg not in managers group and the transaction - doesn't belong to him). - """ - return self.repo.system_source.tx_info(self, txuuid) - - @_open_only - def transaction_actions(self, txuuid, public=True): - """Return an ordered list of actions effectued during that transaction. - - If public is true, return only 'public' actions, i.e. not ones - triggered under the cover by hooks, else return all actions. - - raise `NoSuchTransaction` if the transaction is not found or - if the user is not allowed (eg not in managers group). - """ - return self.repo.system_source.tx_actions(self, txuuid, public) - - @_open_only - def undo_transaction(self, txuuid): - """Undo the given transaction. Return potential restoration errors. - - raise `NoSuchTransaction` if not found or if user is not - allowed (eg not in managers group). - """ - return self.repo.system_source.undo_transaction(self, txuuid) - - # life cycle handling #################################################### - - def __enter__(self): - assert self._open is None # first opening - self._open = True - self.cnxset = self.repo._get_cnxset() - return self - - def __exit__(self, exctype=None, excvalue=None, tb=None): - assert self._open # actually already open - self.rollback() - self._open = False - self.cnxset.cnxset_freed() - self.repo._free_cnxset(self.cnxset) - self.cnxset = None - - @contextmanager - def running_hooks_ops(self): - """this context manager should be called whenever hooks or operations - are about to be run (but after hook selection) - - It will help the undo logic record pertinent metadata or some - hooks to run (or not) depending on who/what issued the query. - """ - prevmode = self.hooks_in_progress - self.hooks_in_progress = True - yield - self.hooks_in_progress = prevmode - - # shared data handling ################################################### - - @property - def data(self): - return self._session_data - - @property - def rql_rewriter(self): - return self._rewriter - - @_open_only - @deprecated('[3.19] use session or transaction data', stacklevel=3) - def get_shared_data(self, key, default=None, pop=False, txdata=False): - """return value associated to `key` in session data""" - if txdata: - data = self.transaction_data - else: - data = self._session_data - if pop: - return data.pop(key, default) - else: - return data.get(key, default) - - @_open_only - @deprecated('[3.19] use session or transaction data', stacklevel=3) - def set_shared_data(self, key, value, txdata=False): - """set value associated to `key` in session data""" - if txdata: - self.transaction_data[key] = value - else: - self._session_data[key] = value - - def clear(self): - """reset internal data""" - self.transaction_data = {} - #: ordered list of operations to be processed on commit/rollback - self.pending_operations = [] - #: (None, 'precommit', 'postcommit', 'uncommitable') - self.commit_state = None - self.pruned_hooks_cache = {} - self.local_perm_cache.clear() - self.rewriter = RQLRewriter(self) - - @deprecated('[3.19] cnxset are automatically managed now.' - ' stop using explicit set and free.') - def set_cnxset(self): - pass - - @deprecated('[3.19] cnxset are automatically managed now.' - ' stop using explicit set and free.') - def free_cnxset(self, ignoremode=False): - pass - - @property - @contextmanager - @_open_only - @deprecated('[3.21] a cnxset is automatically set on __enter__ call now.' - ' stop using .ensure_cnx_set') - def ensure_cnx_set(self): - yield - - @property - def anonymous_connection(self): - return self.session.anonymous_session - - # Entity cache management ################################################# - # - # The connection entity cache as held in cnx.transaction_data is removed at the - # end of the connection (commit and rollback) - # - # XXX connection level caching may be a pb with multiple repository - # instances, but 1. this is probably not the only one :$ and 2. it may be - # an acceptable risk. Anyway we could activate it or not according to a - # configuration option - - def set_entity_cache(self, entity): - """Add `entity` to the connection entity cache""" - # XXX not using _open_only because before at creation time. _set_user - # call this function to cache the Connection user. - if entity.cw_etype != 'CWUser' and not self._open: - raise ProgrammingError('Closed Connection: %s' - % self.connectionid) - ecache = self.transaction_data.setdefault('ecache', {}) - ecache.setdefault(entity.eid, entity) - - @_open_only - def entity_cache(self, eid): - """get cache entity for `eid`""" - return self.transaction_data['ecache'][eid] - - @_open_only - def cached_entities(self): - """return the whole entity cache""" - return self.transaction_data.get('ecache', {}).values() - - @_open_only - def drop_entity_cache(self, eid=None): - """drop entity from the cache - - If eid is None, the whole cache is dropped""" - if eid is None: - self.transaction_data.pop('ecache', None) - else: - del self.transaction_data['ecache'][eid] - - # relations handling ####################################################### - - @_open_only - def add_relation(self, fromeid, rtype, toeid): - """provide direct access to the repository method to add a relation. - - This is equivalent to the following rql query: - - SET X rtype Y WHERE X eid fromeid, T eid toeid - - without read security check but also all the burden of rql execution. - You may use this in hooks when you know both eids of the relation you - want to add. - """ - self.add_relations([(rtype, [(fromeid, toeid)])]) - - @_open_only - def add_relations(self, relations): - '''set many relation using a shortcut similar to the one in add_relation - - relations is a list of 2-uples, the first element of each - 2-uple is the rtype, and the second is a list of (fromeid, - toeid) tuples - ''' - edited_entities = {} - relations_dict = {} - with self.security_enabled(False, False): - for rtype, eids in relations: - if self.vreg.schema[rtype].inlined: - for fromeid, toeid in eids: - if fromeid not in edited_entities: - entity = self.entity_from_eid(fromeid) - edited = EditedEntity(entity) - edited_entities[fromeid] = edited - else: - edited = edited_entities[fromeid] - edited.edited_attribute(rtype, toeid) - else: - relations_dict[rtype] = eids - self.repo.glob_add_relations(self, relations_dict) - for edited in edited_entities.values(): - self.repo.glob_update_entity(self, edited) - - - @_open_only - def delete_relation(self, fromeid, rtype, toeid): - """provide direct access to the repository method to delete a relation. - - This is equivalent to the following rql query: - - DELETE X rtype Y WHERE X eid fromeid, T eid toeid - - without read security check but also all the burden of rql execution. - You may use this in hooks when you know both eids of the relation you - want to delete. - """ - with self.security_enabled(False, False): - if self.vreg.schema[rtype].inlined: - entity = self.entity_from_eid(fromeid) - entity.cw_attr_cache[rtype] = None - self.repo.glob_update_entity(self, entity, set((rtype,))) - else: - self.repo.glob_delete_relation(self, fromeid, rtype, toeid) - - # relations cache handling ################################################# - - @_open_only - def update_rel_cache_add(self, subject, rtype, object, symmetric=False): - self._update_entity_rel_cache_add(subject, rtype, 'subject', object) - if symmetric: - self._update_entity_rel_cache_add(object, rtype, 'subject', subject) - else: - self._update_entity_rel_cache_add(object, rtype, 'object', subject) - - @_open_only - def update_rel_cache_del(self, subject, rtype, object, symmetric=False): - self._update_entity_rel_cache_del(subject, rtype, 'subject', object) - if symmetric: - self._update_entity_rel_cache_del(object, rtype, 'object', object) - else: - self._update_entity_rel_cache_del(object, rtype, 'object', subject) - - @_open_only - def _update_entity_rel_cache_add(self, eid, rtype, role, targeteid): - try: - entity = self.entity_cache(eid) - except KeyError: - return - rcache = entity.cw_relation_cached(rtype, role) - if rcache is not None: - rset, entities = rcache - rset = rset.copy() - entities = list(entities) - rset.rows.append([targeteid]) - if not isinstance(rset.description, list): # else description not set - rset.description = list(rset.description) - rset.description.append([self.entity_metas(targeteid)['type']]) - targetentity = self.entity_from_eid(targeteid) - if targetentity.cw_rset is None: - targetentity.cw_rset = rset - targetentity.cw_row = rset.rowcount - targetentity.cw_col = 0 - rset.rowcount += 1 - entities.append(targetentity) - entity._cw_related_cache['%s_%s' % (rtype, role)] = ( - rset, tuple(entities)) - - @_open_only - def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): - try: - entity = self.entity_cache(eid) - except KeyError: - return - rcache = entity.cw_relation_cached(rtype, role) - if rcache is not None: - rset, entities = rcache - for idx, row in enumerate(rset.rows): - if row[0] == targeteid: - break - else: - # this may occurs if the cache has been filed by a hook - # after the database update - self.debug('cache inconsistency for %s %s %s %s', eid, rtype, - role, targeteid) - return - rset = rset.copy() - entities = list(entities) - del rset.rows[idx] - if isinstance(rset.description, list): # else description not set - del rset.description[idx] - del entities[idx] - rset.rowcount -= 1 - entity._cw_related_cache['%s_%s' % (rtype, role)] = ( - rset, tuple(entities)) - - # Tracking of entities added of removed in the transaction ################## - - @_open_only - def deleted_in_transaction(self, eid): - """return True if the entity of the given eid is being deleted in the - current transaction - """ - return eid in self.transaction_data.get('pendingeids', ()) - - @_open_only - def added_in_transaction(self, eid): - """return True if the entity of the given eid is being created in the - current transaction - """ - return eid in self.transaction_data.get('neweids', ()) - - # Operation management #################################################### - - @_open_only - def add_operation(self, operation, index=None): - """add an operation to be executed at the end of the transaction""" - if index is None: - self.pending_operations.append(operation) - else: - self.pending_operations.insert(index, operation) - - # Hooks control ########################################################### - - @_open_only - def allow_all_hooks_but(self, *categories): - return _hooks_control(self, HOOKS_ALLOW_ALL, *categories) - - @_open_only - def deny_all_hooks_but(self, *categories): - return _hooks_control(self, HOOKS_DENY_ALL, *categories) - - @_open_only - def disable_hook_categories(self, *categories): - """disable the given hook categories: - - - on HOOKS_DENY_ALL mode, ensure those categories are not enabled - - on HOOKS_ALLOW_ALL mode, ensure those categories are disabled - """ - changes = set() - self.pruned_hooks_cache.clear() - categories = set(categories) - if self.hooks_mode is HOOKS_DENY_ALL: - enabledcats = self.enabled_hook_cats - changes = enabledcats & categories - enabledcats -= changes # changes is small hence faster - else: - disabledcats = self.disabled_hook_cats - changes = categories - disabledcats - disabledcats |= changes # changes is small hence faster - return tuple(changes) - - @_open_only - def enable_hook_categories(self, *categories): - """enable the given hook categories: - - - on HOOKS_DENY_ALL mode, ensure those categories are enabled - - on HOOKS_ALLOW_ALL mode, ensure those categories are not disabled - """ - changes = set() - self.pruned_hooks_cache.clear() - categories = set(categories) - if self.hooks_mode is HOOKS_DENY_ALL: - enabledcats = self.enabled_hook_cats - changes = categories - enabledcats - enabledcats |= changes # changes is small hence faster - else: - disabledcats = self.disabled_hook_cats - changes = disabledcats & categories - disabledcats -= changes # changes is small hence faster - return tuple(changes) - - @_open_only - def is_hook_category_activated(self, category): - """return a boolean telling if the given category is currently activated - or not - """ - if self.hooks_mode is HOOKS_DENY_ALL: - return category in self.enabled_hook_cats - return category not in self.disabled_hook_cats - - @_open_only - def is_hook_activated(self, hook): - """return a boolean telling if the given hook class is currently - activated or not - """ - return self.is_hook_category_activated(hook.category) - - # Security management ##################################################### - - @_open_only - def security_enabled(self, read=None, write=None): - return _security_enabled(self, read=read, write=write) - - @property - @_open_only - def read_security(self): - return self._read_security - - @read_security.setter - @_open_only - def read_security(self, activated): - self._read_security = activated - - # undo support ############################################################ - - @_open_only - def ertype_supports_undo(self, ertype): - return self.undo_actions and ertype not in NO_UNDO_TYPES - - @_open_only - def transaction_uuid(self, set=True): - uuid = self.transaction_data.get('tx_uuid') - if set and uuid is None: - self.transaction_data['tx_uuid'] = uuid = text_type(uuid4().hex) - self.repo.system_source.start_undoable_transaction(self, uuid) - return uuid - - @_open_only - def transaction_inc_action_counter(self): - num = self.transaction_data.setdefault('tx_action_count', 0) + 1 - self.transaction_data['tx_action_count'] = num - return num - - # db-api like interface ################################################### - - @_open_only - def source_defs(self): - return self.repo.source_defs() - - @deprecated('[3.19] use .entity_metas(eid) instead') - @_open_only - def describe(self, eid, asdict=False): - """return a tuple (type, sourceuri, extid) for the entity with id """ - etype, extid, source = self.repo.type_and_source_from_eid(eid, self) - metas = {'type': etype, 'source': source, 'extid': extid} - if asdict: - metas['asource'] = metas['source'] # XXX pre 3.19 client compat - return metas - return etype, source, extid - - @_open_only - def entity_metas(self, eid): - """return a tuple (type, sourceuri, extid) for the entity with id """ - etype, extid, source = self.repo.type_and_source_from_eid(eid, self) - return {'type': etype, 'source': source, 'extid': extid} - - # core method ############################################################# - - @_open_only - def execute(self, rql, kwargs=None, build_descr=True): - """db-api like method directly linked to the querier execute method. - - See :meth:`cubicweb.dbapi.Cursor.execute` documentation. - """ - self._session_timestamp.touch() - rset = self._execute(self, rql, kwargs, build_descr) - rset.req = self - self._session_timestamp.touch() - return rset - - @_open_only - def rollback(self, free_cnxset=None, reset_pool=None): - """rollback the current transaction""" - if free_cnxset is not None: - warn('[3.21] free_cnxset is now unneeded', - DeprecationWarning, stacklevel=2) - if reset_pool is not None: - warn('[3.13] reset_pool is now unneeded', - DeprecationWarning, stacklevel=2) - cnxset = self.cnxset - assert cnxset is not None - try: - # by default, operations are executed with security turned off - with self.security_enabled(False, False): - while self.pending_operations: - try: - operation = self.pending_operations.pop(0) - operation.handle_event('rollback_event') - except BaseException: - self.critical('rollback error', exc_info=sys.exc_info()) - continue - cnxset.rollback() - self.debug('rollback for transaction %s done', self.connectionid) - finally: - self._session_timestamp.touch() - self.clear() - - @_open_only - def commit(self, free_cnxset=None, reset_pool=None): - """commit the current session's transaction""" - if free_cnxset is not None: - warn('[3.21] free_cnxset is now unneeded', - DeprecationWarning, stacklevel=2) - if reset_pool is not None: - warn('[3.13] reset_pool is now unneeded', - DeprecationWarning, stacklevel=2) - assert self.cnxset is not None - cstate = self.commit_state - if cstate == 'uncommitable': - raise QueryError('transaction must be rolled back') - if cstate == 'precommit': - self.warn('calling commit in precommit makes no sense; ignoring commit') - return - if cstate == 'postcommit': - self.critical('postcommit phase is not allowed to write to the db; ignoring commit') - return - assert cstate is None - # on rollback, an operation should have the following state - # information: - # - processed by the precommit/commit event or not - # - if processed, is it the failed operation - debug = server.DEBUG & server.DBG_OPS - try: - # by default, operations are executed with security turned off - with self.security_enabled(False, False): - processed = [] - self.commit_state = 'precommit' - if debug: - print(self.commit_state, '*' * 20) - try: - with self.running_hooks_ops(): - while self.pending_operations: - operation = self.pending_operations.pop(0) - operation.processed = 'precommit' - processed.append(operation) - if debug: - print(operation) - operation.handle_event('precommit_event') - self.pending_operations[:] = processed - self.debug('precommit transaction %s done', self.connectionid) - except BaseException: - # if error on [pre]commit: - # - # * set .failed = True on the operation causing the failure - # * call revert_event on processed operations - # * call rollback_event on *all* operations - # - # that seems more natural than not calling rollback_event - # for processed operations, and allow generic rollback - # instead of having to implements rollback, revertprecommit - # and revertcommit, that will be enough in mont case. - operation.failed = True - if debug: - print(self.commit_state, '*' * 20) - with self.running_hooks_ops(): - for operation in reversed(processed): - if debug: - print(operation) - try: - operation.handle_event('revertprecommit_event') - except BaseException: - self.critical('error while reverting precommit', - exc_info=True) - # XXX use slice notation since self.pending_operations is a - # read-only property. - self.pending_operations[:] = processed + self.pending_operations - self.rollback() - raise - self.cnxset.commit() - self.commit_state = 'postcommit' - if debug: - print(self.commit_state, '*' * 20) - with self.running_hooks_ops(): - while self.pending_operations: - operation = self.pending_operations.pop(0) - if debug: - print(operation) - operation.processed = 'postcommit' - try: - operation.handle_event('postcommit_event') - except BaseException: - self.critical('error while postcommit', - exc_info=sys.exc_info()) - self.debug('postcommit transaction %s done', self.connectionid) - return self.transaction_uuid(set=False) - finally: - self._session_timestamp.touch() - self.clear() - - # resource accessors ###################################################### - - @_open_only - def call_service(self, regid, **kwargs): - self.debug('calling service %s', regid) - service = self.vreg['services'].select(regid, self, **kwargs) - return service.call(**kwargs) - - @_open_only - def system_sql(self, sql, args=None, rollback_on_failure=True): - """return a sql cursor on the system database""" - source = self.repo.system_source - try: - return source.doexec(self, sql, args, rollback=rollback_on_failure) - except (source.OperationalError, source.InterfaceError): - if not rollback_on_failure: - raise - source.warning("trying to reconnect") - self.cnxset.reconnect() - return source.doexec(self, sql, args, rollback=rollback_on_failure) - - @_open_only - def rtype_eids_rdef(self, rtype, eidfrom, eidto): - # use type_and_source_from_eid instead of type_from_eid for optimization - # (avoid two extra methods call) - subjtype = self.repo.type_and_source_from_eid(eidfrom, self)[0] - objtype = self.repo.type_and_source_from_eid(eidto, self)[0] - return self.vreg.schema.rschema(rtype).rdefs[(subjtype, objtype)] - - -def cnx_attr(attr_name, writable=False): - """return a property to forward attribute access to connection. - - This is to be used by session""" - args = {} - @deprecated('[3.19] use a Connection object instead') - def attr_from_cnx(session): - return getattr(session._cnx, attr_name) - args['fget'] = attr_from_cnx - if writable: - @deprecated('[3.19] use a Connection object instead') - def write_attr(session, value): - return setattr(session._cnx, attr_name, value) - args['fset'] = write_attr - return property(**args) - - -class Timestamp(object): - - def __init__(self): - self.value = time() - - def touch(self): - self.value = time() - - def __float__(self): - return float(self.value) - - -class Session(object): - """Repository user session - - This ties all together: - * session id, - * user, - * other session data. - """ - - def __init__(self, user, repo, cnxprops=None, _id=None): - self.sessionid = _id or make_uid(unormalize(user.login)) - self.user = user # XXX repoapi: deprecated and store only a login. - self.repo = repo - self.vreg = repo.vreg - self._timestamp = Timestamp() - self.data = {} - self.closed = False - - def close(self): - self.closed = True - - def __enter__(self): - return self - - def __exit__(self, *args): - pass - - def __unicode__(self): - return '' % ( - unicode(self.user.login), self.sessionid, id(self)) - - @property - def timestamp(self): - return float(self._timestamp) - - @property - @deprecated('[3.19] session.id is deprecated, use session.sessionid') - def id(self): - return self.sessionid - - @property - def login(self): - return self.user.login - - def new_cnx(self): - """Return a new Connection object linked to the session - - The returned Connection will *not* be managed by the Session. - """ - return Connection(self) - - @deprecated('[3.19] use a Connection object instead') - def get_option_value(self, option, foreid=None): - if foreid is not None: - warn('[3.19] foreid argument is deprecated', DeprecationWarning, - stacklevel=2) - return self.repo.get_option_value(option) - - def _touch(self): - """update latest session usage timestamp and reset mode to read""" - self._timestamp.touch() - - local_perm_cache = cnx_attr('local_perm_cache') - @local_perm_cache.setter - def local_perm_cache(self, value): - #base class assign an empty dict:-( - assert value == {} - pass - - # deprecated ############################################################### - - @property - def anonymous_session(self): - # XXX for now, anonymous_user only exists in webconfig (and testconfig). - # It will only be present inside all-in-one instance. - # there is plan to move it down to global config. - if not hasattr(self.repo.config, 'anonymous_user'): - # not a web or test config, no anonymous user - return False - return self.user.login == self.repo.config.anonymous_user()[0] - - @deprecated('[3.13] use getattr(session.rtype_eids_rdef(rtype, eidfrom, eidto), prop)') - def schema_rproperty(self, rtype, eidfrom, eidto, rprop): - return getattr(self.rtype_eids_rdef(rtype, eidfrom, eidto), rprop) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - - - -class InternalManager(object): - """a manager user with all access rights used internally for task such as - bootstrapping the repository or creating regular users according to - repository content - """ - - def __init__(self, lang='en'): - self.eid = -1 - self.login = u'__internal_manager__' - self.properties = {} - self.groups = set(['managers']) - self.lang = lang - - def matching_groups(self, groups): - return 1 - - def is_in_group(self, group): - return True - - def owns(self, eid): - return True - - def property_value(self, key): - if key == 'ui.language': - return self.lang - return None - - def prefered_language(self, language=None): - # mock CWUser.prefered_language, mainly for testing purpose - return self.property_value('ui.language') - - # CWUser compat for notification ########################################### - - def name(self): - return 'cubicweb' - - class _IEmailable: - @staticmethod - def get_email(): - return '' - - def cw_adapt_to(self, iface): - if iface == 'IEmailable': - return self._IEmailable - return None - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(Session, getLogger('cubicweb.session')) -set_log_methods(Connection, getLogger('cubicweb.session')) diff -r 058bb3dc685f -r 0b59724cb3f2 server/sources/__init__.py --- a/server/sources/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,474 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb server sources support""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from time import time -from logging import getLogger -from base64 import b64decode - -from six import text_type - -from logilab.common import configuration -from logilab.common.textutils import unormalize -from logilab.common.deprecation import deprecated - -from yams.schema import role_name - -from cubicweb import ValidationError, set_log_methods, server -from cubicweb.server import SOURCE_TYPES -from cubicweb.server.edition import EditedEntity - - -def dbg_st_search(uri, union, varmap, args, cachekey=None, prefix='rql for'): - if server.DEBUG & server.DBG_RQL: - global t - print(' %s %s source: %s' % (prefix, uri, repr(union.as_string()))) - t = time() - if varmap: - print(' using varmap', varmap) - if server.DEBUG & server.DBG_MORE: - print(' args', repr(args)) - print(' cache key', cachekey) - print(' solutions', ','.join(str(s.solutions) - for s in union.children)) - # return true so it can be used as assertion (and so be killed by python -O) - return True - -def dbg_results(results): - if server.DEBUG & server.DBG_RQL: - if len(results) > 10: - print(' -->', results[:10], '...', len(results), end=' ') - else: - print(' -->', results, end=' ') - print('time: ', time() - t) - # return true so it can be used as assertion (and so be killed by python -O) - return True - - -class AbstractSource(object): - """an abstract class for sources""" - - # boolean telling if modification hooks should be called when something is - # modified in this source - should_call_hooks = True - # boolean telling if the repository should connect to this source during - # migration - connect_for_migration = True - - # mappings telling which entities and relations are available in the source - # keys are supported entity/relation types and values are boolean indicating - # wether the support is read-only (False) or read-write (True) - support_entities = {} - support_relations = {} - # a global identifier for this source, which has to be set by the source - # instance - uri = None - # a reference to the system information helper - repo = None - # a reference to the instance'schema (may differs from the source'schema) - schema = None - - # force deactivation (configuration error for instance) - disabled = False - - # boolean telling if cwuri of entities from this source is the url that - # should be used as entity's absolute url - use_cwuri_as_url = False - - # source configuration options - options = () - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - - def __init__(self, repo, source_config, eid=None): - self.repo = repo - self.set_schema(repo.schema) - self.support_relations['identity'] = False - self.eid = eid - self.public_config = source_config.copy() - self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url - self.remove_sensitive_information(self.public_config) - self.uri = source_config.pop('uri') - # unormalize to avoid non-ascii characters in logger's name, this will cause decoding error - # on logging - set_log_methods(self, getLogger('cubicweb.sources.' + unormalize(text_type(self.uri)))) - source_config.pop('type') - self.update_config(None, self.check_conf_dict(eid, source_config, - fail_if_unknown=False)) - - def __repr__(self): - return '<%s %s source %s @%#x>' % (self.uri, self.__class__.__name__, - self.eid, id(self)) - - def __lt__(self, other): - """simple comparison function to get predictable source order, with the - system source at last - """ - if self.uri == other.uri: - return False - if self.uri == 'system': - return False - if other.uri == 'system': - return True - return self.uri < other.uri - - def __eq__(self, other): - return self.uri == other.uri - - def __ne__(self, other): - return not (self == other) - - def backup(self, backupfile, confirm, format='native'): - """method called to create a backup of source's data""" - pass - - def restore(self, backupfile, confirm, drop, format='native'): - """method called to restore a backup of source's data""" - pass - - @classmethod - def check_conf_dict(cls, eid, confdict, _=text_type, fail_if_unknown=True): - """check configuration of source entity. Return config dict properly - typed with defaults set. - """ - processed = {} - for optname, optdict in cls.options: - value = confdict.pop(optname, optdict.get('default')) - if value is configuration.REQUIRED: - if not fail_if_unknown: - continue - msg = _('specifying %s is mandatory' % optname) - raise ValidationError(eid, {role_name('config', 'subject'): msg}) - elif value is not None: - # type check - try: - value = configuration._validate(value, optdict, optname) - except Exception as ex: - msg = text_type(ex) # XXX internationalization - raise ValidationError(eid, {role_name('config', 'subject'): msg}) - processed[optname] = value - # cw < 3.10 bw compat - try: - processed['adapter'] = confdict['adapter'] - except KeyError: - pass - # check for unknown options - if confdict and tuple(confdict) != ('adapter',): - if fail_if_unknown: - msg = _('unknown options %s') % ', '.join(confdict) - raise ValidationError(eid, {role_name('config', 'subject'): msg}) - else: - logger = getLogger('cubicweb.sources') - logger.warning('unknown options %s', ', '.join(confdict)) - # add options to processed, they may be necessary during migration - processed.update(confdict) - return processed - - @classmethod - def check_config(cls, source_entity): - """check configuration of source entity""" - return cls.check_conf_dict(source_entity.eid, source_entity.host_config, - _=source_entity._cw._) - - def update_config(self, source_entity, typedconfig): - """update configuration from source entity. `typedconfig` is config - properly typed with defaults set - """ - if source_entity is not None: - self._entity_update(source_entity) - self.config = typedconfig - - def _entity_update(self, source_entity): - source_entity.complete() - if source_entity.url: - self.urls = [url.strip() for url in source_entity.url.splitlines() - if url.strip()] - else: - self.urls = [] - - @staticmethod - def decode_extid(extid): - if extid is None: - return extid - return b64decode(extid) - - # source initialization / finalization ##################################### - - def set_schema(self, schema): - """set the instance'schema""" - self.schema = schema - - def init_creating(self): - """method called by the repository once ready to create a new instance""" - pass - - def init(self, activated, source_entity): - """method called by the repository once ready to handle request. - `activated` is a boolean flag telling if the source is activated or not. - """ - if activated: - self._entity_update(source_entity) - - PUBLIC_KEYS = ('type', 'uri', 'use-cwuri-as-url') - def remove_sensitive_information(self, sourcedef): - """remove sensitive information such as login / password from source - definition - """ - for key in list(sourcedef): - if not key in self.PUBLIC_KEYS: - sourcedef.pop(key) - - # connections handling ##################################################### - - def get_connection(self): - """open and return a connection to the source""" - raise NotImplementedError(self) - - def close_source_connections(self): - for cnxset in self.repo.cnxsets: - cnxset.cu = None - cnxset.cnx.close() - - def open_source_connections(self): - for cnxset in self.repo.cnxsets: - cnxset.cnx = self.get_connection() - cnxset.cu = cnxset.cnx.cursor() - - # cache handling ########################################################### - - def reset_caches(self): - """method called during test to reset potential source caches""" - pass - - def clear_eid_cache(self, eid, etype): - """clear potential caches for the given eid""" - pass - - # external source api ###################################################### - - def support_entity(self, etype, write=False): - """return true if the given entity's type is handled by this adapter - if write is true, return true only if it's a RW support - """ - try: - wsupport = self.support_entities[etype] - except KeyError: - return False - if write: - return wsupport - return True - - def support_relation(self, rtype, write=False): - """return true if the given relation's type is handled by this adapter - if write is true, return true only if it's a RW support - - current implementation return true if the relation is defined into - `support_relations` or if it is a final relation of a supported entity - type - """ - try: - wsupport = self.support_relations[rtype] - except KeyError: - rschema = self.schema.rschema(rtype) - if not rschema.final or rschema.type == 'has_text': - return False - for etype in rschema.subjects(): - try: - wsupport = self.support_entities[etype] - break - except KeyError: - continue - else: - return False - if write: - return wsupport - return True - - def before_entity_insertion(self, cnx, lid, etype, eid, sourceparams): - """called by the repository when an eid has been attributed for an - entity stored here but the entity has not been inserted in the system - table yet. - - This method must return the an Entity instance representation of this - entity. - """ - entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) - entity.eid = eid - entity.cw_edited = EditedEntity(entity) - return entity - - def after_entity_insertion(self, cnx, lid, entity, sourceparams): - """called by the repository after an entity stored here has been - inserted in the system table. - """ - pass - - def _load_mapping(self, cnx, **kwargs): - if not 'CWSourceSchemaConfig' in self.schema: - self.warning('instance is not mapping ready') - return - for schemacfg in cnx.execute( - 'Any CFG,CFGO,S WHERE ' - 'CFG options CFGO, CFG cw_schema S, ' - 'CFG cw_for_source X, X eid %(x)s', {'x': self.eid}).entities(): - self.add_schema_config(schemacfg, **kwargs) - - def add_schema_config(self, schemacfg, checkonly=False): - """added CWSourceSchemaConfig, modify mapping accordingly""" - msg = schemacfg._cw._("this source doesn't use a mapping") - raise ValidationError(schemacfg.eid, {None: msg}) - - def del_schema_config(self, schemacfg, checkonly=False): - """deleted CWSourceSchemaConfig, modify mapping accordingly""" - msg = schemacfg._cw._("this source doesn't use a mapping") - raise ValidationError(schemacfg.eid, {None: msg}) - - def update_schema_config(self, schemacfg, checkonly=False): - """updated CWSourceSchemaConfig, modify mapping accordingly""" - self.del_schema_config(schemacfg, checkonly) - self.add_schema_config(schemacfg, checkonly) - - # user authentication api ################################################## - - def authenticate(self, cnx, login, **kwargs): - """if the source support CWUser entity type, it should implement - this method which should return CWUser eid for the given login/password - if this account is defined in this source and valid login / password is - given. Else raise `AuthenticationError` - """ - raise NotImplementedError(self) - - # RQL query api ############################################################ - - def syntax_tree_search(self, cnx, union, - args=None, cachekey=None, varmap=None, debug=0): - """return result from this source for a rql query (actually from a rql - syntax tree and a solution dictionary mapping each used variable to a - possible type). If cachekey is given, the query necessary to fetch the - results (but not the results themselves) may be cached using this key. - """ - raise NotImplementedError(self) - - # write modification api ################################################### - # read-only sources don't have to implement methods below - - def get_extid(self, entity): - """return the external id for the given newly inserted entity""" - raise NotImplementedError(self) - - def add_entity(self, cnx, entity): - """add a new entity to the source""" - raise NotImplementedError(self) - - def update_entity(self, cnx, entity): - """update an entity in the source""" - raise NotImplementedError(self) - - def delete_entities(self, cnx, entities): - """delete several entities from the source""" - for entity in entities: - self.delete_entity(cnx, entity) - - def delete_entity(self, cnx, entity): - """delete an entity from the source""" - raise NotImplementedError(self) - - def add_relation(self, cnx, subject, rtype, object): - """add a relation to the source""" - raise NotImplementedError(self) - - def add_relations(self, cnx, rtype, subj_obj_list): - """add a relations to the source""" - # override in derived classes if you feel you can - # optimize - for subject, object in subj_obj_list: - self.add_relation(cnx, subject, rtype, object) - - def delete_relation(self, session, subject, rtype, object): - """delete a relation from the source""" - raise NotImplementedError(self) - - # system source interface ################################################# - - def eid_type_source(self, cnx, eid): - """return a tuple (type, extid, source) for the entity with id """ - raise NotImplementedError(self) - - def create_eid(self, cnx): - raise NotImplementedError(self) - - def add_info(self, cnx, entity, source, extid): - """add type and source info for an eid into the system table""" - raise NotImplementedError(self) - - def update_info(self, cnx, entity, need_fti_update): - """mark entity as being modified, fulltext reindex if needed""" - raise NotImplementedError(self) - - def index_entity(self, cnx, entity): - """create an operation to [re]index textual content of the given entity - on commit - """ - raise NotImplementedError(self) - - def fti_unindex_entities(self, cnx, entities): - """remove text content for entities from the full text index - """ - raise NotImplementedError(self) - - def fti_index_entities(self, cnx, entities): - """add text content of created/modified entities to the full text index - """ - raise NotImplementedError(self) - - # sql system source interface ############################################# - - def sqlexec(self, cnx, sql, args=None): - """execute the query and return its result""" - raise NotImplementedError(self) - - def create_index(self, cnx, table, column, unique=False): - raise NotImplementedError(self) - - def drop_index(self, cnx, table, column, unique=False): - raise NotImplementedError(self) - - - @deprecated('[3.13] use extid2eid(source, value, etype, cnx, **kwargs)') - def extid2eid(self, value, etype, cnx, **kwargs): - return self.repo.extid2eid(self, value, etype, cnx, **kwargs) - - - - -def source_adapter(source_type): - try: - return SOURCE_TYPES[source_type] - except KeyError: - raise RuntimeError('Unknown source type %r' % source_type) - -def get_source(type, source_config, repo, eid): - """return a source adapter according to the adapter field in the source's - configuration - """ - return source_adapter(type)(repo, source_config, eid) diff -r 058bb3dc685f -r 0b59724cb3f2 server/sources/datafeed.py --- a/server/sources/datafeed.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,578 +0,0 @@ -# copyright 2010-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""datafeed sources: copy data from an external data stream into the system -database -""" - -from io import BytesIO -from os.path import exists -from datetime import datetime, timedelta - -from six import text_type -from six.moves.urllib.parse import urlparse -from six.moves.urllib.request import Request, build_opener, HTTPCookieProcessor -from six.moves.urllib.error import HTTPError -from six.moves.http_cookiejar import CookieJar - -from pytz import utc -from lxml import etree - -from logilab.common.deprecation import deprecated - -from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid -from cubicweb.server.repository import preprocess_inlined_relations -from cubicweb.server.sources import AbstractSource -from cubicweb.appobject import AppObject - - -class DataFeedSource(AbstractSource): - use_cwuri_as_url = True - - options = ( - ('synchronize', - {'type' : 'yn', - 'default': True, - 'help': ('Is the repository responsible to automatically import ' - 'content from this source? ' - 'You should say yes unless you don\'t want this behaviour ' - 'or if you use a multiple repositories setup, in which ' - 'case you should say yes on one repository, no on others.'), - 'group': 'datafeed-source', 'level': 2, - }), - ('synchronization-interval', - {'type' : 'time', - 'default': '5min', - 'help': ('Interval in seconds between synchronization with the ' - 'external source (default to 5 minutes, must be >= 1 min).'), - 'group': 'datafeed-source', 'level': 2, - }), - ('max-lock-lifetime', - {'type' : 'time', - 'default': '1h', - 'help': ('Maximum time allowed for a synchronization to be run. ' - 'Exceeded that time, the synchronization will be considered ' - 'as having failed and not properly released the lock, hence ' - 'it won\'t be considered'), - 'group': 'datafeed-source', 'level': 2, - }), - ('delete-entities', - {'type' : 'yn', - 'default': False, - 'help': ('Should already imported entities not found anymore on the ' - 'external source be deleted?'), - 'group': 'datafeed-source', 'level': 2, - }), - ('logs-lifetime', - {'type': 'time', - 'default': '10d', - 'help': ('Time before logs from datafeed imports are deleted.'), - 'group': 'datafeed-source', 'level': 2, - }), - ('http-timeout', - {'type': 'time', - 'default': '1min', - 'help': ('Timeout of HTTP GET requests, when synchronizing a source.'), - 'group': 'datafeed-source', 'level': 2, - }), - ('use-cwuri-as-url', - {'type': 'yn', - 'default': None, # explicitly unset - 'help': ('Use cwuri (i.e. external URL) for link to the entity ' - 'instead of its local URL.'), - 'group': 'datafeed-source', 'level': 1, - }), - ) - - def check_config(self, source_entity): - """check configuration of source entity""" - typed_config = super(DataFeedSource, self).check_config(source_entity) - if typed_config['synchronization-interval'] < 60: - _ = source_entity._cw._ - msg = _('synchronization-interval must be greater than 1 minute') - raise ValidationError(source_entity.eid, {'config': msg}) - return typed_config - - def _entity_update(self, source_entity): - super(DataFeedSource, self)._entity_update(source_entity) - self.parser_id = source_entity.parser - self.latest_retrieval = source_entity.latest_retrieval - - def update_config(self, source_entity, typed_config): - """update configuration from source entity. `typed_config` is config - properly typed with defaults set - """ - super(DataFeedSource, self).update_config(source_entity, typed_config) - self.synchro_interval = timedelta(seconds=typed_config['synchronization-interval']) - self.max_lock_lifetime = timedelta(seconds=typed_config['max-lock-lifetime']) - self.http_timeout = typed_config['http-timeout'] - # if typed_config['use-cwuri-as-url'] is set, we have to update - # use_cwuri_as_url attribute and public configuration dictionary - # accordingly - if typed_config['use-cwuri-as-url'] is not None: - self.use_cwuri_as_url = typed_config['use-cwuri-as-url'] - self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url - - def init(self, activated, source_entity): - super(DataFeedSource, self).init(activated, source_entity) - self.parser_id = source_entity.parser - self.load_mapping(source_entity._cw) - - def _get_parser(self, cnx, **kwargs): - if self.parser_id is None: - self.warning('No parser defined on source %r', self) - raise ObjectNotFound() - return self.repo.vreg['parsers'].select( - self.parser_id, cnx, source=self, **kwargs) - - def load_mapping(self, cnx): - self.mapping = {} - self.mapping_idx = {} - try: - parser = self._get_parser(cnx) - except (RegistryNotFound, ObjectNotFound): - return # no parser yet, don't go further - self._load_mapping(cnx, parser=parser) - - def add_schema_config(self, schemacfg, checkonly=False, parser=None): - """added CWSourceSchemaConfig, modify mapping accordingly""" - if parser is None: - parser = self._get_parser(schemacfg._cw) - parser.add_schema_config(schemacfg, checkonly) - - def del_schema_config(self, schemacfg, checkonly=False, parser=None): - """deleted CWSourceSchemaConfig, modify mapping accordingly""" - if parser is None: - parser = self._get_parser(schemacfg._cw) - parser.del_schema_config(schemacfg, checkonly) - - def fresh(self): - if self.latest_retrieval is None: - return False - return datetime.now(tz=utc) < (self.latest_retrieval + self.synchro_interval) - - def update_latest_retrieval(self, cnx): - self.latest_retrieval = datetime.now(tz=utc) - cnx.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s', - {'x': self.eid, 'date': self.latest_retrieval}) - cnx.commit() - - def acquire_synchronization_lock(self, cnx): - # XXX race condition until WHERE of SET queries is executed using - # 'SELECT FOR UPDATE' - now = datetime.now(tz=utc) - if not cnx.execute( - 'SET X in_synchronization %(now)s WHERE X eid %(x)s, ' - 'X in_synchronization NULL OR X in_synchronization < %(maxdt)s', - {'x': self.eid, 'now': now, 'maxdt': now - self.max_lock_lifetime}): - self.error('concurrent synchronization detected, skip pull') - cnx.commit() - return False - cnx.commit() - return True - - def release_synchronization_lock(self, cnx): - cnx.execute('SET X in_synchronization NULL WHERE X eid %(x)s', - {'x': self.eid}) - cnx.commit() - - def pull_data(self, cnx, force=False, raise_on_error=False): - """Launch synchronization of the source if needed. - - This method is responsible to handle commit/rollback on the given - connection. - """ - if not force and self.fresh(): - return {} - if not self.acquire_synchronization_lock(cnx): - return {} - try: - return self._pull_data(cnx, force, raise_on_error) - finally: - cnx.rollback() # rollback first in case there is some dirty - # transaction remaining - self.release_synchronization_lock(cnx) - - def _pull_data(self, cnx, force=False, raise_on_error=False): - importlog = self.init_import_log(cnx) - myuris = self.source_cwuris(cnx) - try: - parser = self._get_parser(cnx, sourceuris=myuris, import_log=importlog) - except ObjectNotFound: - return {} - if self.process_urls(parser, self.urls, raise_on_error): - self.warning("some error occurred, don't attempt to delete entities") - else: - parser.handle_deletion(self.config, cnx, myuris) - self.update_latest_retrieval(cnx) - stats = parser.stats - if stats.get('created'): - importlog.record_info('added %s entities' % len(stats['created'])) - if stats.get('updated'): - importlog.record_info('updated %s entities' % len(stats['updated'])) - importlog.write_log(cnx, end_timestamp=self.latest_retrieval) - cnx.commit() - return stats - - def process_urls(self, parser, urls, raise_on_error=False): - error = False - for url in urls: - self.info('pulling data from %s', url) - try: - if parser.process(url, raise_on_error): - error = True - except IOError as exc: - if raise_on_error: - raise - parser.import_log.record_error( - 'could not pull data while processing %s: %s' - % (url, exc)) - error = True - except Exception as exc: - if raise_on_error: - raise - self.exception('error while processing %s: %s', - url, exc) - error = True - return error - - @deprecated('[3.21] use the new store API') - def before_entity_insertion(self, cnx, lid, etype, eid, sourceparams): - """called by the repository when an eid has been attributed for an - entity stored here but the entity has not been inserted in the system - table yet. - - This method must return the an Entity instance representation of this - entity. - """ - entity = super(DataFeedSource, self).before_entity_insertion( - cnx, lid, etype, eid, sourceparams) - entity.cw_edited['cwuri'] = lid.decode('utf-8') - entity.cw_edited.set_defaults() - sourceparams['parser'].before_entity_copy(entity, sourceparams) - return entity - - @deprecated('[3.21] use the new store API') - def after_entity_insertion(self, cnx, lid, entity, sourceparams): - """called by the repository after an entity stored here has been - inserted in the system table. - """ - relations = preprocess_inlined_relations(cnx, entity) - if cnx.is_hook_category_activated('integrity'): - entity.cw_edited.check(creation=True) - self.repo.system_source.add_entity(cnx, entity) - entity.cw_edited.saved = entity._cw_is_saved = True - sourceparams['parser'].after_entity_copy(entity, sourceparams) - # call hooks for inlined relations - call_hooks = self.repo.hm.call_hooks - if self.should_call_hooks: - for attr, value in relations: - call_hooks('before_add_relation', cnx, - eidfrom=entity.eid, rtype=attr, eidto=value) - call_hooks('after_add_relation', cnx, - eidfrom=entity.eid, rtype=attr, eidto=value) - - def source_cwuris(self, cnx): - sql = ('SELECT extid, eid, type FROM entities, cw_source_relation ' - 'WHERE entities.eid=cw_source_relation.eid_from ' - 'AND cw_source_relation.eid_to=%s' % self.eid) - return dict((self.decode_extid(uri), (eid, type)) - for uri, eid, type in cnx.system_sql(sql).fetchall()) - - def init_import_log(self, cnx, **kwargs): - dataimport = cnx.create_entity('CWDataImport', cw_import_of=self, - start_timestamp=datetime.now(tz=utc), - **kwargs) - dataimport.init() - return dataimport - - -class DataFeedParser(AppObject): - __registry__ = 'parsers' - - def __init__(self, cnx, source, sourceuris=None, import_log=None, **kwargs): - super(DataFeedParser, self).__init__(cnx, **kwargs) - self.source = source - self.sourceuris = sourceuris - self.import_log = import_log - self.stats = {'created': set(), 'updated': set(), 'checked': set()} - - def normalize_url(self, url): - """Normalize an url by looking if there is a replacement for it in - `cubicweb.sobjects.URL_MAPPING`. - - This dictionary allow to redirect from one host to another, which may be - useful for example in case of test instance using production data, while - you don't want to load the external source nor to hack your `/etc/hosts` - file. - """ - # local import mandatory, it's available after registration - from cubicweb.sobjects import URL_MAPPING - for mappedurl in URL_MAPPING: - if url.startswith(mappedurl): - return url.replace(mappedurl, URL_MAPPING[mappedurl], 1) - return url - - def retrieve_url(self, url): - """Return stream linked by the given url: - * HTTP urls will be normalized (see :meth:`normalize_url`) - * handle file:// URL - * other will be considered as plain content, useful for testing purpose - - For http URLs, it will try to find a cwclientlib config entry - (if available) and use it as requester. - """ - purl = urlparse(url) - if purl.scheme == 'file': - return URLLibResponseAdapter(open(url[7:]), url) - - url = self.normalize_url(url) - - # first, try to use cwclientlib if it's available and if the - # url matches a configuration entry in ~/.config/cwclientlibrc - try: - from cwclientlib import cwproxy_for - # parse url again since it has been normalized - cnx = cwproxy_for(url) - cnx.timeout = self.source.http_timeout - self.source.info('Using cwclientlib for %s' % url) - resp = cnx.get(url) - resp.raise_for_status() - return URLLibResponseAdapter(BytesIO(resp.text), url) - except (ImportError, ValueError, EnvironmentError) as exc: - # ImportError: not available - # ValueError: no config entry found - # EnvironmentError: no cwclientlib config file found - self.source.debug(str(exc)) - - # no chance with cwclientlib, fall back to former implementation - if purl.scheme in ('http', 'https'): - self.source.info('GET %s', url) - req = Request(url) - return _OPENER.open(req, timeout=self.source.http_timeout) - - # url is probably plain content - return URLLibResponseAdapter(BytesIO(url.encode('ascii')), url) - - def add_schema_config(self, schemacfg, checkonly=False): - """added CWSourceSchemaConfig, modify mapping accordingly""" - msg = schemacfg._cw._("this parser doesn't use a mapping") - raise ValidationError(schemacfg.eid, {None: msg}) - - def del_schema_config(self, schemacfg, checkonly=False): - """deleted CWSourceSchemaConfig, modify mapping accordingly""" - msg = schemacfg._cw._("this parser doesn't use a mapping") - raise ValidationError(schemacfg.eid, {None: msg}) - - @deprecated('[3.21] use the new store API') - def extid2entity(self, uri, etype, **sourceparams): - """Return an entity for the given uri. May return None if it should be - skipped. - - If a `raise_on_error` keyword parameter is passed, a ValidationError - exception may be raised. - """ - raise_on_error = sourceparams.pop('raise_on_error', False) - cnx = self._cw - # if cwsource is specified and repository has a source with the same - # name, call extid2eid on that source so entity will be properly seen as - # coming from this source - source_uri = sourceparams.pop('cwsource', None) - if source_uri is not None and source_uri != 'system': - source = cnx.repo.sources_by_uri.get(source_uri, self.source) - else: - source = self.source - sourceparams['parser'] = self - if isinstance(uri, text_type): - uri = uri.encode('utf-8') - try: - eid = cnx.repo.extid2eid(source, uri, etype, cnx, - sourceparams=sourceparams) - except ValidationError as ex: - if raise_on_error: - raise - self.source.critical('error while creating %s: %s', etype, ex) - self.import_log.record_error('error while creating %s: %s' - % (etype, ex)) - return None - if eid < 0: - # entity has been moved away from its original source - # - # Don't give etype to entity_from_eid so we get UnknownEid if the - # entity has been removed - try: - entity = cnx.entity_from_eid(-eid) - except UnknownEid: - return None - self.notify_updated(entity) # avoid later update from the source's data - return entity - if self.sourceuris is not None: - self.sourceuris.pop(str(uri), None) - return cnx.entity_from_eid(eid, etype) - - def process(self, url, raise_on_error=False): - """main callback: process the url""" - raise NotImplementedError - - @deprecated('[3.21] use the new store API') - def before_entity_copy(self, entity, sourceparams): - raise NotImplementedError - - @deprecated('[3.21] use the new store API') - def after_entity_copy(self, entity, sourceparams): - self.stats['created'].add(entity.eid) - - def created_during_pull(self, entity): - return entity.eid in self.stats['created'] - - def updated_during_pull(self, entity): - return entity.eid in self.stats['updated'] - - def notify_updated(self, entity): - return self.stats['updated'].add(entity.eid) - - def notify_checked(self, entity): - return self.stats['checked'].add(entity.eid) - - def is_deleted(self, extid, etype, eid): - """return True if the entity of given external id, entity type and eid - is actually deleted. Always return True by default, put more sensible - stuff in sub-classes. - """ - return True - - def handle_deletion(self, config, cnx, myuris): - if config['delete-entities'] and myuris: - byetype = {} - for extid, (eid, etype) in myuris.items(): - if self.is_deleted(extid, etype, eid): - byetype.setdefault(etype, []).append(str(eid)) - for etype, eids in byetype.items(): - self.warning('delete %s %s entities', len(eids), etype) - cnx.execute('DELETE %s X WHERE X eid IN (%s)' - % (etype, ','.join(eids))) - cnx.commit() - - def update_if_necessary(self, entity, attrs): - entity.complete(tuple(attrs)) - # check modification date and compare attribute values to only update - # what's actually needed - self.notify_checked(entity) - mdate = attrs.get('modification_date') - if not mdate or mdate > entity.modification_date: - attrs = dict( (k, v) for k, v in attrs.items() - if v != getattr(entity, k)) - if attrs: - entity.cw_set(**attrs) - self.notify_updated(entity) - - -class DataFeedXMLParser(DataFeedParser): - - @deprecated() - def process(self, url, raise_on_error=False): - """IDataFeedParser main entry point""" - try: - parsed = self.parse(url) - except Exception as ex: - if raise_on_error: - raise - self.import_log.record_error(str(ex)) - return True - error = False - commit = self._cw.commit - rollback = self._cw.rollback - for args in parsed: - try: - self.process_item(*args, raise_on_error=raise_on_error) - # commit+set_cnxset instead of commit(free_cnxset=False) to let - # other a chance to get our connections set - commit() - except ValidationError as exc: - if raise_on_error: - raise - self.source.error('Skipping %s because of validation error %s' - % (args, exc)) - rollback() - error = True - return error - - def parse(self, url): - stream = self.retrieve_url(url) - return self.parse_etree(etree.parse(stream).getroot()) - - def parse_etree(self, document): - return [(document,)] - - def process_item(self, *args, **kwargs): - raise NotImplementedError - - def is_deleted(self, extid, etype, eid): - if extid.startswith('file://'): - return exists(extid[7:]) - - url = self.normalize_url(extid) - # first, try to use cwclientlib if it's available and if the - # url matches a configuration entry in ~/.config/cwclientlibrc - try: - from cwclientlib import cwproxy_for - # parse url again since it has been normalized - cnx = cwproxy_for(url) - cnx.timeout = self.source.http_timeout - self.source.info('Using cwclientlib for checking %s' % url) - return cnx.get(url).status_code == 404 - except (ImportError, ValueError, EnvironmentError) as exc: - # ImportError: not available - # ValueError: no config entry found - # EnvironmentError: no cwclientlib config file found - self.source.debug(str(exc)) - - # no chance with cwclientlib, fall back to former implementation - if urlparse(url).scheme in ('http', 'https'): - try: - _OPENER.open(url, timeout=self.source.http_timeout) - except HTTPError as ex: - if ex.code == 404: - return True - return False - - -class URLLibResponseAdapter(object): - """Thin wrapper to be used to fake a value returned by urllib2.urlopen""" - def __init__(self, stream, url, code=200): - self._stream = stream - self._url = url - self.code = code - - def read(self, *args): - return self._stream.read(*args) - - def geturl(self): - return self._url - - def getcode(self): - return self.code - - -# use a cookie enabled opener to use session cookie if any -_OPENER = build_opener() -try: - from logilab.common import urllib2ext - _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler()) -except ImportError: # python-kerberos not available - pass -_OPENER.add_handler(HTTPCookieProcessor(CookieJar())) diff -r 058bb3dc685f -r 0b59724cb3f2 server/sources/ldapfeed.py --- a/server/sources/ldapfeed.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,358 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb ldap feed source""" - -from __future__ import division # XXX why? - -from datetime import datetime - -from six import PY2, string_types - -import ldap3 - -from logilab.common.configuration import merge_options - -from cubicweb import ValidationError, AuthenticationError, Binary -from cubicweb.server import utils -from cubicweb.server.sources import datafeed - -from cubicweb import _ - -# search scopes -BASE = ldap3.SEARCH_SCOPE_BASE_OBJECT -ONELEVEL = ldap3.SEARCH_SCOPE_SINGLE_LEVEL -SUBTREE = ldap3.SEARCH_SCOPE_WHOLE_SUBTREE -LDAP_SCOPES = {'BASE': BASE, - 'ONELEVEL': ONELEVEL, - 'SUBTREE': SUBTREE} - -# map ldap protocol to their standard port -PROTO_PORT = {'ldap': 389, - 'ldaps': 636, - 'ldapi': None, - } - - -def replace_filter(s): - s = s.replace('*', '\\2A') - s = s.replace('(', '\\28') - s = s.replace(')', '\\29') - s = s.replace('\\', '\\5c') - s = s.replace('\0', '\\00') - return s - - -class LDAPFeedSource(datafeed.DataFeedSource): - """LDAP feed source: unlike ldapuser source, this source is copy based and - will import ldap content (beside passwords for authentication) into the - system source. - """ - support_entities = {'CWUser': False} - use_cwuri_as_url = False - - options = ( - ('auth-mode', - {'type' : 'choice', - 'default': 'simple', - 'choices': ('simple', 'digest_md5', 'gssapi'), - 'help': 'authentication mode used to authenticate user to the ldap.', - 'group': 'ldap-source', 'level': 3, - }), - ('auth-realm', - {'type' : 'string', - 'default': None, - 'help': 'realm to use when using gssapi/kerberos authentication.', - 'group': 'ldap-source', 'level': 3, - }), - - ('data-cnx-dn', - {'type' : 'string', - 'default': '', - 'help': 'user dn to use to open data connection to the ldap (eg used \ -to respond to rql queries). Leave empty for anonymous bind', - 'group': 'ldap-source', 'level': 1, - }), - ('data-cnx-password', - {'type' : 'string', - 'default': '', - 'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries). Leave empty for anonymous bind.', - 'group': 'ldap-source', 'level': 1, - }), - - ('user-base-dn', - {'type' : 'string', - 'default': '', - 'help': 'base DN to lookup for users; disable user importation mechanism if unset', - 'group': 'ldap-source', 'level': 1, - }), - ('user-scope', - {'type' : 'choice', - 'default': 'ONELEVEL', - 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), - 'help': 'user search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")', - 'group': 'ldap-source', 'level': 1, - }), - ('user-classes', - {'type' : 'csv', - 'default': ('top', 'posixAccount'), - 'help': 'classes of user (with Active Directory, you want to say "user" here)', - 'group': 'ldap-source', 'level': 1, - }), - ('user-filter', - {'type': 'string', - 'default': '', - 'help': 'additional filters to be set in the ldap query to find valid users', - 'group': 'ldap-source', 'level': 2, - }), - ('user-login-attr', - {'type' : 'string', - 'default': 'uid', - 'help': 'attribute used as login on authentication (with Active Directory, you want to use "sAMAccountName" here)', - 'group': 'ldap-source', 'level': 1, - }), - ('user-default-group', - {'type' : 'csv', - 'default': ('users',), - 'help': 'name of a group in which ldap users will be by default. \ -You can set multiple groups by separating them by a comma.', - 'group': 'ldap-source', 'level': 1, - }), - ('user-attrs-map', - {'type' : 'named', - 'default': {'uid': 'login'}, - 'help': 'map from ldap user attributes to cubicweb attributes (with Active Directory, you want to use sAMAccountName:login,mail:email,givenName:firstname,sn:surname)', - 'group': 'ldap-source', 'level': 1, - }), - ('group-base-dn', - {'type' : 'string', - 'default': '', - 'help': 'base DN to lookup for groups; disable group importation mechanism if unset', - 'group': 'ldap-source', 'level': 1, - }), - ('group-scope', - {'type' : 'choice', - 'default': 'ONELEVEL', - 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), - 'help': 'group search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")', - 'group': 'ldap-source', 'level': 1, - }), - ('group-classes', - {'type' : 'csv', - 'default': ('top', 'posixGroup'), - 'help': 'classes of group', - 'group': 'ldap-source', 'level': 1, - }), - ('group-filter', - {'type': 'string', - 'default': '', - 'help': 'additional filters to be set in the ldap query to find valid groups', - 'group': 'ldap-source', 'level': 2, - }), - ('group-attrs-map', - {'type' : 'named', - 'default': {'cn': 'name', 'memberUid': 'member'}, - 'help': 'map from ldap group attributes to cubicweb attributes', - 'group': 'ldap-source', 'level': 1, - }), - ) - - options = merge_options(datafeed.DataFeedSource.options + options, - optgroup='ldap-source',) - - _conn = None - - def update_config(self, source_entity, typedconfig): - """update configuration from source entity. `typedconfig` is config - properly typed with defaults set - """ - super(LDAPFeedSource, self).update_config(source_entity, typedconfig) - self.authmode = typedconfig['auth-mode'] - self._authenticate = getattr(self, '_auth_%s' % self.authmode) - self.cnx_dn = typedconfig['data-cnx-dn'] - self.cnx_pwd = typedconfig['data-cnx-password'] - self.user_base_dn = str(typedconfig['user-base-dn']) - self.user_base_scope = globals()[typedconfig['user-scope']] - self.user_login_attr = typedconfig['user-login-attr'] - self.user_default_groups = typedconfig['user-default-group'] - self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} - self.user_attrs.update(typedconfig['user-attrs-map']) - self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.items()) - self.base_filters = ['(objectclass=%s)' % replace_filter(o) - for o in typedconfig['user-classes']] - if typedconfig['user-filter']: - self.base_filters.append(typedconfig['user-filter']) - self.group_base_dn = str(typedconfig['group-base-dn']) - self.group_base_scope = LDAP_SCOPES[typedconfig['group-scope']] - self.group_attrs = typedconfig['group-attrs-map'] - self.group_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} - self.group_attrs.update(typedconfig['group-attrs-map']) - self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.items()) - self.group_base_filters = ['(objectClass=%s)' % replace_filter(o) - for o in typedconfig['group-classes']] - if typedconfig['group-filter']: - self.group_base_filters.append(typedconfig['group-filter']) - self._conn = None - - def _entity_update(self, source_entity): - super(LDAPFeedSource, self)._entity_update(source_entity) - if self.urls: - if len(self.urls) > 1: - raise ValidationError(source_entity.eid, {'url': _('can only have one url')}) - try: - protocol, hostport = self.urls[0].split('://') - except ValueError: - raise ValidationError(source_entity.eid, {'url': _('badly formatted url')}) - if protocol not in PROTO_PORT: - raise ValidationError(source_entity.eid, {'url': _('unsupported protocol')}) - - def connection_info(self): - assert len(self.urls) == 1, self.urls - protocol, hostport = self.urls[0].split('://') - if protocol != 'ldapi' and ':' in hostport: - host, port = hostport.rsplit(':', 1) - else: - host, port = hostport, PROTO_PORT[protocol] - return protocol, host, port - - def authenticate(self, cnx, login, password=None, **kwargs): - """return CWUser eid for the given login/password if this account is - defined in this source, else raise `AuthenticationError` - - two queries are needed since passwords are stored crypted, so we have - to fetch the salt first - """ - self.info('ldap authenticate %s', login) - if not password: - # On Windows + ADAM this would have succeeded (!!!) - # You get Authenticated as: 'NT AUTHORITY\ANONYMOUS LOGON'. - # we really really don't want that - raise AuthenticationError() - searchfilter = ['(%s=%s)' % (replace_filter(self.user_login_attr), replace_filter(login))] - searchfilter.extend(self.base_filters) - searchstr = '(&%s)' % ''.join(searchfilter) - # first search the user - try: - user = self._search(cnx, self.user_base_dn, - self.user_base_scope, searchstr)[0] - except IndexError: - # no such user - raise AuthenticationError() - # check password by establishing a (unused) connection - try: - self._connect(user, password) - except ldap3.LDAPException as ex: - # Something went wrong, most likely bad credentials - self.info('while trying to authenticate %s: %s', user, ex) - raise AuthenticationError() - except Exception: - self.error('while trying to authenticate %s', user, exc_info=True) - raise AuthenticationError() - eid = self.repo.system_source.extid2eid(cnx, user['dn'].encode('ascii')) - if eid is None or eid < 0: - # user is not known or has been moved away from this source - raise AuthenticationError() - return eid - - def _connect(self, user=None, userpwd=None): - protocol, host, port = self.connection_info() - self.info('connecting %s://%s:%s as %s', protocol, host, port, - user and user['dn'] or 'anonymous') - server = ldap3.Server(host, port=int(port)) - conn = ldap3.Connection(server, user=user and user['dn'], client_strategy=ldap3.STRATEGY_SYNC_RESTARTABLE, auto_referrals=False) - # Now bind with the credentials given. Let exceptions propagate out. - if user is None: - # XXX always use simple bind for data connection - if not self.cnx_dn: - conn.bind() - else: - self._authenticate(conn, {'dn': self.cnx_dn}, self.cnx_pwd) - else: - # user specified, we want to check user/password, no need to return - # the connection which will be thrown out - self._authenticate(conn, user, userpwd) - return conn - - def _auth_simple(self, conn, user, userpwd): - conn.authentication = ldap3.AUTH_SIMPLE - conn.user = user['dn'] - conn.password = userpwd - conn.bind() - - def _auth_digest_md5(self, conn, user, userpwd): - conn.authentication = ldap3.AUTH_SASL - conn.sasl_mechanism = 'DIGEST-MD5' - # realm, user, password, authz-id - conn.sasl_credentials = (None, user['dn'], userpwd, None) - conn.bind() - - def _auth_gssapi(self, conn, user, userpwd): - conn.authentication = ldap3.AUTH_SASL - conn.sasl_mechanism = 'GSSAPI' - conn.bind() - - def _search(self, cnx, base, scope, - searchstr='(objectClass=*)', attrs=()): - """make an ldap query""" - self.debug('ldap search %s %s %s %s %s', self.uri, base, scope, - searchstr, list(attrs)) - if self._conn is None: - self._conn = self._connect() - ldapcnx = self._conn - if not ldapcnx.search(base, searchstr, search_scope=scope, attributes=attrs): - return [] - result = [] - for rec in ldapcnx.response: - if rec['type'] != 'searchResEntry': - continue - items = rec['attributes'].items() - itemdict = self._process_ldap_item(rec['dn'], items) - result.append(itemdict) - self.debug('ldap built results %s', len(result)) - return result - - def _process_ldap_item(self, dn, iterator): - """Turn an ldap received item into a proper dict.""" - itemdict = {'dn': dn} - for key, value in iterator: - if self.user_attrs.get(key) == 'upassword': # XXx better password detection - value = value[0].encode('utf-8') - # we only support ldap_salted_sha1 for ldap sources, see: server/utils.py - if not value.startswith(b'{SSHA}'): - value = utils.crypt_password(value) - itemdict[key] = Binary(value) - elif self.user_attrs.get(key) == 'modification_date': - itemdict[key] = datetime.strptime(value[0], '%Y%m%d%H%M%SZ') - else: - if PY2 and value and isinstance(value[0], str): - value = [unicode(val, 'utf-8', 'replace') for val in value] - if len(value) == 1: - itemdict[key] = value = value[0] - else: - itemdict[key] = value - # we expect memberUid to be a list of user ids, make sure of it - member = self.group_rev_attrs['member'] - if isinstance(itemdict.get(member), string_types): - itemdict[member] = [itemdict[member]] - return itemdict - - def _process_no_such_object(self, cnx, dn): - """Some search return NO_SUCH_OBJECT error, handle this (usually because - an object whose dn is no more existent in ldap as been encountered). - - Do nothing by default, let sub-classes handle that. - """ diff -r 058bb3dc685f -r 0b59724cb3f2 server/sources/native.py --- a/server/sources/native.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1813 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Adapters for native cubicweb sources. - -Notes: -* extid (aka external id, the primary key of an entity in the external source - from which it comes from) are stored in a varchar column encoded as a base64 - string. This is because it should actually be Bytes but we want an index on - it for fast querying. -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from threading import Lock -from datetime import datetime -from base64 import b64encode -from contextlib import contextmanager -from os.path import basename -import re -import itertools -import zipfile -import logging -import sys - -from six import PY2, text_type, binary_type, string_types -from six.moves import range, cPickle as pickle - -from logilab.common.decorators import cached, clear_cache -from logilab.common.configuration import Method -from logilab.common.shellutils import getlogin -from logilab.database import get_db_helper, sqlgen - -from yams.schema import role_name - -from cubicweb import (UnknownEid, AuthenticationError, ValidationError, Binary, - UniqueTogetherError, UndoTransactionException, ViolatedConstraint) -from cubicweb import transaction as tx, server, neg_role -from cubicweb.utils import QueryCache -from cubicweb.schema import VIRTUAL_RTYPES -from cubicweb.cwconfig import CubicWebNoAppConfiguration -from cubicweb.server import hook -from cubicweb.server import schema2sql as y2sql -from cubicweb.server.utils import crypt_password, eschema_eid, verify_and_update -from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn -from cubicweb.server.rqlannotation import set_qdata -from cubicweb.server.hook import CleanupDeletedEidsCacheOp -from cubicweb.server.edition import EditedEntity -from cubicweb.server.sources import AbstractSource, dbg_st_search, dbg_results -from cubicweb.server.sources.rql2sql import SQLGenerator -from cubicweb.statsd_logger import statsd_timeit - - -ATTR_MAP = {} -NONSYSTEM_ETYPES = set() -NONSYSTEM_RELATIONS = set() - -class LogCursor(object): - def __init__(self, cursor): - self.cu = cursor - - def execute(self, query, args=None): - """Execute a query. - it's a function just so that it shows up in profiling - """ - if server.DEBUG & server.DBG_SQL: - print('exec', query, args) - try: - self.cu.execute(str(query), args) - except Exception as ex: - print("sql: %r\n args: %s\ndbms message: %r" % ( - query, args, ex.args[0])) - raise - - def fetchall(self): - return self.cu.fetchall() - - def fetchone(self): - return self.cu.fetchone() - - -def sql_or_clauses(sql, clauses): - select, restr = sql.split(' WHERE ', 1) - restrclauses = restr.split(' AND ') - for clause in clauses: - restrclauses.remove(clause) - if restrclauses: - restr = '%s AND (%s)' % (' AND '.join(restrclauses), - ' OR '.join(clauses)) - else: - restr = '(%s)' % ' OR '.join(clauses) - return '%s WHERE %s' % (select, restr) - - -def rdef_table_column(rdef): - """return table and column used to store the given relation definition in - the database - """ - return (SQL_PREFIX + str(rdef.subject), - SQL_PREFIX + str(rdef.rtype)) - - -def rdef_physical_info(dbhelper, rdef): - """return backend type and a boolean flag if NULL values should be allowed - for a given relation definition - """ - if not rdef.object.final: - return dbhelper.TYPE_MAPPING['Int'] - coltype = y2sql.type_from_rdef(dbhelper, rdef, creating=False) - allownull = rdef.cardinality[0] != '1' - return coltype, allownull - - -class _UndoException(Exception): - """something went wrong during undoing""" - - def __unicode__(self): - """Called by the unicode builtin; should return a Unicode object - - Type of _UndoException message must be `unicode` by design in CubicWeb. - """ - assert isinstance(self.args[0], text_type) - return self.args[0] - - -def _undo_check_relation_target(tentity, rdef, role): - """check linked entity has not been redirected for this relation""" - card = rdef.role_cardinality(role) - if card in '?1' and tentity.related(rdef.rtype, role): - raise _UndoException(tentity._cw._( - "Can't restore %(role)s relation %(rtype)s to entity %(eid)s which " - "is already linked using this relation.") - % {'role': neg_role(role), - 'rtype': rdef.rtype, - 'eid': tentity.eid}) - -def _undo_rel_info(cnx, subj, rtype, obj): - entities = [] - for role, eid in (('subject', subj), ('object', obj)): - try: - entities.append(cnx.entity_from_eid(eid)) - except UnknownEid: - raise _UndoException(cnx._( - "Can't restore relation %(rtype)s, %(role)s entity %(eid)s" - " doesn't exist anymore.") - % {'role': cnx._(role), - 'rtype': cnx._(rtype), - 'eid': eid}) - sentity, oentity = entities - try: - rschema = cnx.vreg.schema.rschema(rtype) - rdef = rschema.rdefs[(sentity.cw_etype, oentity.cw_etype)] - except KeyError: - raise _UndoException(cnx._( - "Can't restore relation %(rtype)s between %(subj)s and " - "%(obj)s, that relation does not exists anymore in the " - "schema.") - % {'rtype': cnx._(rtype), - 'subj': subj, - 'obj': obj}) - return sentity, oentity, rdef - -def _undo_has_later_transaction(cnx, eid): - return cnx.system_sql('''\ -SELECT T.tx_uuid FROM transactions AS TREF, transactions AS T -WHERE TREF.tx_uuid='%(txuuid)s' AND T.tx_uuid!='%(txuuid)s' -AND T.tx_time>=TREF.tx_time -AND (EXISTS(SELECT 1 FROM tx_entity_actions AS TEA - WHERE TEA.tx_uuid=T.tx_uuid AND TEA.eid=%(eid)s) - OR EXISTS(SELECT 1 FROM tx_relation_actions as TRA - WHERE TRA.tx_uuid=T.tx_uuid AND ( - TRA.eid_from=%(eid)s OR TRA.eid_to=%(eid)s)) - )''' % {'txuuid': cnx.transaction_data['undoing_uuid'], - 'eid': eid}).fetchone() - - -class DefaultEidGenerator(object): - __slots__ = ('source', 'cnx', 'lock') - - def __init__(self, source): - self.source = source - self.cnx = None - self.lock = Lock() - - def close(self): - if self.cnx: - self.cnx.close() - self.cnx = None - - def create_eid(self, _cnx, count=1): - # lock needed to prevent 'Connection is busy with results for another - # command (0)' errors with SQLServer - assert count > 0 - with self.lock: - return self._create_eid(count) - - def _create_eid(self, count): - # internal function doing the eid creation without locking. - # needed for the recursive handling of disconnections (otherwise we - # deadlock on self._eid_cnx_lock - source = self.source - if self.cnx is None: - self.cnx = source.get_connection() - cnx = self.cnx - try: - cursor = cnx.cursor() - for sql in source.dbhelper.sqls_increment_numrange('entities_id_seq', count): - cursor.execute(sql) - eid = cursor.fetchone()[0] - except (source.OperationalError, source.InterfaceError): - # FIXME: better detection of deconnection pb - source.warning("trying to reconnect create eid connection") - self.cnx = None - return self._create_eid(count) - except source.DbapiError as exc: - # We get this one with pyodbc and SQL Server when connection was reset - if exc.args[0] == '08S01': - source.warning("trying to reconnect create eid connection") - self.cnx = None - return self._create_eid(count) - else: - raise - except Exception: # WTF? - cnx.rollback() - self.cnx = None - source.exception('create eid failed in an unforeseen way on SQL statement %s', sql) - raise - else: - cnx.commit() - return eid - - -class SQLITEEidGenerator(object): - __slots__ = ('source', 'lock') - - def __init__(self, source): - self.source = source - self.lock = Lock() - - def close(self): - pass - - def create_eid(self, cnx, count=1): - assert count > 0 - source = self.source - with self.lock: - for sql in source.dbhelper.sqls_increment_numrange('entities_id_seq', count): - cursor = source.doexec(cnx, sql) - return cursor.fetchone()[0] - - -class NativeSQLSource(SQLAdapterMixIn, AbstractSource): - """adapter for source using the native cubicweb schema (see below) - """ - sqlgen_class = SQLGenerator - options = ( - ('db-driver', - {'type' : 'string', - 'default': 'postgres', - # XXX use choice type - 'help': 'database driver (postgres, sqlite, sqlserver2005)', - 'group': 'native-source', 'level': 0, - }), - ('db-host', - {'type' : 'string', - 'default': '', - 'help': 'database host', - 'group': 'native-source', 'level': 1, - }), - ('db-port', - {'type' : 'string', - 'default': '', - 'help': 'database port', - 'group': 'native-source', 'level': 1, - }), - ('db-name', - {'type' : 'string', - 'default': Method('default_instance_id'), - 'help': 'database name', - 'group': 'native-source', 'level': 0, - }), - ('db-namespace', - {'type' : 'string', - 'default': '', - 'help': 'database namespace (schema) name', - 'group': 'native-source', 'level': 1, - }), - ('db-user', - {'type' : 'string', - 'default': CubicWebNoAppConfiguration.mode == 'user' and getlogin() or 'cubicweb', - 'help': 'database user', - 'group': 'native-source', 'level': 0, - }), - ('db-password', - {'type' : 'password', - 'default': '', - 'help': 'database password', - 'group': 'native-source', 'level': 0, - }), - ('db-encoding', - {'type' : 'string', - 'default': 'utf8', - 'help': 'database encoding', - 'group': 'native-source', 'level': 1, - }), - ('db-extra-arguments', - {'type' : 'string', - 'default': '', - 'help': 'set to "Trusted_Connection" if you are using SQLServer and ' - 'want trusted authentication for the database connection', - 'group': 'native-source', 'level': 2, - }), - ('db-statement-timeout', - {'type': 'int', - 'default': 0, - 'help': 'sql statement timeout, in milliseconds (postgres only)', - 'group': 'native-source', 'level': 2, - }), - ) - - def __init__(self, repo, source_config, *args, **kwargs): - SQLAdapterMixIn.__init__(self, source_config, repairing=repo.config.repairing) - self.authentifiers = [LoginPasswordAuthentifier(self)] - if repo.config['allow-email-login']: - self.authentifiers.insert(0, EmailPasswordAuthentifier(self)) - AbstractSource.__init__(self, repo, source_config, *args, **kwargs) - # sql generator - self._rql_sqlgen = self.sqlgen_class(self.schema, self.dbhelper, - ATTR_MAP.copy()) - # full text index helper - self.do_fti = not repo.config['delay-full-text-indexation'] - # sql queries cache - self._cache = QueryCache(repo.config['rql-cache-size']) - # (etype, attr) / storage mapping - self._storages = {} - self.binary_to_str = self.dbhelper.dbapi_module.binary_to_str - if self.dbdriver == 'sqlite': - self.eid_generator = SQLITEEidGenerator(self) - else: - self.eid_generator = DefaultEidGenerator(self) - self.create_eid = self.eid_generator.create_eid - - def check_config(self, source_entity): - """check configuration of source entity""" - if source_entity.host_config: - msg = source_entity._cw._('the system source has its configuration ' - 'stored on the file-system') - raise ValidationError(source_entity.eid, {role_name('config', 'subject'): msg}) - - def add_authentifier(self, authentifier): - self.authentifiers.append(authentifier) - authentifier.source = self - authentifier.set_schema(self.schema) - - def reset_caches(self): - """method called during test to reset potential source caches""" - self._cache = QueryCache(self.repo.config['rql-cache-size']) - - def clear_eid_cache(self, eid, etype): - """clear potential caches for the given eid""" - self._cache.pop('Any X WHERE X eid %s, X is %s' % (eid, etype), None) - self._cache.pop('Any X WHERE X eid %s' % eid, None) - self._cache.pop('Any %s' % eid, None) - - @statsd_timeit - def sqlexec(self, cnx, sql, args=None): - """execute the query and return its result""" - return self.process_result(self.doexec(cnx, sql, args)) - - def init_creating(self, cnxset=None): - # check full text index availibility - if self.do_fti: - if cnxset is None: - _cnxset = self.repo._get_cnxset() - else: - _cnxset = cnxset - if not self.dbhelper.has_fti_table(_cnxset.cu): - if not self.repo.config.creating: - self.critical('no text index table') - self.do_fti = False - if cnxset is None: - _cnxset.cnxset_freed() - self.repo._free_cnxset(_cnxset) - - def backup(self, backupfile, confirm, format='native'): - """method called to create a backup of the source's data""" - if format == 'portable': - # ensure the schema is the one stored in the database: if repository - # started in quick_start mode, the file system's one has been loaded - # so force reload - if self.repo.config.quick_start: - self.repo.set_schema(self.repo.deserialize_schema(), - resetvreg=False) - helper = DatabaseIndependentBackupRestore(self) - self.close_source_connections() - try: - helper.backup(backupfile) - finally: - self.open_source_connections() - elif format == 'native': - self.close_source_connections() - try: - self.backup_to_file(backupfile, confirm) - finally: - self.open_source_connections() - else: - raise ValueError('Unknown format %r' % format) - - - def restore(self, backupfile, confirm, drop, format='native'): - """method called to restore a backup of source's data""" - if self.repo.config.init_cnxset_pool: - self.close_source_connections() - try: - if format == 'portable': - helper = DatabaseIndependentBackupRestore(self) - helper.restore(backupfile) - elif format == 'native': - self.restore_from_file(backupfile, confirm, drop=drop) - else: - raise ValueError('Unknown format %r' % format) - finally: - if self.repo.config.init_cnxset_pool: - self.open_source_connections() - - - def init(self, activated, source_entity): - try: - # test if 'asource' column exists - query = self.dbhelper.sql_add_limit_offset('SELECT asource FROM entities', 1) - source_entity._cw.system_sql(query) - except Exception as ex: - self.eid_type_source = self.eid_type_source_pre_131 - super(NativeSQLSource, self).init(activated, source_entity) - self.init_creating(source_entity._cw.cnxset) - - def shutdown(self): - self.eid_generator.close() - - # XXX deprecates [un]map_attribute? - def map_attribute(self, etype, attr, cb, sourcedb=True): - self._rql_sqlgen.attr_map[u'%s.%s' % (etype, attr)] = (cb, sourcedb) - - def unmap_attribute(self, etype, attr): - self._rql_sqlgen.attr_map.pop(u'%s.%s' % (etype, attr), None) - - def set_storage(self, etype, attr, storage): - storage_dict = self._storages.setdefault(etype, {}) - storage_dict[attr] = storage - self.map_attribute(etype, attr, - storage.callback, storage.is_source_callback) - - def unset_storage(self, etype, attr): - self._storages[etype].pop(attr) - # if etype has no storage left, remove the entry - if not self._storages[etype]: - del self._storages[etype] - self.unmap_attribute(etype, attr) - - def storage(self, etype, attr): - """return the storage for the given entity type / attribute - """ - try: - return self._storages[etype][attr] - except KeyError: - raise Exception('no custom storage set for %s.%s' % (etype, attr)) - - # ISource interface ####################################################### - - @statsd_timeit - def compile_rql(self, rql, sols): - rqlst = self.repo.vreg.rqlhelper.parse(rql) - rqlst.restricted_vars = () - rqlst.children[0].solutions = sols - self.repo.querier.sqlgen_annotate(rqlst) - set_qdata(self.schema.rschema, rqlst, ()) - return rqlst - - def set_schema(self, schema): - """set the instance'schema""" - self._cache = QueryCache(self.repo.config['rql-cache-size']) - self.cache_hit, self.cache_miss, self.no_cache = 0, 0, 0 - self.schema = schema - try: - self._rql_sqlgen.schema = schema - except AttributeError: - pass # __init__ - for authentifier in self.authentifiers: - authentifier.set_schema(self.schema) - clear_cache(self, 'need_fti_indexation') - - def support_entity(self, etype, write=False): - """return true if the given entity's type is handled by this adapter - if write is true, return true only if it's a RW support - """ - return not etype in NONSYSTEM_ETYPES - - def support_relation(self, rtype, write=False): - """return true if the given relation's type is handled by this adapter - if write is true, return true only if it's a RW support - """ - if write: - return not rtype in NONSYSTEM_RELATIONS - # due to current multi-sources implementation, the system source - # can't claim not supporting a relation - return True #not rtype == 'content_for' - - @statsd_timeit - def authenticate(self, cnx, login, **kwargs): - """return CWUser eid for the given login and other authentication - information found in kwargs, else raise `AuthenticationError` - """ - for authentifier in self.authentifiers: - try: - return authentifier.authenticate(cnx, login, **kwargs) - except AuthenticationError: - continue - raise AuthenticationError() - - def syntax_tree_search(self, cnx, union, args=None, cachekey=None, - varmap=None): - """return result from this source for a rql query (actually from - a rql syntax tree and a solution dictionary mapping each used - variable to a possible type). If cachekey is given, the query - necessary to fetch the results (but not the results themselves) - may be cached using this key. - """ - assert dbg_st_search(self.uri, union, varmap, args, cachekey) - # remember number of actually selected term (sql generation may append some) - if cachekey is None: - self.no_cache += 1 - # generate sql query if we are able to do so (not supported types...) - sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap) - else: - # sql may be cached - try: - sql, qargs, cbs = self._cache[cachekey] - self.cache_hit += 1 - except KeyError: - self.cache_miss += 1 - sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap) - self._cache[cachekey] = sql, qargs, cbs - args = self.merge_args(args, qargs) - assert isinstance(sql, string_types), repr(sql) - cursor = self.doexec(cnx, sql, args) - results = self.process_result(cursor, cnx, cbs) - assert dbg_results(results) - return results - - @contextmanager - def _fixup_cw(self, cnx, entity): - _cw = entity._cw - entity._cw = cnx - try: - yield - finally: - entity._cw = _cw - - @contextmanager - def _storage_handler(self, cnx, entity, event): - # 1/ memorize values as they are before the storage is called. - # For instance, the BFSStorage will replace the `data` - # binary value with a Binary containing the destination path - # on the filesystem. To make the entity.data usage absolutely - # transparent, we'll have to reset entity.data to its binary - # value once the SQL query will be executed - restore_values = [] - if isinstance(entity, list): - entities = entity - else: - entities = [entity] - etype = entities[0].__regid__ - for attr, storage in self._storages.get(etype, {}).items(): - for entity in entities: - with self._fixup_cw(cnx, entity): - if event == 'deleted': - storage.entity_deleted(entity, attr) - else: - edited = entity.cw_edited - if attr in edited: - handler = getattr(storage, 'entity_%s' % event) - to_restore = handler(entity, attr) - restore_values.append((entity, attr, to_restore)) - try: - yield # 2/ execute the source's instructions - finally: - # 3/ restore original values - for entity, attr, value in restore_values: - entity.cw_edited.edited_attribute(attr, value) - - def add_entity(self, cnx, entity): - """add a new entity to the source""" - with self._storage_handler(cnx, entity, 'added'): - attrs = self.preprocess_entity(entity) - sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs) - self.doexec(cnx, sql, attrs) - if cnx.ertype_supports_undo(entity.cw_etype): - self._record_tx_action(cnx, 'tx_entity_actions', u'C', - etype=text_type(entity.cw_etype), eid=entity.eid) - - def update_entity(self, cnx, entity): - """replace an entity in the source""" - with self._storage_handler(cnx, entity, 'updated'): - attrs = self.preprocess_entity(entity) - if cnx.ertype_supports_undo(entity.cw_etype): - changes = self._save_attrs(cnx, entity, attrs) - self._record_tx_action(cnx, 'tx_entity_actions', u'U', - etype=text_type(entity.cw_etype), eid=entity.eid, - changes=self._binary(pickle.dumps(changes))) - sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs, - ['cw_eid']) - self.doexec(cnx, sql, attrs) - - def delete_entity(self, cnx, entity): - """delete an entity from the source""" - with self._storage_handler(cnx, entity, 'deleted'): - if cnx.ertype_supports_undo(entity.cw_etype): - attrs = [SQL_PREFIX + r.type - for r in entity.e_schema.subject_relations() - if (r.final or r.inlined) and not r in VIRTUAL_RTYPES] - changes = self._save_attrs(cnx, entity, attrs) - self._record_tx_action(cnx, 'tx_entity_actions', u'D', - etype=text_type(entity.cw_etype), eid=entity.eid, - changes=self._binary(pickle.dumps(changes))) - attrs = {'cw_eid': entity.eid} - sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) - self.doexec(cnx, sql, attrs) - - def add_relation(self, cnx, subject, rtype, object, inlined=False): - """add a relation to the source""" - self._add_relations(cnx, rtype, [(subject, object)], inlined) - if cnx.ertype_supports_undo(rtype): - self._record_tx_action(cnx, 'tx_relation_actions', u'A', - eid_from=subject, rtype=text_type(rtype), eid_to=object) - - def add_relations(self, cnx, rtype, subj_obj_list, inlined=False): - """add a relations to the source""" - self._add_relations(cnx, rtype, subj_obj_list, inlined) - if cnx.ertype_supports_undo(rtype): - for subject, object in subj_obj_list: - self._record_tx_action(cnx, 'tx_relation_actions', u'A', - eid_from=subject, rtype=text_type(rtype), eid_to=object) - - def _add_relations(self, cnx, rtype, subj_obj_list, inlined=False): - """add a relation to the source""" - sql = [] - if inlined is False: - attrs = [{'eid_from': subject, 'eid_to': object} - for subject, object in subj_obj_list] - sql.append((self.sqlgen.insert('%s_relation' % rtype, attrs[0]), attrs)) - else: # used by data import - etypes = {} - for subject, object in subj_obj_list: - etype = cnx.entity_metas(subject)['type'] - if etype in etypes: - etypes[etype].append((subject, object)) - else: - etypes[etype] = [(subject, object)] - for subj_etype, subj_obj_list in etypes.items(): - attrs = [{'cw_eid': subject, SQL_PREFIX + rtype: object} - for subject, object in subj_obj_list] - sql.append((self.sqlgen.update(SQL_PREFIX + etype, attrs[0], - ['cw_eid']), - attrs)) - for statement, attrs in sql: - self.doexecmany(cnx, statement, attrs) - - def delete_relation(self, cnx, subject, rtype, object): - """delete a relation from the source""" - rschema = self.schema.rschema(rtype) - self._delete_relation(cnx, subject, rtype, object, rschema.inlined) - if cnx.ertype_supports_undo(rtype): - self._record_tx_action(cnx, 'tx_relation_actions', u'R', - eid_from=subject, rtype=text_type(rtype), eid_to=object) - - def _delete_relation(self, cnx, subject, rtype, object, inlined=False): - """delete a relation from the source""" - if inlined: - table = SQL_PREFIX + cnx.entity_metas(subject)['type'] - column = SQL_PREFIX + rtype - sql = 'UPDATE %s SET %s=NULL WHERE %seid=%%(eid)s' % (table, column, - SQL_PREFIX) - attrs = {'eid' : subject} - else: - attrs = {'eid_from': subject, 'eid_to': object} - sql = self.sqlgen.delete('%s_relation' % rtype, attrs) - self.doexec(cnx, sql, attrs) - - @statsd_timeit - def doexec(self, cnx, query, args=None, rollback=True): - """Execute a query. - it's a function just so that it shows up in profiling - """ - cursor = cnx.cnxset.cu - if server.DEBUG & server.DBG_SQL: - print('exec', query, args, cnx.cnxset.cnx) - try: - # str(query) to avoid error if it's a unicode string - cursor.execute(str(query), args) - except Exception as ex: - if self.repo.config.mode != 'test': - # during test we get those message when trying to alter sqlite - # db schema - self.info("sql: %r\n args: %s\ndbms message: %r", - query, args, ex.args[0]) - if rollback: - try: - cnx.cnxset.rollback() - if self.repo.config.mode != 'test': - self.debug('transaction has been rolled back') - except Exception as ex: - pass - if ex.__class__.__name__ == 'IntegrityError': - # need string comparison because of various backends - for arg in ex.args: - # postgres, sqlserver - mo = re.search("unique_[a-z0-9]{32}", arg) - if mo is not None: - raise UniqueTogetherError(cnx, cstrname=mo.group(0)) - # old sqlite - mo = re.search('columns? (.*) (?:is|are) not unique', arg) - if mo is not None: # sqlite in use - # we left chop the 'cw_' prefix of attribute names - rtypes = [c.strip()[3:] - for c in mo.group(1).split(',')] - raise UniqueTogetherError(cnx, rtypes=rtypes) - # sqlite after http://www.sqlite.org/cgi/src/info/c80e229dd9c1230a - if arg.startswith('UNIQUE constraint failed:'): - # message looks like: "UNIQUE constraint failed: foo.cw_bar, foo.cw_baz" - # so drop the prefix, split on comma, drop the tablenames, and drop "cw_" - columns = arg.split(':', 1)[1].split(',') - rtypes = [c.split('.', 1)[1].strip()[3:] for c in columns] - raise UniqueTogetherError(cnx, rtypes=rtypes) - - mo = re.search('"cstr[a-f0-9]{32}"', arg) - if mo is not None: - # postgresql - raise ViolatedConstraint(cnx, cstrname=mo.group(0)[1:-1]) - if arg.startswith('CHECK constraint failed:'): - # sqlite3 (new) - raise ViolatedConstraint(cnx, cstrname=arg.split(':', 1)[1].strip()) - mo = re.match('^constraint (cstr.*) failed$', arg) - if mo is not None: - # sqlite3 (old) - raise ViolatedConstraint(cnx, cstrname=mo.group(1)) - raise - return cursor - - @statsd_timeit - def doexecmany(self, cnx, query, args): - """Execute a query. - it's a function just so that it shows up in profiling - """ - if server.DEBUG & server.DBG_SQL: - print('execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx) - cursor = cnx.cnxset.cu - try: - # str(query) to avoid error if it's a unicode string - cursor.executemany(str(query), args) - except Exception as ex: - if self.repo.config.mode != 'test': - # during test we get those message when trying to alter sqlite - # db schema - self.critical("sql many: %r\n args: %s\ndbms message: %r", - query, args, ex.args[0]) - try: - cnx.cnxset.rollback() - if self.repo.config.mode != 'test': - self.critical('transaction has been rolled back') - except Exception: - pass - raise - - # short cut to method requiring advanced db helper usage ################## - - def update_rdef_column(self, cnx, rdef): - """update physical column for a relation definition (final or inlined) - """ - table, column = rdef_table_column(rdef) - coltype, allownull = rdef_physical_info(self.dbhelper, rdef) - if not self.dbhelper.alter_column_support: - self.error("backend can't alter %s.%s to %s%s", table, column, coltype, - not allownull and 'NOT NULL' or '') - return - self.dbhelper.change_col_type(LogCursor(cnx.cnxset.cu), - table, column, coltype, allownull) - self.info('altered %s.%s: now %s%s', table, column, coltype, - not allownull and 'NOT NULL' or '') - - def update_rdef_null_allowed(self, cnx, rdef): - """update NULL / NOT NULL of physical column for a relation definition - (final or inlined) - """ - if not self.dbhelper.alter_column_support: - # not supported (and NOT NULL not set by yams in that case, so no - # worry) - return - table, column = rdef_table_column(rdef) - coltype, allownull = rdef_physical_info(self.dbhelper, rdef) - self.dbhelper.set_null_allowed(LogCursor(cnx.cnxset.cu), - table, column, coltype, allownull) - - def update_rdef_indexed(self, cnx, rdef): - table, column = rdef_table_column(rdef) - if rdef.indexed: - self.create_index(cnx, table, column) - else: - self.drop_index(cnx, table, column) - - def update_rdef_unique(self, cnx, rdef): - table, column = rdef_table_column(rdef) - if rdef.constraint_by_type('UniqueConstraint'): - self.create_index(cnx, table, column, unique=True) - else: - self.drop_index(cnx, table, column, unique=True) - - def create_index(self, cnx, table, column, unique=False): - cursor = LogCursor(cnx.cnxset.cu) - self.dbhelper.create_index(cursor, table, column, unique) - - def drop_index(self, cnx, table, column, unique=False): - cursor = LogCursor(cnx.cnxset.cu) - self.dbhelper.drop_index(cursor, table, column, unique) - - # system source interface ################################################# - - def _eid_type_source(self, cnx, eid, sql): - try: - res = self.doexec(cnx, sql).fetchone() - if res is not None: - return res - except Exception: - self.exception('failed to query entities table for eid %s', eid) - raise UnknownEid(eid) - - def eid_type_source(self, cnx, eid): # pylint: disable=E0202 - """return a tuple (type, extid, source) for the entity with id """ - sql = 'SELECT type, extid, asource FROM entities WHERE eid=%s' % eid - res = self._eid_type_source(cnx, eid, sql) - if not isinstance(res, list): - res = list(res) - res[-2] = self.decode_extid(res[-2]) - return res - - def eid_type_source_pre_131(self, cnx, eid): - """return a tuple (type, extid, source) for the entity with id """ - sql = 'SELECT type, extid FROM entities WHERE eid=%s' % eid - res = self._eid_type_source(cnx, eid, sql) - if not isinstance(res, list): - res = list(res) - res[-1] = self.decode_extid(res[-1]) - res.append("system") - return res - - def extid2eid(self, cnx, extid): - """get eid from an external id. Return None if no record found.""" - assert isinstance(extid, binary_type) - args = {'x': b64encode(extid).decode('ascii')} - cursor = self.doexec(cnx, - 'SELECT eid FROM entities WHERE extid=%(x)s', - args) - # XXX testing rowcount cause strange bug with sqlite, results are there - # but rowcount is 0 - #if cursor.rowcount > 0: - try: - result = cursor.fetchone() - if result: - return result[0] - except Exception: - pass - cursor = self.doexec(cnx, - 'SELECT eid FROM moved_entities WHERE extid=%(x)s', - args) - try: - result = cursor.fetchone() - if result: - # entity was moved to the system source, return negative - # number to tell the external source to ignore it - return -result[0] - except Exception: - pass - return None - - def _handle_is_relation_sql(self, cnx, sql, attrs): - """ Handler for specific is_relation sql that may be - overwritten in some stores""" - self.doexec(cnx, sql % attrs) - - _handle_insert_entity_sql = doexec - _handle_is_instance_of_sql = _handle_source_relation_sql = _handle_is_relation_sql - - def add_info(self, cnx, entity, source, extid): - """add type and source info for an eid into the system table""" - assert cnx.cnxset is not None - # begin by inserting eid/type/source/extid into the entities table - if extid is not None: - assert isinstance(extid, binary_type) - extid = b64encode(extid).decode('ascii') - attrs = {'type': text_type(entity.cw_etype), 'eid': entity.eid, 'extid': extid, - 'asource': text_type(source.uri)} - self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs) - # insert core relations: is, is_instance_of and cw_source - try: - self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(cnx, entity.e_schema))) - except IndexError: - # during schema serialization, skip - pass - else: - for eschema in entity.e_schema.ancestors() + [entity.e_schema]: - self._handle_is_relation_sql(cnx, - 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(cnx, eschema))) - if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 - self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, source.eid)) - # now we can update the full text index - if self.need_fti_indexation(entity.cw_etype): - self.index_entity(cnx, entity=entity) - - def update_info(self, cnx, entity, need_fti_update): - """mark entity as being modified, fulltext reindex if needed""" - if need_fti_update: - # reindex the entity only if this query is updating at least - # one indexable attribute - self.index_entity(cnx, entity=entity) - - def delete_info_multi(self, cnx, entities): - """delete system information on deletion of a list of entities with the - same etype and belinging to the same source - - * update the fti - * remove record from the `entities` table - """ - self.fti_unindex_entities(cnx, entities) - attrs = {'eid': '(%s)' % ','.join([str(_e.eid) for _e in entities])} - self.doexec(cnx, self.sqlgen.delete_many('entities', attrs), attrs) - - # undo support ############################################################# - - def undoable_transactions(self, cnx, ueid=None, **actionfilters): - """See :class:`cubicweb.repoapi.Connection.undoable_transactions`""" - # force filtering to connection's user if not a manager - if not cnx.user.is_in_group('managers'): - ueid = cnx.user.eid - restr = {} - if ueid is not None: - restr['tx_user'] = ueid - sql = self.sqlgen.select('transactions', restr, ('tx_uuid', 'tx_time', 'tx_user')) - if actionfilters: - # we will need subqueries to filter transactions according to - # actions done - tearestr = {} # filters on the tx_entity_actions table - trarestr = {} # filters on the tx_relation_actions table - genrestr = {} # generic filters, appliyable to both table - # unless public explicitly set to false, we only consider public - # actions - if actionfilters.pop('public', True): - genrestr['txa_public'] = True - # put additional filters in trarestr and/or tearestr - for key, val in actionfilters.items(): - if key == 'etype': - # filtering on etype implies filtering on entity actions - # only, and with no eid specified - assert actionfilters.get('action', 'C') in 'CUD' - assert not 'eid' in actionfilters - tearestr['etype'] = text_type(val) - elif key == 'eid': - # eid filter may apply to 'eid' of tx_entity_actions or to - # 'eid_from' OR 'eid_to' of tx_relation_actions - if actionfilters.get('action', 'C') in 'CUD': - tearestr['eid'] = val - if actionfilters.get('action', 'A') in 'AR': - trarestr['eid_from'] = val - trarestr['eid_to'] = val - elif key == 'action': - if val in 'CUD': - tearestr['txa_action'] = text_type(val) - else: - assert val in 'AR' - trarestr['txa_action'] = text_type(val) - else: - raise AssertionError('unknow filter %s' % key) - assert trarestr or tearestr, "can't only filter on 'public'" - subqsqls = [] - # append subqueries to the original query, using EXISTS() - if trarestr or (genrestr and not tearestr): - trarestr.update(genrestr) - trasql = self.sqlgen.select('tx_relation_actions', trarestr, ('1',)) - if 'eid_from' in trarestr: - # replace AND by OR between eid_from/eid_to restriction - trasql = sql_or_clauses(trasql, ['eid_from = %(eid_from)s', - 'eid_to = %(eid_to)s']) - trasql += ' AND transactions.tx_uuid=tx_relation_actions.tx_uuid' - subqsqls.append('EXISTS(%s)' % trasql) - if tearestr or (genrestr and not trarestr): - tearestr.update(genrestr) - teasql = self.sqlgen.select('tx_entity_actions', tearestr, ('1',)) - teasql += ' AND transactions.tx_uuid=tx_entity_actions.tx_uuid' - subqsqls.append('EXISTS(%s)' % teasql) - if restr: - sql += ' AND %s' % ' OR '.join(subqsqls) - else: - sql += ' WHERE %s' % ' OR '.join(subqsqls) - restr.update(trarestr) - restr.update(tearestr) - # we want results ordered by transaction's time descendant - sql += ' ORDER BY tx_time DESC' - cu = self.doexec(cnx, sql, restr) - # turn results into transaction objects - return [tx.Transaction(cnx, *args) for args in cu.fetchall()] - - def tx_info(self, cnx, txuuid): - """See :class:`cubicweb.repoapi.Connection.transaction_info`""" - return tx.Transaction(cnx, txuuid, *self._tx_info(cnx, text_type(txuuid))) - - def tx_actions(self, cnx, txuuid, public): - """See :class:`cubicweb.repoapi.Connection.transaction_actions`""" - txuuid = text_type(txuuid) - self._tx_info(cnx, txuuid) - restr = {'tx_uuid': txuuid} - if public: - restr['txa_public'] = True - # XXX use generator to avoid loading everything in memory? - sql = self.sqlgen.select('tx_entity_actions', restr, - ('txa_action', 'txa_public', 'txa_order', - 'etype', 'eid', 'changes')) - with cnx.ensure_cnx_set: - cu = self.doexec(cnx, sql, restr) - actions = [tx.EntityAction(a,p,o,et,e,c and pickle.loads(self.binary_to_str(c))) - for a,p,o,et,e,c in cu.fetchall()] - sql = self.sqlgen.select('tx_relation_actions', restr, - ('txa_action', 'txa_public', 'txa_order', - 'rtype', 'eid_from', 'eid_to')) - with cnx.ensure_cnx_set: - cu = self.doexec(cnx, sql, restr) - actions += [tx.RelationAction(*args) for args in cu.fetchall()] - return sorted(actions, key=lambda x: x.order) - - def undo_transaction(self, cnx, txuuid): - """See :class:`cubicweb.repoapi.Connection.undo_transaction` - - important note: while undoing of a transaction, only hooks in the - 'integrity', 'activeintegrity' and 'undo' categories are called. - """ - errors = [] - cnx.transaction_data['undoing_uuid'] = txuuid - with cnx.deny_all_hooks_but('integrity', 'activeintegrity', 'undo'): - with cnx.security_enabled(read=False): - for action in reversed(self.tx_actions(cnx, txuuid, False)): - undomethod = getattr(self, '_undo_%s' % action.action.lower()) - errors += undomethod(cnx, action) - # remove the transactions record - self.doexec(cnx, - "DELETE FROM transactions WHERE tx_uuid='%s'" % txuuid) - if errors: - raise UndoTransactionException(txuuid, errors) - else: - return - - def start_undoable_transaction(self, cnx, uuid): - """connection callback to insert a transaction record in the transactions - table when some undoable transaction is started - """ - ueid = cnx.user.eid - attrs = {'tx_uuid': uuid, 'tx_user': ueid, 'tx_time': datetime.utcnow()} - self.doexec(cnx, self.sqlgen.insert('transactions', attrs), attrs) - - def _save_attrs(self, cnx, entity, attrs): - """return a pickleable dictionary containing current values for given - attributes of the entity - """ - restr = {'cw_eid': entity.eid} - sql = self.sqlgen.select(SQL_PREFIX + entity.cw_etype, restr, attrs) - cu = self.doexec(cnx, sql, restr) - values = dict(zip(attrs, cu.fetchone())) - # ensure backend specific binary are converted back to string - eschema = entity.e_schema - for column in attrs: - # [3:] remove 'cw_' prefix - attr = column[3:] - if not eschema.subjrels[attr].final: - continue - if eschema.destination(attr) in ('Password', 'Bytes'): - value = values[column] - if value is not None: - values[column] = self.binary_to_str(value) - return values - - def _record_tx_action(self, cnx, table, action, **kwargs): - """record a transaction action in the given table (either - 'tx_entity_actions' or 'tx_relation_action') - """ - kwargs['tx_uuid'] = cnx.transaction_uuid() - kwargs['txa_action'] = action - kwargs['txa_order'] = cnx.transaction_inc_action_counter() - kwargs['txa_public'] = not cnx.hooks_in_progress - self.doexec(cnx, self.sqlgen.insert(table, kwargs), kwargs) - - def _tx_info(self, cnx, txuuid): - """return transaction's time and user of the transaction with the given uuid. - - raise `NoSuchTransaction` if there is no such transaction of if the - connection's user isn't allowed to see it. - """ - restr = {'tx_uuid': txuuid} - sql = self.sqlgen.select('transactions', restr, - ('tx_time', 'tx_user')) - cu = self.doexec(cnx, sql, restr) - try: - time, ueid = cu.fetchone() - except TypeError: - raise tx.NoSuchTransaction(txuuid) - if not (cnx.user.is_in_group('managers') - or cnx.user.eid == ueid): - raise tx.NoSuchTransaction(txuuid) - return time, ueid - - def _reedit_entity(self, entity, changes, err): - cnx = entity._cw - eid = entity.eid - entity.cw_edited = edited = EditedEntity(entity) - # check for schema changes, entities linked through inlined relation - # still exists, rewrap binary values - eschema = entity.e_schema - getrschema = eschema.subjrels - for column, value in changes.items(): - rtype = column[len(SQL_PREFIX):] - if rtype == "eid": - continue # XXX should even `eid` be stored in action changes? - try: - rschema = getrschema[rtype] - except KeyError: - err(cnx._("can't restore relation %(rtype)s of entity %(eid)s, " - "this relation does not exist in the schema anymore.") - % {'rtype': rtype, 'eid': eid}) - if not rschema.final: - if not rschema.inlined: - assert value is None - # rschema is an inlined relation - elif value is not None: - # not a deletion: we must put something in edited - try: - entity._cw.entity_from_eid(value) # check target exists - edited[rtype] = value - except UnknownEid: - err(cnx._("can't restore entity %(eid)s of type %(eschema)s, " - "target of %(rtype)s (eid %(value)s) does not exist any longer") - % locals()) - changes[column] = None - elif eschema.destination(rtype) in ('Bytes', 'Password'): - changes[column] = self._binary(value) - edited[rtype] = Binary(value) - elif PY2 and isinstance(value, str): - edited[rtype] = text_type(value, cnx.encoding, 'replace') - else: - edited[rtype] = value - # This must only be done after init_entitiy_caches : defered in calling functions - # edited.check() - - def _undo_d(self, cnx, action): - """undo an entity deletion""" - errors = [] - err = errors.append - eid = action.eid - etype = action.etype - _ = cnx._ - # get an entity instance - try: - entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) - except Exception: - err("can't restore entity %s of type %s, type no more supported" - % (eid, etype)) - return errors - self._reedit_entity(entity, action.changes, err) - entity.eid = eid - cnx.repo.init_entity_caches(cnx, entity, self) - entity.cw_edited.check() - self.repo.hm.call_hooks('before_add_entity', cnx, entity=entity) - # restore the entity - action.changes['cw_eid'] = eid - # restore record in entities (will update fti if needed) - self.add_info(cnx, entity, self, None) - sql = self.sqlgen.insert(SQL_PREFIX + etype, action.changes) - self.doexec(cnx, sql, action.changes) - self.repo.hm.call_hooks('after_add_entity', cnx, entity=entity) - return errors - - def _undo_r(self, cnx, action): - """undo a relation removal""" - errors = [] - subj, rtype, obj = action.eid_from, action.rtype, action.eid_to - try: - sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj) - except _UndoException as ex: - errors.append(text_type(ex)) - else: - for role, entity in (('subject', sentity), - ('object', oentity)): - try: - _undo_check_relation_target(entity, rdef, role) - except _UndoException as ex: - errors.append(text_type(ex)) - continue - if not errors: - self.repo.hm.call_hooks('before_add_relation', cnx, - eidfrom=subj, rtype=rtype, eidto=obj) - # add relation in the database - self._add_relations(cnx, rtype, [(subj, obj)], rdef.rtype.inlined) - # set related cache - cnx.update_rel_cache_add(subj, rtype, obj, rdef.rtype.symmetric) - self.repo.hm.call_hooks('after_add_relation', cnx, - eidfrom=subj, rtype=rtype, eidto=obj) - return errors - - def _undo_c(self, cnx, action): - """undo an entity creation""" - eid = action.eid - # XXX done to avoid fetching all remaining relation for the entity - # we should find an efficient way to do this (keeping current veolidf - # massive deletion performance) - if _undo_has_later_transaction(cnx, eid): - msg = cnx._('some later transaction(s) touch entity, undo them ' - 'first') - raise ValidationError(eid, {None: msg}) - etype = action.etype - # get an entity instance - try: - entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) - except Exception: - return [cnx._( - "Can't undo creation of entity %(eid)s of type %(etype)s, type " - "no more supported" % {'eid': eid, 'etype': etype})] - entity.eid = eid - # for proper eid/type cache update - CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(eid) - self.repo.hm.call_hooks('before_delete_entity', cnx, entity=entity) - # remove is / is_instance_of which are added using sql by hooks, hence - # unvisible as transaction action - self.doexec(cnx, 'DELETE FROM is_relation WHERE eid_from=%s' % eid) - self.doexec(cnx, 'DELETE FROM is_instance_of_relation WHERE eid_from=%s' % eid) - self.doexec(cnx, 'DELETE FROM cw_source_relation WHERE eid_from=%s' % eid) - # XXX check removal of inlined relation? - # delete the entity - attrs = {'cw_eid': eid} - sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) - self.doexec(cnx, sql, attrs) - # remove record from entities (will update fti if needed) - self.delete_info_multi(cnx, [entity]) - self.repo.hm.call_hooks('after_delete_entity', cnx, entity=entity) - return () - - def _undo_u(self, cnx, action): - """undo an entity update""" - errors = [] - err = errors.append - try: - entity = cnx.entity_from_eid(action.eid) - except UnknownEid: - err(cnx._("can't restore state of entity %s, it has been " - "deleted inbetween") % action.eid) - return errors - self._reedit_entity(entity, action.changes, err) - entity.cw_edited.check() - self.repo.hm.call_hooks('before_update_entity', cnx, entity=entity) - sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, action.changes, - ['cw_eid']) - self.doexec(cnx, sql, action.changes) - self.repo.hm.call_hooks('after_update_entity', cnx, entity=entity) - return errors - - def _undo_a(self, cnx, action): - """undo a relation addition""" - errors = [] - subj, rtype, obj = action.eid_from, action.rtype, action.eid_to - try: - sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj) - except _UndoException as ex: - errors.append(text_type(ex)) - else: - rschema = rdef.rtype - if rschema.inlined: - sql = 'SELECT 1 FROM cw_%s WHERE cw_eid=%s and cw_%s=%s'\ - % (sentity.cw_etype, subj, rtype, obj) - else: - sql = 'SELECT 1 FROM %s_relation WHERE eid_from=%s and eid_to=%s'\ - % (rtype, subj, obj) - cu = self.doexec(cnx, sql) - if cu.fetchone() is None: - errors.append(cnx._( - "Can't undo addition of relation %(rtype)s from %(subj)s to" - " %(obj)s, doesn't exist anymore" % locals())) - if not errors: - self.repo.hm.call_hooks('before_delete_relation', cnx, - eidfrom=subj, rtype=rtype, eidto=obj) - # delete relation from the database - self._delete_relation(cnx, subj, rtype, obj, rschema.inlined) - # set related cache - cnx.update_rel_cache_del(subj, rtype, obj, rschema.symmetric) - self.repo.hm.call_hooks('after_delete_relation', cnx, - eidfrom=subj, rtype=rtype, eidto=obj) - return errors - - # full text index handling ################################################# - - @cached - def need_fti_indexation(self, etype): - eschema = self.schema.eschema(etype) - if any(eschema.indexable_attributes()): - return True - if any(eschema.fulltext_containers()): - return True - return False - - def index_entity(self, cnx, entity): - """create an operation to [re]index textual content of the given entity - on commit - """ - if self.do_fti: - FTIndexEntityOp.get_instance(cnx).add_data(entity.eid) - - def fti_unindex_entities(self, cnx, entities): - """remove text content for entities from the full text index - """ - cursor = cnx.cnxset.cu - cursor_unindex_object = self.dbhelper.cursor_unindex_object - try: - for entity in entities: - cursor_unindex_object(entity.eid, cursor) - except Exception: # let KeyboardInterrupt / SystemExit propagate - self.exception('error while unindexing %s', entity) - - - def fti_index_entities(self, cnx, entities): - """add text content of created/modified entities to the full text index - """ - cursor_index_object = self.dbhelper.cursor_index_object - cursor = cnx.cnxset.cu - try: - # use cursor_index_object, not cursor_reindex_object since - # unindexing done in the FTIndexEntityOp - for entity in entities: - cursor_index_object(entity.eid, - entity.cw_adapt_to('IFTIndexable'), - cursor) - except Exception: # let KeyboardInterrupt / SystemExit propagate - self.exception('error while indexing %s', entity) - - -class FTIndexEntityOp(hook.DataOperationMixIn, hook.LateOperation): - """operation to delay entity full text indexation to commit - - since fti indexing may trigger discovery of other entities, it should be - triggered on precommit, not commit, and this should be done after other - precommit operation which may add relations to the entity - """ - - def precommit_event(self): - cnx = self.cnx - source = cnx.repo.system_source - pendingeids = cnx.transaction_data.get('pendingeids', ()) - done = cnx.transaction_data.setdefault('indexedeids', set()) - to_reindex = set() - for eid in self.get_data(): - if eid in pendingeids or eid in done: - # entity added and deleted in the same transaction or already - # processed - continue - done.add(eid) - iftindexable = cnx.entity_from_eid(eid).cw_adapt_to('IFTIndexable') - to_reindex |= set(iftindexable.fti_containers()) - source.fti_unindex_entities(cnx, to_reindex) - source.fti_index_entities(cnx, to_reindex) - -def sql_schema(driver): - helper = get_db_helper(driver) - typemap = helper.TYPE_MAPPING - schema = """ -/* Create the repository's system database */ - -%s - -CREATE TABLE entities ( - eid INTEGER PRIMARY KEY NOT NULL, - type VARCHAR(64) NOT NULL, - asource VARCHAR(128) NOT NULL, - extid VARCHAR(256) -);; -CREATE INDEX entities_type_idx ON entities(type);; -CREATE TABLE moved_entities ( - eid INTEGER PRIMARY KEY NOT NULL, - extid VARCHAR(256) UNIQUE NOT NULL -);; - -CREATE TABLE transactions ( - tx_uuid CHAR(32) PRIMARY KEY NOT NULL, - tx_user INTEGER NOT NULL, - tx_time %s NOT NULL -);; -CREATE INDEX transactions_tx_user_idx ON transactions(tx_user);; -CREATE INDEX transactions_tx_time_idx ON transactions(tx_time);; - -CREATE TABLE tx_entity_actions ( - tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE, - txa_action CHAR(1) NOT NULL, - txa_public %s NOT NULL, - txa_order INTEGER, - eid INTEGER NOT NULL, - etype VARCHAR(64) NOT NULL, - changes %s -);; -CREATE INDEX tx_entity_actions_txa_action_idx ON tx_entity_actions(txa_action);; -CREATE INDEX tx_entity_actions_txa_public_idx ON tx_entity_actions(txa_public);; -CREATE INDEX tx_entity_actions_eid_idx ON tx_entity_actions(eid);; -CREATE INDEX tx_entity_actions_etype_idx ON tx_entity_actions(etype);; -CREATE INDEX tx_entity_actions_tx_uuid_idx ON tx_entity_actions(tx_uuid);; - -CREATE TABLE tx_relation_actions ( - tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE, - txa_action CHAR(1) NOT NULL, - txa_public %s NOT NULL, - txa_order INTEGER, - eid_from INTEGER NOT NULL, - eid_to INTEGER NOT NULL, - rtype VARCHAR(256) NOT NULL -);; -CREATE INDEX tx_relation_actions_txa_action_idx ON tx_relation_actions(txa_action);; -CREATE INDEX tx_relation_actions_txa_public_idx ON tx_relation_actions(txa_public);; -CREATE INDEX tx_relation_actions_eid_from_idx ON tx_relation_actions(eid_from);; -CREATE INDEX tx_relation_actions_eid_to_idx ON tx_relation_actions(eid_to);; -CREATE INDEX tx_relation_actions_tx_uuid_idx ON tx_relation_actions(tx_uuid);; -""" % (helper.sql_create_numrange('entities_id_seq').replace(';', ';;'), - typemap['Datetime'], - typemap['Boolean'], typemap['Bytes'], typemap['Boolean']) - if helper.backend_name == 'sqlite': - # sqlite support the ON DELETE CASCADE syntax but do nothing - schema += ''' -CREATE TRIGGER fkd_transactions -BEFORE DELETE ON transactions -FOR EACH ROW BEGIN - DELETE FROM tx_entity_actions WHERE tx_uuid=OLD.tx_uuid; - DELETE FROM tx_relation_actions WHERE tx_uuid=OLD.tx_uuid; -END;; -''' - schema += ';;'.join(helper.sqls_create_multicol_unique_index('entities', ['extid'])) - schema += ';;\n' - return schema - - -def sql_drop_schema(driver): - helper = get_db_helper(driver) - return """ -%s; -%s -DROP TABLE entities; -DROP TABLE tx_entity_actions; -DROP TABLE tx_relation_actions; -DROP TABLE transactions; -""" % (';'.join(helper.sqls_drop_multicol_unique_index('entities', ['extid'])), - helper.sql_drop_numrange('entities_id_seq')) - - -def grant_schema(user, set_owner=True): - result = '' - for table in ('entities', 'entities_id_seq', - 'transactions', 'tx_entity_actions', 'tx_relation_actions'): - if set_owner: - result = 'ALTER TABLE %s OWNER TO %s;\n' % (table, user) - result += 'GRANT ALL ON %s TO %s;\n' % (table, user) - return result - - -class BaseAuthentifier(object): - - def __init__(self, source=None): - self.source = source - - def set_schema(self, schema): - """set the instance'schema""" - pass - -class LoginPasswordAuthentifier(BaseAuthentifier): - passwd_rql = 'Any P WHERE X is CWUser, X login %(login)s, X upassword P' - auth_rql = (u'Any X WHERE X is CWUser, X login %(login)s, X upassword %(pwd)s, ' - 'X cw_source S, S name "system"') - _sols = ({'X': 'CWUser', 'P': 'Password', 'S': 'CWSource'},) - - def set_schema(self, schema): - """set the instance'schema""" - if 'CWUser' in schema: # probably an empty schema if not true... - # rql syntax trees used to authenticate users - self._passwd_rqlst = self.source.compile_rql(self.passwd_rql, self._sols) - self._auth_rqlst = self.source.compile_rql(self.auth_rql, self._sols) - - def authenticate(self, cnx, login, password=None, **kwargs): - """return CWUser eid for the given login/password if this account is - defined in this source, else raise `AuthenticationError` - - two queries are needed since passwords are stored crypted, so we have - to fetch the salt first - """ - args = {'login': login, 'pwd' : None} - if password is not None: - rset = self.source.syntax_tree_search(cnx, self._passwd_rqlst, args) - try: - pwd = rset[0][0] - except IndexError: - raise AuthenticationError('bad login') - if pwd is None: - # if pwd is None but a password is provided, something is wrong - raise AuthenticationError('bad password') - # passwords are stored using the Bytes type, so we get a StringIO - args['pwd'] = Binary(crypt_password(password, pwd.getvalue())) - # get eid from login and (crypted) password - rset = self.source.syntax_tree_search(cnx, self._auth_rqlst, args) - pwd = args['pwd'] - try: - user = rset[0][0] - # If the stored hash uses a deprecated scheme (e.g. DES or MD5 used - # before 3.14.7), update with a fresh one - if pwd is not None and pwd.getvalue(): - verify, newhash = verify_and_update(password, pwd.getvalue()) - if not verify: # should not happen, but... - raise AuthenticationError('bad password') - if newhash: - cnx.system_sql("UPDATE %s SET %s=%%(newhash)s WHERE %s=%%(login)s" % ( - SQL_PREFIX + 'CWUser', - SQL_PREFIX + 'upassword', - SQL_PREFIX + 'login'), - {'newhash': self.source._binary(newhash.encode('ascii')), - 'login': login}) - cnx.commit() - return user - except IndexError: - raise AuthenticationError('bad password') - - -class EmailPasswordAuthentifier(BaseAuthentifier): - def authenticate(self, cnx, login, **authinfo): - # email_auth flag prevent from infinite recursion (call to - # repo.check_auth_info at the end of this method may lead us here again) - if not '@' in login or authinfo.pop('email_auth', None): - raise AuthenticationError('not an email') - rset = cnx.execute('Any L WHERE U login L, U primary_email M, ' - 'M address %(login)s', {'login': login}, - build_descr=False) - if rset.rowcount != 1: - raise AuthenticationError('unexisting email') - login = rset.rows[0][0] - authinfo['email_auth'] = True - return self.source.repo.check_auth_info(cnx, login, authinfo) - - -class DatabaseIndependentBackupRestore(object): - """Helper class to perform db backend agnostic backup and restore - - The backup and restore methods are used to dump / restore the - system database in a database independent format. The file is a - Zip archive containing the following files: - - * format.txt: the format of the archive. Currently '1.1' - * tables.txt: list of filenames in the archive tables/ directory - * sequences.txt: list of filenames in the archive sequences/ directory - * numranges.txt: list of filenames in the archive numrange/ directory - * versions.txt: the list of cube versions from CWProperty - * tables/.: pickled data - * sequences/: pickled data - - The pickled data format for tables, numranges and sequences is a tuple of 3 elements: - * the table name - * a tuple of column names - * a list of rows (as tuples with one element per column) - - Tables are saved in chunks in different files in order to prevent - a too high memory consumption. - """ - blocksize = 100 - - def __init__(self, source): - """ - :param: source an instance of the system source - """ - self._source = source - self.logger = logging.getLogger('cubicweb.ctl') - self.logger.setLevel(logging.INFO) - self.logger.addHandler(logging.StreamHandler(sys.stdout)) - self.schema = self._source.schema - self.dbhelper = self._source.dbhelper - self.cnx = None - self.cursor = None - self.sql_generator = sqlgen.SQLGenerator() - - def get_connection(self): - return self._source.get_connection() - - def backup(self, backupfile): - archive = zipfile.ZipFile(backupfile, 'w', allowZip64=True) - self.cnx = self.get_connection() - try: - self.cursor = self.cnx.cursor() - self.cursor.arraysize = 100 - self.logger.info('writing metadata') - self.write_metadata(archive) - for seq in self.get_sequences(): - self.logger.info('processing sequence %s', seq) - self.write_sequence(archive, seq) - for numrange in self.get_numranges(): - self.logger.info('processing numrange %s', numrange) - self.write_numrange(archive, numrange) - for table in self.get_tables(): - self.logger.info('processing table %s', table) - self.write_table(archive, table) - finally: - archive.close() - self.cnx.close() - self.logger.info('done') - - def get_tables(self): - non_entity_tables = ['entities', - 'transactions', - 'tx_entity_actions', - 'tx_relation_actions', - ] - etype_tables = [] - relation_tables = [] - prefix = 'cw_' - for etype in self.schema.entities(): - eschema = self.schema.eschema(etype) - if eschema.final: - continue - etype_tables.append('%s%s'%(prefix, etype)) - for rtype in self.schema.relations(): - rschema = self.schema.rschema(rtype) - if rschema.final or rschema.inlined or rschema in VIRTUAL_RTYPES: - continue - relation_tables.append('%s_relation' % rtype) - return non_entity_tables + etype_tables + relation_tables - - def get_sequences(self): - return [] - - def get_numranges(self): - return ['entities_id_seq'] - - def write_metadata(self, archive): - archive.writestr('format.txt', '1.1') - archive.writestr('tables.txt', '\n'.join(self.get_tables())) - archive.writestr('sequences.txt', '\n'.join(self.get_sequences())) - archive.writestr('numranges.txt', '\n'.join(self.get_numranges())) - versions = self._get_versions() - versions_str = '\n'.join('%s %s' % (k, v) - for k, v in versions) - archive.writestr('versions.txt', versions_str) - - def write_sequence(self, archive, seq): - sql = self.dbhelper.sql_sequence_current_state(seq) - columns, rows_iterator = self._get_cols_and_rows(sql) - rows = list(rows_iterator) - serialized = self._serialize(seq, columns, rows) - archive.writestr('sequences/%s' % seq, serialized) - - def write_numrange(self, archive, numrange): - sql = self.dbhelper.sql_numrange_current_state(numrange) - columns, rows_iterator = self._get_cols_and_rows(sql) - rows = list(rows_iterator) - serialized = self._serialize(numrange, columns, rows) - archive.writestr('numrange/%s' % numrange, serialized) - - def write_table(self, archive, table): - nb_lines_sql = 'SELECT COUNT(*) FROM %s' % table - self.cursor.execute(nb_lines_sql) - rowcount = self.cursor.fetchone()[0] - sql = 'SELECT * FROM %s' % table - columns, rows_iterator = self._get_cols_and_rows(sql) - self.logger.info('number of rows: %d', rowcount) - blocksize = self.blocksize - if rowcount > 0: - for i, start in enumerate(range(0, rowcount, blocksize)): - rows = list(itertools.islice(rows_iterator, blocksize)) - serialized = self._serialize(table, columns, rows) - archive.writestr('tables/%s.%04d' % (table, i), serialized) - self.logger.debug('wrote rows %d to %d (out of %d) to %s.%04d', - start, start+len(rows)-1, - rowcount, - table, i) - else: - rows = [] - serialized = self._serialize(table, columns, rows) - archive.writestr('tables/%s.%04d' % (table, 0), serialized) - - def _get_cols_and_rows(self, sql): - process_result = self._source.iter_process_result - self.cursor.execute(sql) - columns = (d[0] for d in self.cursor.description) - rows = process_result(self.cursor) - return tuple(columns), rows - - def _serialize(self, name, columns, rows): - return pickle.dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL) - - def restore(self, backupfile): - archive = zipfile.ZipFile(backupfile, 'r', allowZip64=True) - self.cnx = self.get_connection() - self.cursor = self.cnx.cursor() - sequences, numranges, tables, table_chunks = self.read_metadata(archive, backupfile) - for seq in sequences: - self.logger.info('restoring sequence %s', seq) - self.read_sequence(archive, seq) - for numrange in numranges: - self.logger.info('restoring numrange %s', numrange) - self.read_numrange(archive, numrange) - for table in tables: - self.logger.info('restoring table %s', table) - self.read_table(archive, table, sorted(table_chunks[table])) - self.cnx.close() - archive.close() - self.logger.info('done') - - def read_metadata(self, archive, backupfile): - formatinfo = archive.read('format.txt') - self.logger.info('checking metadata') - if formatinfo.strip() != "1.1": - self.logger.critical('Unsupported format in archive: %s', formatinfo) - raise ValueError('Unknown format in %s: %s' % (backupfile, formatinfo)) - tables = archive.read('tables.txt').splitlines() - sequences = archive.read('sequences.txt').splitlines() - numranges = archive.read('numranges.txt').splitlines() - file_versions = self._parse_versions(archive.read('versions.txt')) - versions = set(self._get_versions()) - if file_versions != versions: - self.logger.critical('Unable to restore : versions do not match') - self.logger.critical('Expected:\n%s', '\n'.join('%s : %s' % (cube, ver) - for cube, ver in sorted(versions))) - self.logger.critical('Found:\n%s', '\n'.join('%s : %s' % (cube, ver) - for cube, ver in sorted(file_versions))) - raise ValueError('Unable to restore : versions do not match') - table_chunks = {} - for name in archive.namelist(): - if not name.startswith('tables/'): - continue - filename = basename(name) - tablename, _ext = filename.rsplit('.', 1) - table_chunks.setdefault(tablename, []).append(name) - return sequences, numranges, tables, table_chunks - - def read_sequence(self, archive, seq): - seqname, columns, rows = pickle.loads(archive.read('sequences/%s' % seq)) - assert seqname == seq - assert len(rows) == 1 - assert len(rows[0]) == 1 - value = rows[0][0] - sql = self.dbhelper.sql_restart_sequence(seq, value) - self.cursor.execute(sql) - self.cnx.commit() - - def read_numrange(self, archive, numrange): - rangename, columns, rows = pickle.loads(archive.read('numrange/%s' % numrange)) - assert rangename == numrange - assert len(rows) == 1 - assert len(rows[0]) == 1 - value = rows[0][0] - sql = self.dbhelper.sql_restart_numrange(numrange, value) - self.cursor.execute(sql) - self.cnx.commit() - - def read_table(self, archive, table, filenames): - merge_args = self._source.merge_args - self.cursor.execute('DELETE FROM %s' % table) - self.cnx.commit() - row_count = 0 - for filename in filenames: - tablename, columns, rows = pickle.loads(archive.read(filename)) - assert tablename == table - if not rows: - continue - insert = self.sql_generator.insert(table, - dict(zip(columns, rows[0]))) - for row in rows: - self.cursor.execute(insert, merge_args(dict(zip(columns, row)), {})) - row_count += len(rows) - self.cnx.commit() - self.logger.info('inserted %d rows', row_count) - - - def _parse_versions(self, version_str): - versions = set() - for line in version_str.splitlines(): - versions.add(tuple(line.split())) - return versions - - def _get_versions(self): - version_sql = 'SELECT cw_pkey, cw_value FROM cw_CWProperty' - versions = [] - self.cursor.execute(version_sql) - for pkey, value in self.cursor.fetchall(): - if pkey.startswith(u'system.version'): - versions.append((pkey, value)) - return versions diff -r 058bb3dc685f -r 0b59724cb3f2 server/sources/rql2sql.py --- a/server/sources/rql2sql.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1703 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""RQL to SQL generator for native sources. - - -SQL queries optimization -~~~~~~~~~~~~~~~~~~~~~~~~ -1. CWUser X WHERE X in_group G, G name 'users': - - CWUser is the only subject entity type for the in_group relation, - which allow us to do :: - - SELECT eid_from FROM in_group, CWGroup - WHERE in_group.eid_to = CWGroup.eid_from - AND CWGroup.name = 'users' - - -2. Any X WHERE X nonfinal1 Y, Y nonfinal2 Z - - -> direct join between nonfinal1 and nonfinal2, whatever X,Y, Z (unless - inlined...) - - NOT IMPLEMENTED (and quite hard to implement) - -Potential optimization information is collected by the querier, sql generation -is done according to this information - -cross RDMS note : read `Comparison of different SQL implementations`_ -by Troels Arvin. Features SQL ISO Standard, PG, mysql, Oracle, MS SQL, DB2 -and Informix. - -.. _Comparison of different SQL implementations: http://www.troels.arvin.dk/db/rdbms -""" - -__docformat__ = "restructuredtext en" - -import threading - -from six import PY2 -from six.moves import range - -from logilab.database import FunctionDescr, SQL_FUNCTIONS_REGISTRY - -from rql import BadRQLQuery, CoercionError -from rql.utils import common_parent -from rql.stmts import Union, Select -from rql.nodes import (VariableRef, Constant, Function, Variable, Or, - Not, Comparison, ColumnAlias, Relation, SubQuery) - -from cubicweb import QueryError -from cubicweb.rqlrewrite import cleanup_solutions -from cubicweb.server.sqlutils import SQL_PREFIX - -ColumnAlias._q_invariant = False # avoid to check for ColumnAlias / Variable - -FunctionDescr.source_execute = None - -def default_update_cb_stack(self, stack): - stack.append(self.source_execute) -FunctionDescr.update_cb_stack = default_update_cb_stack - -get_func_descr = SQL_FUNCTIONS_REGISTRY.get_function - -LENGTH = get_func_descr('LENGTH') -def length_source_execute(source, session, value): - return len(value.getvalue()) -LENGTH.source_execute = length_source_execute - -def _new_var(select, varname): - newvar = select.get_variable(varname) - if not 'relations' in newvar.stinfo: - # not yet initialized - newvar.prepare_annotation() - newvar.stinfo['scope'] = select - newvar._q_invariant = False - select.selection.append(VariableRef(newvar)) - return newvar - -def _fill_to_wrap_rel(var, newselect, towrap, schema): - for rel in var.stinfo['relations'] - var.stinfo['rhsrelations']: - rschema = schema.rschema(rel.r_type) - if rschema.inlined: - towrap.add( (var, rel) ) - for vref in rel.children[1].iget_nodes(VariableRef): - newivar = _new_var(newselect, vref.name) - _fill_to_wrap_rel(vref.variable, newselect, towrap, schema) - elif rschema.final: - towrap.add( (var, rel) ) - for vref in rel.children[1].iget_nodes(VariableRef): - newivar = _new_var(newselect, vref.name) - newivar.stinfo['attrvar'] = (var, rel.r_type) - -def rewrite_unstable_outer_join(select, solutions, unstable, schema): - """if some optional variables are unstable, they should be selected in a - subquery. This function check this and rewrite the rql syntax tree if - necessary (in place). Return a boolean telling if the tree has been modified - """ - modified = False - for varname in tuple(unstable): - var = select.defined_vars[varname] - if not var.stinfo.get('optrelations'): - continue - unstable.remove(varname) - newselect = Select() - myunion = Union() - myunion.append(newselect) - # extract aliases / selection - newvar = _new_var(newselect, var.name) - newselect.selection = [VariableRef(newvar)] - towrap_rels = set() - _fill_to_wrap_rel(var, newselect, towrap_rels, schema) - # extract relations - for var, rel in towrap_rels: - newrel = rel.copy(newselect) - newselect.add_restriction(newrel) - select.remove_node(rel) - var.stinfo['relations'].remove(rel) - newvar.stinfo['relations'].add(newrel) - if rel.optional in ('left', 'both'): - newvar.add_optional_relation(newrel) - for vref in newrel.children[1].iget_nodes(VariableRef): - var = vref.variable - var.stinfo['relations'].add(newrel) - var.stinfo['rhsrelations'].add(newrel) - if rel.optional in ('right', 'both'): - var.add_optional_relation(newrel) - if not select.where and not modified: - # oops, generated the same thing as the original select.... - # restore original query, else we'll indefinitly loop - for var, rel in towrap_rels: - select.add_restriction(rel) - continue - modified = True - # extract subquery solutions - mysolutions = [sol.copy() for sol in solutions] - cleanup_solutions(newselect, mysolutions) - newselect.set_possible_types(mysolutions) - # full sub-query - aliases = [VariableRef(select.get_variable(avar.name, i)) - for i, avar in enumerate(newselect.selection)] - select.add_subquery(SubQuery(aliases, myunion), check=False) - return modified - -def _new_solutions(rqlst, solutions): - """first filter out subqueries variables from solutions""" - newsolutions = [] - for origsol in solutions: - asol = {} - for vname in rqlst.defined_vars: - asol[vname] = origsol[vname] - if not asol in newsolutions: - newsolutions.append(asol) - return newsolutions - -def remove_unused_solutions(rqlst, solutions, varmap, schema): - """cleanup solutions: remove solutions where invariant variables are taking - different types - """ - newsols = _new_solutions(rqlst, solutions) - existssols = {} - unstable = set() - invariants = {} - for vname, var in rqlst.defined_vars.items(): - vtype = newsols[0][vname] - if var._q_invariant or vname in varmap: - # remove invariant variable from solutions to remove duplicates - # later, then reinserting a type for the variable even later - for sol in newsols: - invariants.setdefault(id(sol), {})[vname] = sol.pop(vname) - elif var.scope is not rqlst: - # move apart variables which are in a EXISTS scope and are variating - try: - thisexistssols, thisexistsvars = existssols[var.scope] - except KeyError: - thisexistssols = [newsols[0]] - thisexistsvars = set() - existssols[var.scope] = thisexistssols, thisexistsvars - for i in range(len(newsols)-1, 0, -1): - if vtype != newsols[i][vname]: - thisexistssols.append(newsols.pop(i)) - thisexistsvars.add(vname) - else: - # remember unstable variables - for i in range(1, len(newsols)): - if vtype != newsols[i][vname]: - unstable.add(vname) - if invariants: - # filter out duplicates - newsols_ = [] - for sol in newsols: - if not sol in newsols_: - newsols_.append(sol) - newsols = newsols_ - # reinsert solutions for invariants - for sol in newsols: - for invvar, vartype in invariants[id(sol)].items(): - sol[invvar] = vartype - for sol in existssols: - try: - for invvar, vartype in invariants[id(sol)].items(): - sol[invvar] = vartype - except KeyError: - continue - if len(newsols) > 1: - if rewrite_unstable_outer_join(rqlst, newsols, unstable, schema): - # remove variables extracted to subqueries from solutions - newsols = _new_solutions(rqlst, newsols) - return newsols, existssols, unstable - -def relation_info(relation): - lhs, rhs = relation.get_variable_parts() - try: - lhs = lhs.variable - lhsconst = lhs.stinfo['constnode'] - except AttributeError: - lhsconst = lhs - lhs = None - except KeyError: - lhsconst = None # ColumnAlias - try: - rhs = rhs.variable - rhsconst = rhs.stinfo['constnode'] - except AttributeError: - rhsconst = rhs - rhs = None - except KeyError: - rhsconst = None # ColumnAlias - return lhs, lhsconst, rhs, rhsconst - -def sort_term_selection(sorts, rqlst, groups): - # XXX beurk - if isinstance(rqlst, list): - def append(term): - rqlst.append(term) - selectionidx = set(str(term) for term in rqlst) - else: - def append(term): - rqlst.selection.append(term.copy(rqlst)) - selectionidx = set(str(term) for term in rqlst.selection) - - for sortterm in sorts: - term = sortterm.term - if not isinstance(term, Constant) and not str(term) in selectionidx: - selectionidx.add(str(term)) - append(term) - if groups: - for vref in term.iget_nodes(VariableRef): - if not any(vref.is_equivalent(g) for g in groups): - groups.append(vref) - -def fix_selection_and_group(rqlst, needwrap, selectsortterms, - sorts, groups, having): - if selectsortterms and sorts: - sort_term_selection(sorts, rqlst, not needwrap and groups) - groupvrefs = [vref for term in groups for vref in term.iget_nodes(VariableRef)] - if sorts and groups: - # when a query is grouped, ensure sort terms are grouped as well - for sortterm in sorts: - term = sortterm.term - if not (isinstance(term, Constant) or \ - (isinstance(term, Function) and - get_func_descr(term.name).aggregat)): - for vref in term.iget_nodes(VariableRef): - if not any(vref.is_equivalent(group) for group in groupvrefs): - groups.append(vref) - groupvrefs.append(vref) - if needwrap and (groups or having): - selectedidx = set(vref.name for term in rqlst.selection - for vref in term.get_nodes(VariableRef)) - if groups: - for vref in groupvrefs: - if vref.name not in selectedidx: - selectedidx.add(vref.name) - rqlst.selection.append(vref) - if having: - for term in having: - for vref in term.iget_nodes(VariableRef): - if vref.name not in selectedidx: - selectedidx.add(vref.name) - rqlst.selection.append(vref) - -def iter_mapped_var_sels(stmt, variable): - # variable is a Variable or ColumnAlias node mapped to a source side - # callback - if not (len(variable.stinfo['rhsrelations']) <= 1 and # < 1 on column alias - variable.stinfo['selected']): - raise QueryError("can't use %s as a restriction variable" - % variable.name) - for selectidx in variable.stinfo['selected']: - vrefs = stmt.selection[selectidx].get_nodes(VariableRef) - if len(vrefs) != 1: - raise QueryError() - yield selectidx, vrefs[0] - -def update_source_cb_stack(state, stmt, node, stack): - while True: - node = node.parent - if node is stmt: - break - if not isinstance(node, Function): - raise QueryError() - funcd = get_func_descr(node.name) - if funcd.source_execute is None: - raise QueryError('%s can not be called on mapped attribute' - % node.name) - state.source_cb_funcs.add(node) - funcd.update_cb_stack(stack) - - -# IGenerator implementation for RQL->SQL ####################################### - -class StateInfo(object): - """this class stores data accumulated during the RQL syntax tree visit - for later SQL generation. - - Attributes related to OUTER JOIN handling - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - * `outer_chains`, list of list of strings. Each list represent a tables - that have to be outer joined together. - - * `outer_tables`, dictionary used as index of tables used in outer join :: - - 'table alias': (outertype, [conditions], [chain]) - - where: - - * `outertype` is one of None, 'LEFT', 'RIGHT', 'FULL' - * `conditions` is a list of join conditions (string) - * `chain` is a list of table alias (the *outer chain*) in which the key - alias appears - - * `outer_pending` is a dictionary containing some conditions that will have - to be added to the outer join when the table will be turned into an - outerjoin :: - - 'table alias': [conditions] - """ - def __init__(self, select, existssols, unstablevars): - self.existssols = existssols - self.unstablevars = unstablevars - self.subtables = {} - self.needs_source_cb = None - self.subquery_source_cb = None - self.source_cb_funcs = set() - self.scopes = {select: 0} - self.scope_nodes = [] - - def reset(self, solution): - """reset some visit variables""" - self.solution = solution - self.count = 0 - self.done = set() - self.tables = self.subtables.copy() - self.actual_tables = [[]] - for _, tsql in self.tables.values(): - self.actual_tables[-1].append(tsql) - self.outer_chains = [] - self.outer_tables = {} - self.outer_pending = {} - self.duplicate_switches = [] - self.aliases = {} - self.restrictions = [] - self._restr_stack = [] - self.ignore_varmap = False - self._needs_source_cb = {} - - def merge_source_cbs(self, needs_source_cb): - if self.needs_source_cb is None: - self.needs_source_cb = needs_source_cb - elif needs_source_cb != self.needs_source_cb: - raise QueryError('query fetch some source mapped attribute, some not') - - def finalize_source_cbs(self): - if self.subquery_source_cb is not None: - self.needs_source_cb.update(self.subquery_source_cb) - - def add_restriction(self, restr): - if restr: - self.restrictions.append(restr) - - def iter_exists_sols(self, exists): - if not exists in self.existssols: - yield 1 - return - thisexistssols, thisexistsvars = self.existssols[exists] - notdone_outside_vars = set() - # when iterating other solutions inner to an EXISTS subquery, we should - # reset variables which have this exists node as scope at each iteration - for var in exists.stmt.defined_vars.values(): - if var.scope is exists: - thisexistsvars.add(var.name) - elif var.name not in self.done: - notdone_outside_vars.add(var) - origsol = self.solution - origtables = self.tables - done = self.done - for thisexistssol in thisexistssols: - for vname in self.unstablevars: - if thisexistssol[vname] != origsol[vname] and vname in thisexistsvars: - break - else: - self.tables = origtables.copy() - self.solution = thisexistssol - yield 1 - # cleanup self.done from stuff specific to exists - for var in thisexistsvars: - if var in done: - done.remove(var) - for var in list(notdone_outside_vars): - if var.name in done and var._q_sqltable in self.tables: - origtables[var._q_sqltable] = self.tables[var._q_sqltable] - notdone_outside_vars.remove(var) - for rel in exists.iget_nodes(Relation): - if rel in done: - done.remove(rel) - self.solution = origsol - self.tables = origtables - - def push_scope(self, scope_node): - self.scope_nodes.append(scope_node) - self.scopes[scope_node] = len(self.actual_tables) - self.actual_tables.append([]) - self._restr_stack.append(self.restrictions) - self.restrictions = [] - - def pop_scope(self): - del self.scopes[self.scope_nodes[-1]] - self.scope_nodes.pop() - restrictions = self.restrictions - self.restrictions = self._restr_stack.pop() - scope = len(self.actual_tables) - 1 - # check if we have some outer chain for this scope - matching_chains = [] - for chain in self.outer_chains: - for tablealias in chain: - if self.tables[tablealias][0] < scope: - # chain belongs to outer scope - break - else: - # chain match current scope - matching_chains.append(chain) - # call to `tables_sql` will pop actual_tables - tables = self.tables_sql(matching_chains) - # cleanup outer join related structure for tables in matching chains - for chain in matching_chains: - self.outer_chains.remove(chain) - for alias in chain: - del self.outer_tables[alias] - return restrictions, tables - - # tables handling ######################################################### - - def add_table(self, table, key=None, scope=-1): - if key is None: - key = table - if key in self.tables: - return - if scope < 0: - scope = len(self.actual_tables) + scope - self.tables[key] = (scope, table) - self.actual_tables[scope].append(table) - - def alias_and_add_table(self, tablename, scope=-1): - alias = '%s%s' % (tablename, self.count) - self.count += 1 - self.add_table('%s AS %s' % (tablename, alias), alias, scope) - return alias - - def relation_table(self, relation): - """return the table alias used by the given relation""" - if relation in self.done: - return relation._q_sqltable - rid = 'rel_%s%s' % (relation.r_type, self.count) - # relation's table is belonging to the root scope if it is the principal - # table of one of its variable and that variable belong's to parent - # scope - for varref in relation.iget_nodes(VariableRef): - var = varref.variable - # XXX may have a principal without being invariant for this generation, - # not sure this is a pb or not - if var.stinfo.get('principal') is relation and var.scope is var.stmt: - scope = 0 - break - else: - scope = -1 - self.count += 1 - self.add_table('%s_relation AS %s' % (relation.r_type, rid), rid, scope=scope) - relation._q_sqltable = rid - self.done.add(relation) - return rid - - def fti_table(self, relation, fti_table): - """return the table alias used by the given has_text relation, - `fti_table` being the table name for the plain text index - """ - if relation in self.done: - try: - return relation._q_sqltable - except AttributeError: - pass - self.done.add(relation) - scope = self.scopes[relation.scope] - alias = self.alias_and_add_table(fti_table, scope=scope) - relation._q_sqltable = alias - return alias - - # outer join handling ###################################################### - - def mark_as_used_in_outer_join(self, tablealias, addpending=True): - """Mark table of given alias as used in outer join. This must be called - after `outer_tables[tablealias]` has been initialized. - """ - # remove a table from actual_table because it's used in an outer join - # chain - scope, tabledef = self.tables[tablealias] - self.actual_tables[scope].remove(tabledef) - # check if there are some pending outer join condition for this table - if addpending: - try: - pending_conditions = self.outer_pending.pop(tablealias) - except KeyError: - pass - else: - self.outer_tables[tablealias][1].extend(pending_conditions) - else: - assert not tablealias in self.outer_pending - - def add_outer_join_condition(self, tablealias, condition): - try: - outer, conditions, chain = self.outer_tables[tablealias] - conditions.append(condition) - except KeyError: - self.outer_pending.setdefault(tablealias, []).append(condition) - - def replace_tables_by_outer_join(self, leftalias, rightalias, - outertype, condition): - """tell we need JOIN ON - """ - assert leftalias != rightalias, leftalias - outer_tables = self.outer_tables - louter, lconditions, lchain = outer_tables.get(leftalias, - (None, None, None)) - router, rconditions, rchain = outer_tables.get(rightalias, - (None, None, None)) - if lchain is None and rchain is None: - # create a new outer chaine - chain = [leftalias, rightalias] - outer_tables[leftalias] = (None, [], chain) - outer_tables[rightalias] = (outertype, [condition], chain) - self.outer_chains.append(chain) - self.mark_as_used_in_outer_join(leftalias, addpending=False) - self.mark_as_used_in_outer_join(rightalias) - elif lchain is None: - # [A > B > C] + [D > A] -> [D > A > B > C] - if rightalias == rchain[0]: - outer_tables[leftalias] = (None, [], rchain) - conditions = outer_tables[rightalias][1] + [condition] - outer_tables[rightalias] = (outertype, conditions, rchain) - rchain.insert(0, leftalias) - else: - # [A > B > C] + [D > B] -> [A > B > C < D] - if outertype == 'LEFT': - outertype = 'RIGHT' - outer_tables[leftalias] = (outertype, [condition], rchain) - rchain.append(leftalias) - self.mark_as_used_in_outer_join(leftalias) - elif rchain is None: - # [A > B > C] + [B > D] -> [A > B > C > D] - outer_tables[rightalias] = (outertype, [condition], lchain) - lchain.append(rightalias) - self.mark_as_used_in_outer_join(rightalias) - elif lchain is rchain: - # already in the same chain, simply check compatibility and append - # the condition if it's ok - lidx = lchain.index(leftalias) - ridx = lchain.index(rightalias) - if (outertype == 'FULL' and router != 'FULL') \ - or (lidx < ridx and router != 'LEFT') \ - or (ridx < lidx and louter != 'RIGHT'): - raise BadRQLQuery() - # merge conditions - if lidx < ridx: - rconditions.append(condition) - else: - lconditions.append(condition) - elif louter is None: - # merge chains - self.outer_chains.remove(lchain) - rchain += lchain - self.mark_as_used_in_outer_join(leftalias) - for alias, (aouter, aconditions, achain) in outer_tables.items(): - if achain is lchain: - outer_tables[alias] = (aouter, aconditions, rchain) - else: - raise BadRQLQuery() - - # sql generation helpers ################################################### - - def tables_sql(self, outer_chains=None): - """generate SQL for FROM clause""" - # sort for test predictability - tables = sorted(self.actual_tables.pop()) - # process outer joins - if outer_chains is None: - assert not self.actual_tables, self.actual_tables - assert not self.outer_pending - outer_chains = self.outer_chains - for chain in sorted(outer_chains): - tablealias = chain[0] - outertype, conditions, _ = self.outer_tables[tablealias] - assert _ is chain, (chain, _) - assert outertype is None, (chain, self.outer_chains) - assert not conditions, (chain, self.outer_chains) - assert len(chain) > 1 - tabledef = self.tables[tablealias][1] - outerjoin = [tabledef] - for tablealias in chain[1:]: - outertype, conditions, _ = self.outer_tables[tablealias] - assert _ is chain, (chain, self.outer_chains) - assert outertype in ('LEFT', 'RIGHT', 'FULL'), ( - tablealias, outertype, conditions) - assert isinstance(conditions, (list)), ( - tablealias, outertype, conditions) - tabledef = self.tables[tablealias][1] - outerjoin.append('%s OUTER JOIN %s ON (%s)' % ( - outertype, tabledef, ' AND '.join(conditions))) - tables.append(' '.join(outerjoin)) - return ', '.join(tables) - - -def extract_fake_having_terms(having): - """RQL's HAVING may be used to contains stuff that should go in the WHERE - clause of the SQL query, due to RQL grammar limitation. Split them... - - Return a list nodes that can be ANDed with query's WHERE clause. Having - subtrees updated in place. - """ - fakehaving = [] - for subtree in having: - ors, tocheck = set(), [] - for compnode in subtree.get_nodes(Comparison): - for fnode in compnode.get_nodes(Function): - if fnode.descr().aggregat: - p = compnode.parent - oor = None - while not isinstance(p, Select): - if isinstance(p, (Or, Not)): - oor = p - p = p.parent - if oor is not None: - ors.add(oor) - break - else: - tocheck.append(compnode) - # tocheck hold a set of comparison not implying an aggregat function - # put them in fakehaving if they don't share an Or node as ancestor - # with another comparison containing an aggregat function - for compnode in tocheck: - parents = set() - p = compnode.parent - oor = None - while not isinstance(p, Select): - if p in ors or p is None: # p is None for nodes already in fakehaving - break - if isinstance(p, (Or, Not)): - oor = p - p = p.parent - else: - node = oor or compnode - fakehaving.append(node) - node.parent.remove(node) - return fakehaving - - -class SQLGenerator(object): - """ - generation of SQL from the fully expanded RQL syntax tree - SQL is designed to be used with a CubicWeb SQL schema - - Groups and sort are not handled here since they should not be handled at - this level (see cubicweb.server.querier) - - we should not have errors here! - - WARNING: a CubicWebSQLGenerator instance is not thread safe, but generate is - protected by a lock - """ - - def __init__(self, schema, dbhelper, attrmap=None): - self.schema = schema - self.dbhelper = dbhelper - self.dbencoding = dbhelper.dbencoding - self.keyword_map = {'NOW' : self.dbhelper.sql_current_timestamp, - 'TODAY': self.dbhelper.sql_current_date, - } - if not self.dbhelper.union_parentheses_support: - self.union_sql = self.noparen_union_sql - self._lock = threading.Lock() - if attrmap is None: - attrmap = {} - self.attr_map = attrmap - - def generate(self, union, args=None, varmap=None): - """return SQL queries and a variable dictionary from a RQL syntax tree - - :partrqls: a list of couple (rqlst, solutions) - :args: optional dictionary with values of substitutions used in the query - :varmap: optional dictionary mapping variable name to a special table - name, in case the query as to fetch data from temporary tables - - return an sql string and a dictionary with substitutions values - """ - if args is None: - args = {} - if varmap is None: - varmap = {} - self._lock.acquire() - self._args = args - self._varmap = varmap - self._query_attrs = {} - self._state = None - # self._not_scope_offset = 0 - try: - # union query for each rqlst / solution - sql = self.union_sql(union) - # we are done - return sql, self._query_attrs, self._state.needs_source_cb - finally: - self._lock.release() - - def union_sql(self, union, needalias=False): # pylint: disable=E0202 - if len(union.children) == 1: - return self.select_sql(union.children[0], needalias) - sqls = ('(%s)' % self.select_sql(select, needalias) - for select in union.children) - return '\nUNION ALL\n'.join(sqls) - - def noparen_union_sql(self, union, needalias=False): - # needed for sqlite backend which doesn't like parentheses around union - # query. This may cause bug in some condition (sort in one of the - # subquery) but will work in most case - # - # see http://www.sqlite.org/cvstrac/tktview?tn=3074 - sqls = (self.select_sql(select, needalias) - for i, select in enumerate(union.children)) - return '\nUNION ALL\n'.join(sqls) - - def select_sql(self, select, needalias=False): - """return SQL queries and a variable dictionary from a RQL syntax tree - - :select: a selection statement of the syntax tree (`rql.stmts.Select`) - :solution: a dictionary containing variables binding. - A solution's dictionary has variable's names as key and variable's - types as values - :needwrap: boolean telling if the query will be wrapped in an outer - query (to deal with aggregat and/or grouping) - """ - if select.distinct: - distinct = True - elif self.dbhelper.fti_need_distinct: - distinct = getattr(select.parent, 'has_text_query', False) - else: - distinct = False - sorts = select.orderby - groups = select.groupby - having = select.having - for restr in extract_fake_having_terms(having): - scope = None - for vref in restr.get_nodes(VariableRef): - vscope = vref.variable.scope - if vscope is select: - continue # ignore select scope, so restriction is added to - # the inner most scope possible - if scope is None: - scope = vscope - elif vscope is not scope: - scope = common_parent(scope, vscope).scope - if scope is None: - scope = select - scope.add_restriction(restr) - # remember selection, it may be changed and have to be restored - origselection = select.selection[:] - # check if the query will have union subquery, if it need sort term - # selection (union or distinct query) and wrapping (union with groups) - needwrap = False - sols = select.solutions - selectsortterms = distinct - if len(sols) > 1: - # remove invariant from solutions - sols, existssols, unstable = remove_unused_solutions( - select, sols, self._varmap, self.schema) - if len(sols) > 1: - # if there is still more than one solution, a UNION will be - # generated and so sort terms have to be selected - selectsortterms = True - # and if select is using group by or aggregat, a wrapping - # query will be necessary - if groups or select.has_aggregat: - select.select_only_variables() - needwrap = True - else: - existssols, unstable = {}, () - state = StateInfo(select, existssols, unstable) - if self._state is not None: - # state from a previous unioned select - state.merge_source_cbs(self._state.needs_source_cb) - # treat subqueries - self._subqueries_sql(select, state) - # generate sql for this select node - if needwrap: - outerselection = origselection[:] - if sorts and selectsortterms: - if distinct: - sort_term_selection(sorts, outerselection, groups) - fix_selection_and_group(select, needwrap, selectsortterms, - sorts, groups, having) - if needwrap: - fneedwrap = len(outerselection) != len(origselection) - else: - fneedwrap = len(select.selection) != len(origselection) - if fneedwrap: - needalias = True - self._in_wrapping_query = False - self._state = state - try: - sql = self._solutions_sql(select, sols, distinct, - needalias or needwrap) - # generate groups / having before wrapping query selection to get - # correct column aliases - self._in_wrapping_query = needwrap - if groups: - # no constant should be inserted in GROUP BY else the backend - # will interpret it as a positional index in the selection - groups = ','.join(vref.accept(self) for vref in groups - if not isinstance(vref, Constant)) - if having: - # filter out constants as for GROUP BY - having = ' AND '.join(term.accept(self) for term in having - if not isinstance(term, Constant)) - if needwrap: - sql = '%s FROM (%s) AS T1' % ( - self._selection_sql(outerselection, distinct,needalias), - sql) - if groups: - sql += '\nGROUP BY %s' % groups - if having: - sql += '\nHAVING %s' % having - # sort - if sorts: - sqlsortterms = [] - if needwrap: - selectidx = [str(term) for term in outerselection] - else: - selectidx = [str(term) for term in select.selection] - for sortterm in sorts: - _term = self._sortterm_sql(sortterm, selectidx) - if _term is not None: - sqlsortterms.append(_term) - if sqlsortterms: - sql = self.dbhelper.sql_add_order_by( - sql, sqlsortterms, origselection, fneedwrap, - select.limit or select.offset) - else: - sqlsortterms = None - state.finalize_source_cbs() - finally: - select.selection = origselection - # limit / offset - sql = self.dbhelper.sql_add_limit_offset(sql, - select.limit, - select.offset, - sqlsortterms) - return sql - - def _subqueries_sql(self, select, state): - for i, subquery in enumerate(select.with_): - sql = self.union_sql(subquery.query, needalias=True) - tablealias = '_T%s' % i # XXX nested subqueries - sql = '(%s) AS %s' % (sql, tablealias) - state.subtables[tablealias] = (0, sql) - latest_state = self._state - for vref in subquery.aliases: - alias = vref.variable - alias._q_sqltable = tablealias - alias._q_sql = '%s.C%s' % (tablealias, alias.colnum) - try: - stack = latest_state.needs_source_cb[alias.colnum] - if state.subquery_source_cb is None: - state.subquery_source_cb = {} - for selectidx, vref in iter_mapped_var_sels(select, alias): - stack = stack[:] - update_source_cb_stack(state, select, vref, stack) - state.subquery_source_cb[selectidx] = stack - except KeyError: - continue - - def _solutions_sql(self, select, solutions, distinct, needalias): - sqls = [] - for solution in solutions: - self._state.reset(solution) - # visit restriction subtree - if select.where is not None: - self._state.add_restriction(select.where.accept(self)) - sql = [self._selection_sql(select.selection, distinct, needalias)] - if self._state.restrictions: - sql.append('WHERE %s' % ' AND '.join(self._state.restrictions)) - self._state.merge_source_cbs(self._state._needs_source_cb) - # add required tables - assert len(self._state.actual_tables) == 1, self._state.actual_tables - tables = self._state.tables_sql() - if tables: - sql.insert(1, 'FROM %s' % tables) - elif self._state.restrictions and self.dbhelper.needs_from_clause: - sql.insert(1, 'FROM (SELECT 1) AS _T') - sqls.append('\n'.join(sql)) - if distinct: - return '\nUNION\n'.join(sqls) - else: - return '\nUNION ALL\n'.join(sqls) - - def _selection_sql(self, selected, distinct, needaliasing=False): - clause = [] - for term in selected: - sql = term.accept(self) - if needaliasing: - colalias = 'C%s' % len(clause) - clause.append('%s AS %s' % (sql, colalias)) - if isinstance(term, VariableRef): - self._state.aliases[term.name] = colalias - else: - clause.append(sql) - if distinct: - return 'SELECT DISTINCT %s' % ', '.join(clause) - return 'SELECT %s' % ', '.join(clause) - - def _sortterm_sql(self, sortterm, selectidx): - term = sortterm.term - try: - sqlterm = selectidx.index(str(term)) + 1 - except ValueError: - # Constant node or non selected term - sqlterm = term.accept(self) - if sqlterm is None: - return None - if sortterm.asc: - return str(sqlterm) - else: - return '%s DESC' % sqlterm - - def visit_and(self, et): - """generate SQL for a AND subtree""" - res = [] - for c in et.children: - part = c.accept(self) - if part: - res.append(part) - return ' AND '.join(res) - - def visit_or(self, ou): - """generate SQL for a OR subtree""" - res = [] - for c in ou.children: - part = c.accept(self) - if part: - res.append('(%s)' % part) - if res: - if len(res) > 1: - return '(%s)' % ' OR '.join(res) - return res[0] - return '' - - def visit_not(self, node): - csql = node.children[0].accept(self) - if node in self._state.done or not csql: - # already processed or no sql generated by children - return csql - return 'NOT (%s)' % csql - - def visit_exists(self, exists): - """generate SQL name for a exists subquery""" - sqls = [] - for dummy in self._state.iter_exists_sols(exists): - sql = self._visit_exists(exists) - if sql: - sqls.append(sql) - if not sqls: - return '' - return 'EXISTS(%s)' % ' UNION '.join(sqls) - - def _visit_exists(self, exists): - self._state.push_scope(exists) - restriction = exists.children[0].accept(self) - restrictions, tables = self._state.pop_scope() - if restriction: - restrictions.append(restriction) - restriction = ' AND '.join(restrictions) - if not restriction: - if tables: - return 'SELECT 1 FROM %s' % tables - return '' - if not tables: - # XXX could leave surrounding EXISTS() in this case no? - sql = 'SELECT 1 WHERE %s' % restriction - else: - sql = 'SELECT 1 FROM %s WHERE %s' % (tables, restriction) - return sql - - - def visit_relation(self, relation): - """generate SQL for a relation""" - rtype = relation.r_type - # don't care of type constraint statement (i.e. relation_type = 'is') - if relation.is_types_restriction(): - return '' - lhs, rhs = relation.get_parts() - rschema = self.schema.rschema(rtype) - if rschema.final: - if rtype == 'eid' and lhs.variable._q_invariant and \ - lhs.variable.stinfo['constnode']: - # special case where this restriction is already generated by - # some other relation - return '' - # attribute relation - if rtype == 'has_text': - sql = self._visit_has_text_relation(relation) - else: - rhs_vars = rhs.get_nodes(VariableRef) - if rhs_vars: - # if variable(s) in the RHS - sql = self._visit_var_attr_relation(relation, rhs_vars) - else: - # no variables in the RHS - sql = self._visit_attribute_relation(relation) - elif (rtype == 'is' and isinstance(rhs.children[0], Constant) - and rhs.children[0].eval(self._args) is None): - # special case "C is NULL" - if lhs.name in self._varmap: - lhssql = self._varmap[lhs.name] - else: - lhssql = lhs.accept(self) - return '%s%s' % (lhssql, rhs.accept(self)) - elif '%s.%s' % (lhs, relation.r_type) in self._varmap: - # relation has already been processed by a previous step - return '' - elif relation.optional: - # OPTIONAL relation, generate a left|right outer join - if rtype == 'identity' or rschema.inlined: - sql = self._visit_outer_join_inlined_relation(relation, rschema) - else: - sql = self._visit_outer_join_relation(relation, rschema) - elif rschema.inlined: - sql = self._visit_inlined_relation(relation) - else: - # regular (non final) relation - sql = self._visit_relation(relation, rschema) - return sql - - def _visit_inlined_relation(self, relation): - lhsvar, _, rhsvar, rhsconst = relation_info(relation) - # we are sure lhsvar is not None - lhssql = self._inlined_var_sql(lhsvar, relation.r_type) - if rhsvar is None: - moresql = None - else: - moresql = self._extra_join_sql(relation, lhssql, rhsvar) - if isinstance(relation.parent, Not): - self._state.done.add(relation.parent) - if rhsvar is not None and rhsvar._q_invariant: - sql = '%s IS NULL' % lhssql - else: - # column != 1234 may not get back rows where column is NULL... - sql = '(%s IS NULL OR %s!=%s)' % ( - lhssql, lhssql, (rhsvar or rhsconst).accept(self)) - elif rhsconst is not None: - sql = '%s=%s' % (lhssql, rhsconst.accept(self)) - elif isinstance(rhsvar, Variable) and rhsvar._q_invariant and \ - not rhsvar.name in self._varmap: - # if the rhs variable is only linked to this relation, this mean we - # only want the relation to exists, eg NOT NULL in case of inlined - # relation - if moresql is not None: - return moresql - return '%s IS NOT NULL' % lhssql - else: - sql = '%s=%s' % (lhssql, rhsvar.accept(self)) - if moresql is None: - return sql - return '%s AND %s' % (sql, moresql) - - def _process_relation_term(self, relation, rid, termvar, termconst, relfield): - if termconst or not termvar._q_invariant: - termsql = termconst and termconst.accept(self) or termvar.accept(self) - yield '%s.%s=%s' % (rid, relfield, termsql) - elif termvar._q_invariant: - # if the variable is mapped, generate restriction anyway - if termvar.name in self._varmap: - termsql = termvar.accept(self) - yield '%s.%s=%s' % (rid, relfield, termsql) - extrajoin = self._extra_join_sql(relation, '%s.%s' % (rid, relfield), termvar) - if extrajoin is not None: - yield extrajoin - - def _visit_relation(self, relation, rschema): - """generate SQL for a relation - - implements optimization 1. - """ - if relation.r_type == 'identity': - # special case "X identity Y" - lhs, rhs = relation.get_parts() - return '%s%s' % (lhs.accept(self), rhs.accept(self)) - lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation) - rid = self._state.relation_table(relation) - sqls = [] - sqls += self._process_relation_term(relation, rid, lhsvar, lhsconst, 'eid_from') - sqls += self._process_relation_term(relation, rid, rhsvar, rhsconst, 'eid_to') - sql = ' AND '.join(sqls) - return sql - - def _visit_outer_join_relation(self, relation, rschema): - """ - left outer join syntax (optional=='right'): - X relation Y? - - right outer join syntax (optional=='left'): - X? relation Y - - full outer join syntaxes (optional=='both'): - X? relation Y? - - if relation is inlined: - if it's a left outer join: - -> X LEFT OUTER JOIN Y ON (X.relation=Y.eid) - elif it's a right outer join: - -> Y LEFT OUTER JOIN X ON (X.relation=Y.eid) - elif it's a full outer join: - -> X FULL OUTER JOIN Y ON (X.relation=Y.eid) - else: - if it's a left outer join: - -> X LEFT OUTER JOIN relation ON (relation.eid_from=X.eid) - LEFT OUTER JOIN Y ON (relation.eid_to=Y.eid) - elif it's a right outer join: - -> Y LEFT OUTER JOIN relation ON (relation.eid_to=Y.eid) - LEFT OUTER JOIN X ON (relation.eid_from=X.eid) - elif it's a full outer join: - -> X FULL OUTER JOIN Y ON (X.relation=Y.eid) - """ - leftvar, leftconst, rightvar, rightconst = relation_info(relation) - assert not (leftconst and rightconst), "doesn't make sense" - if relation.optional == 'left': - leftvar, rightvar = rightvar, leftvar - leftconst, rightconst = rightconst, leftconst - joinattr, restrattr = 'eid_to', 'eid_from' - else: - joinattr, restrattr = 'eid_from', 'eid_to' - # search table for this variable, to use as left table of the outer join - leftalias = None - if leftvar: - # take care, may return None for invariant variable - leftalias = self._var_table(leftvar) - if leftalias is None: - if leftvar.stinfo['principal'] is not relation: - # use variable's principal relation - leftalias = leftvar.stinfo['principal']._q_sqltable - else: - # search for relation on which we should join - for orelation in leftvar.stinfo['relations']: - if (orelation is not relation and - not self.schema.rschema(orelation.r_type).final): - break - else: - for orelation in rightvar.stinfo['relations']: - if (orelation is not relation and - not self.schema.rschema(orelation.r_type).final - and orelation.optional): - break - else: - # unexpected - assert False, leftvar - leftalias = self._state.relation_table(orelation) - # right table of the outer join - rightalias = self._state.relation_table(relation) - # compute join condition - if not leftconst or (leftvar and not leftvar._q_invariant): - leftsql = leftvar.accept(self) - else: - leftsql = leftconst.accept(self) - condition = '%s.%s=%s' % (rightalias, joinattr, leftsql) - if rightconst: - condition += ' AND %s.%s=%s' % (rightalias, restrattr, rightconst.accept(self)) - # record outer join - outertype = 'FULL' if relation.optional == 'both' else 'LEFT' - self._state.replace_tables_by_outer_join(leftalias, rightalias, - outertype, condition) - # need another join? - if rightconst is None: - # we need another outer join for the other side of the relation (e.g. - # for "X relation Y?" in RQL, we treated earlier the (cw_X.eid / - # relation.eid_from) join, now we've to do (relation.eid_to / - # cw_Y.eid) - leftalias = rightalias - rightsql = rightvar.accept(self) # accept before using var_table - rightalias = self._var_table(rightvar) - if rightalias is None: - if rightvar.stinfo['principal'] is not relation: - self._state.replace_tables_by_outer_join( - leftalias, rightvar.stinfo['principal']._q_sqltable, - outertype, '%s.%s=%s' % (leftalias, restrattr, rightvar.accept(self))) - else: - self._state.replace_tables_by_outer_join( - leftalias, rightalias, outertype, - '%s.%s=%s' % (leftalias, restrattr, rightvar.accept(self))) - # this relation will hence be expressed in FROM clause, return nothing - # here - return '' - - - def _visit_outer_join_inlined_relation(self, relation, rschema): - lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation) - assert not (lhsconst and rhsconst), "doesn't make sense" - attr = 'eid' if relation.r_type == 'identity' else relation.r_type - lhsalias = self._var_table(lhsvar) - rhsalias = rhsvar and self._var_table(rhsvar) - try: - lhssql = self._varmap['%s.%s' % (lhsvar.name, attr)] - except KeyError: - if lhsalias is None: - lhssql = lhsconst.accept(self) - elif attr == 'eid': - lhssql = lhsvar.accept(self) - else: - lhssql = '%s.%s%s' % (lhsalias, SQL_PREFIX, attr) - condition = '%s=%s' % (lhssql, (rhsconst or rhsvar).accept(self)) - # this is not a typo, rhs optional variable means lhs outer join and vice-versa - if relation.optional == 'left': - lhsvar, rhsvar = rhsvar, lhsvar - lhsconst, rhsconst = rhsconst, lhsconst - lhsalias, rhsalias = rhsalias, lhsalias - outertype = 'LEFT' - elif relation.optional == 'both': - outertype = 'FULL' - else: - outertype = 'LEFT' - if rhsalias is None: - if rhsconst is not None: - # inlined relation with invariant as rhs - if relation.r_type != 'identity': - condition = '(%s OR %s IS NULL)' % (condition, lhssql) - if not lhsvar.stinfo.get('optrelations'): - return condition - self._state.add_outer_join_condition(lhsalias, condition) - return - if lhsalias is None: - if lhsconst is not None and not rhsvar.stinfo.get('optrelations'): - return condition - lhsalias = lhsvar._q_sql.split('.', 1)[0] - if lhsalias == rhsalias: - self._state.add_outer_join_condition(lhsalias, condition) - else: - self._state.replace_tables_by_outer_join( - lhsalias, rhsalias, outertype, condition) - return '' - - def _visit_var_attr_relation(self, relation, rhs_vars): - """visit an attribute relation with variable(s) in the RHS - - attribute variables are used either in the selection or for unification - (eg X attr1 A, Y attr2 A). In case of selection, nothing to do here. - """ - ored = relation.ored() - for vref in rhs_vars: - var = vref.variable - if var.name in self._varmap: - # ensure table is added - self._var_info(var) - if isinstance(var, ColumnAlias): - # force sql generation whatever the computed principal - principal = 1 - else: - principal = var.stinfo.get('principal') - # we've to return some sql if: - # 1. visited relation is ored - # 2. variable's principal is not this relation and not 1. - if ored or (principal is not None and principal is not relation - and not getattr(principal, 'ored', lambda : 0)()): - # we have to generate unification expression - if principal is relation: - # take care if ored case and principal is the relation to - # use the right relation in the unification term - _rel = [rel for rel in var.stinfo['rhsrelations'] - if not rel is principal][0] - else: - _rel = relation - lhssql = self._inlined_var_sql(_rel.children[0].variable, - _rel.r_type) - try: - self._state.ignore_varmap = True - sql = lhssql + relation.children[1].accept(self) - finally: - self._state.ignore_varmap = False - if relation.optional == 'right': - leftalias = self._var_table(principal.children[0].variable) - rightalias = self._var_table(relation.children[0].variable) - self._state.replace_tables_by_outer_join( - leftalias, rightalias, 'LEFT', sql) - return '' - return sql - return '' - - def _visit_attribute_relation(self, rel): - """generate SQL for an attribute relation""" - lhs, rhs = rel.get_parts() - rhssql = rhs.accept(self) - table = self._var_table(lhs.variable) - if table is None: - assert rel.r_type == 'eid' - lhssql = lhs.accept(self) - else: - try: - lhssql = self._varmap['%s.%s' % (lhs.name, rel.r_type)] - except KeyError: - mapkey = '%s.%s' % (self._state.solution[lhs.name], rel.r_type) - if mapkey in self.attr_map: - cb, sourcecb = self.attr_map[mapkey] - if sourcecb: - # callback is a source callback, we can't use this - # attribute in restriction - raise QueryError("can't use %s (%s) in restriction" - % (mapkey, rel.as_string())) - lhssql = cb(self, lhs.variable, rel) - elif rel.r_type == 'eid': - lhssql = lhs.variable._q_sql - else: - lhssql = '%s.%s%s' % (table, SQL_PREFIX, rel.r_type) - try: - if rel._q_needcast == 'TODAY': - sql = 'DATE(%s)%s' % (lhssql, rhssql) - # XXX which cast function should be used - #elif rel._q_needcast == 'NOW': - # sql = 'TIMESTAMP(%s)%s' % (lhssql, rhssql) - else: - sql = '%s%s' % (lhssql, rhssql) - except AttributeError: - sql = '%s%s' % (lhssql, rhssql) - if lhs.variable.stinfo.get('optrelations'): - self._state.add_outer_join_condition(table, sql) - else: - return sql - - def _visit_has_text_relation(self, rel): - """generate SQL for a has_text relation""" - lhs, rhs = rel.get_parts() - const = rhs.children[0] - alias = self._state.fti_table(rel, self.dbhelper.fti_table) - jointo = lhs.accept(self) - restriction = '' - lhsvar = lhs.variable - me_is_principal = lhsvar.stinfo.get('principal') is rel - if me_is_principal: - if lhsvar.stinfo['typerel'] is None: - # the variable is using the fti table, no join needed - jointo = None - elif not lhsvar.name in self._varmap: - # join on entities instead of etype's table to get result for - # external entities on multisources configurations - ealias = lhsvar._q_sqltable = '_' + lhsvar.name - jointo = lhsvar._q_sql = '%s.eid' % ealias - self._state.add_table('entities AS %s' % ealias, ealias) - if not lhsvar._q_invariant or len(lhsvar.stinfo['possibletypes']) == 1: - restriction = " AND %s.type='%s'" % (ealias, self._state.solution[lhs.name]) - else: - etypes = ','.join("'%s'" % etype for etype in lhsvar.stinfo['possibletypes']) - restriction = " AND %s.type IN (%s)" % (ealias, etypes) - if isinstance(rel.parent, Not): - self._state.done.add(rel.parent) - not_ = True - else: - not_ = False - query = const.eval(self._args) - return self.dbhelper.fti_restriction_sql(alias, query, - jointo, not_) + restriction - - def visit_comparison(self, cmp): - """generate SQL for a comparison""" - optional = getattr(cmp, 'optional', None) # rql < 0.30 - if len(cmp.children) == 2: - # simplified expression from HAVING clause - lhs, rhs = cmp.children - else: - lhs = None - rhs = cmp.children[0] - assert not optional - sql = None - operator = cmp.operator - if operator in ('LIKE', 'ILIKE'): - if operator == 'ILIKE' and not self.dbhelper.ilike_support: - operator = ' LIKE ' - else: - operator = ' %s ' % operator - elif operator == 'REGEXP': - sql = ' %s' % self.dbhelper.sql_regexp_match_expression(rhs.accept(self)) - elif (operator == '=' and isinstance(rhs, Constant) - and rhs.eval(self._args) is None): - if lhs is None: - sql = ' IS NULL' - else: - sql = '%s IS NULL' % lhs.accept(self) - elif isinstance(rhs, Function) and rhs.name == 'IN': - assert operator == '=' - operator = ' ' - if sql is None: - if lhs is None: - sql = '%s%s'% (operator, rhs.accept(self)) - else: - sql = '%s%s%s'% (lhs.accept(self), operator, rhs.accept(self)) - if optional is None: - return sql - leftvars = cmp.children[0].get_nodes(VariableRef) - assert len(leftvars) == 1 - if leftvars[0].variable.stinfo['attrvar'] is None: - assert isinstance(leftvars[0].variable, ColumnAlias) - leftalias = leftvars[0].variable._q_sqltable - else: - leftalias = self._var_table(leftvars[0].variable.stinfo['attrvar']) - rightvars = cmp.children[1].get_nodes(VariableRef) - assert len(rightvars) == 1 - if rightvars[0].variable.stinfo['attrvar'] is None: - assert isinstance(rightvars[0].variable, ColumnAlias) - rightalias = rightvars[0].variable._q_sqltable - else: - rightalias = self._var_table(rightvars[0].variable.stinfo['attrvar']) - if optional == 'right': - self._state.replace_tables_by_outer_join( - leftalias, rightalias, 'LEFT', sql) - elif optional == 'left': - self._state.replace_tables_by_outer_join( - rightalias, leftalias, 'LEFT', sql) - else: - self._state.replace_tables_by_outer_join( - leftalias, rightalias, 'FULL', sql) - return '' - - def visit_mathexpression(self, mexpr): - """generate SQL for a mathematic expression""" - lhs, rhs = mexpr.get_parts() - # check for string concatenation - operator = mexpr.operator - if operator == '%': - operator = '%%' - try: - if mexpr.operator == '+' and mexpr.get_type(self._state.solution, self._args) == 'String': - return '(%s)' % self.dbhelper.sql_concat_string(lhs.accept(self), - rhs.accept(self)) - except CoercionError: - pass - return '(%s %s %s)'% (lhs.accept(self), operator, rhs.accept(self)) - - def visit_unaryexpression(self, uexpr): - """generate SQL for a unary expression""" - return '%s%s'% (uexpr.operator, uexpr.children[0].accept(self)) - - def visit_function(self, func): - """generate SQL name for a function""" - if func.name == 'FTIRANK': - try: - rel = next(iter(func.children[0].variable.stinfo['ftirels'])) - except KeyError: - raise BadRQLQuery("can't use FTIRANK on variable not used in an" - " 'has_text' relation (eg full-text search)") - const = rel.get_parts()[1].children[0] - return self.dbhelper.fti_rank_order( - self._state.fti_table(rel, self.dbhelper.fti_table), - const.eval(self._args)) - args = [c.accept(self) for c in func.children] - if func in self._state.source_cb_funcs: - # function executed as a callback on the source - assert len(args) == 1 - return args[0] - # func_as_sql will check function is supported by the backend - return self.dbhelper.func_as_sql(func.name, args) - - def visit_constant(self, constant): - """generate SQL name for a constant""" - if constant.type is None: - return 'NULL' - value = constant.value - if constant.type == 'etype': - return value - # don't substitute int, causes pb when used as sorting column number - if constant.type == 'Int': - return str(value) - if constant.type in ('Date', 'Datetime'): - rel = constant.relation() - if rel is not None: - rel._q_needcast = value - return self.keyword_map[value]() - if constant.type == 'Substitute': - try: - # we may found constant from simplified var in varmap - return self._mapped_term(constant, '%%(%s)s' % value)[0] - except KeyError: - _id = value - if PY2 and isinstance(_id, unicode): - _id = _id.encode() - else: - _id = str(id(constant)).replace('-', '', 1) - self._query_attrs[_id] = value - return '%%(%s)s' % _id - - def visit_variableref(self, variableref): - """get the sql name for a variable reference""" - # use accept, .variable may be a variable or a columnalias - return variableref.variable.accept(self) - - def visit_columnalias(self, colalias): - """get the sql name for a subquery column alias""" - if colalias.name in self._varmap: - sql = self._varmap[colalias.name] - table = sql.split('.', 1)[0] - colalias._q_sqltable = table - colalias._q_sql = sql - self._state.add_table(table) - return sql - return colalias._q_sql - - def visit_variable(self, variable): - """get the table name and sql string for a variable""" - #if contextrels is None and variable.name in self._state.done: - if variable.name in self._state.done: - if self._in_wrapping_query: - return 'T1.%s' % self._state.aliases[variable.name] - return variable._q_sql - self._state.done.add(variable.name) - vtablename = None - if not self._state.ignore_varmap and variable.name in self._varmap: - sql, vtablename = self._var_info(variable) - elif variable.stinfo['attrvar']: - # attribute variable (systematically used in rhs of final - # relation(s)), get table name and sql from any rhs relation - sql = self._linked_var_sql(variable) - elif variable._q_invariant: - # since variable is invariant, we know we won't found final relation - principal = variable.stinfo['principal'] - if principal is None: - vtablename = '_' + variable.name - self._state.add_table('entities AS %s' % vtablename, vtablename) - sql = '%s.eid' % vtablename - if variable.stinfo['typerel'] is not None: - # add additional restriction on entities.type column - pts = variable.stinfo['possibletypes'] - if len(pts) == 1: - etype = next(iter(variable.stinfo['possibletypes'])) - restr = "%s.type='%s'" % (vtablename, etype) - else: - etypes = ','.join("'%s'" % et for et in pts) - restr = '%s.type IN (%s)' % (vtablename, etypes) - self._state.add_restriction(restr) - elif principal.r_type == 'has_text': - sql = '%s.%s' % (self._state.fti_table(principal, - self.dbhelper.fti_table), - self.dbhelper.fti_uid_attr) - elif principal in variable.stinfo['rhsrelations']: - if self.schema.rschema(principal.r_type).inlined: - sql = self._linked_var_sql(variable) - else: - sql = '%s.eid_to' % self._state.relation_table(principal) - else: - sql = '%s.eid_from' % self._state.relation_table(principal) - else: - # standard variable: get table name according to etype and use .eid - # attribute - sql, vtablename = self._var_info(variable) - variable._q_sqltable = vtablename - variable._q_sql = sql - return sql - - # various utilities ####################################################### - - def _extra_join_sql(self, relation, sql, var): - # if rhs var is invariant, and this relation is not its principal, - # generate extra join - try: - if not var.stinfo['principal'] is relation: - op = relation.operator() - if op == '=': - # need a predicable result for tests - args = sorted( (sql, var.accept(self)) ) - args.insert(1, op) - else: - args = (sql, op, var.accept(self)) - return '%s%s%s' % tuple(args) - except KeyError: - # no principal defined, relation is necessarily the principal and - # so nothing to return here - pass - return None - - def _temp_table_scope(self, select, table): - scope = 9999 - for var, sql in self._varmap.items(): - # skip "attribute variable" in varmap (such 'T.login') - if not '.' in var and table == sql.split('.', 1)[0]: - try: - scope = min(scope, self._state.scopes[select.defined_vars[var].scope]) - except KeyError: - scope = 0 # XXX - if scope == 0: - break - return scope - - def _mapped_term(self, term, key): - """return sql and table alias to the `term`, mapped as `key` or raise - KeyError when the key is not found in the varmap - """ - sql = self._varmap[key] - tablealias = sql.split('.', 1)[0] - scope = self._temp_table_scope(term.stmt, tablealias) - self._state.add_table(tablealias, scope=scope) - return sql, tablealias - - def _var_info(self, var): - try: - return self._mapped_term(var, var.name) - except KeyError: - scope = self._state.scopes[var.scope] - etype = self._state.solution[var.name] - # XXX this check should be moved in rql.stcheck - if self.schema.eschema(etype).final: - raise BadRQLQuery(var.stmt.root) - tablealias = '_' + var.name - sql = '%s.%seid' % (tablealias, SQL_PREFIX) - self._state.add_table('%s%s AS %s' % (SQL_PREFIX, etype, tablealias), - tablealias, scope=scope) - return sql, tablealias - - def _inlined_var_sql(self, var, rtype): - try: - sql = self._varmap['%s.%s' % (var.name, rtype)] - scope = self._state.scopes[var.scope] - self._state.add_table(sql.split('.', 1)[0], scope=scope) - except KeyError: - # rtype may be an attribute relation when called from - # _visit_var_attr_relation. take care about 'eid' rtype, since in - # some case we may use the `entities` table, so in that case we've - # to properly use variable'sql - if rtype == 'eid': - sql = var.accept(self) - else: - sql = '%s.%s%s' % (self._var_table(var), SQL_PREFIX, rtype) - return sql - - def _linked_var_sql(self, variable): - if not self._state.ignore_varmap: - try: - return self._varmap[variable.name] - except KeyError: - pass - rel = (variable.stinfo.get('principal') or - next(iter(variable.stinfo['rhsrelations']))) - linkedvar = rel.children[0].variable - if rel.r_type == 'eid': - return linkedvar.accept(self) - if isinstance(linkedvar, ColumnAlias): - raise BadRQLQuery('variable %s should be selected by the subquery' - % variable.name) - try: - sql = self._varmap['%s.%s' % (linkedvar.name, rel.r_type)] - except KeyError: - mapkey = '%s.%s' % (self._state.solution[linkedvar.name], rel.r_type) - if mapkey in self.attr_map: - cb, sourcecb = self.attr_map[mapkey] - if not sourcecb: - return cb(self, linkedvar, rel) - # attribute mapped at the source level (bfss for instance) - stmt = rel.stmt - for selectidx, vref in iter_mapped_var_sels(stmt, variable): - stack = [cb] - update_source_cb_stack(self._state, stmt, vref, stack) - self._state._needs_source_cb[selectidx] = stack - linkedvar.accept(self) - sql = '%s.%s%s' % (linkedvar._q_sqltable, SQL_PREFIX, rel.r_type) - return sql - - # tables handling ######################################################### - - def _var_table(self, var): - var.accept(self)#.visit_variable(var) - return var._q_sqltable diff -r 058bb3dc685f -r 0b59724cb3f2 server/sources/storages.py --- a/server/sources/storages.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,288 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""custom storages for the system source""" - -import os -import sys -from os import unlink, path as osp -from contextlib import contextmanager -import tempfile - -from six import PY2, PY3, text_type, binary_type - -from logilab.common import nullobject - -from yams.schema import role_name - -from cubicweb import Binary, ValidationError -from cubicweb.server import hook -from cubicweb.server.edition import EditedEntity - - -def set_attribute_storage(repo, etype, attr, storage): - repo.system_source.set_storage(etype, attr, storage) - -def unset_attribute_storage(repo, etype, attr): - repo.system_source.unset_storage(etype, attr) - - -class Storage(object): - """abstract storage - - * If `source_callback` is true (by default), the callback will be run during - query result process of fetched attribute's value and should have the - following prototype:: - - callback(self, source, cnx, value) - - where `value` is the value actually stored in the backend. None values - will be skipped (eg callback won't be called). - - * if `source_callback` is false, the callback will be run during sql - generation when some attribute with a custom storage is accessed and - should have the following prototype:: - - callback(self, generator, relation, linkedvar) - - where `generator` is the sql generator, `relation` the current rql syntax - tree relation and linkedvar the principal syntax tree variable holding the - attribute. - """ - is_source_callback = True - - def callback(self, *args): - """see docstring for prototype, which vary according to is_source_callback - """ - raise NotImplementedError() - - def entity_added(self, entity, attr): - """an entity using this storage for attr has been added""" - raise NotImplementedError() - def entity_updated(self, entity, attr): - """an entity using this storage for attr has been updatded""" - raise NotImplementedError() - def entity_deleted(self, entity, attr): - """an entity using this storage for attr has been deleted""" - raise NotImplementedError() - def migrate_entity(self, entity, attribute): - """migrate an entity attribute to the storage""" - raise NotImplementedError() - -# TODO -# * make it configurable without code -# * better file path attribution -# * handle backup/restore - -def uniquify_path(dirpath, basename): - """return a file descriptor and unique file name for `basename` in `dirpath` - """ - path = basename.replace(osp.sep, '-') - base, ext = osp.splitext(path) - return tempfile.mkstemp(prefix=base, suffix=ext, dir=dirpath) - -@contextmanager -def fsimport(cnx): - present = 'fs_importing' in cnx.transaction_data - old_value = cnx.transaction_data.get('fs_importing') - cnx.transaction_data['fs_importing'] = True - yield - if present: - cnx.transaction_data['fs_importing'] = old_value - else: - del cnx.transaction_data['fs_importing'] - - -_marker = nullobject() - - -class BytesFileSystemStorage(Storage): - """store Bytes attribute value on the file system""" - def __init__(self, defaultdir, fsencoding=_marker, wmode=0o444): - if PY3: - if not isinstance(defaultdir, text_type): - raise TypeError('defaultdir must be a unicode object in python 3') - if fsencoding is not _marker: - raise ValueError('fsencoding is no longer supported in python 3') - else: - self.fsencoding = fsencoding or 'utf-8' - if isinstance(defaultdir, text_type): - defaultdir = defaultdir.encode(fsencoding) - self.default_directory = defaultdir - # extra umask to use when creating file - # 0444 as in "only allow read bit in permission" - self._wmode = wmode - - def _writecontent(self, fd, binary): - """write the content of a binary in readonly file - - As the bfss never alters an existing file it does not prevent it from - working as intended. This is a better safe than sorry approach. - """ - os.fchmod(fd, self._wmode) - fileobj = os.fdopen(fd, 'wb') - binary.to_file(fileobj) - fileobj.close() - - - def callback(self, source, cnx, value): - """sql generator callback when some attribute with a custom storage is - accessed - """ - fpath = source.binary_to_str(value) - try: - return Binary.from_file(fpath) - except EnvironmentError as ex: - source.critical("can't open %s: %s", value, ex) - return None - - def entity_added(self, entity, attr): - """an entity using this storage for attr has been added""" - if entity._cw.transaction_data.get('fs_importing'): - binary = Binary.from_file(entity.cw_edited[attr].getvalue()) - entity._cw_dont_cache_attribute(attr, repo_side=True) - else: - binary = entity.cw_edited.pop(attr) - fd, fpath = self.new_fs_path(entity, attr) - # bytes storage used to store file's path - binary_obj = Binary(fpath if PY2 else fpath.encode('utf-8')) - entity.cw_edited.edited_attribute(attr, binary_obj) - self._writecontent(fd, binary) - AddFileOp.get_instance(entity._cw).add_data(fpath) - return binary - - def entity_updated(self, entity, attr): - """an entity using this storage for attr has been updated""" - # get the name of the previous file containing the value - oldpath = self.current_fs_path(entity, attr) - if entity._cw.transaction_data.get('fs_importing'): - # If we are importing from the filesystem, the file already exists. - # We do not need to create it but we need to fetch the content of - # the file as the actual content of the attribute - fpath = entity.cw_edited[attr].getvalue() - entity._cw_dont_cache_attribute(attr, repo_side=True) - assert fpath is not None - binary = Binary.from_file(fpath) - else: - # We must store the content of the attributes - # into a file to stay consistent with the behaviour of entity_add. - # Moreover, the BytesFileSystemStorage expects to be able to - # retrieve the current value of the attribute at anytime by reading - # the file on disk. To be able to rollback things, use a new file - # and keep the old one that will be removed on commit if everything - # went ok. - # - # fetch the current attribute value in memory - binary = entity.cw_edited.pop(attr) - if binary is None: - fpath = None - else: - # Get filename for it - fd, fpath = self.new_fs_path(entity, attr) - # write attribute value on disk - self._writecontent(fd, binary) - # Mark the new file as added during the transaction. - # The file will be removed on rollback - AddFileOp.get_instance(entity._cw).add_data(fpath) - # reinstall poped value - if fpath is None: - entity.cw_edited.edited_attribute(attr, None) - else: - # register the new location for the file. - binary_obj = Binary(fpath if PY2 else fpath.encode('utf-8')) - entity.cw_edited.edited_attribute(attr, binary_obj) - if oldpath is not None and oldpath != fpath: - # Mark the old file as useless so the file will be removed at - # commit. - DeleteFileOp.get_instance(entity._cw).add_data(oldpath) - return binary - - def entity_deleted(self, entity, attr): - """an entity using this storage for attr has been deleted""" - fpath = self.current_fs_path(entity, attr) - if fpath is not None: - DeleteFileOp.get_instance(entity._cw).add_data(fpath) - - def new_fs_path(self, entity, attr): - # We try to get some hint about how to name the file using attribute's - # name metadata, so we use the real file name and extension when - # available. Keeping the extension is useful for example in the case of - # PIL processing that use filename extension to detect content-type, as - # well as providing more understandable file names on the fs. - if PY2: - attr = attr.encode('ascii') - basename = [str(entity.eid), attr] - name = entity.cw_attr_metadata(attr, 'name') - if name is not None: - basename.append(name.encode(self.fsencoding) if PY2 else name) - fd, fspath = uniquify_path(self.default_directory, - '_'.join(basename)) - if fspath is None: - msg = entity._cw._('failed to uniquify path (%s, %s)') % ( - self.default_directory, '_'.join(basename)) - raise ValidationError(entity.eid, {role_name(attr, 'subject'): msg}) - assert isinstance(fspath, str) # bytes on py2, unicode on py3 - return fd, fspath - - def current_fs_path(self, entity, attr): - """return the current fs_path of the attribute, or None is the attr is - not stored yet. - """ - sysource = entity._cw.repo.system_source - cu = sysource.doexec(entity._cw, - 'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % ( - attr, entity.cw_etype, entity.eid)) - rawvalue = cu.fetchone()[0] - if rawvalue is None: # no previous value - return None - fspath = sysource._process_value(rawvalue, cu.description[0], - binarywrap=binary_type) - if PY3: - fspath = fspath.decode('utf-8') - assert isinstance(fspath, str) # bytes on py2, unicode on py3 - return fspath - - def migrate_entity(self, entity, attribute): - """migrate an entity attribute to the storage""" - entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) - self.entity_added(entity, attribute) - cnx = entity._cw - source = cnx.repo.system_source - attrs = source.preprocess_entity(entity) - sql = source.sqlgen.update('cw_' + entity.cw_etype, attrs, - ['cw_eid']) - source.doexec(cnx, sql, attrs) - entity.cw_edited = None - - -class AddFileOp(hook.DataOperationMixIn, hook.Operation): - def rollback_event(self): - for filepath in self.get_data(): - assert isinstance(filepath, str) # bytes on py2, unicode on py3 - try: - unlink(filepath) - except Exception as ex: - self.error("can't remove %s: %s" % (filepath, ex)) - -class DeleteFileOp(hook.DataOperationMixIn, hook.Operation): - def postcommit_event(self): - for filepath in self.get_data(): - assert isinstance(filepath, str) # bytes on py2, unicode on py3 - try: - unlink(filepath) - except Exception as ex: - self.error("can't remove %s: %s" % (filepath, ex)) diff -r 058bb3dc685f -r 0b59724cb3f2 server/sqlutils.py --- a/server/sqlutils.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,591 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""SQL utilities functions and classes.""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -import re -import subprocess -from os.path import abspath -from logging import getLogger -from datetime import time, datetime, timedelta - -from six import string_types, text_type -from six.moves import filter - -from pytz import utc - -from logilab import database as db, common as lgc -from logilab.common.shellutils import ProgressBar, DummyProgressBar -from logilab.common.deprecation import deprecated -from logilab.common.logging_ext import set_log_methods -from logilab.common.date import utctime, utcdatetime, strptime -from logilab.database.sqlgen import SQLGenerator - -from cubicweb import Binary, ConfigurationError -from cubicweb.uilib import remove_html_tags -from cubicweb.schema import PURE_VIRTUAL_RTYPES -from cubicweb.server import SQL_CONNECT_HOOKS -from cubicweb.server.utils import crypt_password - -lgc.USE_MX_DATETIME = False -SQL_PREFIX = 'cw_' - - -def _run_command(cmd): - if isinstance(cmd, string_types): - print(cmd) - return subprocess.call(cmd, shell=True) - else: - print(' '.join(cmd)) - return subprocess.call(cmd) - - -def sqlexec(sqlstmts, cursor_or_execute, withpb=True, - pbtitle='', delimiter=';', cnx=None): - """execute sql statements ignoring DROP/ CREATE GROUP or USER statements - error. - - :sqlstmts_as_string: a string or a list of sql statements. - :cursor_or_execute: sql cursor or a callback used to execute statements - :cnx: if given, commit/rollback at each statement. - - :withpb: if True, display a progresse bar - :pbtitle: a string displayed as the progress bar title (if `withpb=True`) - - :delimiter: a string used to split sqlstmts (if it is a string) - - Return the failed statements (same type as sqlstmts) - """ - if hasattr(cursor_or_execute, 'execute'): - execute = cursor_or_execute.execute - else: - execute = cursor_or_execute - sqlstmts_as_string = False - if isinstance(sqlstmts, string_types): - sqlstmts_as_string = True - sqlstmts = sqlstmts.split(delimiter) - if withpb: - if sys.stdout.isatty(): - pb = ProgressBar(len(sqlstmts), title=pbtitle) - else: - pb = DummyProgressBar() - failed = [] - for sql in sqlstmts: - sql = sql.strip() - if withpb: - pb.update() - if not sql: - continue - try: - # some dbapi modules doesn't accept unicode for sql string - execute(str(sql)) - except Exception: - if cnx: - cnx.rollback() - failed.append(sql) - else: - if cnx: - cnx.commit() - if withpb: - print() - if sqlstmts_as_string: - failed = delimiter.join(failed) - return failed - - -def sqlgrants(schema, driver, user, - text_index=True, set_owner=True, - skip_relations=(), skip_entities=()): - """return sql to give all access privileges to the given user on the system - schema - """ - from cubicweb.server.schema2sql import grant_schema - from cubicweb.server.sources import native - output = [] - w = output.append - w(native.grant_schema(user, set_owner)) - w('') - if text_index: - dbhelper = db.get_db_helper(driver) - w(dbhelper.sql_grant_user_on_fti(user)) - w('') - w(grant_schema(schema, user, set_owner, skip_entities=skip_entities, prefix=SQL_PREFIX)) - return '\n'.join(output) - - -def sqlschema(schema, driver, text_index=True, - user=None, set_owner=False, - skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()): - """return the system sql schema, according to the given parameters""" - from cubicweb.server.schema2sql import schema2sql - from cubicweb.server.sources import native - if set_owner: - assert user, 'user is argument required when set_owner is true' - output = [] - w = output.append - w(native.sql_schema(driver)) - w('') - dbhelper = db.get_db_helper(driver) - if text_index: - w(dbhelper.sql_init_fti().replace(';', ';;')) - w('') - w(schema2sql(dbhelper, schema, prefix=SQL_PREFIX, - skip_entities=skip_entities, - skip_relations=skip_relations).replace(';', ';;')) - if dbhelper.users_support and user: - w('') - w(sqlgrants(schema, driver, user, text_index, set_owner, - skip_relations, skip_entities).replace(';', ';;')) - return '\n'.join(output) - - -def sqldropschema(schema, driver, text_index=True, - skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()): - """return the sql to drop the schema, according to the given parameters""" - from cubicweb.server.schema2sql import dropschema2sql - from cubicweb.server.sources import native - output = [] - w = output.append - if text_index: - dbhelper = db.get_db_helper(driver) - w(dbhelper.sql_drop_fti()) - w('') - w(dropschema2sql(dbhelper, schema, prefix=SQL_PREFIX, - skip_entities=skip_entities, - skip_relations=skip_relations)) - w('') - w(native.sql_drop_schema(driver)) - return '\n'.join(output) - - -_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION = re.compile('^(?!(sql|pg)_)').match -def sql_drop_all_user_tables(driver_or_helper, sqlcursor): - """Return ths sql to drop all tables found in the database system.""" - if not getattr(driver_or_helper, 'list_tables', None): - dbhelper = db.get_db_helper(driver_or_helper) - else: - dbhelper = driver_or_helper - - cmds = [dbhelper.sql_drop_sequence('entities_id_seq')] - # for mssql, we need to drop views before tables - if hasattr(dbhelper, 'list_views'): - cmds += ['DROP VIEW %s;' % name - for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))] - cmds += ['DROP TABLE %s;' % name - for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))] - return '\n'.join(cmds) - - -class ConnectionWrapper(object): - """handle connection to the system source, at some point associated to a - :class:`Session` - """ - - # since 3.19, we only have to manage the system source connection - def __init__(self, system_source): - # dictionary of (source, connection), indexed by sources'uri - self._source = system_source - self.cnx = system_source.get_connection() - self.cu = self.cnx.cursor() - - def commit(self): - """commit the current transaction for this user""" - # let exception propagates - self.cnx.commit() - - def rollback(self): - """rollback the current transaction for this user""" - # catch exceptions, rollback other sources anyway - try: - self.cnx.rollback() - except Exception: - self._source.critical('rollback error', exc_info=sys.exc_info()) - # error on rollback, the connection is much probably in a really - # bad state. Replace it by a new one. - self.reconnect() - - def close(self, i_know_what_i_do=False): - """close all connections in the set""" - if i_know_what_i_do is not True: # unexpected closing safety belt - raise RuntimeError('connections set shouldn\'t be closed') - try: - self.cu.close() - self.cu = None - except Exception: - pass - try: - self.cnx.close() - self.cnx = None - except Exception: - pass - - # internals ############################################################### - - def cnxset_freed(self): - """connections set is being freed from a session""" - pass # no nothing by default - - def reconnect(self): - """reopen a connection for this source or all sources if none specified - """ - try: - # properly close existing connection if any - self.cnx.close() - except Exception: - pass - self._source.info('trying to reconnect') - self.cnx = self._source.get_connection() - self.cu = self.cnx.cursor() - - @deprecated('[3.19] use .cu instead') - def __getitem__(self, uri): - assert uri == 'system' - return self.cu - - @deprecated('[3.19] use repo.system_source instead') - def source(self, uid): - assert uid == 'system' - return self._source - - @deprecated('[3.19] use .cnx instead') - def connection(self, uid): - assert uid == 'system' - return self.cnx - - -class SqliteConnectionWrapper(ConnectionWrapper): - """Sqlite specific connection wrapper: close the connection each time it's - freed (and reopen it later when needed) - """ - def __init__(self, system_source): - # don't call parent's __init__, we don't want to initiate the connection - self._source = system_source - - _cnx = None - - def cnxset_freed(self): - self.cu.close() - self.cnx.close() - self.cnx = self.cu = None - - @property - def cnx(self): - if self._cnx is None: - self._cnx = self._source.get_connection() - self._cu = self._cnx.cursor() - return self._cnx - @cnx.setter - def cnx(self, value): - self._cnx = value - - @property - def cu(self): - if self._cnx is None: - self._cnx = self._source.get_connection() - self._cu = self._cnx.cursor() - return self._cu - @cu.setter - def cu(self, value): - self._cu = value - - -class SQLAdapterMixIn(object): - """Mixin for SQL data sources, getting a connection from a configuration - dictionary and handling connection locking - """ - cnx_wrap = ConnectionWrapper - - def __init__(self, source_config, repairing=False): - try: - self.dbdriver = source_config['db-driver'].lower() - dbname = source_config['db-name'] - except KeyError: - raise ConfigurationError('missing some expected entries in sources file') - dbhost = source_config.get('db-host') - port = source_config.get('db-port') - dbport = port and int(port) or None - dbuser = source_config.get('db-user') - dbpassword = source_config.get('db-password') - dbencoding = source_config.get('db-encoding', 'UTF-8') - dbextraargs = source_config.get('db-extra-arguments') - dbnamespace = source_config.get('db-namespace') - self.dbhelper = db.get_db_helper(self.dbdriver) - self.dbhelper.record_connection_info(dbname, dbhost, dbport, dbuser, - dbpassword, dbextraargs, - dbencoding, dbnamespace) - self.sqlgen = SQLGenerator() - # copy back some commonly accessed attributes - dbapi_module = self.dbhelper.dbapi_module - self.OperationalError = dbapi_module.OperationalError - self.InterfaceError = dbapi_module.InterfaceError - self.DbapiError = dbapi_module.Error - self._binary = self.dbhelper.binary_value - self._process_value = dbapi_module.process_value - self._dbencoding = dbencoding - if self.dbdriver == 'sqlite': - self.cnx_wrap = SqliteConnectionWrapper - self.dbhelper.dbname = abspath(self.dbhelper.dbname) - if not repairing: - statement_timeout = int(source_config.get('db-statement-timeout', 0)) - if statement_timeout > 0: - def set_postgres_timeout(cnx): - cnx.cursor().execute('SET statement_timeout to %d' % statement_timeout) - cnx.commit() - postgres_hooks = SQL_CONNECT_HOOKS['postgres'] - postgres_hooks.append(set_postgres_timeout) - - def wrapped_connection(self): - """open and return a connection to the database, wrapped into a class - handling reconnection and all - """ - return self.cnx_wrap(self) - - def get_connection(self): - """open and return a connection to the database""" - return self.dbhelper.get_connection() - - def backup_to_file(self, backupfile, confirm): - for cmd in self.dbhelper.backup_commands(backupfile, - keepownership=False): - if _run_command(cmd): - if not confirm(' [Failed] Continue anyway?', default='n'): - raise Exception('Failed command: %s' % cmd) - - def restore_from_file(self, backupfile, confirm, drop=True): - for cmd in self.dbhelper.restore_commands(backupfile, - keepownership=False, - drop=drop): - if _run_command(cmd): - if not confirm(' [Failed] Continue anyway?', default='n'): - raise Exception('Failed command: %s' % cmd) - - def merge_args(self, args, query_args): - if args is not None: - newargs = {} - for key, val in args.items(): - # convert cubicweb binary into db binary - if isinstance(val, Binary): - val = self._binary(val.getvalue()) - # convert timestamp to utc. - # expect SET TiME ZONE to UTC at connection opening time. - # This shouldn't change anything for datetime without TZ. - elif isinstance(val, datetime) and val.tzinfo is not None: - val = utcdatetime(val) - elif isinstance(val, time) and val.tzinfo is not None: - val = utctime(val) - newargs[key] = val - # should not collide - assert not (frozenset(newargs) & frozenset(query_args)), \ - 'unexpected collision: %s' % (frozenset(newargs) & frozenset(query_args)) - newargs.update(query_args) - return newargs - return query_args - - def process_result(self, cursor, cnx=None, column_callbacks=None): - """return a list of CubicWeb compliant values from data in the given cursor - """ - return list(self.iter_process_result(cursor, cnx, column_callbacks)) - - def iter_process_result(self, cursor, cnx, column_callbacks=None): - """return a iterator on tuples of CubicWeb compliant values from data - in the given cursor - """ - # use two different implementations to avoid paying the price of - # callback lookup for each *cell* in results when there is nothing to - # lookup - if not column_callbacks: - return self.dbhelper.dbapi_module.process_cursor(cursor, self._dbencoding, - Binary) - assert cnx - return self._cb_process_result(cursor, column_callbacks, cnx) - - def _cb_process_result(self, cursor, column_callbacks, cnx): - # begin bind to locals for optimization - descr = cursor.description - encoding = self._dbencoding - process_value = self._process_value - binary = Binary - # /end - cursor.arraysize = 100 - while True: - results = cursor.fetchmany() - if not results: - break - for line in results: - result = [] - for col, value in enumerate(line): - if value is None: - result.append(value) - continue - cbstack = column_callbacks.get(col, None) - if cbstack is None: - value = process_value(value, descr[col], encoding, binary) - else: - for cb in cbstack: - value = cb(self, cnx, value) - result.append(value) - yield result - - def preprocess_entity(self, entity): - """return a dictionary to use as extra argument to cursor.execute - to insert/update an entity into a SQL database - """ - attrs = {} - eschema = entity.e_schema - converters = getattr(self.dbhelper, 'TYPE_CONVERTERS', {}) - for attr, value in entity.cw_edited.items(): - if value is not None and eschema.subjrels[attr].final: - atype = str(entity.e_schema.destination(attr)) - if atype in converters: - # It is easier to modify preprocess_entity rather - # than add_entity (native) as this behavior - # may also be used for update. - value = converters[atype](value) - elif atype == 'Password': # XXX could be done using a TYPE_CONVERTERS callback - # if value is a Binary instance, this mean we got it - # from a query result and so it is already encrypted - if isinstance(value, Binary): - value = value.getvalue() - else: - value = crypt_password(value) - value = self._binary(value) - elif isinstance(value, Binary): - value = self._binary(value.getvalue()) - attrs[SQL_PREFIX+str(attr)] = value - attrs[SQL_PREFIX+'eid'] = entity.eid - return attrs - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter')) - - -# connection initialization functions ########################################## - -def _install_sqlite_querier_patch(): - """This monkey-patch hotfixes a bug sqlite causing some dates to be returned as strings rather than - date objects (http://www.sqlite.org/cvstrac/tktview?tn=1327,33) - """ - from cubicweb.server.querier import QuerierHelper - - if hasattr(QuerierHelper, '_sqlite_patched'): - return # already monkey patched - - def wrap_execute(base_execute): - def new_execute(*args, **kwargs): - rset = base_execute(*args, **kwargs) - if rset.description: - found_date = False - for row, rowdesc in zip(rset, rset.description): - for cellindex, (value, vtype) in enumerate(zip(row, rowdesc)): - if vtype in ('TZDatetime', 'Date', 'Datetime') \ - and isinstance(value, text_type): - found_date = True - value = value.rsplit('.', 1)[0] - try: - row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S') - except Exception: - row[cellindex] = strptime(value, '%Y-%m-%d') - if vtype == 'TZDatetime': - row[cellindex] = row[cellindex].replace(tzinfo=utc) - if vtype == 'Time' and isinstance(value, text_type): - found_date = True - try: - row[cellindex] = strptime(value, '%H:%M:%S') - except Exception: - # DateTime used as Time? - row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S') - if vtype == 'Interval' and isinstance(value, int): - found_date = True - # XXX value is in number of seconds? - row[cellindex] = timedelta(0, value, 0) - if not found_date: - break - return rset - return new_execute - - QuerierHelper.execute = wrap_execute(QuerierHelper.execute) - QuerierHelper._sqlite_patched = True - - -def _init_sqlite_connection(cnx): - """Internal function that will be called to init a sqlite connection""" - _install_sqlite_querier_patch() - - class group_concat(object): - def __init__(self): - self.values = set() - def step(self, value): - if value is not None: - self.values.add(value) - def finalize(self): - return ', '.join(text_type(v) for v in self.values) - - cnx.create_aggregate("GROUP_CONCAT", 1, group_concat) - - def _limit_size(text, maxsize, format='text/plain'): - if len(text) < maxsize: - return text - if format in ('text/html', 'text/xhtml', 'text/xml'): - text = remove_html_tags(text) - if len(text) > maxsize: - text = text[:maxsize] + '...' - return text - - def limit_size3(text, format, maxsize): - return _limit_size(text, maxsize, format) - cnx.create_function("LIMIT_SIZE", 3, limit_size3) - - def limit_size2(text, maxsize): - return _limit_size(text, maxsize) - cnx.create_function("TEXT_LIMIT_SIZE", 2, limit_size2) - - from logilab.common.date import strptime - def weekday(ustr): - try: - dt = strptime(ustr, '%Y-%m-%d %H:%M:%S') - except: - dt = strptime(ustr, '%Y-%m-%d') - # expect sunday to be 1, saturday 7 while weekday method return 0 for - # monday - return (dt.weekday() + 1) % 7 - cnx.create_function("WEEKDAY", 1, weekday) - - cnx.cursor().execute("pragma foreign_keys = on") - - import yams.constraints - yams.constraints.patch_sqlite_decimal() - -sqlite_hooks = SQL_CONNECT_HOOKS.setdefault('sqlite', []) -sqlite_hooks.append(_init_sqlite_connection) - - -def _init_postgres_connection(cnx): - """Internal function that will be called to init a postgresql connection""" - cnx.cursor().execute('SET TIME ZONE UTC') - # commit is needed, else setting are lost if the connection is first - # rolled back - cnx.commit() - -postgres_hooks = SQL_CONNECT_HOOKS.setdefault('postgres', []) -postgres_hooks.append(_init_postgres_connection) diff -r 058bb3dc685f -r 0b59724cb3f2 server/ssplanner.py --- a/server/ssplanner.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,543 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""plan execution of rql queries on a single source""" - -__docformat__ = "restructuredtext en" - -from six import text_type - -from rql.stmts import Union, Select -from rql.nodes import Constant, Relation - -from cubicweb import QueryError -from cubicweb.schema import VIRTUAL_RTYPES -from cubicweb.rqlrewrite import add_types_restriction -from cubicweb.server.edition import EditedEntity - -READ_ONLY_RTYPES = set(('eid', 'has_text', 'is', 'is_instance_of', 'identity')) - -_CONSTANT = object() -_FROM_SUBSTEP = object() - -def _extract_const_attributes(plan, rqlst, to_build): - """add constant values to entity def, mark variables to be selected - """ - to_select = {} - for relation in rqlst.main_relations: - lhs, rhs = relation.get_variable_parts() - rtype = relation.r_type - if rtype in READ_ONLY_RTYPES: - raise QueryError("can't assign to %s" % rtype) - try: - edef = to_build[str(lhs)] - except KeyError: - # lhs var is not to build, should be selected and added as an - # object relation - edef = to_build[str(rhs)] - to_select.setdefault(edef, []).append((rtype, lhs, 1)) - else: - if isinstance(rhs, Constant) and not rhs.uid: - # add constant values to entity def - value = rhs.eval(plan.args) - eschema = edef.entity.e_schema - attrtype = eschema.subjrels[rtype].objects(eschema)[0] - if attrtype == 'Password' and isinstance(value, text_type): - value = value.encode('UTF8') - edef.edited_attribute(rtype, value) - elif str(rhs) in to_build: - # create a relation between two newly created variables - plan.add_relation_def((edef, rtype, to_build[rhs.name])) - else: - to_select.setdefault(edef, []).append( (rtype, rhs, 0) ) - return to_select - -def _extract_eid_consts(plan, rqlst): - """return a dict mapping rqlst variable object to their eid if specified in - the syntax tree - """ - cnx = plan.cnx - if rqlst.where is None: - return {} - eidconsts = {} - neweids = cnx.transaction_data.get('neweids', ()) - checkread = cnx.read_security - eschema = cnx.vreg.schema.eschema - for rel in rqlst.where.get_nodes(Relation): - # only care for 'eid' relations ... - if (rel.r_type == 'eid' - # ... that are not part of a NOT clause ... - and not rel.neged(strict=True) - # ... and where eid is specified by '=' operator. - and rel.children[1].operator == '='): - lhs, rhs = rel.get_variable_parts() - if isinstance(rhs, Constant): - eid = int(rhs.eval(plan.args)) - # check read permission here since it may not be done by - # the generated select substep if not emited (eg nothing - # to be selected) - if checkread and eid not in neweids: - with cnx.security_enabled(read=False): - eschema(cnx.entity_metas(eid)['type']).check_perm( - cnx, 'read', eid=eid) - eidconsts[lhs.variable] = eid - return eidconsts - -def _build_substep_query(select, origrqlst): - """Finalize substep select query that should be executed to get proper - selection of stuff to insert/update. - - Return None when no query actually needed, else the given select node that - will be used as substep query. - """ - if origrqlst.where is not None and not select.selection: - # no selection, append one randomly by searching for a relation which is - # not neged neither a type restriction (is/is_instance_of) - for rel in origrqlst.where.iget_nodes(Relation): - if not (rel.neged(traverse_scope=True) or rel.is_types_restriction()): - select.append_selected(rel.children[0].copy(select)) - break - else: - return None - if select.selection: - if origrqlst.where is not None: - select.set_where(origrqlst.where.copy(select)) - if getattr(origrqlst, 'having', None): - select.set_having([sq.copy(select) for sq in origrqlst.having]) - return select - return None - -class SSPlanner(object): - """SingleSourcePlanner: build execution plan for rql queries - - optimized for single source repositories - """ - - def __init__(self, schema, rqlhelper): - self.schema = schema - self.rqlhelper = rqlhelper - - def build_plan(self, plan): - """build an execution plan from a RQL query - - do nothing here, dispatch according to the statement type - """ - build_plan = getattr(self, 'build_%s_plan' % plan.rqlst.TYPE) - for step in build_plan(plan, plan.rqlst): - plan.add_step(step) - - def build_select_plan(self, plan, rqlst): - """build execution plan for a SELECT RQL query. Suppose only one source - is available and so avoid work need for query decomposition among sources - - the rqlst should not be tagged at this point. - """ - plan.preprocess(rqlst) - return (OneFetchStep(plan, rqlst),) - - def build_insert_plan(self, plan, rqlst): - """get an execution plan from an INSERT RQL query""" - # each variable in main variables is a new entity to insert - to_build = {} - cnx = plan.cnx - etype_class = cnx.vreg['etypes'].etype_class - for etype, var in rqlst.main_variables: - # need to do this since entity class is shared w. web client code ! - to_build[var.name] = EditedEntity(etype_class(etype)(cnx)) - plan.add_entity_def(to_build[var.name]) - # add constant values to entity def, mark variables to be selected - to_select = _extract_const_attributes(plan, rqlst, to_build) - # add necessary steps to add relations and update attributes - step = InsertStep(plan) # insert each entity and its relations - step.children += self._compute_relation_steps(plan, rqlst, to_select) - return (step,) - - def _compute_relation_steps(self, plan, rqlst, to_select): - """handle the selection of relations for an insert query""" - eidconsts = _extract_eid_consts(plan, rqlst) - for edef, rdefs in to_select.items(): - # create a select rql st to fetch needed data - select = Select() - eschema = edef.entity.e_schema - for i, (rtype, term, reverse) in enumerate(rdefs): - if getattr(term, 'variable', None) in eidconsts: - value = eidconsts[term.variable] - else: - select.append_selected(term.copy(select)) - value = _FROM_SUBSTEP - if reverse: - rdefs[i] = (rtype, InsertRelationsStep.REVERSE_RELATION, value) - else: - rschema = eschema.subjrels[rtype] - if rschema.final or rschema.inlined: - rdefs[i] = (rtype, InsertRelationsStep.FINAL, value) - else: - rdefs[i] = (rtype, InsertRelationsStep.RELATION, value) - step = InsertRelationsStep(plan, edef, rdefs) - select = _build_substep_query(select, rqlst) - if select is not None: - step.children += self._select_plan(plan, select, rqlst.solutions) - yield step - - def build_delete_plan(self, plan, rqlst): - """get an execution plan from a DELETE RQL query""" - # build a select query to fetch entities to delete - steps = [] - for etype, var in rqlst.main_variables: - step = DeleteEntitiesStep(plan) - step.children += self._sel_variable_step(plan, rqlst, etype, var) - steps.append(step) - for relation in rqlst.main_relations: - step = DeleteRelationsStep(plan, relation.r_type) - step.children += self._sel_relation_steps(plan, rqlst, relation) - steps.append(step) - return steps - - def _sel_variable_step(self, plan, rqlst, etype, varref): - """handle the selection of variables for a delete query""" - select = Select() - varref = varref.copy(select) - select.defined_vars = {varref.name: varref.variable} - select.append_selected(varref) - if rqlst.where is not None: - select.set_where(rqlst.where.copy(select)) - if getattr(rqlst, 'having', None): - select.set_having([x.copy(select) for x in rqlst.having]) - if etype != 'Any': - select.add_type_restriction(varref.variable, etype) - return self._select_plan(plan, select, rqlst.solutions) - - def _sel_relation_steps(self, plan, rqlst, relation): - """handle the selection of relations for a delete query""" - select = Select() - lhs, rhs = relation.get_variable_parts() - select.append_selected(lhs.copy(select)) - select.append_selected(rhs.copy(select)) - select.set_where(relation.copy(select)) - if rqlst.where is not None: - select.add_restriction(rqlst.where.copy(select)) - if getattr(rqlst, 'having', None): - select.set_having([x.copy(select) for x in rqlst.having]) - return self._select_plan(plan, select, rqlst.solutions) - - def build_set_plan(self, plan, rqlst): - """get an execution plan from an SET RQL query""" - getrschema = self.schema.rschema - select = Select() # potential substep query - selectedidx = {} # local state - attributes = set() # edited attributes - updatedefs = [] # definition of update attributes/relations - selidx = residx = 0 # substep selection / resulting rset indexes - # search for eid const in the WHERE clause - eidconsts = _extract_eid_consts(plan, rqlst) - # build `updatedefs` describing things to update and add necessary - # variables to the substep selection - for i, relation in enumerate(rqlst.main_relations): - if relation.r_type in VIRTUAL_RTYPES: - raise QueryError('can not assign to %r relation' - % relation.r_type) - lhs, rhs = relation.get_variable_parts() - lhskey = lhs.as_string() - if not lhskey in selectedidx: - if lhs.variable in eidconsts: - eid = eidconsts[lhs.variable] - lhsinfo = (_CONSTANT, eid, residx) - else: - select.append_selected(lhs.copy(select)) - lhsinfo = (_FROM_SUBSTEP, selidx, residx) - selidx += 1 - residx += 1 - selectedidx[lhskey] = lhsinfo - else: - lhsinfo = selectedidx[lhskey][:-1] + (None,) - rhskey = rhs.as_string() - if not rhskey in selectedidx: - if isinstance(rhs, Constant): - rhsinfo = (_CONSTANT, rhs.eval(plan.args), residx) - elif getattr(rhs, 'variable', None) in eidconsts: - eid = eidconsts[rhs.variable] - rhsinfo = (_CONSTANT, eid, residx) - else: - select.append_selected(rhs.copy(select)) - rhsinfo = (_FROM_SUBSTEP, selidx, residx) - selidx += 1 - residx += 1 - selectedidx[rhskey] = rhsinfo - else: - rhsinfo = selectedidx[rhskey][:-1] + (None,) - rschema = getrschema(relation.r_type) - updatedefs.append( (lhsinfo, rhsinfo, rschema) ) - # the update step - step = UpdateStep(plan, updatedefs) - # when necessary add substep to fetch yet unknown values - select = _build_substep_query(select, rqlst) - if select is not None: - # set distinct to avoid potential duplicate key error - select.distinct = True - step.children += self._select_plan(plan, select, rqlst.solutions) - return (step,) - - # internal methods ######################################################## - - def _select_plan(self, plan, select, solutions): - union = Union() - union.append(select) - select.clean_solutions(solutions) - add_types_restriction(self.schema, select) - self.rqlhelper.annotate(union) - return self.build_select_plan(plan, union) - - -# execution steps and helper functions ######################################## - -def varmap_test_repr(varmap, tablesinorder): - if varmap is None: - return varmap - maprepr = {} - for var, sql in varmap.items(): - table, col = sql.split('.') - maprepr[var] = '%s.%s' % (tablesinorder[table], col) - return maprepr - -class Step(object): - """base abstract class for execution step""" - def __init__(self, plan): - self.plan = plan - self.children = [] - - def execute_child(self): - assert len(self.children) == 1 - return self.children[0].execute() - - def execute_children(self): - for step in self.children: - step.execute() - - def execute(self): - """execute this step and store partial (eg this step) results""" - raise NotImplementedError() - - def mytest_repr(self): - """return a representation of this step suitable for test""" - return (self.__class__.__name__,) - - def test_repr(self): - """return a representation of this step suitable for test""" - return self.mytest_repr() + ( - [step.test_repr() for step in self.children],) - - -class OneFetchStep(Step): - """step consisting in fetching data from sources and directly returning - results - """ - def __init__(self, plan, union, inputmap=None): - Step.__init__(self, plan) - self.union = union - self.inputmap = inputmap - - def execute(self): - """call .syntax_tree_search with the given syntax tree on each - source for each solution - """ - self.execute_children() - cnx = self.plan.cnx - args = self.plan.args - inputmap = self.inputmap - union = self.union - # do we have to use a inputmap from a previous step ? If so disable - # cachekey - if inputmap or self.plan.cache_key is None: - cachekey = None - # union may have been splited into subqueries, in which case we can't - # use plan.cache_key, rebuild a cache key - elif isinstance(self.plan.cache_key, tuple): - cachekey = list(self.plan.cache_key) - cachekey[0] = union.as_string() - cachekey = tuple(cachekey) - else: - cachekey = union.as_string() - # get results for query - source = cnx.repo.system_source - result = source.syntax_tree_search(cnx, union, args, cachekey, inputmap) - #print 'ONEFETCH RESULT %s' % (result) - return result - - def mytest_repr(self): - """return a representation of this step suitable for test""" - try: - inputmap = varmap_test_repr(self.inputmap, self.plan.tablesinorder) - except AttributeError: - inputmap = self.inputmap - return (self.__class__.__name__, - sorted((r.as_string(kwargs=self.plan.args), r.solutions) - for r in self.union.children), - inputmap) - - -# UPDATE/INSERT/DELETE steps ################################################## - -class InsertRelationsStep(Step): - """step consisting in adding attributes/relations to entity defs from a - previous FetchStep - - relations values comes from the latest result, with one columns for - each relation defined in self.rdefs - - for one entity definition, we'll construct N entity, where N is the - number of the latest result - """ - - FINAL = 0 - RELATION = 1 - REVERSE_RELATION = 2 - - def __init__(self, plan, edef, rdefs): - Step.__init__(self, plan) - # partial entity definition to expand - self.edef = edef - # definition of relations to complete - self.rdefs = rdefs - - def execute(self): - """execute this step""" - base_edef = self.edef - edefs = [] - if self.children: - result = self.execute_child() - else: - result = [[]] - for row in result: - # get a new entity definition for this row - edef = base_edef.clone() - # complete this entity def using row values - index = 0 - for rtype, rorder, value in self.rdefs: - if value is _FROM_SUBSTEP: - value = row[index] - index += 1 - if rorder == InsertRelationsStep.FINAL: - edef.edited_attribute(rtype, value) - elif rorder == InsertRelationsStep.RELATION: - self.plan.add_relation_def( (edef, rtype, value) ) - edef.querier_pending_relations[(rtype, 'subject')] = value - else: - self.plan.add_relation_def( (value, rtype, edef) ) - edef.querier_pending_relations[(rtype, 'object')] = value - edefs.append(edef) - self.plan.substitute_entity_def(base_edef, edefs) - return result - - -class InsertStep(Step): - """step consisting in inserting new entities / relations""" - - def execute(self): - """execute this step""" - for step in self.children: - assert isinstance(step, InsertRelationsStep) - step.plan = self.plan - step.execute() - # insert entities first - result = self.plan.insert_entity_defs() - # then relation - self.plan.insert_relation_defs() - # return eids of inserted entities - return result - - -class DeleteEntitiesStep(Step): - """step consisting in deleting entities""" - - def execute(self): - """execute this step""" - results = self.execute_child() - if results: - todelete = frozenset(int(eid) for eid, in results) - cnx = self.plan.cnx - cnx.repo.glob_delete_entities(cnx, todelete) - return results - -class DeleteRelationsStep(Step): - """step consisting in deleting relations""" - - def __init__(self, plan, rtype): - Step.__init__(self, plan) - self.rtype = rtype - - def execute(self): - """execute this step""" - cnx = self.plan.cnx - delete = cnx.repo.glob_delete_relation - for subj, obj in self.execute_child(): - delete(cnx, subj, self.rtype, obj) - - -class UpdateStep(Step): - """step consisting in updating entities / adding relations from relations - definitions and from results fetched in previous step - """ - - def __init__(self, plan, updatedefs): - Step.__init__(self, plan) - self.updatedefs = updatedefs - - def execute(self): - """execute this step""" - cnx = self.plan.cnx - repo = cnx.repo - edefs = {} - relations = {} - # insert relations - if self.children: - result = self.execute_child() - else: - result = [[]] - for i, row in enumerate(result): - newrow = [] - for (lhsinfo, rhsinfo, rschema) in self.updatedefs: - lhsval = _handle_relterm(lhsinfo, row, newrow) - rhsval = _handle_relterm(rhsinfo, row, newrow) - if rschema.final or rschema.inlined: - eid = int(lhsval) - try: - edited = edefs[eid] - except KeyError: - edef = cnx.entity_from_eid(eid) - edefs[eid] = edited = EditedEntity(edef) - edited.edited_attribute(str(rschema), rhsval) - else: - str_rschema = str(rschema) - if str_rschema in relations: - relations[str_rschema].append((lhsval, rhsval)) - else: - relations[str_rschema] = [(lhsval, rhsval)] - result[i] = newrow - # update entities - repo.glob_add_relations(cnx, relations) - for eid, edited in edefs.items(): - repo.glob_update_entity(cnx, edited) - return result - -def _handle_relterm(info, row, newrow): - if info[0] is _CONSTANT: - val = info[1] - else: # _FROM_SUBSTEP - val = row[info[1]] - if info[-1] is not None: - newrow.append(val) - return val diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-cwep002/schema.py --- a/server/test/data-cwep002/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,35 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import EntityType, RelationDefinition, Int, ComputedRelation - -class Person(EntityType): - salary = Int() - -class works_for(RelationDefinition): - subject = 'Person' - object = 'Company' - cardinality = '?*' - -class Company(EntityType): - total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE ' - 'P works_for X, P salary SA') - -class has_employee(ComputedRelation): - rule = 'O works_for S' - __permissions__ = {'read': ('managers',)} diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/bootstrap_cubes --- a/server/test/data-migractions/bootstrap_cubes Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -card,comment,tag,basket,file,localperms,fakeemail diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/cubes/__init__.py --- a/server/test/data-migractions/cubes/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/cubes/fakecustomtype/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/cubes/fakecustomtype/__pkginfo__.py --- a/server/test/data-migractions/cubes/fakecustomtype/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -# pylint: disable-msg=W0622 -"""cubicweb-fakeemail packaging information""" - -modname = 'fakecustomtype' -distname = "cubicweb-%s" % modname - -numversion = (1, 0, 0) -version = '.'.join(str(num) for num in numversion) - -license = 'LGPL' -author = "Logilab" -author_email = "contact@logilab.fr" -web = 'http://www.cubicweb.org/project/%s' % distname -description = "whatever" -classifiers = [ - 'Environment :: Web Environment', - 'Framework :: CubicWeb', - 'Programming Language :: Python', - 'Programming Language :: JavaScript', -] - -# used packages -__depends__ = {'cubicweb': '>= 3.19.0', - } - - -# packaging ### - -from os import listdir as _listdir -from os.path import join, isdir -from glob import glob - -THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname) - -def listdir(dirpath): - return [join(dirpath, fname) for fname in _listdir(dirpath) - if fname[0] != '.' and not fname.endswith('.pyc') - and not fname.endswith('~') - and not isdir(join(dirpath, fname))] - -data_files = [ - # common files - [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']], - ] -# check for possible extended cube layout -for dirname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'i18n', 'migration', 'wdoc'): - if isdir(dirname): - data_files.append([join(THIS_CUBE_DIR, dirname), listdir(dirname)]) -# Note: here, you'll need to add subdirectories if you want -# them to be included in the debian package diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/cubes/fakecustomtype/schema.py --- a/server/test/data-migractions/cubes/fakecustomtype/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ - -from yams.buildobjs import EntityType, make_type - -Numeric = make_type('Numeric') - -class Location(EntityType): - num = Numeric(scale=10, precision=18) diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/cubes/fakecustomtype/site_cubicweb.py --- a/server/test/data-migractions/cubes/fakecustomtype/site_cubicweb.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -from yams import register_base_type -from logilab.database import get_db_helper -from logilab.database.sqlgen import SQLExpression - -_NUMERIC_PARAMETERS = {'scale': 0, 'precision': None} -register_base_type('Numeric', _NUMERIC_PARAMETERS) - -# Add the datatype to the helper mapping -pghelper = get_db_helper('postgres') - - -def pg_numeric_sqltype(rdef): - """Return a PostgreSQL column type corresponding to rdef - """ - return 'numeric(%s, %s)' % (rdef.precision, rdef.scale) - -pghelper.TYPE_MAPPING['Numeric'] = pg_numeric_sqltype diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/cubes/fakeemail/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/cubes/fakeemail/__pkginfo__.py --- a/server/test/data-migractions/cubes/fakeemail/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,53 +0,0 @@ -# pylint: disable-msg=W0622 -"""cubicweb-fakeemail packaging information""" - -modname = 'fakeemail' -distname = "cubicweb-%s" % modname - -numversion = (1, 10, 0) -version = '.'.join(str(num) for num in numversion) - -license = 'LGPL' -author = "Logilab" -author_email = "contact@logilab.fr" -web = 'http://www.cubicweb.org/project/%s' % distname -description = "email component for the CubicWeb framework" -classifiers = [ - 'Environment :: Web Environment', - 'Framework :: CubicWeb', - 'Programming Language :: Python', - 'Programming Language :: JavaScript', -] - -# used packages -__depends__ = {'cubicweb': '>= 3.19.0', - 'cubicweb-file': '>= 1.9.0', - 'logilab-common': '>= 0.58.3', - } -__recommends__ = {'cubicweb-comment': None} - - -# packaging ### - -from os import listdir as _listdir -from os.path import join, isdir -from glob import glob - -THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname) - -def listdir(dirpath): - return [join(dirpath, fname) for fname in _listdir(dirpath) - if fname[0] != '.' and not fname.endswith('.pyc') - and not fname.endswith('~') - and not isdir(join(dirpath, fname))] - -data_files = [ - # common files - [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']], - ] -# check for possible extended cube layout -for dirname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'i18n', 'migration', 'wdoc'): - if isdir(dirname): - data_files.append([join(THIS_CUBE_DIR, dirname), listdir(dirname)]) -# Note: here, you'll need to add subdirectories if you want -# them to be included in the debian package diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/cubes/fakeemail/schema.py --- a/server/test/data-migractions/cubes/fakeemail/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,86 +0,0 @@ -"""entity/relation schemas to store email in an cubicweb instance - -:organization: Logilab -:copyright: 2006-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -""" -__docformat__ = "restructuredtext en" -from cubicweb import _ - -# pylint: disable-msg=E0611,F0401 -from yams.buildobjs import (SubjectRelation, RelationType, EntityType, - String, Datetime, Int, RelationDefinition) -from yams.reader import context - -from cubicweb.schema import ERQLExpression - - -class Email(EntityType): - """electronic mail""" - subject = String(fulltextindexed=True) - date = Datetime(description=_('UTC time on which the mail was sent')) - messageid = String(required=True, indexed=True) - headers = String(description=_('raw headers')) - - sender = SubjectRelation('EmailAddress', cardinality='?*') - # an email with only Bcc is acceptable, don't require any recipients - recipients = SubjectRelation('EmailAddress') - cc = SubjectRelation('EmailAddress') - - parts = SubjectRelation('EmailPart', cardinality='*1', composite='subject') - attachment = SubjectRelation('File') - - reply_to = SubjectRelation('Email', cardinality='?*') - cites = SubjectRelation('Email') - in_thread = SubjectRelation('EmailThread', cardinality='?*') - - -class EmailPart(EntityType): - """an email attachment""" - __permissions__ = { - 'read': ('managers', 'users', 'guests',), # XXX if E parts X, U has_read_permission E - 'add': ('managers', ERQLExpression('E parts X, U has_update_permission E'),), - 'delete': ('managers', ERQLExpression('E parts X, U has_update_permission E')), - 'update': ('managers', 'owners',), - } - - content = String(fulltextindexed=True) - content_format = String(required=True, maxsize=50) - ordernum = Int(required=True) - alternative = SubjectRelation('EmailPart', symmetric=True) - - -class EmailThread(EntityType): - """discussion thread""" - title = String(required=True, indexed=True, fulltextindexed=True) - see_also = SubjectRelation('EmailThread') - forked_from = SubjectRelation('EmailThread', cardinality='?*') - -class parts(RelationType): - """ """ - fulltext_container = 'subject' - -class sender(RelationType): - """ """ - inlined = True - -class in_thread(RelationType): - """ """ - inlined = True - -class reply_to(RelationType): - """ """ - inlined = True - -class generated_by(RelationType): - """mark an entity as generated from an email""" - cardinality = '?*' - subject = ('TrInfo',) - object = 'Email' - -# if comment is installed -if 'Comment' in context.defined: - class comment_generated_by(RelationDefinition): - subject = 'Comment' - name = 'generated_by' - object = 'Email' diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/migratedapp/__init__.py --- a/server/test/data-migractions/migratedapp/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/migratedapp/bootstrap_cubes --- a/server/test/data-migractions/migratedapp/bootstrap_cubes Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -card,comment,tag,basket,fakeemail,file diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/migratedapp/schema.py --- a/server/test/data-migractions/migratedapp/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,213 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cw.server.migraction test""" -import datetime as dt -from yams.buildobjs import (EntityType, RelationType, RelationDefinition, - SubjectRelation, Bytes, - RichString, String, Int, Boolean, Datetime, Date, Float) -from yams.constraints import SizeConstraint, UniqueConstraint -from cubicweb import _ -from cubicweb.schema import (WorkflowableEntityType, RQLConstraint, - RQLVocabularyConstraint, - ERQLExpression, RRQLExpression) - -class Affaire(EntityType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), - 'update': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), - 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), - } - - ref = String(fulltextindexed=True, indexed=True, - constraints=[SizeConstraint(16)]) - sujet = String(fulltextindexed=True, - constraints=[SizeConstraint(256)]) - concerne = SubjectRelation('Societe') - opt_attr = Bytes() - -class Societe(WorkflowableEntityType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', 'owners'), - 'delete': ('managers', 'owners'), - 'add': ('managers', 'users',) - } - nom = String(maxsize=64, fulltextindexed=True) - web = String(maxsize=128) - tel = Float() - fax = Int() - rncs = String(maxsize=128) - ad1 = String(maxsize=128) - ad2 = String(maxsize=128) - ad3 = String(maxsize=128) - cp = String(maxsize=12) - ville= String(maxsize=32) - -# Division and SubDivision are gone - -# New -class Para(EntityType): - para = String(maxsize=512) - newattr = String() - newinlined = SubjectRelation('Affaire', cardinality='?*', inlined=True) - newnotinlined = SubjectRelation('Affaire', cardinality='?*') - -class Note(Para): - __specializes_schema__ = True - - __permissions__ = {'read': ('managers', 'users', 'guests',), - 'update': ('managers', 'owners',), - 'delete': ('managers', ), - 'add': ('managers', - ERQLExpression('X ecrit_part PE, U in_group G, ' - 'PE require_permission P, P name "add_note", ' - 'P require_group G'),)} - - whatever = Int(default=0) # keep it before `date` for unittest_migraction.test_add_attribute_int - yesno = Boolean(default=False) - date = Datetime() - type = String(maxsize=1) - unique_id = String(maxsize=1, required=True, unique=True) - mydate = Date(default='TODAY') - oldstyledefaultdate = Date(default='2013/01/01') - newstyledefaultdate = Date(default=dt.date(2013, 1, 1)) - shortpara = String(maxsize=64, default='hop') - ecrit_par = SubjectRelation('Personne', constraints=[RQLConstraint('S concerne A, O concerne A')]) - attachment = SubjectRelation('File') - - -class Frozable(EntityType): - __permissions__ = { - 'read': ('managers', 'users'), - 'add': ('managers', 'users'), - 'update': ('managers', ERQLExpression('X frozen False'),), - 'delete': ('managers', ERQLExpression('X frozen False'),) - } - name = String() - frozen = Boolean(default=False, - __permissions__ = { - 'read': ('managers', 'users'), - 'add': ('managers', 'users'), - 'update': ('managers', 'owners') - }) - - -class Personne(EntityType): - __permissions__ = { - 'read': ('managers', 'users'), # 'guests' was removed - 'add': ('managers', 'users'), - 'update': ('managers', 'owners'), - 'delete': ('managers', 'owners') - } - __unique_together__ = [('nom', 'prenom', 'datenaiss')] - nom = String(fulltextindexed=True, required=True, maxsize=64) - prenom = String(fulltextindexed=True, maxsize=64) - civility = String(maxsize=1, default='M', fulltextindexed=True) - promo = String(vocabulary=('bon','pasbon')) - titre = String(fulltextindexed=True, maxsize=128) - adel = String(maxsize=128) - ass = String(maxsize=128) - web = String(maxsize=128) - tel = Int() - fax = Int() - datenaiss = Datetime() - test = Boolean() - - travaille = SubjectRelation('Societe') - concerne = SubjectRelation('Affaire') - concerne2 = SubjectRelation(('Affaire', 'Note'), cardinality='1*') - connait = SubjectRelation('Personne', symmetric=True) - -class concerne(RelationType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('U has_update_permission S')), - 'delete': ('managers', RRQLExpression('O owned_by U')), - } - -# `Old` entity type is gonce -# `comments` is gone -# `fiche` is gone -# `multisource_*` rdefs are gone -# `see_also_*` rdefs are gone - -class evaluee(RelationDefinition): - subject = ('Personne', 'CWUser', 'Societe') - object = ('Note') - constraints = [RQLVocabularyConstraint('S owned_by U')] - -class ecrit_par(RelationType): - __permissions__ = {'read': ('managers', 'users', 'guests',), - 'delete': ('managers', ), - 'add': ('managers', - RRQLExpression('O require_permission P, P name "add_note", ' - 'U in_group G, P require_group G'),) - } - inlined = True - cardinality = '?*' - -# `copain` rdef is gone -# `tags` rdef is gone -# `filed_under` rdef is gone -# `require_permission` rdef is gone -# `require_state` rdef is gone -# `personne_composite` rdef is gone -# `personne_inlined` rdef is gone -# `login_user` rdef is gone -# `ambiguous_inlined` rdef is gone - -class Folder(EntityType): - """folders are used to classify entities. They may be defined as a tree. - """ - name = String(required=True, indexed=True, internationalizable=True, - maxsize=64) - description = RichString(fulltextindexed=True) - filed_under = SubjectRelation('Folder', description=_('parent folder')) - - -# New -class Text(Para): - __specializes_schema__ = True - summary = String(maxsize=512) - - -# New -class Folder2(EntityType): - """folders are used to classify entities. They may be defined as a tree. - When you include the Folder entity, all application specific entities - may then be classified using the "filed_under" relation. - """ - name = String(required=True, indexed=True, internationalizable=True, - constraints=[UniqueConstraint(), SizeConstraint(64)]) - description = RichString(fulltextindexed=True) - -# New -class filed_under2(RelationDefinition): - subject ='*' - object = 'Folder2' - - -# New -class New(EntityType): - new_name = String() - -# New -class same_as(RelationDefinition): - subject = ('Societe',) - object = 'ExternalUri' diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-migractions/schema.py --- a/server/test/data-migractions/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,290 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import (EntityType, RelationType, RelationDefinition, ComputedRelation, - SubjectRelation, RichString, String, Int, Float, - Boolean, Datetime, TZDatetime, Bytes) -from yams.constraints import SizeConstraint -from cubicweb.schema import (WorkflowableEntityType, - RQLConstraint, RQLUniqueConstraint, - RQLVocabularyConstraint, - ERQLExpression, RRQLExpression) -from cubicweb import _ - - -class Affaire(WorkflowableEntityType): - __permissions__ = { - 'read': ('managers', - ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), - 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), - 'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')), - 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), - } - - ref = String(fulltextindexed=True, indexed=True, - constraints=[SizeConstraint(16)]) - sujet = String(fulltextindexed=True, - constraints=[SizeConstraint(256)]) - descr = RichString(fulltextindexed=True, - description=_('more detailed description')) - - duration = Int() - invoiced = Float() - opt_attr = Bytes() - - depends_on = SubjectRelation('Affaire') - require_permission = SubjectRelation('CWPermission') - concerne = SubjectRelation(('Societe', 'Note')) - todo_by = SubjectRelation('Personne', cardinality='?*') - documented_by = SubjectRelation('Card') - - -class Societe(EntityType): - __unique_together__ = [('nom', 'type', 'cp')] - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), - 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), - 'add': ('managers', 'users',) - } - - nom = String(maxsize=64, fulltextindexed=True) - web = String(maxsize=128) - type = String(maxsize=128) # attribute in common with Note - tel = Int() - fax = Int() - rncs = String(maxsize=128) - ad1 = String(maxsize=128) - ad2 = String(maxsize=128) - ad3 = String(maxsize=128) - cp = String(maxsize=12) - ville= String(maxsize=32) - - -class Division(Societe): - __specializes_schema__ = True - -class SubDivision(Division): - __specializes_schema__ = True - -class travaille_subdivision(RelationDefinition): - subject = 'Personne' - object = 'SubDivision' - -from cubicweb.schemas.base import CWUser -next(CWUser.get_relations('login')).fulltextindexed = True - -class Note(WorkflowableEntityType): - date = String(maxsize=10) - type = String(vocabulary=[u'todo', u'a', u'b', u'T', u'lalala']) - para = String(maxsize=512, - __permissions__ = { - 'add': ('managers', ERQLExpression('X in_state S, S name "todo"')), - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', ERQLExpression('X in_state S, S name "todo"')), - }) - something = String(maxsize=1, - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': (ERQLExpression('NOT X para NULL'),), - 'update': ('managers', 'owners') - }) - migrated_from = SubjectRelation('Note') - attachment = SubjectRelation('File') - inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*', - constraints=[RQLUniqueConstraint('S type T, S inline1 A1, A1 todo_by C, ' - 'Y type T, Y inline1 A2, A2 todo_by C', - 'S,Y')]) - todo_by = SubjectRelation('CWUser') - - -class Frozable(EntityType): - __permissions__ = { - 'read': ('managers', 'users'), - 'add': ('managers', 'users'), - 'update': ('managers', ERQLExpression('X frozen False'),), - 'delete': ('managers', ERQLExpression('X frozen False'),) - } - name = String() - frozen = Boolean(default=False, - __permissions__ = { - 'read': ('managers', 'users'), - 'add': ('managers', 'users'), - 'update': ('managers', 'owners') - }) - - -class Personne(EntityType): - __unique_together__ = [('nom', 'prenom', 'inline2')] - nom = String(fulltextindexed=True, required=True, maxsize=64) - prenom = String(fulltextindexed=True, maxsize=64) - sexe = String(maxsize=1, default='M', fulltextindexed=True) - promo = String(vocabulary=('bon','pasbon')) - titre = String(fulltextindexed=True, maxsize=128) - adel = String(maxsize=128) - ass = String(maxsize=128) - web = String(maxsize=128) - tel = Int() - fax = Int() - datenaiss = Datetime() - tzdatenaiss = TZDatetime() - test = Boolean(__permissions__={ - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'update': ('managers',), - }) - description = String() - firstname = String(fulltextindexed=True, maxsize=64) - - concerne = SubjectRelation('Affaire') - connait = SubjectRelation('Personne') - inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*') - - -class Old(EntityType): - name = String(__permissions__ = { - 'read' : ('managers', 'users', 'guests'), - 'add' : ('managers', 'users', 'guests'), - 'update' : () - }) - - -class connait(RelationType): - symmetric = True - -class concerne(RelationType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('U has_update_permission S')), - 'delete': ('managers', RRQLExpression('O owned_by U')), - } - -class travaille(RelationDefinition): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('U has_update_permission S')), - 'delete': ('managers', RRQLExpression('O owned_by U')), - } - subject = 'Personne' - object = 'Societe' - constraints = [RQLVocabularyConstraint('S owned_by U'), - RQLVocabularyConstraint('S created_by U')] - -class comments(RelationDefinition): - subject = 'Comment' - object = 'Personne' - -class fiche(RelationDefinition): - inlined = True - subject = 'Personne' - object = 'Card' - cardinality = '??' - -class multisource_inlined_rel(RelationDefinition): - inlined = True - cardinality = '?*' - subject = ('Card', 'Note') - object = ('Affaire', 'Note') - - -class see_also_1(RelationDefinition): - name = 'see_also' - subject = object = 'Folder' - -class see_also_2(RelationDefinition): - name = 'see_also' - subject = ('Bookmark', 'Note') - object = ('Bookmark', 'Note') - -class evaluee(RelationDefinition): - subject = ('Personne', 'CWUser', 'Societe') - object = ('Note') - constraints = [ - RQLVocabularyConstraint('S created_by U'), - RQLVocabularyConstraint('S owned_by U'), - ] - -class ecrit_par(RelationType): - inlined = True - -class ecrit_par_1(RelationDefinition): - name = 'ecrit_par' - subject = 'Note' - object = 'Personne' - cardinality = '?*' - -class ecrit_par_2(RelationDefinition): - name = 'ecrit_par' - subject = 'Note' - object = 'CWUser' - cardinality='?*' - - -class copain(RelationDefinition): - subject = object = 'CWUser' - -class tags(RelationDefinition): - subject = 'Tag' - object = ('CWUser', 'CWGroup', 'State', 'Note', 'Card', 'Affaire') - -class Folder(EntityType): - """folders are used to classify entities. They may be defined as a tree. - """ - name = String(required=True, indexed=True, internationalizable=True, - maxsize=64) - description = RichString(fulltextindexed=True) - filed_under = SubjectRelation('Folder', description=_('parent folder')) - -class filed_under(RelationDefinition): - subject = ('Note', 'Affaire') - object = 'Folder' - -class require_permission(RelationDefinition): - subject = ('Card', 'Note', 'Personne') - object = 'CWPermission' - -class require_state(RelationDefinition): - subject = 'CWPermission' - object = 'State' - -class personne_composite(RelationDefinition): - subject='Personne' - object='Personne' - composite='subject' - -class personne_inlined(RelationDefinition): - subject='Personne' - object='Personne' - cardinality='?*' - inlined=True - - -class login_user(RelationDefinition): - subject = 'Personne' - object = 'CWUser' - cardinality = '??' - -class ambiguous_inlined(RelationDefinition): - subject = ('Affaire', 'Note') - object = 'CWUser' - inlined = True - cardinality = '?*' - - -class user_login(ComputedRelation): - rule = 'O login_user S' diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schema2sql/__init__.py diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schema2sql/schema/Company.py --- a/server/test/data-schema2sql/schema/Company.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,67 +0,0 @@ -# copyright 2004-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of yams. -# -# yams is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# yams is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with yams. If not, see . -from yams.buildobjs import EntityType, RelationType, RelationDefinition, \ - SubjectRelation, String - -class Company(EntityType): - name = String() - -class Subcompany(Company): - __specializes_schema__ = True - subcompany_of = SubjectRelation('Company') - -class Division(Company): - __specializes_schema__ = True - division_of = SubjectRelation('Company') - -class Subdivision(Division): - __specializes_schema__ = True - subdivision_of = SubjectRelation('Company') - -class Employee(EntityType): - works_for = SubjectRelation('Company') - -class require_permission(RelationType): - """link a permission to the entity. This permission should be used in the - security definition of the entity's type to be useful. - """ - fulltext_container = 'subject' - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'delete': ('managers',), - } - - -class missing_require_permission(RelationDefinition): - name = 'require_permission' - subject = 'Company' - object = 'EPermission' - -class EPermission(EntityType): - """entity type that may be used to construct some advanced security configuration - """ - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers',), - 'delete': ('managers',), - 'update': ('managers', 'owners',), - } - name = String(required=True, indexed=True, internationalizable=True, - fulltextindexed=True, maxsize=100, - description=_('name or identifier of the permission')) diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schema2sql/schema/Dates.py --- a/server/test/data-schema2sql/schema/Dates.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,28 +0,0 @@ -# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of yams. -# -# yams is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# yams is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with yams. If not, see . -from datetime import time, date -from yams.buildobjs import EntityType, Datetime, Date, Time -from yams.constraints import TODAY, BoundaryConstraint - -class Datetest(EntityType): - dt1 = Datetime(default=u'now') - dt2 = Datetime(default=u'today') - d1 = Date(default=u'today', constraints=[BoundaryConstraint('<=', TODAY())]) - d2 = Date(default=date(2007, 12, 11)) - t1 = Time(default=time(8, 40)) - t2 = Time(default=time(9, 45)) diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schema2sql/schema/State.py --- a/server/test/data-schema2sql/schema/State.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,81 +0,0 @@ -# copyright 2004-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of yams. -# -# yams is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# yams is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with yams. If not, see . -from yams.buildobjs import (EntityType, RelationType, RelationDefinition, - SubjectRelation, Int, String, Boolean) -from yams.constraints import SizeConstraint, UniqueConstraint - -from . import RESTRICTED_RTYPE_PERMS - -class State(EntityType): - """used to associate simple states to an entity - type and/or to define workflows - """ - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', 'users',), - 'delete': ('managers', 'owners',), - 'update': ('managers', 'owners',), - } - - # attributes - eid = Int(required=True, uid=True) - name = String(required=True, - indexed=True, internationalizable=True, - constraints=[SizeConstraint(256)]) - description = String(fulltextindexed=True) - # relations - state_of = SubjectRelation('Eetype', cardinality='+*') - next_state = SubjectRelation('State', cardinality='**') - - -class state_of(RelationType): - """link a state to one or more entity type""" - __permissions__ = RESTRICTED_RTYPE_PERMS - -class next_state(RelationType): - """define a workflow by associating a state to possible following states - """ - __permissions__ = RESTRICTED_RTYPE_PERMS - -class initial_state(RelationType): - """indicate which state should be used by default when an entity using states - is created - """ - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers', 'users',), - 'delete': ('managers', 'users',), - } - subject = 'Eetype' - object = 'State' - cardinality = '?*' - inlined = True - -class Eetype(EntityType): - """define an entity type, used to build the application schema""" - __permissions__ = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers',), - 'delete': ('managers',), - 'update': ('managers', 'owners',), - } - name = String(required=True, indexed=True, internationalizable=True, - constraints=[UniqueConstraint(), SizeConstraint(64)]) - description = String(fulltextindexed=True) - meta = Boolean() - final = Boolean() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schema2sql/schema/__init__.py --- a/server/test/data-schema2sql/schema/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ -# copyright 2004-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of yams. -# -# yams is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# yams is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with yams. If not, see . -"""test schema""" -RESTRICTED_RTYPE_PERMS = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers',), - 'delete': ('managers',), - } diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schema2sql/schema/schema.py --- a/server/test/data-schema2sql/schema/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,112 +0,0 @@ -# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of yams. -# -# yams is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# yams is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with yams. If not, see . -from yams.buildobjs import (EntityType, RelationDefinition, RelationType, - SubjectRelation, String, Int, Float, Date, Boolean) -from yams.constraints import Attribute, BoundaryConstraint - -class Affaire(EntityType): - sujet = String(maxsize=128) - ref = String(maxsize=12) - - concerne = SubjectRelation('Societe') - obj_wildcard = SubjectRelation('*') - sym_rel = SubjectRelation('Person', symmetric=True) - inline_rel = SubjectRelation('Person', inlined=True, cardinality='?*') - -class subj_wildcard(RelationDefinition): - subject = '*' - object = 'Affaire' - - -class Person(EntityType): - __unique_together__ = [('nom', 'prenom')] - nom = String(maxsize=64, fulltextindexed=True, required=True) - prenom = String(maxsize=64, fulltextindexed=True) - sexe = String(maxsize=1, default='M') - promo = String(vocabulary=('bon','pasbon')) - titre = String(maxsize=128, fulltextindexed=True) - adel = String(maxsize=128) - ass = String(maxsize=128) - web = String(maxsize=128) - tel = Int(__permissions__={'read': (), - 'add': ('managers',), - 'update': ('managers',)}) - fax = Int() - datenaiss = Date() - test = Boolean() - salary = Float() - travaille = SubjectRelation('Societe', - __permissions__={'read': (), - 'add': (), - 'delete': ('managers',), - }) - - evaluee = SubjectRelation('Note') - -class Salaried(Person): - __specializes_schema__ = True - -class Societe(EntityType): - nom = String(maxsize=64, fulltextindexed=True) - web = String(maxsize=128) - tel = Int() - fax = Int(constraints=[BoundaryConstraint('<=', Attribute('tel'))]) - rncs = String(maxsize=32) - ad1 = String(maxsize=128) - ad2 = String(maxsize=128) - ad3 = String(maxsize=128) - cp = String(maxsize=12) - ville = String(maxsize=32) - - evaluee = SubjectRelation('Note') - - -class Note(EntityType): - date = String(maxsize=10) - type = String(maxsize=1) - para = String(maxsize=512) - - -class pkginfo(EntityType): - modname = String(maxsize=30, required=True) - version = String(maxsize=10, required=True, default='0.1') - copyright = String(required=True) - license = String(vocabulary=('GPL', 'ZPL')) - short_desc = String(maxsize=80, required=True) - long_desc = String(required=True, fulltextindexed=True) - author = String(maxsize=100, required=True) - author_email = String(maxsize=100, required=True) - mailinglist = String(maxsize=100) - debian_handler = String(vocabulary=('machin', 'bidule')) - - -class evaluee(RelationType): - __permissions__ = { - 'read': ('managers',), - 'add': ('managers',), - 'delete': ('managers',), - } - -class concerne(RelationDefinition): - subject = 'Person' - object = 'Affaire' - __permissions__ = { - 'read': ('managers',), - 'add': ('managers',), - 'delete': ('managers',), - } diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schema2sql/schema/toignore --- a/server/test/data-schema2sql/schema/toignore Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -coucou diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schemaserial/schema.py --- a/server/test/data-schemaserial/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,31 +0,0 @@ -# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import EntityType, SubjectRelation, String, make_type -BabarTestType = make_type('BabarTestType') - -class Affaire(EntityType): - nom = String(unique=True, maxsize=64) - -class Personne(EntityType): - __unique_together__ = [('nom', 'prenom', 'inline2')] - nom = String(fulltextindexed=True, required=True, maxsize=64) - prenom = String(fulltextindexed=True, maxsize=64) - inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*') - - custom_field_of_jungle = BabarTestType(jungle_speed=42) diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data-schemaserial/site_cubicweb.py --- a/server/test/data-schemaserial/site_cubicweb.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from logilab.database import FunctionDescr -from logilab.database.sqlite import register_sqlite_pyfunc -from rql.utils import register_function - -class DUMB_SORT(FunctionDescr): - pass - -register_function(DUMB_SORT) -def dumb_sort(something): - return something -register_sqlite_pyfunc(dumb_sort) diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/bootstrap_cubes --- a/server/test/data/bootstrap_cubes Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -card,comment,tag,basket,file,localperms diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/hooks.py --- a/server/test/data/hooks.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from cubicweb.server.hook import Hook - -CALLED_EVENTS = {} - -class StartupHook(Hook): - __regid__ = 'mystartup' - events = ('server_startup',) - def __call__(self): - CALLED_EVENTS['server_startup'] = True - -class ShutdownHook(Hook): - __regid__ = 'myshutdown' - events = ('server_shutdown',) - def __call__(self): - CALLED_EVENTS['server_shutdown'] = True - - -class LoginHook(Hook): - __regid__ = 'mylogin' - events = ('session_open',) - def __call__(self): - CALLED_EVENTS['session_open'] = self._cw.user.login - -class LogoutHook(Hook): - __regid__ = 'mylogout' - events = ('session_close',) - def __call__(self): - CALLED_EVENTS['session_close'] = self._cw.user.login diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/ldap_test.ldif --- a/server/test/data/ldap_test.ldif Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -dn: dc=cubicweb,dc=test -structuralObjectClass: organization -objectClass: dcObject -objectClass: organization -o: cubicweb -dc: cubicweb - -dn: ou=People,dc=cubicweb,dc=test -objectClass: organizationalUnit -ou: People -structuralObjectClass: organizationalUnit - -dn: ou=Group,dc=cubicweb,dc=test -objectClass: organizationalUnit -ou: Group - -dn: cn=logilab,ou=Group,dc=cubicweb,dc=test -gidNumber: 2000 -objectClass: posixGroup -objectClass: top -cn: logilab -memberUid: adim - -dn: cn=dir,ou=Group,dc=cubicweb,dc=test -gidNumber: 2002 -objectClass: posixGroup -objectClass: top -cn: dir -memberUid: adim -memberUid: syt - -dn: uid=syt,ou=People,dc=cubicweb,dc=test -loginShell: /bin/bash -objectClass: OpenLDAPperson -objectClass: posixAccount -objectClass: top -objectClass: shadowAccount -cn: Sylvain Thenault -sn: Thenault -shadowMax: 99999 -gidNumber: 1004 -uid: syt -homeDirectory: /home/syt -shadowFlag: 134538764 -uidNumber: 1004 -givenName: Sylvain -telephoneNumber: 106 -displayName: sthenault -gecos: Sylvain Thenault -mail: sylvain.thenault@logilab.fr -mail: syt@logilab.fr -userPassword: syt - -dn: uid=adim,ou=People,dc=cubicweb,dc=test -loginShell: /bin/bash -objectClass: OpenLDAPperson -objectClass: posixAccount -objectClass: top -objectClass: shadowAccount -cn: Adrien Di Mascio -sn: Di Mascio -shadowMax: 99999 -gidNumber: 1006 -uid: adim -homeDirectory: /home/adim -uidNumber: 1006 -givenName: Adrien -telephoneNumber: 109 -displayName: adimascio -gecos: Adrien Di Mascio -mail: adim@logilab.fr -mail: adrien.dimascio@logilab.fr -userPassword: adim - diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/migration/postcreate.py --- a/server/test/data/migration/postcreate.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb post creation script, set note's workflow - -""" - -wf = add_workflow(u'note workflow', 'Note') -todo = wf.add_state(u'todo', initial=True) -done = wf.add_state(u'done') -wf.add_transition(u'redoit', done, todo) -wf.add_transition(u'markasdone', todo, done) -commit() - -wf = add_workflow(u'affaire workflow', 'Affaire') -pitetre = wf.add_state(u'pitetre', initial=True) -encours = wf.add_state(u'en cours') -finie = wf.add_state(u'finie') -bennon = wf.add_state(u'ben non') -wf.add_transition(u'abort', pitetre, bennon) -wf.add_transition(u'start', pitetre, encours) -wf.add_transition(u'end', encours, finie) -commit() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/schema.py --- a/server/test/data/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,311 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import (EntityType, RelationType, RelationDefinition, ComputedRelation, - SubjectRelation, RichString, String, Int, Float, - Boolean, Datetime, TZDatetime, Bytes) -from yams.constraints import SizeConstraint -from cubicweb.schema import (WorkflowableEntityType, - RQLConstraint, RQLUniqueConstraint, - RQLVocabularyConstraint, - ERQLExpression, RRQLExpression) -from cubicweb import _ - -class Affaire(WorkflowableEntityType): - __permissions__ = { - 'read': ('managers', - ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), - 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), - 'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')), - 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), - } - - ref = String(fulltextindexed=True, indexed=True, - constraints=[SizeConstraint(16)]) - sujet = String(fulltextindexed=True, - constraints=[SizeConstraint(256)]) - descr = RichString(fulltextindexed=True, - description=_('more detailed description')) - - duration = Int() - invoiced = Float() - opt_attr = Bytes() - - depends_on = SubjectRelation('Affaire') - require_permission = SubjectRelation('CWPermission') - concerne = SubjectRelation(('Societe', 'Note')) - todo_by = SubjectRelation('Personne', cardinality='?*') - documented_by = SubjectRelation('Card') - - -class Societe(EntityType): - __unique_together__ = [('nom', 'type', 'cp')] - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), - 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), - 'add': ('managers', 'users',) - } - - nom = String(maxsize=64, fulltextindexed=True) - web = String(maxsize=128) - type = String(maxsize=128) # attribute in common with Note - tel = Int() - fax = Int() - rncs = String(maxsize=128) - ad1 = String(maxsize=128) - ad2 = String(maxsize=128) - ad3 = String(maxsize=128) - cp = String(maxsize=12) - ville= String(maxsize=32) - - -class Division(Societe): - __specializes_schema__ = True - -class SubDivision(Division): - __specializes_schema__ = True - -class travaille_subdivision(RelationDefinition): - subject = 'Personne' - object = 'SubDivision' - -from cubicweb.schemas.base import CWUser -next(CWUser.get_relations('login')).fulltextindexed = True - -class Note(WorkflowableEntityType): - date = String(maxsize=10) - type = String(vocabulary=[u'todo', u'a', u'b', u'T', u'lalala']) - para = String(maxsize=512, - __permissions__ = { - 'add': ('managers', ERQLExpression('X in_state S, S name "todo"')), - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', ERQLExpression('X in_state S, S name "todo"')), - }) - something = String(maxsize=1, - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': (ERQLExpression('NOT X para NULL'),), - 'update': ('managers', 'owners') - }) - migrated_from = SubjectRelation('Note') - attachment = SubjectRelation('File') - inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*', - constraints=[RQLUniqueConstraint('S type T, S inline1 A1, A1 todo_by C, ' - 'Y type T, Y inline1 A2, A2 todo_by C', - 'S,Y')]) - todo_by = SubjectRelation('CWUser') - - -class Frozable(EntityType): - __permissions__ = { - 'read': ('managers', 'users'), - 'add': ('managers', 'users'), - 'update': ('managers', ERQLExpression('X frozen False'),), - 'delete': ('managers', ERQLExpression('X frozen False'),) - } - name = String() - frozen = Boolean(default=False, - __permissions__ = { - 'read': ('managers', 'users'), - 'add': ('managers', 'users'), - 'update': ('managers', 'owners') - }) - - -class Personne(EntityType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), # 'guests' will be removed - 'add': ('managers', 'users'), - 'update': ('managers', 'owners'), - 'delete': ('managers', 'owners') - } - __unique_together__ = [('nom', 'prenom', 'inline2')] - nom = String(fulltextindexed=True, required=True, maxsize=64) - prenom = String(fulltextindexed=True, maxsize=64) - sexe = String(maxsize=1, default='M', fulltextindexed=True) - promo = String(vocabulary=('bon','pasbon')) - titre = String(fulltextindexed=True, maxsize=128) - adel = String(maxsize=128) - ass = String(maxsize=128) - web = String(maxsize=128) - tel = Int() - fax = Int() - datenaiss = Datetime() - tzdatenaiss = TZDatetime() - test = Boolean(__permissions__={ - 'read': ('managers', 'users', 'guests'), - 'add': ('managers',), - 'update': ('managers',), - }) - description = String() - firstname = String(fulltextindexed=True, maxsize=64) - - concerne = SubjectRelation('Affaire') - connait = SubjectRelation('Personne') - inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*') - - -class Old(EntityType): - name = String(__permissions__ = { - 'read' : ('managers', 'users', 'guests'), - 'add' : ('managers', 'users', 'guests'), - 'update' : () - }) - - -class Email(EntityType): - subject = String(fulltextindexed=True) - messageid = String(required=True, indexed=True, unique=True) - sender = SubjectRelation('EmailAddress', cardinality='?*') - recipients = SubjectRelation('EmailAddress') - attachment = SubjectRelation('File') - - -class EmailPart(EntityType): - pass - - -class EmailThread(EntityType): - see_also = SubjectRelation('EmailThread') - - -class connait(RelationType): - symmetric = True - -class concerne(RelationType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('U has_update_permission S')), - 'delete': ('managers', RRQLExpression('O owned_by U')), - } - -class travaille(RelationDefinition): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('U has_update_permission S')), - 'delete': ('managers', RRQLExpression('O owned_by U')), - } - subject = 'Personne' - object = 'Societe' - constraints = [RQLVocabularyConstraint('S owned_by U'), - RQLVocabularyConstraint('S created_by U')] - -class comments(RelationDefinition): - subject = 'Comment' - object = 'Personne' - -class fiche(RelationDefinition): - inlined = True - subject = 'Personne' - object = 'Card' - cardinality = '??' - -class multisource_inlined_rel(RelationDefinition): - inlined = True - cardinality = '?*' - subject = ('Card', 'Note') - object = ('Affaire', 'Note') - - -class see_also_1(RelationDefinition): - name = 'see_also' - subject = object = 'Folder' - -class see_also_2(RelationDefinition): - name = 'see_also' - subject = ('Bookmark', 'Note') - object = ('Bookmark', 'Note') - -class evaluee(RelationDefinition): - subject = ('Personne', 'CWUser', 'Societe') - object = ('Note') - constraints = [ - RQLVocabularyConstraint('S created_by U'), - RQLVocabularyConstraint('S owned_by U'), - ] - -class ecrit_par(RelationType): - inlined = True - -class ecrit_par_1(RelationDefinition): - name = 'ecrit_par' - subject = 'Note' - object ='Personne' - cardinality = '?*' - -class ecrit_par_2(RelationDefinition): - name = 'ecrit_par' - subject = 'Note' - object ='CWUser' - cardinality='?*' - - -class copain(RelationDefinition): - subject = object = 'CWUser' - -class tags(RelationDefinition): - subject = 'Tag' - object = ('CWUser', 'CWGroup', 'State', 'Note', 'Card', 'Affaire') - -class Folder(EntityType): - """folders are used to classify entities. They may be defined as a tree. - """ - name = String(required=True, indexed=True, internationalizable=True, - maxsize=64) - description = RichString(fulltextindexed=True) - filed_under = SubjectRelation('Folder', description=_('parent folder')) - -class filed_under(RelationDefinition): - subject = ('Note', 'Affaire') - object = 'Folder' - -class require_permission(RelationDefinition): - subject = ('Card', 'Note', 'Personne') - object = 'CWPermission' - -class require_state(RelationDefinition): - subject = 'CWPermission' - object = 'State' - -class personne_composite(RelationDefinition): - subject='Personne' - object='Personne' - composite='subject' - -class personne_inlined(RelationDefinition): - subject='Personne' - object='Personne' - cardinality='?*' - inlined=True - - -class login_user(RelationDefinition): - subject = 'Personne' - object = 'CWUser' - cardinality = '??' - -class ambiguous_inlined(RelationDefinition): - subject = ('Affaire', 'Note') - object = 'CWUser' - inlined = True - cardinality = '?*' - - -class user_login(ComputedRelation): - rule = 'O login_user S' diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/site_cubicweb.py --- a/server/test/data/site_cubicweb.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from logilab.database import FunctionDescr -from logilab.database.sqlite import register_sqlite_pyfunc -from rql.utils import register_function - -class DUMB_SORT(FunctionDescr): - pass - -register_function(DUMB_SORT) -def dumb_sort(something): - return something -register_sqlite_pyfunc(dumb_sort) diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/slapd.conf.in --- a/server/test/data/slapd.conf.in Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,53 +0,0 @@ -# This is the main slapd configuration file. See slapd.conf(5) for more -# info on the configuration options. - -####################################################################### -# Global Directives: - -# Features to permit -#allow bind_v2 - -# Schema and objectClass definitions -include /etc/ldap/schema/core.schema -include /etc/ldap/schema/cosine.schema -include /etc/ldap/schema/nis.schema -include /etc/ldap/schema/inetorgperson.schema -include /etc/ldap/schema/openldap.schema -include /etc/ldap/schema/misc.schema - -# Where the pid file is put. The init.d script -# will not stop the server if you change this. -pidfile %(apphome)s/test-slapd.pid - -# List of arguments that were passed to the server -argsfile %(apphome)s/slapd.args - -# Read slapd.conf(5) for possible values -loglevel sync -# none - -# Where the dynamically loaded modules are stored -modulepath /usr/lib/ldap -moduleload back_hdb -moduleload back_bdb -moduleload back_monitor - -# The maximum number of entries that is returned for a search operation -sizelimit 500 - -# The tool-threads parameter sets the actual amount of cpu's that is used -# for indexing. -tool-threads 1 - -database bdb - -# The base of your directory in database #1 -suffix "dc=cubicweb,dc=test" - -# rootdn directive for specifying a superuser on the database. This is needed -# for syncrepl. and ldapdelete easyness -rootdn "cn=admin,dc=cubicweb,dc=test" -rootpw "cw" -# Where the database file are physically stored for database #1 -directory "%(testdir)s" - diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/sources_extern --- a/server/test/data/sources_extern Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -[system] -db-driver = sqlite -db-name = tmpdb-extern -db-encoding = UTF-8 diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/data/sources_multi --- a/server/test/data/sources_multi Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -[system] -db-driver = sqlite -adapter = native -db-name = tmpdb-multi -db-encoding = UTF-8 diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/datacomputed/migratedapp/schema.py --- a/server/test/datacomputed/migratedapp/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,66 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import (EntityType, RelationDefinition, ComputedRelation, - Int, Float) - - -class Employee(EntityType): - pass - - -class employees(RelationDefinition): - subject = 'Company' - object = 'Employee' - - -class associates(RelationDefinition): - subject = 'Company' - object = 'Employee' - - -class works_for(ComputedRelation): - rule = 'O employees S, NOT EXISTS (O associates S)' - - -class Company(EntityType): - score = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note NN') - score100 = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note100 NN') - - -class Note(EntityType): - note = Int() - note100 = Int(formula='Any N*100 WHERE X note N') - - -class concerns(RelationDefinition): - subject = 'Note' - object = 'Employee' - - -class whatever(ComputedRelation): - rule = 'S employees E, O associates E' - - -class renamed(ComputedRelation): - rule = 'S employees E, O concerns E' - - -class perm_changes(ComputedRelation): - __permissions__ = {'read': ('managers',)} - rule = 'S employees E, O concerns E' diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/datacomputed/schema.py --- a/server/test/datacomputed/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,65 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import EntityType, RelationDefinition, ComputedRelation, Int, Float - - -class Employee(EntityType): - pass - - -class employees(RelationDefinition): - subject = 'Company' - object = 'Employee' - - -class associates(RelationDefinition): - subject = 'Company' - object = 'Employee' - - -class Company(EntityType): - score100 = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note100 NN') - - -class Note(EntityType): - note = Int() - note20 = Int(formula='Any N*20 WHERE X note N') - note100 = Int(formula='Any N*20 WHERE X note N') - - -class concerns(RelationDefinition): - subject = 'Note' - object = 'Employee' - - -class notes(ComputedRelation): - rule = 'S employees E, O concerns E' - - -class whatever(ComputedRelation): - rule = 'S employees E, O concerns E' - - -class to_be_renamed(ComputedRelation): - rule = 'S employees E, O concerns E' - - -class perm_changes(ComputedRelation): - __permissions__ = {'read': ('managers', 'users')} - rule = 'S employees E, O concerns E' diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/requirements.txt --- a/server/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -psycopg2 -ldap3 -cubicweb-basket -cubicweb-card -cubicweb-comment -cubicweb-file -cubicweb-localperms -cubicweb-tag diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_checkintegrity.py --- a/server/test/unittest_checkintegrity.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,70 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -import sys - -from six import PY2 -if PY2: - from StringIO import StringIO -else: - from io import StringIO - -from logilab.common.testlib import TestCase, unittest_main -from cubicweb.devtools import get_test_db_handler, TestServerConfiguration - - -from cubicweb.server.checkintegrity import check, reindex_entities - -class CheckIntegrityTC(TestCase): - - def setUp(self): - handler = get_test_db_handler(TestServerConfiguration(apphome=self.datadir)) - handler.build_db_cache() - self.repo, _cnx = handler.get_repo_and_cnx() - sys.stderr = sys.stdout = StringIO() - - def tearDown(self): - sys.stderr = sys.__stderr__ - sys.stdout = sys.__stdout__ - self.repo.shutdown() - - def test_checks(self): - with self.repo.internal_cnx() as cnx: - check(self.repo, cnx, ('entities', 'relations', 'text_index', 'metadata'), - reindex=False, fix=True, withpb=False) - - def test_reindex_all(self): - with self.repo.internal_cnx() as cnx: - cnx.execute('INSERT Personne X: X nom "toto", X prenom "tutu"') - cnx.commit() - self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) - reindex_entities(self.repo.schema, cnx, withpb=False) - self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) - - def test_reindex_etype(self): - with self.repo.internal_cnx() as cnx: - cnx.execute('INSERT Personne X: X nom "toto", X prenom "tutu"') - cnx.execute('INSERT Affaire X: X ref "toto"') - cnx.commit() - reindex_entities(self.repo.schema, cnx, withpb=False, - etypes=('Personne',)) - self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) - self.assertTrue(cnx.execute('Any X WHERE X has_text "toto"')) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_datafeed.py --- a/server/test/unittest_datafeed.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,169 +0,0 @@ -# coding: utf-8 -# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from datetime import timedelta -from contextlib import contextmanager - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.server.sources import datafeed - - -class DataFeedTC(CubicWebTC): - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - with self.base_parser(cnx): - cnx.create_entity('CWSource', name=u'ô myfeed', type=u'datafeed', - parser=u'testparser', url=u'ignored', - config=u'synchronization-interval=1min') - cnx.commit() - - @contextmanager - def base_parser(self, session): - class AParser(datafeed.DataFeedParser): - __regid__ = 'testparser' - def process(self, url, raise_on_error=False): - entity = self.extid2entity('http://www.cubicweb.org/', 'Card', - item={'title': u'cubicweb.org', - 'content': u'the cw web site'}, - raise_on_error=raise_on_error) - if not self.created_during_pull(entity): - self.notify_updated(entity) - def before_entity_copy(self, entity, sourceparams): - entity.cw_edited.update(sourceparams['item']) - - with self.temporary_appobjects(AParser): - if u'ô myfeed' in self.repo.sources_by_uri: - yield self.repo.sources_by_uri[u'ô myfeed']._get_parser(session) - else: - yield - - def test(self): - self.assertIn(u'ô myfeed', self.repo.sources_by_uri) - dfsource = self.repo.sources_by_uri[u'ô myfeed'] - self.assertNotIn('use_cwuri_as_url', dfsource.__dict__) - self.assertEqual({'type': u'datafeed', 'uri': u'ô myfeed', 'use-cwuri-as-url': True}, - dfsource.public_config) - self.assertEqual(dfsource.use_cwuri_as_url, True) - self.assertEqual(dfsource.latest_retrieval, None) - self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60)) - self.assertFalse(dfsource.fresh()) - # ensure source's logger name has been unormalized - self.assertEqual(dfsource.info.__self__.name, 'cubicweb.sources.o myfeed') - - with self.repo.internal_cnx() as cnx: - with self.base_parser(cnx): - stats = dfsource.pull_data(cnx, force=True) - cnx.commit() - # test import stats - self.assertEqual(sorted(stats), ['checked', 'created', 'updated']) - self.assertEqual(len(stats['created']), 1) - entity = cnx.execute('Card X').get_entity(0, 0) - self.assertIn(entity.eid, stats['created']) - self.assertEqual(stats['updated'], set()) - # test imported entities - self.assertEqual(entity.title, 'cubicweb.org') - self.assertEqual(entity.content, 'the cw web site') - self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') - self.assertEqual(entity.cw_source[0].name, u'ô myfeed') - self.assertEqual(entity.cw_metainformation(), - {'type': 'Card', - 'source': {'uri': u'ô myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, - 'extid': b'http://www.cubicweb.org/'} - ) - self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/') - # test repo cache keys - self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', b'http://www.cubicweb.org/', u'ô myfeed')) - self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], - entity.eid) - # test repull - stats = dfsource.pull_data(cnx, force=True) - self.assertEqual(stats['created'], set()) - self.assertEqual(stats['updated'], set((entity.eid,))) - # test repull with caches reseted - self.repo._type_source_cache.clear() - self.repo._extid_cache.clear() - stats = dfsource.pull_data(cnx, force=True) - self.assertEqual(stats['created'], set()) - self.assertEqual(stats['updated'], set((entity.eid,))) - self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', b'http://www.cubicweb.org/', u'ô myfeed')) - self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], - entity.eid) - - self.assertEqual(dfsource.source_cwuris(cnx), - {b'http://www.cubicweb.org/': (entity.eid, 'Card')}) - self.assertTrue(dfsource.latest_retrieval) - self.assertTrue(dfsource.fresh()) - - # test_rename_source - with self.admin_access.repo_cnx() as cnx: - cnx.entity_from_eid(dfsource.eid).cw_set(name=u"myrenamedfeed") - cnx.commit() - entity = cnx.execute('Card X').get_entity(0, 0) - self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') - self.assertEqual(entity.cw_source[0].name, 'myrenamedfeed') - self.assertEqual(entity.cw_metainformation(), - {'type': 'Card', - 'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, - 'extid': b'http://www.cubicweb.org/'} - ) - self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', b'http://www.cubicweb.org/', 'myrenamedfeed')) - self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], - entity.eid) - - # test_delete_source - cnx.execute('DELETE CWSource S WHERE S name "myrenamedfeed"') - cnx.commit() - self.assertFalse(cnx.execute('Card X WHERE X title "cubicweb.org"')) - self.assertFalse(cnx.execute('Any X WHERE X has_text "cubicweb.org"')) - - def test_parser_retrieve_url_local(self): - with self.admin_access.repo_cnx() as cnx: - with self.base_parser(cnx) as parser: - value = parser.retrieve_url('a string') - self.assertEqual(200, value.getcode()) - self.assertEqual('a string', value.geturl()) - - def test_update_url(self): - dfsource = self.repo.sources_by_uri[u'ô myfeed'] - with self.admin_access.repo_cnx() as cnx: - cnx.entity_from_eid(dfsource.eid).cw_set(url=u"http://pouet.com\nhttp://pouet.org") - self.assertEqual(dfsource.urls, [u'ignored']) - cnx.commit() - self.assertEqual(dfsource.urls, [u"http://pouet.com", u"http://pouet.org"]) - - -class DataFeedConfigTC(CubicWebTC): - - def test_use_cwuri_as_url_override(self): - with self.admin_access.client_cnx() as cnx: - cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed', - parser=u'testparser', url=u'ignored', - config=u'use-cwuri-as-url=no') - cnx.commit() - dfsource = self.repo.sources_by_uri['myfeed'] - self.assertEqual(dfsource.use_cwuri_as_url, False) - self.assertEqual({'type': u'datafeed', 'uri': u'myfeed', 'use-cwuri-as-url': False}, - dfsource.public_config) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_hook.py --- a/server/test/unittest_hook.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit/functional tests for cubicweb.server.hook""" - -from logilab.common.testlib import TestCase, unittest_main, mock_object - -from cubicweb.devtools import TestServerConfiguration, fake -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.server import hook -from cubicweb.hooks import integrity, syncschema - -class OperationsTC(CubicWebTC): - - def setUp(self): - CubicWebTC.setUp(self) - self.hm = self.repo.hm - - def test_late_operation(self): - with self.admin_access.repo_cnx() as cnx: - l1 = hook.LateOperation(cnx) - l2 = hook.LateOperation(cnx) - l3 = hook.Operation(cnx) - self.assertEqual(cnx.pending_operations, [l3, l1, l2]) - - def test_single_last_operation(self): - with self.admin_access.repo_cnx() as cnx: - l0 = hook.SingleLastOperation(cnx) - l1 = hook.LateOperation(cnx) - l2 = hook.LateOperation(cnx) - l3 = hook.Operation(cnx) - self.assertEqual(cnx.pending_operations, [l3, l1, l2, l0]) - l4 = hook.SingleLastOperation(cnx) - self.assertEqual(cnx.pending_operations, [l3, l1, l2, l4]) - - def test_global_operation_order(self): - with self.admin_access.repo_cnx() as cnx: - op1 = syncschema.RDefDelOp(cnx) - op2 = integrity._CheckORelationOp(cnx) - op3 = syncschema.MemSchemaNotifyChanges(cnx) - self.assertEqual([op1, op2, op3], cnx.pending_operations) - -class HookCalled(Exception): pass - -config = TestServerConfiguration('data') -config.bootstrap_cubes() -schema = config.load_schema() - -def tearDownModule(*args): - global config, schema - del config, schema - -class AddAnyHook(hook.Hook): - __regid__ = 'addany' - category = 'cat1' - events = ('before_add_entity',) - def __call__(self): - raise HookCalled() - - -class HooksRegistryTC(TestCase): - - def setUp(self): - """ called before each test from this class """ - self.vreg = mock_object(config=config, schema=schema) - self.o = hook.HooksRegistry(self.vreg) - - def test_register_bad_hook1(self): - class _Hook(hook.Hook): - events = ('before_add_entiti',) - with self.assertRaises(Exception) as cm: - self.o.register(_Hook) - self.assertEqual(str(cm.exception), 'bad event before_add_entiti on %s._Hook' % __name__) - - def test_register_bad_hook2(self): - class _Hook(hook.Hook): - events = None - with self.assertRaises(Exception) as cm: - self.o.register(_Hook) - self.assertEqual(str(cm.exception), 'bad .events attribute None on %s._Hook' % __name__) - - def test_register_bad_hook3(self): - class _Hook(hook.Hook): - events = 'before_add_entity' - with self.assertRaises(Exception) as cm: - self.o.register(_Hook) - self.assertEqual(str(cm.exception), 'bad event b on %s._Hook' % __name__) - - def test_call_hook(self): - self.o.register(AddAnyHook) - dis = set() - cw = fake.FakeSession() - cw.is_hook_activated = lambda cls: cls.category not in dis - self.assertRaises(HookCalled, - self.o.call_hooks, 'before_add_entity', cw) - dis.add('cat1') - self.o.call_hooks('before_add_entity', cw) # disabled hooks category, not called - dis.remove('cat1') - self.assertRaises(HookCalled, - self.o.call_hooks, 'before_add_entity', cw) - self.o.unregister(AddAnyHook) - self.o.call_hooks('before_add_entity', cw) # nothing to call - - -class SystemHooksTC(CubicWebTC): - - def test_startup_shutdown(self): - import hooks # cubicweb/server/test/data/hooks.py - self.assertEqual(hooks.CALLED_EVENTS['server_startup'], True) - # don't actually call repository.shutdown ! - self.repo.hm.call_hooks('server_shutdown', repo=self.repo) - self.assertEqual(hooks.CALLED_EVENTS['server_shutdown'], True) - - def test_session_open_close(self): - import hooks # cubicweb/server/test/data/hooks.py - anonaccess = self.new_access('anon') - with anonaccess.repo_cnx() as cnx: - self.assertEqual(hooks.CALLED_EVENTS['session_open'], 'anon') - anonaccess.close() - self.assertEqual(hooks.CALLED_EVENTS['session_close'], 'anon') - - -# class RelationHookTC(TestCase): -# """testcase for relation hooks grouping""" -# def setUp(self): -# """ called before each test from this class """ -# self.o = HooksManager(schema) -# self.called = [] - -# def test_before_add_relation(self): -# """make sure before_xxx_relation hooks are called directly""" -# self.o.register(self._before_relation_hook, -# 'before_add_relation', 'concerne') -# self.assertEqual(self.called, []) -# self.o.call_hooks('before_add_relation', 'concerne', 'USER', -# 1, 'concerne', 2) -# self.assertEqual(self.called, [(1, 'concerne', 2)]) - -# def test_after_add_relation(self): -# """make sure after_xxx_relation hooks are deferred""" -# self.o.register(self._after_relation_hook, -# 'after_add_relation', 'concerne') -# self.assertEqual(self.called, []) -# self.o.call_hooks('after_add_relation', 'concerne', 'USER', -# 1, 'concerne', 2) -# self.o.call_hooks('after_add_relation', 'concerne', 'USER', -# 3, 'concerne', 4) -# self.assertEqual(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) - -# def test_before_delete_relation(self): -# """make sure before_xxx_relation hooks are called directly""" -# self.o.register(self._before_relation_hook, -# 'before_delete_relation', 'concerne') -# self.assertEqual(self.called, []) -# self.o.call_hooks('before_delete_relation', 'concerne', 'USER', -# 1, 'concerne', 2) -# self.assertEqual(self.called, [(1, 'concerne', 2)]) - -# def test_after_delete_relation(self): -# """make sure after_xxx_relation hooks are deferred""" -# self.o.register(self._after_relation_hook, -# 'after_delete_relation', 'concerne') -# self.o.call_hooks('after_delete_relation', 'concerne', 'USER', -# 1, 'concerne', 2) -# self.o.call_hooks('after_delete_relation', 'concerne', 'USER', -# 3, 'concerne', 4) -# self.assertEqual(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) - - -# def _before_relation_hook(self, cnxset, subject, r_type, object): -# self.called.append((subject, r_type, object)) - -# def _after_relation_hook(self, cnxset, subject, r_type, object): -# self.called.append((subject, r_type, object)) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_ldapsource.py --- a/server/test/unittest_ldapsource.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,491 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb.server.sources.ldapfeed unit and functional tests - -Those tests expect to have slapd, python-ldap3 and ldapscripts packages installed. -""" -from __future__ import print_function - -import os -import sys -import shutil -import time -import subprocess -import tempfile -import unittest -from os.path import join - -from six import string_types -from six.moves import range - -from cubicweb import AuthenticationError -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.httptest import get_available_port - - -CONFIG_LDAPFEED = u''' -user-base-dn=ou=People,dc=cubicweb,dc=test -group-base-dn=ou=Group,dc=cubicweb,dc=test -user-attrs-map=uid=login,mail=email,userPassword=upassword -group-attrs-map=cn=name,memberUid=member -''' -CONFIG_LDAPUSER = u''' -user-base-dn=ou=People,dc=cubicweb,dc=test -user-attrs-map=uid=login,mail=email,userPassword=upassword -''' - -URL = None - - -def create_slapd_configuration(cls): - global URL - slapddir = tempfile.mkdtemp('cw-unittest-ldap') - config = cls.config - slapdconf = join(config.apphome, "slapd.conf") - confin = open(join(config.apphome, "slapd.conf.in")).read() - confstream = open(slapdconf, 'w') - confstream.write(confin % {'apphome': config.apphome, 'testdir': slapddir}) - confstream.close() - # fill ldap server with some data - ldiffile = join(config.apphome, "ldap_test.ldif") - config.info('Initing ldap database') - cmdline = ['/usr/sbin/slapadd', '-f', slapdconf, '-l', ldiffile, '-c'] - PIPE = subprocess.PIPE - slapproc = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE) - stdout, stderr = slapproc.communicate() - if slapproc.returncode: - print('slapadd returned with status: %s' - % slapproc.returncode, file=sys.stderr) - sys.stdout.write(stdout) - sys.stderr.write(stderr) - - # ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f') - port = get_available_port(range(9000, 9100)) - host = 'localhost:%s' % port - ldapuri = 'ldap://%s' % host - cmdline = ["/usr/sbin/slapd", "-f", slapdconf, "-h", ldapuri, "-d", "0"] - config.info('Starting slapd:', ' '.join(cmdline)) - PIPE = subprocess.PIPE - cls.slapd_process = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE) - time.sleep(0.2) - if cls.slapd_process.poll() is None: - config.info('slapd started with pid %s', cls.slapd_process.pid) - else: - raise EnvironmentError('Cannot start slapd with cmdline="%s" (from directory "%s")' % - (" ".join(cmdline), os.getcwd())) - URL = u'ldap://%s' % host - return slapddir - - -def terminate_slapd(cls): - config = cls.config - if cls.slapd_process and cls.slapd_process.returncode is None: - config.info('terminating slapd') - if hasattr(cls.slapd_process, 'terminate'): - cls.slapd_process.terminate() - else: - import signal - os.kill(cls.slapd_process.pid, signal.SIGTERM) - stdout, stderr = cls.slapd_process.communicate() - if cls.slapd_process.returncode: - print('slapd returned with status: %s' - % cls.slapd_process.returncode, file=sys.stderr) - sys.stdout.write(stdout) - sys.stderr.write(stderr) - config.info('DONE') - - -class LDAPFeedTestBase(CubicWebTC): - test_db_id = 'ldap-feed' - loglevel = 'ERROR' - - @classmethod - def setUpClass(cls): - if not os.path.exists('/usr/sbin/slapd'): - raise unittest.SkipTest('slapd not found') - from cubicweb.cwctl import init_cmdline_log_threshold - init_cmdline_log_threshold(cls.config, cls.loglevel) - cls._tmpdir = create_slapd_configuration(cls) - - @classmethod - def tearDownClass(cls): - terminate_slapd(cls) - try: - shutil.rmtree(cls._tmpdir) - except: - pass - - @classmethod - def pre_setup_database(cls, cnx, config): - cnx.create_entity('CWSource', name=u'ldap', type=u'ldapfeed', parser=u'ldapfeed', - url=URL, config=CONFIG_LDAPFEED) - - cnx.commit() - return cls.pull(cnx) - - @classmethod - def pull(self, cnx): - lfsource = cnx.repo.sources_by_uri['ldap'] - stats = lfsource.pull_data(cnx, force=True, raise_on_error=True) - cnx.commit() - return stats - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('DELETE Any E WHERE E cw_source S, S name "ldap"') - cnx.execute('SET S config %(conf)s, S url %(url)s ' - 'WHERE S is CWSource, S name "ldap"', - {"conf": CONFIG_LDAPFEED, 'url': URL}) - cnx.commit() - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - - def add_ldap_entry(self, dn, mods): - """ - add an LDAP entity - """ - modcmd = ['dn: %s' % dn, 'changetype: add'] - for key, values in mods.items(): - if isinstance(values, string_types): - values = [values] - for value in values: - modcmd.append('%s: %s' % (key, value)) - self._ldapmodify(modcmd) - - def delete_ldap_entry(self, dn): - """ - delete an LDAP entity - """ - modcmd = ['dn: %s' % dn, 'changetype: delete'] - self._ldapmodify(modcmd) - - def update_ldap_entry(self, dn, mods): - """ - modify one or more attributes of an LDAP entity - """ - modcmd = ['dn: %s' % dn, 'changetype: modify'] - for (kind, key), values in mods.items(): - modcmd.append('%s: %s' % (kind, key)) - if isinstance(values, string_types): - values = [values] - for value in values: - modcmd.append('%s: %s' % (key, value)) - modcmd.append('-') - self._ldapmodify(modcmd) - - def _ldapmodify(self, modcmd): - uri = self.repo.sources_by_uri['ldap'].urls[0] - updatecmd = ['ldapmodify', '-H', uri, '-v', '-x', '-D', - 'cn=admin,dc=cubicweb,dc=test', '-w', 'cw'] - PIPE = subprocess.PIPE - p = subprocess.Popen(updatecmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) - p.stdin.write('\n'.join(modcmd).encode('ascii')) - p.stdin.close() - if p.wait(): - raise RuntimeError("ldap update failed: %s" % ('\n'.join(p.stderr.readlines()))) - - -class CheckWrongGroup(LDAPFeedTestBase): - """ - A testcase for situations where the default group for CWUser - created from LDAP is wrongly configured. - """ - - def test_wrong_group(self): - with self.admin_access.repo_cnx() as cnx: - source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0, 0) - config = source.repo_source.check_config(source) - # inject a bogus group here, along with at least a valid one - config['user-default-group'] = ('thisgroupdoesnotexists', 'users') - source.repo_source.update_config(source, config) - cnx.commit() - # here we emitted an error log entry - source.repo_source.pull_data(cnx, force=True, raise_on_error=True) - cnx.commit() - - -class LDAPFeedUserTC(LDAPFeedTestBase): - """ - A testcase for CWUser support in ldapfeed (basic tests and authentication). - """ - - def assertMetadata(self, entity): - self.assertTrue(entity.creation_date) - self.assertTrue(entity.modification_date) - - def test_authenticate(self): - source = self.repo.sources_by_uri['ldap'] - with self.admin_access.repo_cnx() as cnx: - # ensure we won't be logged against - self.assertRaises(AuthenticationError, - source.authenticate, cnx, 'toto', 'toto') - self.assertTrue(source.authenticate(cnx, 'syt', 'syt')) - sessionid = self.repo.connect('syt', password='syt') - self.assertTrue(sessionid) - self.repo.close(sessionid) - - def test_base(self): - with self.admin_access.repo_cnx() as cnx: - # check a known one - rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) - e = rset.get_entity(0, 0) - self.assertEqual(e.login, 'syt') - e.complete() - self.assertMetadata(e) - self.assertEqual(e.firstname, None) - self.assertEqual(e.surname, None) - self.assertIn('users', set(g.name for g in e.in_group)) - self.assertEqual(e.owned_by[0].login, 'syt') - self.assertEqual(e.created_by, ()) - addresses = [pe.address for pe in e.use_email] - addresses.sort() - self.assertEqual(['sylvain.thenault@logilab.fr', 'syt@logilab.fr'], - addresses) - self.assertIn(e.primary_email[0].address, ['sylvain.thenault@logilab.fr', - 'syt@logilab.fr']) - # email content should be indexed on the user - rset = cnx.execute('CWUser X WHERE X has_text "thenault"') - self.assertEqual(rset.rows, [[e.eid]]) - - def test_copy_to_system_source(self): - "make sure we can 'convert' an LDAP user into a system one" - with self.admin_access.repo_cnx() as cnx: - source = self.repo.sources_by_uri['ldap'] - eid = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0] - cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid}) - cnx.commit() - source.reset_caches() - rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) - self.assertEqual(len(rset), 1) - e = rset.get_entity(0, 0) - self.assertEqual(e.eid, eid) - self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', - 'uri': u'system', - 'use-cwuri-as-url': False}, - 'type': 'CWUser', - 'extid': None}) - self.assertEqual(e.cw_source[0].name, 'system') - self.assertTrue(e.creation_date) - self.assertTrue(e.modification_date) - source.pull_data(cnx) - rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) - self.assertEqual(len(rset), 1) - self.assertTrue(self.repo.system_source.authenticate(cnx, 'syt', password='syt')) - # make sure the pull from ldap have not "reverted" user as a ldap-feed user - self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', - 'uri': u'system', - 'use-cwuri-as-url': False}, - 'type': 'CWUser', - 'extid': None}) - # and that the password stored in the system source is not empty or so - user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) - user.cw_clear_all_caches() - cu = cnx.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';") - pwd = cu.fetchall()[0][0] - self.assertIsNotNone(pwd) - self.assertTrue(str(pwd)) - - -class LDAPFeedUserDeletionTC(LDAPFeedTestBase): - """ - A testcase for situations where users are deleted from or - unavailable in the LDAP database. - """ - - def test_a_filter_inactivate(self): - """ filtered out people should be deactivated, unable to authenticate """ - with self.admin_access.repo_cnx() as cnx: - source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0, 0) - config = source.repo_source.check_config(source) - # filter with adim's phone number - config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109') - source.repo_source.update_config(source, config) - cnx.commit() - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt') - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' - 'U in_state S, S name N').rows[0][0], - 'deactivated') - self.assertEqual(cnx.execute('Any N WHERE U login "adim", ' - 'U in_state S, S name N').rows[0][0], - 'activated') - # unfilter, syt should be activated again - config['user-filter'] = u'' - source.repo_source.update_config(source, config) - cnx.commit() - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' - 'U in_state S, S name N').rows[0][0], - 'activated') - self.assertEqual(cnx.execute('Any N WHERE U login "adim", ' - 'U in_state S, S name N').rows[0][0], - 'activated') - - def test_delete(self): - """ delete syt, pull, check deactivation, repull, - read syt, pull, check activation - """ - self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test') - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt') - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' - 'U in_state S, S name N').rows[0][0], - 'deactivated') - with self.repo.internal_cnx() as cnx: - # check that it doesn't choke - self.pull(cnx) - # reinsert syt - self.add_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test', - {'objectClass': ['OpenLDAPperson', 'posixAccount', 'top', - 'shadowAccount'], - 'cn': 'Sylvain Thenault', - 'sn': 'Thenault', - 'gidNumber': '1004', - 'uid': 'syt', - 'homeDirectory': '/home/syt', - 'shadowFlag': '134538764', - 'uidNumber': '1004', - 'givenName': 'Sylvain', - 'telephoneNumber': '106', - 'displayName': 'sthenault', - 'gecos': 'Sylvain Thenault', - 'mail': ['sylvain.thenault@logilab.fr', 'syt@logilab.fr'], - 'userPassword': 'syt', - }) - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' - 'U in_state S, S name N').rows[0][0], - 'activated') - - def test_reactivate_deleted(self): - # test reactivating BY HAND the user isn't enough to - # authenticate, as the native source refuse to authenticate - # user from other sources - self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test') - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - with self.admin_access.repo_cnx() as cnx: - # reactivate user (which source is still ldap-feed) - user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) - user.cw_adapt_to('IWorkflowable').fire_transition('activate') - cnx.commit() - with self.assertRaises(AuthenticationError): - self.repo.connect('syt', password='syt') - - # ok now let's try to make it a system user - cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': user.eid}) - cnx.commit() - # and that we can now authenticate again - self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='toto') - sessionid = self.repo.connect('syt', password='syt') - self.assertTrue(sessionid) - self.repo.close(sessionid) - - -class LDAPFeedGroupTC(LDAPFeedTestBase): - """ - A testcase for group support in ldapfeed. - """ - - def test_groups_exist(self): - with self.admin_access.repo_cnx() as cnx: - rset = cnx.execute('CWGroup X WHERE X name "dir"') - self.assertEqual(len(rset), 1) - - rset = cnx.execute('CWGroup X WHERE X cw_source S, S name "ldap"') - self.assertEqual(len(rset), 2) - - def test_group_deleted(self): - with self.admin_access.repo_cnx() as cnx: - rset = cnx.execute('CWGroup X WHERE X name "dir"') - self.assertEqual(len(rset), 1) - - def test_in_group(self): - with self.admin_access.repo_cnx() as cnx: - rset = cnx.execute('CWGroup X WHERE X name %(name)s', {'name': 'dir'}) - dirgroup = rset.get_entity(0, 0) - self.assertEqual(set(['syt', 'adim']), - set([u.login for u in dirgroup.reverse_in_group])) - rset = cnx.execute('CWGroup X WHERE X name %(name)s', {'name': 'logilab'}) - logilabgroup = rset.get_entity(0, 0) - self.assertEqual(set(['adim']), - set([u.login for u in logilabgroup.reverse_in_group])) - - def test_group_member_added(self): - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - with self.admin_access.repo_cnx() as cnx: - rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', - {'name': 'logilab'}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][0], 'adim') - - try: - self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test', - {('add', 'memberUid'): ['syt']}) - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - - with self.admin_access.repo_cnx() as cnx: - rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', - {'name': 'logilab'}) - self.assertEqual(len(rset), 2) - members = set([u[0] for u in rset]) - self.assertEqual(set(['adim', 'syt']), members) - - finally: - # back to normal ldap setup - self.tearDownClass() - self.setUpClass() - - def test_group_member_deleted(self): - with self.repo.internal_cnx() as cnx: - self.pull(cnx) # ensure we are sync'ed - with self.admin_access.repo_cnx() as cnx: - rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', - {'name': 'logilab'}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][0], 'adim') - - try: - self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test', - {('delete', 'memberUid'): ['adim']}) - with self.repo.internal_cnx() as cnx: - self.pull(cnx) - - with self.admin_access.repo_cnx() as cnx: - rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', - {'name': 'logilab'}) - self.assertEqual(len(rset), 0, rset.rows) - finally: - # back to normal ldap setup - self.tearDownClass() - self.setUpClass() - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,933 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.migractions""" - -from datetime import date -import os, os.path as osp -from contextlib import contextmanager - -from logilab.common.testlib import unittest_main, Tags, tag -from logilab.common import tempattr - -from yams.constraints import UniqueConstraint - -from cubicweb import ConfigurationError, ValidationError, ExecutionError -from cubicweb.devtools import startpgcluster, stoppgcluster -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.server.migractions import ServerMigrationHelper - -import cubicweb.devtools - - -HERE = osp.dirname(osp.abspath(__file__)) - - -def setUpModule(): - startpgcluster(__file__) - - -migrschema = None -def tearDownModule(*args): - global migrschema - del migrschema - if hasattr(MigrationCommandsTC, 'origschema'): - del MigrationCommandsTC.origschema - if hasattr(MigrationCommandsComputedTC, 'origschema'): - del MigrationCommandsComputedTC.origschema - stoppgcluster(__file__) - - -class MigrationConfig(cubicweb.devtools.TestServerConfiguration): - default_sources = cubicweb.devtools.DEFAULT_PSQL_SOURCES - CUBES_PATH = cubicweb.devtools.TestServerConfiguration.CUBES_PATH + [ - osp.join(HERE, 'data-migractions', 'cubes')] - - -class MigrationTC(CubicWebTC): - - appid = 'data-migractions' - - configcls = MigrationConfig - - tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions')) - - def _init_repo(self): - super(MigrationTC, self)._init_repo() - # we have to read schema from the database to get eid for schema entities - self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) - # hack to read the schema from data/migrschema - config = self.config - config.appid = osp.join(self.appid, 'migratedapp') - config._apphome = osp.join(HERE, config.appid) - global migrschema - migrschema = config.load_schema() - config.appid = self.appid - config._apphome = osp.join(HERE, self.appid) - - def setUp(self): - self.configcls.cls_adjust_sys_path() - super(MigrationTC, self).setUp() - - def tearDown(self): - super(MigrationTC, self).tearDown() - self.repo.vreg['etypes'].clear_caches() - - @contextmanager - def mh(self): - with self.admin_access.repo_cnx() as cnx: - yield cnx, ServerMigrationHelper(self.repo.config, migrschema, - repo=self.repo, cnx=cnx, - interactive=False) - - def table_sql(self, mh, tablename): - result = mh.sqlexec("SELECT table_name FROM information_schema.tables WHERE LOWER(table_name)=%(table)s", - {'table': tablename.lower()}) - if result: - return result[0][0] - return None # no such table - - def table_schema(self, mh, tablename): - result = mh.sqlexec("SELECT column_name, data_type, character_maximum_length FROM information_schema.columns " - "WHERE LOWER(table_name) = %(table)s", {'table': tablename.lower()}) - assert result, 'no table %s' % tablename - return dict((x[0], (x[1], x[2])) for x in result) - - -class MigrationCommandsTC(MigrationTC): - - def _init_repo(self): - super(MigrationCommandsTC, self)._init_repo() - assert 'Folder' in migrschema - - def test_add_attribute_bool(self): - with self.mh() as (cnx, mh): - self.assertNotIn('yesno', self.schema) - cnx.create_entity('Note') - cnx.commit() - mh.cmd_add_attribute('Note', 'yesno') - self.assertIn('yesno', self.schema) - self.assertEqual(self.schema['yesno'].subjects(), ('Note',)) - self.assertEqual(self.schema['yesno'].objects(), ('Boolean',)) - self.assertEqual(self.schema['Note'].default('yesno'), False) - # test default value set on existing entities - note = cnx.execute('Note X').get_entity(0, 0) - self.assertEqual(note.yesno, False) - # test default value set for next entities - self.assertEqual(cnx.create_entity('Note').yesno, False) - - def test_add_attribute_int(self): - with self.mh() as (cnx, mh): - self.assertNotIn('whatever', self.schema) - cnx.create_entity('Note') - cnx.commit() - orderdict = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' - 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) - mh.cmd_add_attribute('Note', 'whatever') - self.assertIn('whatever', self.schema) - self.assertEqual(self.schema['whatever'].subjects(), ('Note',)) - self.assertEqual(self.schema['whatever'].objects(), ('Int',)) - self.assertEqual(self.schema['Note'].default('whatever'), 0) - # test default value set on existing entities - note = cnx.execute('Note X').get_entity(0, 0) - self.assertIsInstance(note.whatever, int) - self.assertEqual(note.whatever, 0) - # test default value set for next entities - self.assertEqual(cnx.create_entity('Note').whatever, 0) - # test attribute order - orderdict2 = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' - 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) - whateverorder = migrschema['whatever'].rdef('Note', 'Int').order - for k, v in orderdict.items(): - if v >= whateverorder: - orderdict[k] = v+1 - orderdict['whatever'] = whateverorder - self.assertDictEqual(orderdict, orderdict2) - #self.assertEqual([r.type for r in self.schema['Note'].ordered_relations()], - # ['modification_date', 'creation_date', 'owned_by', - # 'eid', 'ecrit_par', 'inline1', 'date', 'type', - # 'whatever', 'date', 'in_basket']) - # NB: commit instead of rollback make following test fail with py2.5 - # this sounds like a pysqlite/2.5 bug (the same eid is affected to - # two different entities) - - def test_add_attribute_varchar(self): - with self.mh() as (cnx, mh): - self.assertNotIn('whatever', self.schema) - cnx.create_entity('Note') - cnx.commit() - self.assertNotIn('shortpara', self.schema) - mh.cmd_add_attribute('Note', 'shortpara') - self.assertIn('shortpara', self.schema) - self.assertEqual(self.schema['shortpara'].subjects(), ('Note', )) - self.assertEqual(self.schema['shortpara'].objects(), ('String', )) - # test created column is actually a varchar(64) - fields = self.table_schema(mh, '%sNote' % SQL_PREFIX) - self.assertEqual(fields['%sshortpara' % SQL_PREFIX], ('character varying', 64)) - # test default value set on existing entities - self.assertEqual(cnx.execute('Note X').get_entity(0, 0).shortpara, 'hop') - # test default value set for next entities - self.assertEqual(cnx.create_entity('Note').shortpara, 'hop') - - def test_add_datetime_with_default_value_attribute(self): - with self.mh() as (cnx, mh): - self.assertNotIn('mydate', self.schema) - self.assertNotIn('oldstyledefaultdate', self.schema) - self.assertNotIn('newstyledefaultdate', self.schema) - mh.cmd_add_attribute('Note', 'mydate') - mh.cmd_add_attribute('Note', 'oldstyledefaultdate') - mh.cmd_add_attribute('Note', 'newstyledefaultdate') - self.assertIn('mydate', self.schema) - self.assertIn('oldstyledefaultdate', self.schema) - self.assertIn('newstyledefaultdate', self.schema) - self.assertEqual(self.schema['mydate'].subjects(), ('Note', )) - self.assertEqual(self.schema['mydate'].objects(), ('Date', )) - testdate = date(2005, 12, 13) - eid1 = mh.rqlexec('INSERT Note N')[0][0] - eid2 = mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0] - d1 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0] - d2 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0] - d3 = mh.rqlexec('Any D WHERE X eid %(x)s, X oldstyledefaultdate D', {'x': eid1})[0][0] - d4 = mh.rqlexec('Any D WHERE X eid %(x)s, X newstyledefaultdate D', {'x': eid1})[0][0] - self.assertEqual(d1, date.today()) - self.assertEqual(d2, testdate) - myfavoritedate = date(2013, 1, 1) - self.assertEqual(d3, myfavoritedate) - self.assertEqual(d4, myfavoritedate) - - def test_drop_chosen_constraints_ctxmanager(self): - with self.mh() as (cnx, mh): - with mh.cmd_dropped_constraints('Note', 'unique_id', UniqueConstraint): - mh.cmd_add_attribute('Note', 'unique_id') - # make sure the maxsize constraint is not dropped - self.assertRaises(ValidationError, - mh.rqlexec, - 'INSERT Note N: N unique_id "xyz"') - mh.rollback() - # make sure the unique constraint is dropped - mh.rqlexec('INSERT Note N: N unique_id "x"') - mh.rqlexec('INSERT Note N: N unique_id "x"') - mh.rqlexec('DELETE Note N') - - def test_drop_required_ctxmanager(self): - with self.mh() as (cnx, mh): - with mh.cmd_dropped_constraints('Note', 'unique_id', cstrtype=None, - droprequired=True): - mh.cmd_add_attribute('Note', 'unique_id') - mh.rqlexec('INSERT Note N') - mh.rqlexec('SET N unique_id "x"') - # make sure the required=True was restored - self.assertRaises(ValidationError, mh.rqlexec, 'INSERT Note N') - mh.rollback() - - def test_rename_attribute(self): - with self.mh() as (cnx, mh): - self.assertNotIn('civility', self.schema) - eid1 = mh.rqlexec('INSERT Personne X: X nom "lui", X sexe "M"')[0][0] - eid2 = mh.rqlexec('INSERT Personne X: X nom "l\'autre", X sexe NULL')[0][0] - mh.cmd_rename_attribute('Personne', 'sexe', 'civility') - self.assertNotIn('sexe', self.schema) - self.assertIn('civility', self.schema) - # test data has been backported - c1 = mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid1)[0][0] - self.assertEqual(c1, 'M') - c2 = mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid2)[0][0] - self.assertEqual(c2, None) - - def test_workflow_actions(self): - with self.mh() as (cnx, mh): - wf = mh.cmd_add_workflow(u'foo', ('Personne', 'Email'), - ensure_workflowable=False) - for etype in ('Personne', 'Email'): - s1 = mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' % - etype)[0][0] - self.assertEqual(s1, "foo") - s1 = mh.rqlexec('Any N WHERE ET default_workflow WF, ET name "%s", WF name N' % - etype)[0][0] - self.assertEqual(s1, "foo") - - def test_add_entity_type(self): - with self.mh() as (cnx, mh): - self.assertNotIn('Folder2', self.schema) - self.assertNotIn('filed_under2', self.schema) - mh.cmd_add_entity_type('Folder2') - self.assertIn('Folder2', self.schema) - self.assertIn('Old', self.schema) - self.assertTrue(cnx.execute('CWEType X WHERE X name "Folder2"')) - self.assertIn('filed_under2', self.schema) - self.assertTrue(cnx.execute('CWRType X WHERE X name "filed_under2"')) - self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()), - ['created_by', 'creation_date', 'cw_source', 'cwuri', - 'description', 'description_format', - 'eid', - 'filed_under2', 'has_text', - 'identity', 'in_basket', 'is', 'is_instance_of', - 'modification_date', 'name', 'owned_by']) - self.assertCountEqual([str(rs) for rs in self.schema['Folder2'].object_relations()], - ['filed_under2', 'identity']) - # Old will be missing as it has been renamed into 'New' in the migrated - # schema while New hasn't been added here. - self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), - sorted(str(e) for e in self.schema.entities() if not e.final and e != 'Old')) - self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) - eschema = self.schema.eschema('Folder2') - for cstr in eschema.rdef('name').constraints: - self.assertTrue(hasattr(cstr, 'eid')) - - def test_add_cube_with_custom_final_type(self): - with self.mh() as (cnx, mh): - try: - mh.cmd_add_cube('fakecustomtype') - self.assertIn('Numeric', self.schema) - self.assertTrue(self.schema['Numeric'].final) - rdef = self.schema['num'].rdefs[('Location', 'Numeric')] - self.assertEqual(rdef.scale, 10) - self.assertEqual(rdef.precision, 18) - fields = self.table_schema(mh, '%sLocation' % SQL_PREFIX) - self.assertEqual(fields['%snum' % SQL_PREFIX], ('numeric', None)) # XXX - finally: - mh.cmd_drop_cube('fakecustomtype') - - def test_add_drop_entity_type(self): - with self.mh() as (cnx, mh): - mh.cmd_add_entity_type('Folder2') - wf = mh.cmd_add_workflow(u'folder2 wf', 'Folder2', - ensure_workflowable=False) - todo = wf.add_state(u'todo', initial=True) - done = wf.add_state(u'done') - wf.add_transition(u'redoit', done, todo) - wf.add_transition(u'markasdone', todo, done) - cnx.commit() - eschema = self.schema.eschema('Folder2') - mh.cmd_drop_entity_type('Folder2') - self.assertNotIn('Folder2', self.schema) - self.assertFalse(cnx.execute('CWEType X WHERE X name "Folder2"')) - # test automatic workflow deletion - self.assertFalse(cnx.execute('Workflow X WHERE NOT X workflow_of ET')) - self.assertFalse(cnx.execute('State X WHERE NOT X state_of WF')) - self.assertFalse(cnx.execute('Transition X WHERE NOT X transition_of WF')) - - def test_rename_entity_type(self): - with self.mh() as (cnx, mh): - entity = mh.create_entity('Old', name=u'old') - self.repo.type_and_source_from_eid(entity.eid, entity._cw) - mh.cmd_rename_entity_type('Old', 'New') - mh.cmd_rename_attribute('New', 'name', 'new_name') - - def test_add_drop_relation_type(self): - with self.mh() as (cnx, mh): - mh.cmd_add_entity_type('Folder2', auto=False) - mh.cmd_add_relation_type('filed_under2') - self.assertIn('filed_under2', self.schema) - # Old will be missing as it has been renamed into 'New' in the migrated - # schema while New hasn't been added here. - self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), - sorted(str(e) for e in self.schema.entities() - if not e.final and e != 'Old')) - self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) - mh.cmd_drop_relation_type('filed_under2') - self.assertNotIn('filed_under2', self.schema) - # this should not crash - mh.cmd_drop_relation_type('filed_under2') - - def test_add_relation_definition_nortype(self): - with self.mh() as (cnx, mh): - mh.cmd_add_relation_definition('Personne', 'concerne2', 'Affaire') - self.assertEqual(self.schema['concerne2'].subjects(), - ('Personne',)) - self.assertEqual(self.schema['concerne2'].objects(), - ('Affaire', )) - self.assertEqual(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality, - '1*') - mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note') - self.assertEqual(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note']) - mh.create_entity('Personne', nom=u'tot') - mh.create_entity('Affaire') - mh.rqlexec('SET X concerne2 Y WHERE X is Personne, Y is Affaire') - cnx.commit() - mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Affaire') - self.assertIn('concerne2', self.schema) - mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Note') - self.assertNotIn('concerne2', self.schema) - - def test_drop_relation_definition_existant_rtype(self): - with self.mh() as (cnx, mh): - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) - mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire') - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Division', 'Note', 'Societe', 'SubDivision']) - mh.cmd_add_relation_definition('Personne', 'concerne', 'Affaire') - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) - # trick: overwrite self.maxeid to avoid deletion of just reintroduced types - self.maxeid = cnx.execute('Any MAX(X)')[0][0] - - def test_drop_relation_definition_with_specialization(self): - with self.mh() as (cnx, mh): - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) - mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe') - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Note']) - mh.cmd_add_relation_definition('Affaire', 'concerne', 'Societe') - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) - # trick: overwrite self.maxeid to avoid deletion of just reintroduced types - self.maxeid = cnx.execute('Any MAX(X)')[0][0] - - def test_rename_relation(self): - self.skipTest('implement me') - - def test_change_relation_props_non_final(self): - with self.mh() as (cnx, mh): - rschema = self.schema['concerne'] - card = rschema.rdef('Affaire', 'Societe').cardinality - self.assertEqual(card, '**') - try: - mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', - cardinality='?*') - card = rschema.rdef('Affaire', 'Societe').cardinality - self.assertEqual(card, '?*') - finally: - mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', - cardinality='**') - - def test_change_relation_props_final(self): - with self.mh() as (cnx, mh): - rschema = self.schema['adel'] - card = rschema.rdef('Personne', 'String').fulltextindexed - self.assertEqual(card, False) - try: - mh.cmd_change_relation_props('Personne', 'adel', 'String', - fulltextindexed=True) - card = rschema.rdef('Personne', 'String').fulltextindexed - self.assertEqual(card, True) - finally: - mh.cmd_change_relation_props('Personne', 'adel', 'String', - fulltextindexed=False) - - def test_sync_schema_props_perms_rqlconstraints(self): - with self.mh() as (cnx, mh): - # Drop one of the RQLConstraint. - rdef = self.schema['evaluee'].rdefs[('Personne', 'Note')] - oldconstraints = rdef.constraints - self.assertIn('S created_by U', - [cstr.expression for cstr in oldconstraints]) - mh.cmd_sync_schema_props_perms('evaluee', commit=True) - newconstraints = rdef.constraints - self.assertNotIn('S created_by U', - [cstr.expression for cstr in newconstraints]) - - # Drop all RQLConstraint. - rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')] - oldconstraints = rdef.constraints - self.assertEqual(len(oldconstraints), 2) - mh.cmd_sync_schema_props_perms('travaille', commit=True) - rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')] - newconstraints = rdef.constraints - self.assertEqual(len(newconstraints), 0) - - @tag('longrun') - def test_sync_schema_props_perms(self): - with self.mh() as (cnx, mh): - nbrqlexpr_start = cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0] - migrschema['titre'].rdefs[('Personne', 'String')].order = 7 - migrschema['adel'].rdefs[('Personne', 'String')].order = 6 - migrschema['ass'].rdefs[('Personne', 'String')].order = 5 - migrschema['Personne'].description = 'blabla bla' - migrschema['titre'].description = 'usually a title' - migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person' - delete_concerne_rqlexpr = self._rrqlexpr_rset(cnx, 'delete', 'concerne') - add_concerne_rqlexpr = self._rrqlexpr_rset(cnx, 'add', 'concerne') - - # make sure properties (e.g. etype descriptions) are synced by the - # second call to sync_schema - mh.cmd_sync_schema_props_perms(syncprops=False, commit=False) - mh.cmd_sync_schema_props_perms(commit=False) - - self.assertEqual(cnx.execute('Any D WHERE X name "Personne", X description D')[0][0], - 'blabla bla') - self.assertEqual(cnx.execute('Any D WHERE X name "titre", X description D')[0][0], - 'usually a title') - self.assertEqual(cnx.execute('Any D WHERE X relation_type RT, RT name "titre",' - 'X from_entity FE, FE name "Personne",' - 'X description D')[0][0], - 'title for this person') - rinorder = [n for n, in cnx.execute( - 'Any N ORDERBY O,N WHERE X is CWAttribute, X relation_type RT, RT name N,' - 'X from_entity FE, FE name "Personne",' - 'X ordernum O')] - expected = [u'nom', u'prenom', u'sexe', u'promo', u'ass', u'adel', u'titre', - u'web', u'tel', u'fax', u'datenaiss', u'test', u'tzdatenaiss', - u'description', u'firstname', - u'creation_date', u'cwuri', u'modification_date'] - self.assertEqual(expected, rinorder) - - # test permissions synchronization #################################### - # new rql expr to add note entity - eexpr = self._erqlexpr_entity(cnx, 'add', 'Note') - self.assertEqual(eexpr.expression, - 'X ecrit_part PE, U in_group G, ' - 'PE require_permission P, P name "add_note", P require_group G') - self.assertEqual([et.name for et in eexpr.reverse_add_permission], ['Note']) - self.assertEqual(eexpr.reverse_read_permission, ()) - self.assertEqual(eexpr.reverse_delete_permission, ()) - self.assertEqual(eexpr.reverse_update_permission, ()) - self.assertTrue(self._rrqlexpr_rset(cnx, 'add', 'para')) - # no rqlexpr to delete para attribute - self.assertFalse(self._rrqlexpr_rset(cnx, 'delete', 'para')) - # new rql expr to add ecrit_par relation - rexpr = self._rrqlexpr_entity(cnx, 'add', 'ecrit_par') - self.assertEqual(rexpr.expression, - 'O require_permission P, P name "add_note", ' - 'U in_group G, P require_group G') - self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par']) - self.assertEqual(rexpr.reverse_read_permission, ()) - self.assertEqual(rexpr.reverse_delete_permission, ()) - # no more rqlexpr to delete and add travaille relation - self.assertFalse(self._rrqlexpr_rset(cnx, 'add', 'travaille')) - self.assertFalse(self._rrqlexpr_rset(cnx, 'delete', 'travaille')) - # no more rqlexpr to delete and update Societe entity - self.assertFalse(self._erqlexpr_rset(cnx, 'update', 'Societe')) - self.assertFalse(self._erqlexpr_rset(cnx, 'delete', 'Societe')) - # no more rqlexpr to read Affaire entity - self.assertFalse(self._erqlexpr_rset(cnx, 'read', 'Affaire')) - # rqlexpr to update Affaire entity has been updated - eexpr = self._erqlexpr_entity(cnx, 'update', 'Affaire') - self.assertEqual(eexpr.expression, 'X concerne S, S owned_by U') - # no change for rqlexpr to add and delete Affaire entity - self.assertEqual(len(self._erqlexpr_rset(cnx, 'delete', 'Affaire')), 1) - self.assertEqual(len(self._erqlexpr_rset(cnx, 'add', 'Affaire')), 1) - # no change for rqlexpr to add and delete concerne relation - self.assertEqual(len(self._rrqlexpr_rset(cnx, 'delete', 'concerne')), - len(delete_concerne_rqlexpr)) - self.assertEqual(len(self._rrqlexpr_rset(cnx, 'add', 'concerne')), - len(add_concerne_rqlexpr)) - # * migrschema involve: - # * 7 erqlexprs deletions (2 in (Affaire + Societe + Note.para) + 1 Note.something - # * 2 rrqlexprs deletions (travaille) - # * 1 update (Affaire update) - # * 2 new (Note add, ecrit_par add) - # * 2 implicit new for attributes (Note.para, Person.test) - # remaining orphan rql expr which should be deleted at commit (composite relation) - # unattached expressions -> pending deletion on commit - self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",' - 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' - 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], - 7) - self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",' - 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' - 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], - 2) - # finally - self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0], - nbrqlexpr_start + 1 + 2 + 2 + 2) - cnx.commit() - # unique_together test - self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1) - self.assertCountEqual(self.schema.eschema('Personne')._unique_together[0], - ('nom', 'prenom', 'datenaiss')) - rset = cnx.execute('Any C WHERE C is CWUniqueTogetherConstraint, C constraint_of ET, ET name "Personne"') - self.assertEqual(len(rset), 1) - relations = [r.name for r in rset.get_entity(0, 0).relations] - self.assertCountEqual(relations, ('nom', 'prenom', 'datenaiss')) - - def _erqlexpr_rset(self, cnx, action, ertype): - rql = 'RQLExpression X WHERE ET is CWEType, ET %s_permission X, ET name %%(name)s' % action - return cnx.execute(rql, {'name': ertype}) - - def _erqlexpr_entity(self, cnx, action, ertype): - rset = self._erqlexpr_rset(cnx, action, ertype) - self.assertEqual(len(rset), 1) - return rset.get_entity(0, 0) - - def _rrqlexpr_rset(self, cnx, action, ertype): - rql = 'RQLExpression X WHERE RT is CWRType, RDEF %s_permission X, RT name %%(name)s, RDEF relation_type RT' % action - return cnx.execute(rql, {'name': ertype}) - - def _rrqlexpr_entity(self, cnx, action, ertype): - rset = self._rrqlexpr_rset(cnx, action, ertype) - self.assertEqual(len(rset), 1) - return rset.get_entity(0, 0) - - def test_set_size_constraint(self): - with self.mh() as (cnx, mh): - # existing previous value - try: - mh.cmd_set_size_constraint('CWEType', 'name', 128) - finally: - mh.cmd_set_size_constraint('CWEType', 'name', 64) - # non existing previous value - try: - mh.cmd_set_size_constraint('CWEType', 'description', 256) - finally: - mh.cmd_set_size_constraint('CWEType', 'description', None) - - @tag('longrun') - def test_add_drop_cube_and_deps(self): - with self.mh() as (cnx, mh): - schema = self.repo.schema - self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs), - sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), - ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), - ('Note', 'Note'), ('Note', 'Bookmark')])) - try: - mh.cmd_drop_cube('fakeemail', removedeps=True) - # file was there because it's an email dependancy, should have been removed - self.assertNotIn('fakeemail', self.config.cubes()) - self.assertNotIn(self.config.cube_dir('fakeemail'), self.config.cubes_path()) - self.assertNotIn('file', self.config.cubes()) - self.assertNotIn(self.config.cube_dir('file'), self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', - 'sender', 'in_thread', 'reply_to', 'data_format'): - self.assertNotIn(ertype, schema) - self.assertEqual(sorted(schema['see_also'].rdefs), - sorted([('Folder', 'Folder'), - ('Bookmark', 'Bookmark'), - ('Bookmark', 'Note'), - ('Note', 'Note'), - ('Note', 'Bookmark')])) - self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'Folder', 'Note']) - self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note']) - self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.fakeemail"').rowcount, 0) - self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0) - finally: - mh.cmd_add_cube('fakeemail') - self.assertIn('fakeemail', self.config.cubes()) - self.assertIn(self.config.cube_dir('fakeemail'), self.config.cubes_path()) - self.assertIn('file', self.config.cubes()) - self.assertIn(self.config.cube_dir('file'), self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', - 'sender', 'in_thread', 'reply_to', 'data_format'): - self.assertIn(ertype, schema) - self.assertEqual(sorted(schema['see_also'].rdefs), - sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), - ('Bookmark', 'Bookmark'), - ('Bookmark', 'Note'), - ('Note', 'Note'), - ('Note', 'Bookmark')])) - self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) - self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) - from cubes.fakeemail.__pkginfo__ import version as email_version - from cubes.file.__pkginfo__ import version as file_version - self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.fakeemail"')[0][0], - email_version) - self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0], - file_version) - # trick: overwrite self.maxeid to avoid deletion of just reintroduced - # types (and their associated tables!) - self.maxeid = cnx.execute('Any MAX(X)')[0][0] - # why this commit is necessary is unclear to me (though without it - # next test may fail complaining of missing tables - cnx.commit() - - - @tag('longrun') - def test_add_drop_cube_no_deps(self): - with self.mh() as (cnx, mh): - cubes = set(self.config.cubes()) - schema = self.repo.schema - try: - mh.cmd_drop_cube('fakeemail') - cubes.remove('fakeemail') - self.assertNotIn('fakeemail', self.config.cubes()) - self.assertIn('file', self.config.cubes()) - for ertype in ('Email', 'EmailThread', 'EmailPart', - 'sender', 'in_thread', 'reply_to'): - self.assertNotIn(ertype, schema) - finally: - mh.cmd_add_cube('fakeemail') - self.assertIn('fakeemail', self.config.cubes()) - # trick: overwrite self.maxeid to avoid deletion of just reintroduced - # types (and their associated tables!) - self.maxeid = cnx.execute('Any MAX(X)')[0][0] # XXXXXXX KILL KENNY - # why this commit is necessary is unclear to me (though without it - # next test may fail complaining of missing tables - cnx.commit() - - def test_drop_dep_cube(self): - with self.mh() as (cnx, mh): - with self.assertRaises(ConfigurationError) as cm: - mh.cmd_drop_cube('file') - self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency") - - @tag('longrun') - def test_introduce_base_class(self): - with self.mh() as (cnx, mh): - mh.cmd_add_entity_type('Para') - self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), - ['Note']) - self.assertEqual(self.schema['Note'].specializes().type, 'Para') - mh.cmd_add_entity_type('Text') - self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), - ['Note', 'Text']) - self.assertEqual(self.schema['Text'].specializes().type, 'Para') - # test columns have been actually added - text = cnx.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0) - note = cnx.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0) - aff = cnx.execute('INSERT Affaire X').get_entity(0, 0) - self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': text.eid, 'y': aff.eid})) - self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': note.eid, 'y': aff.eid})) - self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': text.eid, 'y': aff.eid})) - self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': note.eid, 'y': aff.eid})) - # XXX remove specializes by ourselves, else tearDown fails when removing - # Para because of Note inheritance. This could be fixed by putting the - # MemSchemaCWETypeDel(session, name) operation in the - # after_delete_entity(CWEType) hook, since in that case the MemSchemaSpecializesDel - # operation would be removed before, but I'm not sure this is a desired behaviour. - # - # also we need more tests about introducing/removing base classes or - # specialization relationship... - cnx.execute('DELETE X specializes Y WHERE Y name "Para"') - cnx.commit() - self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), - []) - self.assertEqual(self.schema['Note'].specializes(), None) - self.assertEqual(self.schema['Text'].specializes(), None) - - def test_add_symmetric_relation_type(self): - with self.mh() as (cnx, mh): - self.assertFalse(self.table_sql(mh, 'same_as_relation')) - mh.cmd_add_relation_type('same_as') - self.assertTrue(self.table_sql(mh, 'same_as_relation')) - - def test_change_attribute_type(self): - with self.mh() as (cnx, mh): - mh.cmd_create_entity('Societe', tel=1) - mh.commit() - mh.change_attribute_type('Societe', 'tel', 'Float') - self.assertNotIn(('Societe', 'Int'), self.schema['tel'].rdefs) - self.assertIn(('Societe', 'Float'), self.schema['tel'].rdefs) - self.assertEqual(self.schema['tel'].rdefs[('Societe', 'Float')].object, 'Float') - tel = mh.rqlexec('Any T WHERE X tel T')[0][0] - self.assertEqual(tel, 1.0) - self.assertIsInstance(tel, float) - - def test_drop_required_inlined_relation(self): - with self.mh() as (cnx, mh): - bob = mh.cmd_create_entity('Personne', nom=u'bob') - note = mh.cmd_create_entity('Note', ecrit_par=bob) - mh.commit() - rdef = mh.fs_schema.rschema('ecrit_par').rdefs[('Note', 'Personne')] - with tempattr(rdef, 'cardinality', '1*'): - mh.sync_schema_props_perms('ecrit_par', syncperms=False) - mh.cmd_drop_relation_type('ecrit_par') - self.assertNotIn('%secrit_par' % SQL_PREFIX, - self.table_schema(mh, '%sPersonne' % SQL_PREFIX)) - - def test_drop_inlined_rdef_delete_data(self): - with self.mh() as (cnx, mh): - note = mh.cmd_create_entity('Note', ecrit_par=cnx.user.eid) - mh.commit() - mh.drop_relation_definition('Note', 'ecrit_par', 'CWUser') - self.assertFalse(mh.sqlexec('SELECT * FROM cw_Note WHERE cw_ecrit_par IS NOT NULL')) - -class MigrationCommandsComputedTC(MigrationTC): - """ Unit tests for computed relations and attributes - """ - appid = 'datacomputed' - - def setUp(self): - MigrationTC.setUp(self) - # ensure vregistry is reloaded, needed by generated hooks for computed - # attributes - self.repo.vreg.set_schema(self.repo.schema) - - def test_computed_relation_add_relation_definition(self): - self.assertNotIn('works_for', self.schema) - with self.mh() as (cnx, mh): - with self.assertRaises(ExecutionError) as exc: - mh.cmd_add_relation_definition('Employee', 'works_for', 'Company') - self.assertEqual(str(exc.exception), - 'Cannot add a relation definition for a computed ' - 'relation (works_for)') - - def test_computed_relation_drop_relation_definition(self): - self.assertIn('notes', self.schema) - with self.mh() as (cnx, mh): - with self.assertRaises(ExecutionError) as exc: - mh.cmd_drop_relation_definition('Company', 'notes', 'Note') - self.assertEqual(str(exc.exception), - 'Cannot drop a relation definition for a computed ' - 'relation (notes)') - - def test_computed_relation_add_relation_type(self): - self.assertNotIn('works_for', self.schema) - with self.mh() as (cnx, mh): - mh.cmd_add_relation_type('works_for') - self.assertIn('works_for', self.schema) - self.assertEqual(self.schema['works_for'].rule, - 'O employees S, NOT EXISTS (O associates S)') - self.assertEqual(self.schema['works_for'].objects(), ('Company',)) - self.assertEqual(self.schema['works_for'].subjects(), ('Employee',)) - self.assertFalse(self.table_sql(mh, 'works_for_relation')) - e = cnx.create_entity('Employee') - a = cnx.create_entity('Employee') - cnx.create_entity('Company', employees=e, associates=a) - cnx.commit() - company = cnx.execute('Company X').get_entity(0, 0) - self.assertEqual([e.eid], - [x.eid for x in company.reverse_works_for]) - mh.rollback() - - def test_computed_relation_drop_relation_type(self): - self.assertIn('notes', self.schema) - with self.mh() as (cnx, mh): - mh.cmd_drop_relation_type('notes') - self.assertNotIn('notes', self.schema) - - def test_computed_relation_sync_schema_props_perms(self): - self.assertIn('whatever', self.schema) - with self.mh() as (cnx, mh): - mh.cmd_sync_schema_props_perms('whatever') - self.assertEqual(self.schema['whatever'].rule, - 'S employees E, O associates E') - self.assertEqual(self.schema['whatever'].objects(), ('Company',)) - self.assertEqual(self.schema['whatever'].subjects(), ('Company',)) - self.assertFalse(self.table_sql(mh, 'whatever_relation')) - - def test_computed_relation_sync_schema_props_perms_security(self): - with self.mh() as (cnx, mh): - rdef = next(iter(self.schema['perm_changes'].rdefs.values())) - self.assertEqual(rdef.permissions, - {'add': (), 'delete': (), - 'read': ('managers', 'users')}) - mh.cmd_sync_schema_props_perms('perm_changes') - self.assertEqual(self.schema['perm_changes'].permissions, - {'read': ('managers',)}) - rdef = next(iter(self.schema['perm_changes'].rdefs.values())) - self.assertEqual(rdef.permissions, - {'add': (), 'delete': (), - 'read': ('managers',)}) - - def test_computed_relation_sync_schema_props_perms_on_rdef(self): - self.assertIn('whatever', self.schema) - with self.mh() as (cnx, mh): - with self.assertRaises(ExecutionError) as exc: - mh.cmd_sync_schema_props_perms( - ('Company', 'whatever', 'Person')) - self.assertEqual(str(exc.exception), - 'Cannot synchronize a relation definition for a computed ' - 'relation (whatever)') - - def test_computed_relation_rename_relation_type(self): - with self.mh() as (cnx, mh): - mh.cmd_rename_relation_type('to_be_renamed', 'renamed') - self.assertIn('renamed', self.schema) - self.assertNotIn('to_be_renamed', self.schema) - - # computed attributes migration ############################################ - - def setup_add_score(self): - with self.admin_access.client_cnx() as cnx: - assert not cnx.execute('Company X') - c = cnx.create_entity('Company') - e1 = cnx.create_entity('Employee', reverse_employees=c) - cnx.create_entity('Note', note=2, concerns=e1) - e2 = cnx.create_entity('Employee', reverse_employees=c) - cnx.create_entity('Note', note=4, concerns=e2) - cnx.commit() - - def assert_score_initialized(self, mh): - self.assertEqual(self.schema['score'].rdefs['Company', 'Float'].formula, - 'Any AVG(NN) WHERE X employees E, N concerns E, N note NN') - fields = self.table_schema(mh, '%sCompany' % SQL_PREFIX) - self.assertEqual(fields['%sscore' % SQL_PREFIX], ('double precision', None)) - self.assertEqual([[3.0]], - mh.rqlexec('Any CS WHERE C score CS, C is Company').rows) - - def test_computed_attribute_add_relation_type(self): - self.assertNotIn('score', self.schema) - self.setup_add_score() - with self.mh() as (cnx, mh): - mh.cmd_add_relation_type('score') - self.assertIn('score', self.schema) - self.assertEqual(self.schema['score'].objects(), ('Float',)) - self.assertEqual(self.schema['score'].subjects(), ('Company',)) - self.assert_score_initialized(mh) - - def test_computed_attribute_add_attribute(self): - self.assertNotIn('score', self.schema) - self.setup_add_score() - with self.mh() as (cnx, mh): - mh.cmd_add_attribute('Company', 'score') - self.assertIn('score', self.schema) - self.assert_score_initialized(mh) - - def assert_computed_attribute_dropped(self): - self.assertNotIn('note20', self.schema) - with self.mh() as (cnx, mh): - fields = self.table_schema(mh, '%sNote' % SQL_PREFIX) - self.assertNotIn('%snote20' % SQL_PREFIX, fields) - - def test_computed_attribute_drop_type(self): - self.assertIn('note20', self.schema) - with self.mh() as (cnx, mh): - mh.cmd_drop_relation_type('note20') - self.assert_computed_attribute_dropped() - - def test_computed_attribute_drop_relation_definition(self): - self.assertIn('note20', self.schema) - with self.mh() as (cnx, mh): - mh.cmd_drop_relation_definition('Note', 'note20', 'Int') - self.assert_computed_attribute_dropped() - - def test_computed_attribute_drop_attribute(self): - self.assertIn('note20', self.schema) - with self.mh() as (cnx, mh): - mh.cmd_drop_attribute('Note', 'note20') - self.assert_computed_attribute_dropped() - - def test_computed_attribute_sync_schema_props_perms_rtype(self): - self.assertIn('note100', self.schema) - with self.mh() as (cnx, mh): - mh.cmd_sync_schema_props_perms('note100') - rdef = self.schema['note100'].rdefs['Note', 'Int'] - self.assertEqual(rdef.formula_select.as_string(), - 'Any (N * 100) WHERE X note N, X is Note') - self.assertEqual(rdef.formula, 'Any N*100 WHERE X note N') - - def test_computed_attribute_sync_schema_props_perms_rdef(self): - self.setup_add_score() - with self.mh() as (cnx, mh): - mh.cmd_sync_schema_props_perms(('Note', 'note100', 'Int')) - self.assertEqual([[200], [400]], - cnx.execute('Any N ORDERBY N WHERE X note100 N').rows) - self.assertEqual([[300]], - cnx.execute('Any CS WHERE C score100 CS, C is Company').rows) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_postgres.py --- a/server/test/unittest_postgres.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,186 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from datetime import datetime -from threading import Thread - -from six.moves import range - -from logilab.common.testlib import SkipTest - -import logilab.database as lgdb -from cubicweb import ValidationError -from cubicweb.devtools import PostgresApptestConfiguration, startpgcluster, stoppgcluster -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.predicates import is_instance -from cubicweb.entities.adapters import IFTIndexableAdapter - -from unittest_querier import FixedOffset - - -def setUpModule(): - startpgcluster(__file__) - - -def tearDownModule(): - stoppgcluster(__file__) - - -class PostgresTimeoutConfiguration(PostgresApptestConfiguration): - def __init__(self, *args, **kwargs): - self.default_sources = PostgresApptestConfiguration.default_sources.copy() - self.default_sources['system'] = PostgresApptestConfiguration.default_sources['system'].copy() - self.default_sources['system']['db-statement-timeout'] = 200 - super(PostgresTimeoutConfiguration, self).__init__(*args, **kwargs) - - -class PostgresFTITC(CubicWebTC): - configcls = PostgresTimeoutConfiguration - - @classmethod - def setUpClass(cls): - cls.orig_connect_hooks = lgdb.SQL_CONNECT_HOOKS['postgres'][:] - - @classmethod - def tearDownClass(cls): - lgdb.SQL_CONNECT_HOOKS['postgres'] = cls.orig_connect_hooks - - def test_eid_range(self): - # concurrent allocation of eid ranges - source = self.session.repo.sources_by_uri['system'] - range1 = [] - range2 = [] - def allocate_eid_ranges(session, target): - for x in range(1, 10): - eid = source.create_eid(session, count=x) - target.extend(range(eid-x, eid)) - - t1 = Thread(target=lambda: allocate_eid_ranges(self.session, range1)) - t2 = Thread(target=lambda: allocate_eid_ranges(self.session, range2)) - t1.start() - t2.start() - t1.join() - t2.join() - self.assertEqual(range1, sorted(range1)) - self.assertEqual(range2, sorted(range2)) - self.assertEqual(set(), set(range1) & set(range2)) - - def test_occurence_count(self): - with self.admin_access.repo_cnx() as cnx: - c1 = cnx.create_entity('Card', title=u'c1', - content=u'cubicweb cubicweb cubicweb') - c2 = cnx.create_entity('Card', title=u'c3', - content=u'cubicweb') - c3 = cnx.create_entity('Card', title=u'c2', - content=u'cubicweb cubicweb') - cnx.commit() - self.assertEqual(cnx.execute('Card X ORDERBY FTIRANK(X) DESC ' - 'WHERE X has_text "cubicweb"').rows, - [[c1.eid,], [c3.eid,], [c2.eid,]]) - - - def test_attr_weight(self): - class CardIFTIndexableAdapter(IFTIndexableAdapter): - __select__ = is_instance('Card') - attr_weight = {'title': 'A'} - with self.temporary_appobjects(CardIFTIndexableAdapter): - with self.admin_access.repo_cnx() as cnx: - c1 = cnx.create_entity('Card', title=u'c1', - content=u'cubicweb cubicweb cubicweb') - c2 = cnx.create_entity('Card', title=u'c2', - content=u'cubicweb cubicweb') - c3 = cnx.create_entity('Card', title=u'cubicweb', - content=u'autre chose') - cnx.commit() - self.assertEqual(cnx.execute('Card X ORDERBY FTIRANK(X) DESC ' - 'WHERE X has_text "cubicweb"').rows, - [[c3.eid,], [c1.eid,], [c2.eid,]]) - - def test_entity_weight(self): - class PersonneIFTIndexableAdapter(IFTIndexableAdapter): - __select__ = is_instance('Personne') - entity_weight = 2.0 - with self.temporary_appobjects(PersonneIFTIndexableAdapter): - with self.admin_access.repo_cnx() as cnx: - c1 = cnx.create_entity('Personne', nom=u'c1', prenom=u'cubicweb') - c2 = cnx.create_entity('Comment', content=u'cubicweb cubicweb', - comments=c1) - c3 = cnx.create_entity('Comment', content=u'cubicweb cubicweb cubicweb', - comments=c1) - cnx.commit() - self.assertEqual(cnx.execute('Any X ORDERBY FTIRANK(X) DESC ' - 'WHERE X has_text "cubicweb"').rows, - [[c1.eid,], [c3.eid,], [c2.eid,]]) - - def test_tz_datetime(self): - with self.admin_access.repo_cnx() as cnx: - bob = cnx.create_entity('Personne', nom=u'bob', - tzdatenaiss=datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))) - datenaiss = cnx.execute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] - self.assertIsNotNone(datenaiss.tzinfo) - self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) - cnx.commit() - cnx.create_entity('Personne', nom=u'boby', - tzdatenaiss=datetime(1977, 6, 7, 2, 0)) - datenaiss = cnx.execute("Any XD WHERE X nom 'boby', X tzdatenaiss XD")[0][0] - self.assertIsNotNone(datenaiss.tzinfo) - self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 2, 0)) - rset = cnx.execute("Any X WHERE X tzdatenaiss %(d)s", - {'d': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) - self.assertEqual(rset.rows, [[bob.eid]]) - - def test_constraint_validationerror(self): - with self.admin_access.repo_cnx() as cnx: - with cnx.allow_all_hooks_but('integrity'): - with self.assertRaises(ValidationError) as cm: - cnx.execute("INSERT Note N: N type 'nogood'") - self.assertEqual(cm.exception.errors, - {'type-subject': u'invalid value %(KEY-value)s, it must be one of %(KEY-choices)s'}) - self.assertEqual(cm.exception.msgargs, - {'type-subject-value': u'"nogood"', - 'type-subject-choices': u'"todo", "a", "b", "T", "lalala"'}) - - def test_statement_timeout(self): - with self.admin_access.repo_cnx() as cnx: - cnx.system_sql('select pg_sleep(0.1)') - with self.assertRaises(Exception): - cnx.system_sql('select pg_sleep(0.3)') - - -class PostgresLimitSizeTC(CubicWebTC): - configcls = PostgresApptestConfiguration - - def test(self): - with self.admin_access.repo_cnx() as cnx: - def sql(string): - return cnx.system_sql(string).fetchone()[0] - yield self.assertEqual, sql("SELECT limit_size('

      hello

      ', 'text/html', 20)"), \ - '

      hello

      ' - yield self.assertEqual, sql("SELECT limit_size('

      hello

      ', 'text/html', 2)"), \ - 'he...' - yield self.assertEqual, sql("SELECT limit_size('
      hello', 'text/html', 2)"), \ - 'he...' - yield self.assertEqual, sql("SELECT limit_size('hello', 'text/html', 2)"), \ - 'he...' - yield self.assertEqual, sql("SELECT limit_size('a>b', 'text/html', 2)"), \ - 'a>...' - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_querier.py --- a/server/test/unittest_querier.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1668 +0,0 @@ -# -*- coding: iso-8859-1 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for modules cubicweb.server.querier and cubicweb.server.ssplanner -""" - -from datetime import date, datetime, timedelta, tzinfo - -import pytz - -from six import PY2, integer_types, binary_type, text_type - -from logilab.common.testlib import TestCase, unittest_main -from rql import BadRQLQuery - -from cubicweb import QueryError, Unauthorized, Binary -from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.server.utils import crypt_password -from cubicweb.server.querier import manual_build_descr, _make_description -from cubicweb.devtools import get_test_db_handler, TestServerConfiguration -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.repotest import tuplify, BaseQuerierTC - - -class FixedOffset(tzinfo): - def __init__(self, hours=0): - self.hours = hours - def utcoffset(self, dt): - return timedelta(hours=self.hours) - def dst(self, dt): - return timedelta(0) - - -# register priority/severity sorting registered procedure -from rql.utils import register_function, FunctionDescr - -class group_sort_value(FunctionDescr): - supported_backends = ('sqlite',) - rtype = 'Int' -try: - register_function(group_sort_value) -except AssertionError: - pass -from cubicweb.server.sqlutils import SQL_CONNECT_HOOKS -def init_sqlite_connexion(cnx): - def group_sort_value(text): - return {"managers": "3", "users": "2", "guests": "1", "owners": "0"}[text] - cnx.create_function("GROUP_SORT_VALUE", 1, group_sort_value) -SQL_CONNECT_HOOKS['sqlite'].append(init_sqlite_connexion) - - -def setUpClass(cls, *args): - global repo, cnx - config = TestServerConfiguration(apphome=UtilsTC.datadir) - handler = get_test_db_handler(config) - handler.build_db_cache() - repo, cnx = handler.get_repo_and_cnx() - cls.repo = repo - -def tearDownClass(cls, *args): - global repo, cnx - repo.shutdown() - del repo, cnx - - -class Variable: - def __init__(self, name): - self.name = name - self.children = [] - - def get_type(self, solution, args=None): - return solution[self.name] - def as_string(self): - return self.name - -class Function: - def __init__(self, name, varname): - self.name = name - self.children = [Variable(varname)] - def get_type(self, solution, args=None): - return 'Int' - -class MakeDescriptionTC(TestCase): - def test_known_values(self): - solution = {'A': 'Int', 'B': 'CWUser'} - self.assertEqual(_make_description((Function('max', 'A'), Variable('B')), {}, solution), - ['Int','CWUser']) - - -class UtilsTC(BaseQuerierTC): - setUpClass = classmethod(setUpClass) - tearDownClass = classmethod(tearDownClass) - - def get_max_eid(self): - # no need for cleanup here - return None - def cleanup(self): - # no need for cleanup here - pass - - def test_preprocess_1(self): - with self.session.new_cnx() as cnx: - reid = cnx.execute('Any X WHERE X is CWRType, X name "owned_by"')[0][0] - rqlst = self._prepare(cnx, 'Any COUNT(RDEF) WHERE RDEF relation_type X, X eid %(x)s', - {'x': reid}) - self.assertEqual([{'RDEF': 'CWAttribute'}, {'RDEF': 'CWRelation'}], - rqlst.solutions) - - def test_preprocess_2(self): - with self.session.new_cnx() as cnx: - teid = cnx.execute("INSERT Tag X: X name 'tag'")[0][0] - #geid = self.execute("CWGroup G WHERE G name 'users'")[0][0] - #self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", - # {'g': geid, 't': teid}, 'g') - rqlst = self._prepare(cnx, 'Any X WHERE E eid %(x)s, E tags X', {'x': teid}) - # the query may be optimized, should keep only one solution - # (any one, etype will be discarded) - self.assertEqual(1, len(rqlst.solutions)) - - def assertRQLEqual(self, expected, got): - from rql import parse - self.assertMultiLineEqual(text_type(parse(expected)), - text_type(parse(got))) - - def test_preprocess_security(self): - s = self.user_groups_session('users') - with s.new_cnx() as cnx: - plan = self._prepare_plan(cnx, 'Any ETN,COUNT(X) GROUPBY ETN ' - 'WHERE X is ET, ET name ETN') - union = plan.rqlst - plan.preprocess(union) - self.assertEqual(len(union.children), 1) - self.assertEqual(len(union.children[0].with_), 1) - subq = union.children[0].with_[0].query - self.assertEqual(len(subq.children), 4) - self.assertEqual([t.as_string() for t in union.children[0].selection], - ['ETN','COUNT(X)']) - self.assertEqual([t.as_string() for t in union.children[0].groupby], - ['ETN']) - partrqls = sorted(((rqlst.as_string(), rqlst.solutions) for rqlst in subq.children)) - rql, solutions = partrqls[0] - self.assertRQLEqual(rql, - 'Any ETN,X WHERE X is ET, ET name ETN, (EXISTS(X owned_by %(B)s))' - ' OR ((((EXISTS(D concerne C?, C owned_by %(B)s, ' - ' X identity D, C is Division, D is Affaire))' - ' OR (EXISTS(H concerne G?, G owned_by %(B)s, G is SubDivision, ' - ' X identity H, H is Affaire)))' - ' OR (EXISTS(I concerne F?, F owned_by %(B)s, F is Societe, ' - ' X identity I, I is Affaire)))' - ' OR (EXISTS(J concerne E?, E owned_by %(B)s, E is Note, ' - ' X identity J, J is Affaire)))' - ', ET is CWEType, X is Affaire') - self.assertEqual(solutions, [{'C': 'Division', - 'D': 'Affaire', - 'E': 'Note', - 'F': 'Societe', - 'G': 'SubDivision', - 'H': 'Affaire', - 'I': 'Affaire', - 'J': 'Affaire', - 'X': 'Affaire', - 'ET': 'CWEType', 'ETN': 'String'}]) - rql, solutions = partrqls[1] - self.assertRQLEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, ' - 'X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWComputedRType, ' - ' CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, ' - ' CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, CWUser, Card, ' - ' Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, ' - ' Frozable, Note, Old, Personne, RQLExpression, Societe, State, SubDivision, ' - ' SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') - self.assertCountEqual(solutions, - [{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWComputedRType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Frozable', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Old', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Societe', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'State', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'SubDivision', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'SubWorkflowExitPoint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Tag', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Transition', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'TrInfo', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}]) - rql, solutions = partrqls[2] - self.assertEqual(rql, - 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(%(D)s use_email X), ' - 'ET is CWEType, X is EmailAddress') - self.assertEqual(solutions, [{'X': 'EmailAddress', 'ET': 'CWEType', 'ETN': 'String'}]) - rql, solutions = partrqls[3] - self.assertEqual(rql, - 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(X owned_by %(C)s), ' - 'ET is CWEType, X is Basket') - self.assertEqual(solutions, [{'X': 'Basket', 'ET': 'CWEType', 'ETN': 'String'}]) - - def test_preprocess_security_aggregat(self): - s = self.user_groups_session('users') - with s.new_cnx() as cnx: - plan = self._prepare_plan(cnx, 'Any MAX(X)') - union = plan.rqlst - plan.preprocess(union) - self.assertEqual(len(union.children), 1) - self.assertEqual(len(union.children[0].with_), 1) - subq = union.children[0].with_[0].query - self.assertEqual(len(subq.children), 4) - self.assertEqual([t.as_string() for t in union.children[0].selection], - ['MAX(X)']) - - def test_preprocess_nonregr(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any S ORDERBY SI WHERE NOT S ecrit_par O, S para SI') - self.assertEqual(len(rqlst.solutions), 1) - - def test_build_description(self): - # should return an empty result set - rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': self.session.user.eid}) - self.assertEqual(rset.description[0][0], 'CWUser') - rset = self.qexecute('Any 1') - self.assertEqual(rset.description[0][0], 'Int') - rset = self.qexecute('Any TRUE') - self.assertEqual(rset.description[0][0], 'Boolean') - rset = self.qexecute('Any "hop"') - self.assertEqual(rset.description[0][0], 'String') - rset = self.qexecute('Any TODAY') - self.assertEqual(rset.description[0][0], 'Date') - rset = self.qexecute('Any NOW') - self.assertEqual(rset.description[0][0], 'Datetime') - rset = self.qexecute('Any %(x)s', {'x': 1}) - self.assertEqual(rset.description[0][0], 'Int') - if PY2: - rset = self.qexecute('Any %(x)s', {'x': long(1)}) - self.assertEqual(rset.description[0][0], 'Int') - rset = self.qexecute('Any %(x)s', {'x': True}) - self.assertEqual(rset.description[0][0], 'Boolean') - rset = self.qexecute('Any %(x)s', {'x': 1.0}) - self.assertEqual(rset.description[0][0], 'Float') - rset = self.qexecute('Any %(x)s', {'x': datetime.now()}) - self.assertEqual(rset.description[0][0], 'Datetime') - rset = self.qexecute('Any %(x)s', {'x': 'str'}) - self.assertEqual(rset.description[0][0], 'String') - rset = self.qexecute('Any %(x)s', {'x': u'str'}) - self.assertEqual(rset.description[0][0], 'String') - - def test_build_descr1(self): - with self.session.new_cnx() as cnx: - rset = cnx.execute('(Any U,L WHERE U login L) UNION ' - '(Any G,N WHERE G name N, G is CWGroup)') - # rset.req = self.session - orig_length = len(rset) - rset.rows[0][0] = 9999999 - description = manual_build_descr(cnx, rset.syntax_tree(), None, rset.rows) - self.assertEqual(len(description), orig_length - 1) - self.assertEqual(len(rset.rows), orig_length - 1) - self.assertNotEqual(rset.rows[0][0], 9999999) - - def test_build_descr2(self): - rset = self.qexecute('Any X,Y WITH X,Y BEING ((Any G,NULL WHERE G is CWGroup) UNION ' - '(Any U,G WHERE U in_group G))') - for x, y in rset.description: - if y is not None: - self.assertEqual(y, 'CWGroup') - - def test_build_descr3(self): - rset = self.qexecute('(Any G,NULL WHERE G is CWGroup) UNION ' - '(Any U,G WHERE U in_group G)') - for x, y in rset.description: - if y is not None: - self.assertEqual(y, 'CWGroup') - - -class QuerierTC(BaseQuerierTC): - setUpClass = classmethod(setUpClass) - tearDownClass = classmethod(tearDownClass) - - def test_unknown_eid(self): - # should return an empty result set - self.assertFalse(self.qexecute('Any X WHERE X eid 99999999')) - - def test_typed_eid(self): - # should return an empty result set - rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': '1'}) - self.assertIsInstance(rset[0][0], integer_types) - - def test_bytes_storage(self): - feid = self.qexecute('INSERT File X: X data_name "foo.pdf", ' - 'X data_format "text/plain", X data %(data)s', - {'data': Binary(b"xxx")})[0][0] - fdata = self.qexecute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0] - self.assertIsInstance(fdata, Binary) - self.assertEqual(fdata.getvalue(), b'xxx') - - # selection queries tests ################################################# - - def test_select_1(self): - rset = self.qexecute('Any X ORDERBY X WHERE X is CWGroup') - result, descr = rset.rows, rset.description - self.assertEqual(tuplify(result), [(2,), (3,), (4,), (5,)]) - self.assertEqual(descr, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) - - def test_select_2(self): - rset = self.qexecute('Any X ORDERBY N WHERE X is CWGroup, X name N') - self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)]) - self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) - rset = self.qexecute('Any X ORDERBY N DESC WHERE X is CWGroup, X name N') - self.assertEqual(tuplify(rset.rows), [(5,), (4,), (3,), (2,)]) - - def test_select_3(self): - rset = self.qexecute('Any N GROUPBY N WHERE X is CWGroup, X name N') - result, descr = rset.rows, rset.description - result.sort() - self.assertEqual(tuplify(result), [('guests',), ('managers',), ('owners',), ('users',)]) - self.assertEqual(descr, [('String',), ('String',), ('String',), ('String',)]) - - def test_select_is(self): - rset = self.qexecute('Any X, TN ORDERBY TN LIMIT 10 WHERE X is T, T name TN') - result, descr = rset.rows, rset.description - self.assertEqual(result[0][1], descr[0][0]) - - def test_select_is_aggr(self): - rset = self.qexecute('Any TN, COUNT(X) GROUPBY TN ORDERBY 2 DESC WHERE X is T, T name TN') - result, descr = rset.rows, rset.description - self.assertEqual(descr[0][0], 'String') - self.assertEqual(descr[0][1], 'Int') - self.assertEqual(result[0][0], 'RQLExpression') # XXX may change as schema evolve - - def test_select_groupby_orderby(self): - rset = self.qexecute('Any N GROUPBY N ORDERBY N WHERE X is CWGroup, X name N') - self.assertEqual(tuplify(rset.rows), [('guests',), ('managers',), ('owners',), ('users',)]) - self.assertEqual(rset.description, [('String',), ('String',), ('String',), ('String',)]) - - def test_select_complex_groupby(self): - rset = self.qexecute('Any N GROUPBY N WHERE X name N') - rset = self.qexecute('Any N,MAX(D) GROUPBY N LIMIT 5 WHERE X name N, X creation_date D') - - def test_select_inlined_groupby(self): - seid = self.qexecute('State X WHERE X name "deactivated"')[0][0] - rset = self.qexecute('Any U,L,S GROUPBY U,L,S WHERE X in_state S, U login L, S eid %s' % seid) - - def test_select_groupby_funccall(self): - rset = self.qexecute('Any YEAR(CD), COUNT(X) GROUPBY YEAR(CD) ' - 'WHERE X is CWUser, X creation_date CD') - self.assertListEqual(rset.rows, [[date.today().year, 2]]) - - def test_select_groupby_colnumber(self): - rset = self.qexecute('Any YEAR(CD), COUNT(X) GROUPBY 1 ' - 'WHERE X is CWUser, X creation_date CD') - self.assertListEqual(rset.rows, [[date.today().year, 2]]) - - def test_select_complex_orderby(self): - rset1 = self.qexecute('Any N ORDERBY N WHERE X name N') - self.assertEqual(sorted(rset1.rows), rset1.rows) - rset = self.qexecute('Any N ORDERBY N LIMIT 5 OFFSET 1 WHERE X name N') - self.assertEqual(rset.rows[0][0], rset1.rows[1][0]) - self.assertEqual(len(rset), 5) - - def test_select_5(self): - rset = self.qexecute('Any X, TMP ORDERBY TMP WHERE X name TMP, X is CWGroup') - self.assertEqual(tuplify(rset.rows), - [(2, 'guests',), - (3, 'managers',), - (4, 'owners',), - (5, 'users',)]) - self.assertEqual(rset.description, - [('CWGroup', 'String',), - ('CWGroup', 'String',), - ('CWGroup', 'String',), - ('CWGroup', 'String',)]) - - def test_select_6(self): - self.qexecute("INSERT Personne X: X nom 'bidule'")[0] - rset = self.qexecute('Any Y where X name TMP, Y nom in (TMP, "bidule")') - #self.assertEqual(rset.description, [('Personne',), ('Personne',)]) - self.assertIn(('Personne',), rset.description) - rset = self.qexecute('DISTINCT Any Y where X name TMP, Y nom in (TMP, "bidule")') - self.assertIn(('Personne',), rset.description) - - def test_select_not_attr(self): - peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - seid = self.qexecute("INSERT Societe X: X nom 'chouette'")[0][0] - rset = self.qexecute('Personne X WHERE NOT X nom "bidule"') - self.assertEqual(len(rset.rows), 0, rset.rows) - rset = self.qexecute('Personne X WHERE NOT X nom "bid"') - self.assertEqual(len(rset.rows), 1, rset.rows) - self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") - rset = self.qexecute('Personne X WHERE NOT X travaille S') - self.assertEqual(len(rset.rows), 0, rset.rows) - - def test_select_is_in(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Societe X: X nom 'chouette'") - self.assertEqual(len(self.qexecute("Any X WHERE X is IN (Personne, Societe)")), - 2) - - def test_select_not_rel(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Societe X: X nom 'chouette'") - self.qexecute("INSERT Personne X: X nom 'autre'") - self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") - rset = self.qexecute('Personne X WHERE NOT X travaille S') - self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.qexecute('Personne X WHERE NOT X travaille S, S nom "chouette"') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_select_nonregr_inlined(self): - self.qexecute("INSERT Note X: X para 'bidule'") - self.qexecute("INSERT Personne X: X nom 'chouette'") - self.qexecute("INSERT Personne X: X nom 'autre'") - self.qexecute("SET X ecrit_par P WHERE X para 'bidule', P nom 'chouette'") - rset = self.qexecute('Any U,T ORDERBY T DESC WHERE U is CWUser, ' - 'N ecrit_par U, N type T')#, {'x': self.ueid}) - self.assertEqual(len(rset.rows), 0) - - def test_select_nonregr_edition_not(self): - groupeids = set((2, 3, 4)) - groupreadperms = set(r[0] for r in self.qexecute('Any Y WHERE X name "CWGroup", ' - 'Y eid IN(2, 3, 4), X read_permission Y')) - rset = self.qexecute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", ' - 'Y eid IN(2, 3, 4), NOT X read_permission Y') - self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) - rset = self.qexecute('DISTINCT Any Y WHERE X name "CWGroup", ' - 'Y eid IN(2, 3, 4), NOT X read_permission Y') - self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) - - def test_select_outer_join(self): - peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - peid2 = self.qexecute("INSERT Personne X: X nom 'autre'")[0][0] - seid1 = self.qexecute("INSERT Societe X: X nom 'chouette'")[0][0] - seid2 = self.qexecute("INSERT Societe X: X nom 'chouetos'")[0][0] - rset = self.qexecute('Any X,S ORDERBY X WHERE X travaille S?') - self.assertEqual(rset.rows, [[peid1, None], [peid2, None]]) - self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") - rset = self.qexecute('Any X,S ORDERBY X WHERE X travaille S?') - self.assertEqual(rset.rows, [[peid1, seid1], [peid2, None]]) - rset = self.qexecute('Any S,X ORDERBY S WHERE X? travaille S') - self.assertEqual(rset.rows, [[seid1, peid1], [seid2, None]]) - - def test_select_outer_join_optimized(self): - peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - rset = self.qexecute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}) - self.assertEqual(rset.rows, [[peid1]]) - rset = self.qexecute('Any X WHERE X eid %(x)s, X require_permission P?', - {'x':peid1}) - self.assertEqual(rset.rows, [[peid1]]) - - def test_select_left_outer_join(self): - rset = self.qexecute('DISTINCT Any G WHERE U? in_group G') - self.assertEqual(len(rset), 4) - rset = self.qexecute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s', - {'x': self.session.user.eid}) - self.assertEqual(len(rset), 4) - - def test_select_ambigous_outer_join(self): - teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] - self.qexecute("INSERT Tag X: X name 'tagbis'")[0][0] - geid = self.qexecute("CWGroup G WHERE G name 'users'")[0][0] - self.qexecute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", - {'g': geid, 't': teid}) - rset = self.qexecute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN") - self.assertIn(['users', 'tag'], rset.rows) - self.assertIn(['activated', None], rset.rows) - rset = self.qexecute("Any GN,TN ORDERBY GN WHERE T tags G?, T name TN, G name GN") - self.assertEqual(rset.rows, [[None, 'tagbis'], ['users', 'tag']]) - - def test_select_not_inline_rel(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Note X: X type 'a'") - self.qexecute("INSERT Note X: X type 'b'") - self.qexecute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") - rset = self.qexecute('Note X WHERE NOT X ecrit_par P') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_select_not_unlinked_multiple_solutions(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Note X: X type 'a'") - self.qexecute("INSERT Note X: X type 'b'") - self.qexecute("SET Y evaluee X WHERE X type 'a', Y nom 'bidule'") - rset = self.qexecute('Note X WHERE NOT Y evaluee X') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_select_date_extraction(self): - self.qexecute("INSERT Personne X: X nom 'foo', X datenaiss %(d)s", - {'d': datetime(2001, 2,3, 12,13)}) - test_data = [('YEAR', 2001), ('MONTH', 2), ('DAY', 3), - ('HOUR', 12), ('MINUTE', 13), ('WEEKDAY', 6)] - for funcname, result in test_data: - rset = self.qexecute('Any %s(D) WHERE X is Personne, X datenaiss D' - % funcname) - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.rows[0][0], result) - self.assertEqual(rset.description, [('Int',)]) - - def test_regexp_based_pattern_matching(self): - peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - peid2 = self.qexecute("INSERT Personne X: X nom 'cidule'")[0][0] - rset = self.qexecute('Any X WHERE X is Personne, X nom REGEXP "^b"') - self.assertEqual(len(rset.rows), 1, rset.rows) - self.assertEqual(rset.rows[0][0], peid1) - rset = self.qexecute('Any X WHERE X is Personne, X nom REGEXP "idu"') - self.assertEqual(len(rset.rows), 2, rset.rows) - - def test_select_aggregat_count(self): - rset = self.qexecute('Any COUNT(X)') - self.assertEqual(len(rset.rows), 1) - self.assertEqual(len(rset.rows[0]), 1) - self.assertEqual(rset.description, [('Int',)]) - - def test_select_aggregat_sum(self): - rset = self.qexecute('Any SUM(O) WHERE X ordernum O') - self.assertEqual(len(rset.rows), 1) - self.assertEqual(len(rset.rows[0]), 1) - self.assertEqual(rset.description, [('Int',)]) - - def test_select_aggregat_min(self): - rset = self.qexecute('Any MIN(X) WHERE X is Personne') - self.assertEqual(len(rset.rows), 1) - self.assertEqual(len(rset.rows[0]), 1) - self.assertEqual(rset.description, [('Personne',)]) - rset = self.qexecute('Any MIN(O) WHERE X ordernum O') - self.assertEqual(len(rset.rows), 1) - self.assertEqual(len(rset.rows[0]), 1) - self.assertEqual(rset.description, [('Int',)]) - - def test_select_aggregat_max(self): - rset = self.qexecute('Any MAX(X) WHERE X is Personne') - self.assertEqual(len(rset.rows), 1) - self.assertEqual(len(rset.rows[0]), 1) - self.assertEqual(rset.description, [('Personne',)]) - rset = self.qexecute('Any MAX(O) WHERE X ordernum O') - self.assertEqual(len(rset.rows), 1) - self.assertEqual(len(rset.rows[0]), 1) - self.assertEqual(rset.description, [('Int',)]) - - def test_select_custom_aggregat_concat_string(self): - rset = self.qexecute('Any GROUP_CONCAT(N) WHERE X is CWGroup, X name N') - self.assertTrue(rset) - self.assertEqual(sorted(rset[0][0].split(', ')), ['guests', 'managers', - 'owners', 'users']) - - def test_select_custom_regproc_limit_size(self): - rset = self.qexecute('Any TEXT_LIMIT_SIZE(N, 3) WHERE X is CWGroup, X name N, X name "managers"') - self.assertTrue(rset) - self.assertEqual(rset[0][0], 'man...') - self.qexecute("INSERT Basket X: X name 'bidule', X description 'hop hop', X description_format 'text/html'") - rset = self.qexecute('Any LIMIT_SIZE(D, DF, 3) WHERE X is Basket, X description D, X description_format DF') - self.assertTrue(rset) - self.assertEqual(rset[0][0], 'hop...') - - def test_select_regproc_orderby(self): - rset = self.qexecute('DISTINCT Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N, X name "managers"') - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][1], 'managers') - rset = self.qexecute('Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N, NOT U in_group X, U login "admin"') - self.assertEqual(len(rset), 3) - self.assertEqual(rset[0][1], 'owners') - - def test_select_aggregat_sort(self): - rset = self.qexecute('Any G, COUNT(U) GROUPBY G ORDERBY 2 WHERE U in_group G') - self.assertEqual(len(rset.rows), 2) - self.assertEqual(len(rset.rows[0]), 2) - self.assertEqual(rset.description[0], ('CWGroup', 'Int',)) - - def test_select_aggregat_having(self): - rset = self.qexecute('Any N,COUNT(RDEF) GROUPBY N ORDERBY 2,N ' - 'WHERE RT name N, RDEF relation_type RT ' - 'HAVING COUNT(RDEF) > 10') - self.assertListEqual(rset.rows, - [[u'description_format', 13], - [u'description', 14], - [u'name', 19], - [u'created_by', 45], - [u'creation_date', 45], - [u'cw_source', 45], - [u'cwuri', 45], - [u'in_basket', 45], - [u'is', 45], - [u'is_instance_of', 45], - [u'modification_date', 45], - [u'owned_by', 45]]) - - def test_select_aggregat_having_dumb(self): - # dumb but should not raise an error - rset = self.qexecute('Any U,COUNT(X) GROUPBY U ' - 'WHERE U eid %(x)s, X owned_by U ' - 'HAVING COUNT(X) > 10', {'x': self.ueid}) - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.rows[0][0], self.ueid) - - def test_select_having_non_aggregat_1(self): - rset = self.qexecute('Any L WHERE X login L, X creation_date CD ' - 'HAVING YEAR(CD) = %s' % date.today().year) - self.assertListEqual(rset.rows, - [[u'admin'], - [u'anon']]) - - def test_select_having_non_aggregat_2(self): - rset = self.qexecute('Any L GROUPBY L WHERE X login L, X in_group G, ' - 'X creation_date CD HAVING YEAR(CD) = %s OR COUNT(G) > 1' - % date.today().year) - self.assertListEqual(rset.rows, - [[u'admin'], - [u'anon']]) - - def test_select_complex_sort(self): - """need sqlite including http://www.sqlite.org/cvstrac/tktview?tn=3773 fix""" - rset = self.qexecute('Any X ORDERBY X,D LIMIT 5 WHERE X creation_date D') - result = rset.rows - result.sort() - self.assertEqual(tuplify(result), [(1,), (2,), (3,), (4,), (5,)]) - - def test_select_upper(self): - rset = self.qexecute('Any X, UPPER(L) ORDERBY L WHERE X is CWUser, X login L') - self.assertEqual(len(rset.rows), 2) - self.assertEqual(rset.rows[0][1], 'ADMIN') - self.assertEqual(rset.description[0], ('CWUser', 'String',)) - self.assertEqual(rset.rows[1][1], 'ANON') - self.assertEqual(rset.description[1], ('CWUser', 'String',)) - eid = rset.rows[0][0] - rset = self.qexecute('Any UPPER(L) WHERE X eid %s, X login L'%eid) - self.assertEqual(rset.rows[0][0], 'ADMIN') - self.assertEqual(rset.description, [('String',)]) - - def test_select_float_abs(self): - # test positive number - eid = self.qexecute('INSERT Affaire A: A invoiced %(i)s', {'i': 1.2})[0][0] - rset = self.qexecute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid}) - self.assertEqual(rset.rows[0][0], 1.2) - # test negative number - eid = self.qexecute('INSERT Affaire A: A invoiced %(i)s', {'i': -1.2})[0][0] - rset = self.qexecute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid}) - self.assertEqual(rset.rows[0][0], 1.2) - - def test_select_int_abs(self): - # test positive number - eid = self.qexecute('INSERT Affaire A: A duration %(d)s', {'d': 12})[0][0] - rset = self.qexecute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid}) - self.assertEqual(rset.rows[0][0], 12) - # test negative number - eid = self.qexecute('INSERT Affaire A: A duration %(d)s', {'d': -12})[0][0] - rset = self.qexecute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid}) - self.assertEqual(rset.rows[0][0], 12) - -## def test_select_simplified(self): -## ueid = self.session.user.eid -## rset = self.qexecute('Any L WHERE %s login L'%ueid) -## self.assertEqual(rset.rows[0][0], 'admin') -## rset = self.qexecute('Any L WHERE %(x)s login L', {'x':ueid}) -## self.assertEqual(rset.rows[0][0], 'admin') - - def test_select_searchable_text_1(self): - rset = self.qexecute(u"INSERT Personne X: X nom 'bidle'") - rset = self.qexecute(u"INSERT Societe X: X nom 'bidle'") - rset = self.qexecute("INSERT Societe X: X nom 'chouette'") - rset = self.qexecute('Any X where X has_text %(text)s', {'text': u'bidle'}) - self.assertEqual(len(rset.rows), 2, rset.rows) - rset = self.qexecute(u'Any N where N has_text "bidle"') - self.assertEqual(len(rset.rows), 2, rset.rows) - biduleeids = [r[0] for r in rset.rows] - rset = self.qexecute(u'Any N where NOT N has_text "bidle"') - self.assertFalse([r[0] for r in rset.rows if r[0] in biduleeids]) - # duh? - rset = self.qexecute('Any X WHERE X has_text %(text)s', {'text': u'a'}) - - def test_select_searchable_text_2(self): - rset = self.qexecute("INSERT Personne X: X nom 'bidule'") - rset = self.qexecute("INSERT Personne X: X nom 'chouette'") - rset = self.qexecute("INSERT Societe X: X nom 'bidule'") - rset = self.qexecute('Personne N where N has_text "bidule"') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_select_searchable_text_3(self): - rset = self.qexecute("INSERT Personne X: X nom 'bidule', X sexe 'M'") - rset = self.qexecute("INSERT Personne X: X nom 'bidule', X sexe 'F'") - rset = self.qexecute("INSERT Societe X: X nom 'bidule'") - rset = self.qexecute('Any X where X has_text "bidule" and X sexe "M"') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_select_multiple_searchable_text(self): - self.qexecute(u"INSERT Personne X: X nom 'bidle'") - self.qexecute("INSERT Societe X: X nom 'chouette', S travaille X") - self.qexecute(u"INSERT Personne X: X nom 'bidle'") - rset = self.qexecute('Personne X WHERE X has_text %(text)s, X travaille S, S has_text %(text2)s', - {'text': u'bidle', - 'text2': u'chouette',} - ) - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_select_no_descr(self): - rset = self.qexecute('Any X WHERE X is CWGroup', build_descr=0) - rset.rows.sort() - self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)]) - self.assertEqual(rset.description, ()) - - def test_select_limit_offset(self): - rset = self.qexecute('CWGroup X ORDERBY N LIMIT 2 WHERE X name N') - self.assertEqual(tuplify(rset.rows), [(2,), (3,)]) - self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',)]) - rset = self.qexecute('CWGroup X ORDERBY N LIMIT 2 OFFSET 2 WHERE X name N') - self.assertEqual(tuplify(rset.rows), [(4,), (5,)]) - - def test_select_symmetric(self): - self.qexecute("INSERT Personne X: X nom 'machin'") - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Personne X: X nom 'chouette'") - self.qexecute("INSERT Personne X: X nom 'trucmuche'") - self.qexecute("SET X connait Y WHERE X nom 'chouette', Y nom 'bidule'") - self.qexecute("SET X connait Y WHERE X nom 'machin', Y nom 'chouette'") - rset = self.qexecute('Any P WHERE P connait P2') - self.assertEqual(len(rset.rows), 4, rset.rows) - rset = self.qexecute('Any P WHERE NOT P connait P2') - self.assertEqual(len(rset.rows), 1, rset.rows) # trucmuche - rset = self.qexecute('Any P WHERE P connait P2, P2 nom "bidule"') - self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.qexecute('Any P WHERE P2 connait P, P2 nom "bidule"') - self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.qexecute('Any P WHERE P connait P2, P2 nom "chouette"') - self.assertEqual(len(rset.rows), 2, rset.rows) - rset = self.qexecute('Any P WHERE P2 connait P, P2 nom "chouette"') - self.assertEqual(len(rset.rows), 2, rset.rows) - - def test_select_inline(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Note X: X type 'a'") - self.qexecute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") - rset = self.qexecute('Any N where N ecrit_par X, X nom "bidule"') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_select_creation_date(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - rset = self.qexecute('Any D WHERE X nom "bidule", X creation_date D') - self.assertEqual(len(rset.rows), 1) - - def test_select_or_relation(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Personne X: X nom 'chouette'") - self.qexecute("INSERT Societe X: X nom 'logilab'") - self.qexecute("INSERT Societe X: X nom 'caesium'") - self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'logilab'") - rset = self.qexecute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, ' - 'S1 nom "logilab", S2 nom "caesium"') - self.assertEqual(len(rset.rows), 1) - self.qexecute("SET P travaille S WHERE P nom 'chouette', S nom 'caesium'") - rset = self.qexecute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, ' - 'S1 nom "logilab", S2 nom "caesium"') - self.assertEqual(len(rset.rows), 2) - - def test_select_or_sym_relation(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Personne X: X nom 'chouette'") - self.qexecute("INSERT Personne X: X nom 'truc'") - self.qexecute("SET P connait S WHERE P nom 'bidule', S nom 'chouette'") - rset = self.qexecute('DISTINCT Any P WHERE S connait P, S nom "chouette"') - self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.qexecute('DISTINCT Any P WHERE P connait S or S connait P, S nom "chouette"') - self.assertEqual(len(rset.rows), 1, rset.rows) - self.qexecute("SET P connait S WHERE P nom 'chouette', S nom 'truc'") - rset = self.qexecute('DISTINCT Any P WHERE S connait P, S nom "chouette"') - self.assertEqual(len(rset.rows), 2, rset.rows) - rset = self.qexecute('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"') - self.assertEqual(len(rset.rows), 2, rset.rows) - - def test_select_follow_relation(self): - self.qexecute("INSERT Affaire X: X sujet 'cool'") - self.qexecute("INSERT Societe X: X nom 'chouette'") - self.qexecute("SET A concerne S WHERE A is Affaire, S is Societe") - self.qexecute("INSERT Note X: X para 'truc'") - self.qexecute("SET S evaluee N WHERE S is Societe, N is Note") - self.qexecute("INSERT Societe X: X nom 'bidule'") - self.qexecute("INSERT Note X: X para 'troc'") - self.qexecute("SET S evaluee N WHERE S nom 'bidule', N para 'troc'") - rset = self.qexecute('DISTINCT Any A,N WHERE A concerne S, S evaluee N') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_select_ordered_distinct_1(self): - self.assertRaises(BadRQLQuery, - self.qexecute, 'DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R') - - def test_select_ordered_distinct_2(self): - self.qexecute("INSERT Affaire X: X sujet 'minor'") - self.qexecute("INSERT Affaire X: X sujet 'zou'") - self.qexecute("INSERT Affaire X: X sujet 'abcd'") - rset = self.qexecute('DISTINCT Any S ORDERBY S WHERE A is Affaire, A sujet S') - self.assertEqual(rset.rows, [['abcd'], ['minor'], ['zou']]) - - def test_select_ordered_distinct_3(self): - rset = self.qexecute('DISTINCT Any N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N') - self.assertEqual(rset.rows, [['owners'], ['guests'], ['users'], ['managers']]) - - def test_select_or_value(self): - rset = self.qexecute('Any U WHERE U in_group G, G name "owners" OR G name "users"') - self.assertEqual(len(rset.rows), 0) - rset = self.qexecute('Any U WHERE U in_group G, G name "guests" OR G name "managers"') - self.assertEqual(len(rset.rows), 2) - - def test_select_explicit_eid(self): - rset = self.qexecute('Any X,E WHERE X owned_by U, X eid E, U eid %(u)s', - {'u': self.session.user.eid}) - self.assertTrue(rset) - self.assertEqual(rset.description[0][1], 'Int') - -# def test_select_rewritten_optional(self): -# eid = self.qexecute("INSERT Affaire X: X sujet 'cool'")[0][0] -# rset = self.qexecute('Any X WHERE X eid %(x)s, EXISTS(X owned_by U) OR EXISTS(X concerne S?, S owned_by U)', -# {'x': eid}, 'x') -# self.assertEqual(rset.rows, [[eid]]) - - def test_today_bug(self): - self.qexecute("INSERT Tag X: X name 'bidule', X creation_date NOW") - self.qexecute("INSERT Tag Y: Y name 'toto'") - rset = self.qexecute("Any D WHERE X name in ('bidule', 'toto') , X creation_date D") - self.assertIsInstance(rset.rows[0][0], datetime) - rset = self.qexecute('Tag X WHERE X creation_date TODAY') - self.assertEqual(len(rset.rows), 2) - - def test_sqlite_patch(self): - """this test monkey patch done by sqlutils._install_sqlite_querier_patch""" - self.qexecute("INSERT Personne X: X nom 'bidule', X datenaiss NOW, X tzdatenaiss NOW") - rset = self.qexecute('Any MAX(D) WHERE X is Personne, X datenaiss D') - self.assertIsInstance(rset[0][0], datetime) - rset = self.qexecute('Any MAX(D) WHERE X is Personne, X tzdatenaiss D') - self.assertIsInstance(rset[0][0], datetime) - self.assertEqual(rset[0][0].tzinfo, pytz.utc) - - def test_today(self): - self.qexecute("INSERT Tag X: X name 'bidule', X creation_date TODAY") - self.qexecute("INSERT Tag Y: Y name 'toto'") - rset = self.qexecute('Tag X WHERE X creation_date TODAY') - self.assertEqual(len(rset.rows), 2) - - def test_select_boolean(self): - rset = self.qexecute('Any N WHERE X is CWEType, X name N, X final %(val)s', - {'val': True}) - self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes', - 'Date', 'Datetime', - 'Decimal', 'Float', - 'Int', 'Interval', - 'Password', 'String', - 'TZDatetime', 'TZTime', - 'Time']) - rset = self.qexecute('Any N WHERE X is CWEType, X name N, X final TRUE') - self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes', - 'Date', 'Datetime', - 'Decimal', 'Float', - 'Int', 'Interval', - 'Password', 'String', - 'TZDatetime', 'TZTime', - 'Time']) - with self.session.new_cnx() as cnx: - cnx.create_entity('Personne', nom=u'louis', test=True) - self.assertEqual(len(cnx.execute('Any X WHERE X test %(val)s', {'val': True})), 1) - self.assertEqual(len(cnx.execute('Any X WHERE X test TRUE')), 1) - self.assertEqual(len(cnx.execute('Any X WHERE X test %(val)s', {'val': False})), 0) - self.assertEqual(len(cnx.execute('Any X WHERE X test FALSE')), 0) - - def test_select_constant(self): - rset = self.qexecute('Any X, "toto" ORDERBY X WHERE X is CWGroup') - self.assertEqual(rset.rows, - [list(x) for x in zip((2,3,4,5), ('toto','toto','toto','toto',))]) - self.assertIsInstance(rset[0][1], text_type) - self.assertEqual(rset.description, - list(zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), - ('String', 'String', 'String', 'String',)))) - rset = self.qexecute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'}) - self.assertEqual(rset.rows, - list(map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))) - self.assertIsInstance(rset[0][1], text_type) - self.assertEqual(rset.description, - list(zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), - ('String', 'String', 'String', 'String',)))) - rset = self.qexecute('Any X,GN WHERE X is CWUser, G is CWGroup, X login "syt", ' - 'X in_group G, G name GN') - - def test_select_union(self): - rset = self.qexecute('Any X,N ORDERBY N WITH X,N BEING ' - '((Any X,N WHERE X name N, X transition_of WF, WF workflow_of E, E name %(name)s)' - ' UNION ' - '(Any X,N WHERE X name N, X state_of WF, WF workflow_of E, E name %(name)s))', - {'name': 'CWUser'}) - self.assertEqual([x[1] for x in rset.rows], - ['activate', 'activated', 'deactivate', 'deactivated']) - self.assertEqual(rset.description, - [('Transition', 'String'), ('State', 'String'), - ('Transition', 'String'), ('State', 'String')]) - - def test_select_union_aggregat(self): - # meaningless, the goal in to have group by done on different attribute - # for each sub-query - self.qexecute('(Any N,COUNT(X) GROUPBY N WHERE X name N, X is State)' - ' UNION ' - '(Any N,COUNT(X) GROUPBY N ORDERBY 2 WHERE X login N)') - - def test_select_union_aggregat_independant_group(self): - with self.session.new_cnx() as cnx: - cnx.execute('INSERT State X: X name "hop"') - cnx.execute('INSERT State X: X name "hop"') - cnx.execute('INSERT Transition X: X name "hop"') - cnx.execute('INSERT Transition X: X name "hop"') - rset = cnx.execute('Any N,NX ORDERBY 2 WITH N,NX BEING ' - '((Any N,COUNT(X) GROUPBY N WHERE X name N, ' - ' X is State HAVING COUNT(X)>1)' - ' UNION ' - '(Any N,COUNT(X) GROUPBY N WHERE X name N, ' - ' X is Transition HAVING COUNT(X)>1))') - self.assertEqual(rset.rows, [[u'hop', 2], [u'hop', 2]]) - - def test_select_union_selection_with_diff_variables(self): - rset = self.qexecute('(Any N WHERE X name N, X is State)' - ' UNION ' - '(Any NN WHERE XX name NN, XX is Transition)') - self.assertEqual(sorted(r[0] for r in rset.rows), - ['abort', 'activate', 'activated', 'ben non', - 'deactivate', 'deactivated', 'done', 'en cours', - 'end', 'finie', 'markasdone', 'pitetre', 'redoit', - 'start', 'todo']) - - def test_select_union_description_diff_var(self): - eid1 = self.qexecute('CWGroup X WHERE X name "managers"')[0][0] - eid2 = self.qexecute('CWUser X WHERE X login "admin"')[0][0] - rset = self.qexecute('(Any X WHERE X eid %(x)s)' - ' UNION ' - '(Any Y WHERE Y eid %(y)s)', - {'x': eid1, 'y': eid2}) - self.assertEqual(rset.description[:], [('CWGroup',), ('CWUser',)]) - - def test_exists(self): - geid = self.qexecute("INSERT CWGroup X: X name 'lulufanclub'")[0][0] - self.qexecute("SET U in_group G WHERE G name 'lulufanclub'") - peid = self.qexecute("INSERT Personne X: X prenom 'lulu', X nom 'petit'")[0][0] - rset = self.qexecute("Any X WHERE X prenom 'lulu'," - "EXISTS (U in_group G, G name 'lulufanclub' OR G name 'managers');") - self.assertEqual(rset.rows, [[peid]]) - - def test_identity(self): - eid = self.qexecute('Any X WHERE X identity Y, Y eid 1')[0][0] - self.assertEqual(eid, 1) - eid = self.qexecute('Any X WHERE Y identity X, Y eid 1')[0][0] - self.assertEqual(eid, 1) - login = self.qexecute('Any L WHERE X login "admin", X identity Y, Y login L')[0][0] - self.assertEqual(login, 'admin') - - def test_select_date_mathexp(self): - rset = self.qexecute('Any X, TODAY - CD WHERE X is CWUser, X creation_date CD') - self.assertTrue(rset) - self.assertEqual(rset.description[0][1], 'Interval') - eid, = self.qexecute("INSERT Personne X: X nom 'bidule'")[0] - rset = self.qexecute('Any X, NOW - CD WHERE X is Personne, X creation_date CD') - self.assertEqual(rset.description[0][1], 'Interval') - - def test_select_subquery_aggregat_1(self): - # percent users by groups - self.qexecute('SET X in_group G WHERE G name "users"') - rset = self.qexecute('Any GN, COUNT(X)*100/T GROUPBY GN ORDERBY 2,1' - ' WHERE G name GN, X in_group G' - ' WITH T BEING (Any COUNT(U) WHERE U is CWUser)') - self.assertEqual(rset.rows, [[u'guests', 50], [u'managers', 50], [u'users', 100]]) - self.assertEqual(rset.description, [('String', 'Int'), ('String', 'Int'), ('String', 'Int')]) - - def test_select_subquery_aggregat_2(self): - expected = self.qexecute('Any X, 0, COUNT(T) GROUPBY X ' - 'WHERE X is Workflow, T transition_of X').rows - rset = self.qexecute(''' -Any P1,B,E WHERE P1 identity P2 WITH - P1,B BEING (Any P,COUNT(T) GROUPBY P WHERE P is Workflow, T is Transition, - T? transition_of P, T type "auto"), - P2,E BEING (Any P,COUNT(T) GROUPBY P WHERE P is Workflow, T is Transition, - T? transition_of P, T type "normal")''') - self.assertEqual(sorted(rset.rows), sorted(expected)) - - def test_select_subquery_const(self): - rset = self.qexecute('Any X WITH X BEING ((Any NULL) UNION (Any "toto"))') - self.assertEqual(rset.rows, [[None], ['toto']]) - self.assertEqual(rset.description, [(None,), ('String',)]) - - # insertion queries tests ################################################# - - def test_insert_is(self): - eid, = self.qexecute("INSERT Personne X: X nom 'bidule'")[0] - etype, = self.qexecute("Any TN WHERE X is T, X eid %s, T name TN" % eid)[0] - self.assertEqual(etype, 'Personne') - self.qexecute("INSERT Personne X: X nom 'managers'") - - def test_insert_1(self): - rset = self.qexecute("INSERT Personne X: X nom 'bidule'") - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.description, [('Personne',)]) - rset = self.qexecute('Personne X WHERE X nom "bidule"') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne',)]) - - def test_insert_1_multiple(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Personne X: X nom 'chouette'") - rset = self.qexecute("INSERT Societe Y: Y nom N, P travaille Y WHERE P nom N") - self.assertEqual(len(rset.rows), 2) - self.assertEqual(rset.description, [('Societe',), ('Societe',)]) - - def test_insert_2(self): - rset = self.qexecute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'") - self.assertEqual(rset.description, [('Personne', 'Personne')]) - rset = self.qexecute('Personne X WHERE X nom "bidule" or X nom "tutu"') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne',), ('Personne',)]) - - def test_insert_3(self): - self.qexecute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y") - rset = self.qexecute('Personne X WHERE X nom "admin"') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne',)]) - - def test_insert_4(self): - self.qexecute("INSERT Societe Y: Y nom 'toto'") - self.qexecute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'") - rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne', 'Societe',)]) - - def test_insert_4bis(self): - peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - seid = self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", - {'x': str(peid)})[0][0] - self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 1) - self.qexecute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", - {'x': str(seid)}) - self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 2) - - def test_insert_4ter(self): - peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - seid = self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", - {'x': text_type(peid)})[0][0] - self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 1) - self.qexecute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", - {'x': text_type(seid)}) - self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 2) - - def test_insert_5(self): - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'") - rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne', 'Societe',)]) - - def test_insert_5bis(self): - peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", - {'x': peid}) - rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne', 'Societe',)]) - - def test_insert_6(self): - self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y") - rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne', 'Societe',)]) - - def test_insert_7(self): - self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', " - "X travaille Y WHERE U login 'admin', U login N") - rset = self.qexecute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne', 'Societe',)]) - - def test_insert_7_2(self): - self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', " - "X travaille Y WHERE U login N") - rset = self.qexecute('Any X, Y WHERE Y nom "toto", X travaille Y') - self.assertEqual(len(rset), 2) - self.assertEqual(rset.description, [('Personne', 'Societe',), - ('Personne', 'Societe',)]) - - def test_insert_8(self): - self.qexecute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y " - "WHERE U login 'admin', U login N") - rset = self.qexecute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y') - self.assertTrue(rset.rows) - self.assertEqual(rset.description, [('Personne', 'Societe',)]) - - def test_insert_9(self): - self.qexecute("INSERT Societe X: X nom 'Lo'") - self.qexecute("INSERT Societe X: X nom 'Gi'") - self.qexecute("INSERT SubDivision X: X nom 'Lab'") - rset = self.qexecute("INSERT Personne X: X nom N, X travaille Y, X travaille_subdivision Z " - "WHERE Y is Societe, Z is SubDivision, Y nom N") - self.assertEqual(len(rset), 2) - self.assertEqual(rset.description, [('Personne',), ('Personne',)]) - # self.assertSetEqual(set(x.nom for x in rset.entities()), - # ['Lo', 'Gi']) - # self.assertSetEqual(set(y.nom for x in rset.entities() for y in x.travaille), - # ['Lo', 'Gi']) - # self.assertEqual([y.nom for x in rset.entities() for y in x.travaille_subdivision], - # ['Lab', 'Lab']) - - def test_insert_query_error(self): - self.assertRaises(Exception, - self.qexecute, - "INSERT Personne X: X nom 'toto', X is Personne") - self.assertRaises(Exception, - self.qexecute, - "INSERT Personne X: X nom 'toto', X is_instance_of Personne") - self.assertRaises(QueryError, - self.qexecute, - "INSERT Personne X: X nom 'toto', X has_text 'tutu'") - - self.assertRaises(QueryError, - self.qexecute, - "INSERT CWUser X: X login 'toto', X eid %s" % cnx.user(self.session).eid) - - def test_insertion_description_with_where(self): - rset = self.qexecute('INSERT CWUser E, EmailAddress EM: E login "X", E upassword "X", ' - 'E primary_email EM, EM address "X", E in_group G ' - 'WHERE G name "managers"') - self.assertEqual(list(rset.description[0]), ['CWUser', 'EmailAddress']) - - # deletion queries tests ################################################## - - def test_delete_1(self): - self.qexecute("INSERT Personne Y: Y nom 'toto'") - rset = self.qexecute('Personne X WHERE X nom "toto"') - self.assertEqual(len(rset.rows), 1) - drset = self.qexecute("DELETE Personne Y WHERE Y nom 'toto'") - self.assertEqual(drset.rows, rset.rows) - rset = self.qexecute('Personne X WHERE X nom "toto"') - self.assertEqual(len(rset.rows), 0) - - def test_delete_2(self): - rset = self.qexecute("INSERT Personne X, Personne Y, Societe Z : " - "X nom 'syt', Y nom 'adim', Z nom 'Logilab', X travaille Z, Y travaille Z") - self.assertEqual(len(rset), 1) - self.assertEqual(len(rset[0]), 3) - self.assertEqual(rset.description[0], ('Personne', 'Personne', 'Societe')) - self.assertEqual(self.qexecute('Any N WHERE X nom N, X eid %s'% rset[0][0])[0][0], 'syt') - rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') - self.assertEqual(len(rset.rows), 2, rset.rows) - self.qexecute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilabo'") - rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') - self.assertEqual(len(rset.rows), 2, rset.rows) - self.qexecute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilab'") - rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') - self.assertEqual(len(rset.rows), 0, rset.rows) - - def test_delete_3(self): - s = self.user_groups_session('users') - with s.new_cnx() as cnx: - peid, = self.o.execute(cnx, "INSERT Personne P: P nom 'toto'")[0] - seid, = self.o.execute(cnx, "INSERT Societe S: S nom 'logilab'")[0] - self.o.execute(cnx, "SET P travaille S") - cnx.commit() - rset = self.qexecute('Personne P WHERE P travaille S') - self.assertEqual(len(rset.rows), 1) - self.qexecute("DELETE X travaille Y WHERE X eid %s, Y eid %s" % (peid, seid)) - rset = self.qexecute('Personne P WHERE P travaille S') - self.assertEqual(len(rset.rows), 0) - - def test_delete_symmetric(self): - teid1 = self.qexecute("INSERT Folder T: T name 'toto'")[0][0] - teid2 = self.qexecute("INSERT Folder T: T name 'tutu'")[0][0] - self.qexecute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) - rset = self.qexecute('Any X,Y WHERE X see_also Y') - self.assertEqual(len(rset) , 2, rset.rows) - self.qexecute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) - rset = self.qexecute('Any X,Y WHERE X see_also Y') - self.assertEqual(len(rset) , 0) - self.qexecute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) - rset = self.qexecute('Any X,Y WHERE X see_also Y') - self.assertEqual(len(rset) , 2) - self.qexecute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid2, teid1)) - rset = self.qexecute('Any X,Y WHERE X see_also Y') - self.assertEqual(len(rset) , 0) - - def test_nonregr_delete_cache(self): - """test that relations are properly cleaned when an entity is deleted - (using cachekey on sql generation returned always the same query for an eid, - whatever the relation) - """ - aeid, = self.qexecute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')[0] - # XXX would be nice if the rql below was enough... - #'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y' - eeid, = self.qexecute('INSERT Email X: X messageid "<1234>", X subject "test", ' - 'X sender Y, X recipients Y WHERE Y is EmailAddress')[0] - self.qexecute("DELETE Email X") - with self.session.new_cnx() as cnx: - sqlc = cnx.cnxset.cu - sqlc.execute('SELECT * FROM recipients_relation') - self.assertEqual(len(sqlc.fetchall()), 0) - sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid) - self.assertEqual(len(sqlc.fetchall()), 0) - - def test_nonregr_delete_cache2(self): - eid = self.qexecute("INSERT Folder T: T name 'toto'")[0][0] - # fill the cache - self.qexecute("Any X WHERE X eid %(x)s", {'x': eid}) - self.qexecute("Any X WHERE X eid %s" % eid) - self.qexecute("Folder X WHERE X eid %(x)s", {'x': eid}) - self.qexecute("Folder X WHERE X eid %s" % eid) - self.qexecute("DELETE Folder T WHERE T eid %s" % eid) - rset = self.qexecute("Any X WHERE X eid %(x)s", {'x': eid}) - self.assertEqual(rset.rows, []) - rset = self.qexecute("Any X WHERE X eid %s" % eid) - self.assertEqual(rset.rows, []) - rset = self.qexecute("Folder X WHERE X eid %(x)s", {'x': eid}) - self.assertEqual(rset.rows, []) - rset = self.qexecute("Folder X WHERE X eid %s" %eid) - self.assertEqual(rset.rows, []) - - # update queries tests #################################################### - - def test_update_1(self): - peid = self.qexecute("INSERT Personne Y: Y nom 'toto'")[0][0] - rset = self.qexecute('Personne X WHERE X nom "toto"') - self.assertEqual(len(rset.rows), 1) - rset = self.qexecute("SET X nom 'tutu', X prenom 'original' WHERE X is Personne, X nom 'toto'") - self.assertEqual(tuplify(rset.rows), [(peid, 'tutu', 'original')]) - rset = self.qexecute('Any Y, Z WHERE X is Personne, X nom Y, X prenom Z') - self.assertEqual(tuplify(rset.rows), [('tutu', 'original')]) - - def test_update_2(self): - peid, seid = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")[0] - rset = self.qexecute("SET X travaille Y WHERE X nom 'bidule', Y nom 'toto'") - self.assertEqual(tuplify(rset.rows), [(peid, seid)]) - rset = self.qexecute('Any X, Y WHERE X travaille Y') - self.assertEqual(len(rset.rows), 1) - - def test_update_2bis(self): - rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") - eid1, eid2 = rset[0][0], rset[0][1] - self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", - {'x': str(eid1), 'y': str(eid2)}) - rset = self.qexecute('Any X, Y WHERE X travaille Y') - self.assertEqual(len(rset.rows), 1) - # test add of an existant relation but with NOT X rel Y protection - self.assertFalse(self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s," - "NOT X travaille Y", - {'x': str(eid1), 'y': str(eid2)})) - - def test_update_2ter(self): - rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") - eid1, eid2 = rset[0][0], rset[0][1] - self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", - {'x': text_type(eid1), 'y': text_type(eid2)}) - rset = self.qexecute('Any X, Y WHERE X travaille Y') - self.assertEqual(len(rset.rows), 1) - - def test_update_multiple1(self): - peid1 = self.qexecute("INSERT Personne Y: Y nom 'tutu'")[0][0] - peid2 = self.qexecute("INSERT Personne Y: Y nom 'toto'")[0][0] - self.qexecute("SET X nom 'tutu', Y nom 'toto' WHERE X nom 'toto', Y nom 'tutu'") - self.assertEqual(self.qexecute('Any X WHERE X nom "toto"').rows, [[peid1]]) - self.assertEqual(self.qexecute('Any X WHERE X nom "tutu"').rows, [[peid2]]) - - def test_update_multiple2(self): - with self.session.new_cnx() as cnx: - ueid = cnx.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0] - peid1 = cnx.execute("INSERT Personne Y: Y nom 'turlu'")[0][0] - peid2 = cnx.execute("INSERT Personne Y: Y nom 'tutu'")[0][0] - cnx.execute('SET P1 owned_by U, P2 owned_by U ' - 'WHERE P1 eid %s, P2 eid %s, U eid %s' % (peid1, peid2, ueid)) - self.assertTrue(cnx.execute('Any X WHERE X eid %s, X owned_by U, U eid %s' - % (peid1, ueid))) - self.assertTrue(cnx.execute('Any X WHERE X eid %s, X owned_by U, U eid %s' - % (peid2, ueid))) - - def test_update_math_expr(self): - orders = [r[0] for r in self.qexecute('Any O ORDERBY O WHERE ST name "Personne", ' - 'X from_entity ST, X ordernum O')] - for i,v in enumerate(orders): - if v != orders[0]: - splitidx = i - break - self.qexecute('SET X ordernum Y+1 WHERE X from_entity SE, SE name "Personne", ' - 'X ordernum Y, X ordernum >= %(order)s', - {'order': orders[splitidx]}) - orders2 = [r[0] for r in self.qexecute('Any O ORDERBY O WHERE ST name "Personne", ' - 'X from_entity ST, X ordernum O')] - orders = orders[:splitidx] + [o+1 for o in orders[splitidx:]] - self.assertEqual(orders2, orders) - - def test_update_string_concat(self): - beid = self.qexecute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0] - self.qexecute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', - {'suffix': u'-moved'}) - newname = self.qexecute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0] - self.assertEqual(newname, 'toto-moved') - - def test_update_not_exists(self): - rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") - eid1, eid2 = rset[0][0], rset[0][1] - rset = self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s, " - "NOT EXISTS(Z ecrit_par X)", - {'x': text_type(eid1), 'y': text_type(eid2)}) - self.assertEqual(tuplify(rset.rows), [(eid1, eid2)]) - - def test_update_query_error(self): - self.qexecute("INSERT Personne Y: Y nom 'toto'") - self.assertRaises(Exception, self.qexecute, "SET X nom 'toto', X is Personne") - self.assertRaises(QueryError, self.qexecute, "SET X nom 'toto', X has_text 'tutu' " - "WHERE X is Personne") - self.assertRaises(QueryError, - self.qexecute, - "SET X login 'tutu', X eid %s" % cnx.user(self.session).eid) - - - # HAVING on write queries test ############################################# - - def test_update_having(self): - peid1 = self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] - peid2 = self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2")[0][0] - rset = self.qexecute("SET X tel 3 WHERE X tel TEL HAVING TEL&1=1") - self.assertEqual(tuplify(rset.rows), [(peid1, 3)]) - - def test_insert_having(self): - self.skipTest('unsupported yet') - self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] - self.assertFalse(self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2 " - "WHERE X tel XT HAVING XT&2=2")) - self.assertTrue(self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2 " - "WHERE X tel XT HAVING XT&1=1")) - - def test_delete_having(self): - self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] - self.assertFalse(self.qexecute("DELETE Personne Y WHERE X tel XT HAVING XT&2=2")) - self.assertTrue(self.qexecute("DELETE Personne Y WHERE X tel XT HAVING XT&1=1")) - - # upassword encryption tests ################################################# - - def test_insert_upassword(self): - rset = self.qexecute("INSERT CWUser X: X login 'bob', X upassword 'toto', " - "X in_group G WHERE G name 'users'") - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.description, [('CWUser',)]) - self.assertRaises(Unauthorized, - self.qexecute, "Any P WHERE X is CWUser, X login 'bob', X upassword P") - with self.session.new_cnx() as cnx: - cursor = cnx.cnxset.cu - cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" - % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) - passwd = binary_type(cursor.fetchone()[0]) - self.assertEqual(passwd, crypt_password('toto', passwd)) - rset = self.qexecute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", - {'pwd': Binary(passwd)}) - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.description, [('CWUser',)]) - - def test_update_upassword(self): - with self.session.new_cnx() as cnx: - rset = cnx.execute("INSERT CWUser X: X login 'bob', X upassword %(pwd)s", - {'pwd': 'toto'}) - self.assertEqual(rset.description[0][0], 'CWUser') - rset = cnx.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'", - {'pwd': b'tutu'}) - cursor = cnx.cnxset.cu - cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" - % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) - passwd = binary_type(cursor.fetchone()[0]) - self.assertEqual(passwd, crypt_password('tutu', passwd)) - rset = cnx.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", - {'pwd': Binary(passwd)}) - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.description, [('CWUser',)]) - - # ZT datetime tests ######################################################## - - def test_tz_datetime(self): - self.qexecute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", - {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) - datenaiss = self.qexecute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] - self.assertIsNotNone(datenaiss.tzinfo) - self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) - - def test_tz_datetime_cache_nonregr(self): - datenaiss = datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1)) - self.qexecute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", - {'date': datenaiss}) - self.assertTrue(self.qexecute("Any X WHERE X tzdatenaiss %(d)s", {'d': datenaiss})) - self.assertFalse(self.qexecute("Any X WHERE X tzdatenaiss %(d)s", {'d': datenaiss - timedelta(1)})) - - # non regression tests ##################################################### - - def test_nonregr_1(self): - teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] - self.qexecute("SET X tags Y WHERE X name 'tag', Y is State, Y name 'activated'") - rset = self.qexecute('Any X WHERE T tags X') - self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.qexecute('Any T WHERE T tags X, X is State') - self.assertEqual(rset.rows, [[teid]]) - rset = self.qexecute('Any T WHERE T tags X') - self.assertEqual(rset.rows, [[teid]]) - - def test_nonregr_2(self): - teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] - geid = self.qexecute("CWGroup G WHERE G name 'users'")[0][0] - self.qexecute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", - {'g': geid, 't': teid}) - rset = self.qexecute('Any X WHERE E eid %(x)s, E tags X', - {'x': teid}) - self.assertEqual(rset.rows, [[geid]]) - - def test_nonregr_3(self): - """bad sql generated on the second query (destination_state is not - detected as an inlined relation) - """ - rset = self.qexecute('Any S,ES,T WHERE S state_of WF, WF workflow_of ET, ET name "CWUser",' - 'ES allowed_transition T, T destination_state S') - self.assertEqual(len(rset.rows), 2) - - def test_nonregr_4(self): - # fix variables'type, else we get (nb of entity types with a 'name' attribute)**3 - # union queries and that make for instance a 266Ko sql query which is refused - # by the server (or client lib) - rset = self.qexecute('Any ER,SE,OE WHERE SE name "Comment", ER name "comments", OE name "Comment",' - 'ER is CWRType, SE is CWEType, OE is CWEType') - self.assertEqual(len(rset), 1) - - def test_nonregr_5(self): - # jpl #15505: equivalent queries returning different result sets - teid1 = self.qexecute("INSERT Folder X: X name 'hop'")[0][0] - teid2 = self.qexecute("INSERT Folder X: X name 'hip'")[0][0] - neid = self.qexecute("INSERT Note X: X todo_by U, X filed_under T " - "WHERE U login 'admin', T name 'hop'")[0][0] - weid = self.qexecute("INSERT Affaire X: X concerne N, X filed_under T " - "WHERE N is Note, T name 'hip'")[0][0] - rset1 = self.qexecute('Any N,U WHERE N filed_under T, T eid %s,' - 'N todo_by U, W concerne N,' - 'W is Affaire, W filed_under A, A eid %s' % (teid1, teid2)) - rset2 = self.qexecute('Any N,U WHERE N filed_under T, T eid %s,' - 'N todo_by U, W concerne N,' - 'W filed_under A, A eid %s' % (teid1, teid2)) - rset3 = self.qexecute('Any N,U WHERE N todo_by U, T eid %s,' - 'N filed_under T, W concerne N,' - 'W is Affaire, W filed_under A, A eid %s' % (teid1, teid2)) - rset4 = self.qexecute('Any N,U WHERE N todo_by U, T eid %s,' - 'N filed_under T, W concerne N,' - 'W filed_under A, A eid %s' % (teid1, teid2)) - self.assertEqual(rset1.rows, rset2.rows) - self.assertEqual(rset1.rows, rset3.rows) - self.assertEqual(rset1.rows, rset4.rows) - - def test_nonregr_6(self): - self.qexecute('Any N,COUNT(S) GROUPBY N ORDERBY COUNT(N) WHERE S name N, S is State') - - def test_sqlite_encoding(self): - """XXX this test was trying to show a bug on use of lower which only - occurs with non ascii string and misconfigured locale - """ - self.qexecute("INSERT Tag X: X name %(name)s," - "X modification_date %(modification_date)s," - "X creation_date %(creation_date)s", - {'name': u'name0', - 'modification_date': '2003/03/12 11:00', - 'creation_date': '2000/07/03 11:00'}) - rset = self.qexecute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,' - 'X owned_by U, U eid %(x)s', - {'x':self.session.user.eid}) - self.assertEqual(rset.rows, [[u'\xe9name0']]) - - - def test_nonregr_description(self): - """check that a correct description is built in case where infered - solutions may be "fusionned" into one by the querier while all solutions - are needed to build the result's description - """ - self.qexecute("INSERT Personne X: X nom 'bidule'") - self.qexecute("INSERT Societe Y: Y nom 'toto'") - beid = self.qexecute("INSERT Basket B: B name 'mybasket'")[0][0] - self.qexecute("SET X in_basket B WHERE X is Personne") - self.qexecute("SET X in_basket B WHERE X is Societe") - rset = self.qexecute('Any X WHERE X in_basket B, B eid %s' % beid) - self.assertEqual(len(rset), 2) - self.assertEqual(rset.description, [('Personne',), ('Societe',)]) - - - def test_nonregr_cache_1(self): - peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - beid = self.qexecute("INSERT Basket X: X name 'tag'")[0][0] - self.qexecute("SET X in_basket Y WHERE X is Personne, Y eid %(y)s", - {'y': beid}) - rset = self.qexecute("Any X WHERE X in_basket B, B eid %(x)s", - {'x': beid}) - self.assertEqual(rset.rows, [[peid]]) - rset = self.qexecute("Any X WHERE X in_basket B, B eid %(x)s", - {'x': beid}) - self.assertEqual(rset.rows, [[peid]]) - - def test_nonregr_has_text_cache(self): - eid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - eid2 = self.qexecute("INSERT Personne X: X nom 'tag'")[0][0] - rset = self.qexecute("Any X WHERE X has_text %(text)s", {'text': u'bidule'}) - self.assertEqual(rset.rows, [[eid1]]) - rset = self.qexecute("Any X WHERE X has_text %(text)s", {'text': u'tag'}) - self.assertEqual(rset.rows, [[eid2]]) - - def test_nonregr_sortterm_management(self): - """Error: Variable has no attribute 'sql' in rql2sql.py (visit_variable) - - cause: old variable ref inserted into a fresh rqlst copy - (in RQLSpliter._complex_select_plan) - - need sqlite including http://www.sqlite.org/cvstrac/tktview?tn=3773 fix - """ - self.qexecute('Any X ORDERBY D DESC WHERE X creation_date D') - - def test_nonregr_extra_joins(self): - ueid = self.session.user.eid - teid1 = self.qexecute("INSERT Folder X: X name 'folder1'")[0][0] - teid2 = self.qexecute("INSERT Folder X: X name 'folder2'")[0][0] - neid1 = self.qexecute("INSERT Note X: X para 'note1'")[0][0] - neid2 = self.qexecute("INSERT Note X: X para 'note2'")[0][0] - self.qexecute("SET X filed_under Y WHERE X eid %s, Y eid %s" - % (neid1, teid1)) - self.qexecute("SET X filed_under Y WHERE X eid %s, Y eid %s" - % (neid2, teid2)) - self.qexecute("SET X todo_by Y WHERE X is Note, Y eid %s" % ueid) - rset = self.qexecute('Any N WHERE N todo_by U, N is Note, U eid %s, N filed_under T, T eid %s' - % (ueid, teid1)) - self.assertEqual(len(rset), 1) - - def test_nonregr_XXX(self): - teid = self.qexecute('Transition S WHERE S name "deactivate"')[0][0] - rset = self.qexecute('Any O WHERE O is State, ' - 'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid}) - self.assertEqual(len(rset), 2) - rset = self.qexecute('Any O WHERE O is State, NOT S destination_state O, ' - 'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid}) - self.assertEqual(len(rset), 1) - - - def test_nonregr_set_datetime(self): - # huum, psycopg specific - self.qexecute('SET X creation_date %(date)s WHERE X eid 1', {'date': date.today()}) - - def test_nonregr_u_owned_by_u(self): - ueid = self.qexecute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G " - "WHERE G name 'users'")[0][0] - rset = self.qexecute("CWUser U") - self.assertEqual(len(rset), 3) # bob + admin + anon - rset = self.qexecute("Any U WHERE NOT U owned_by U") - # even admin created at repo initialization time should belong to itself - self.assertEqual(len(rset), 0) - - def test_nonreg_update_index(self): - # this is the kind of queries generated by "cubicweb-ctl db-check -ry" - self.qexecute("SET X description D WHERE X is State, X description D") - - def test_nonregr_is(self): - uteid = self.qexecute('Any ET WHERE ET name "CWUser"')[0][0] - self.qexecute('Any X, ET WHERE X is ET, ET eid %s' % uteid) - - def test_nonregr_orderby(self): - seid = self.qexecute('Any X WHERE X name "activated"')[0][0] - self.qexecute('Any X,S, MAX(T) GROUPBY X,S ORDERBY S ' - 'WHERE X is CWUser, T tags X, S eid IN(%s), X in_state S' % seid) - - def test_nonregr_solution_cache(self): - self.skipTest('XXX should be fixed or documented') # (doesn't occur if cache key is provided.) - rset = self.qexecute('Any X WHERE X is CWUser, X eid %(x)s', {'x':self.ueid}) - self.assertEqual(len(rset), 1) - rset = self.qexecute('Any X WHERE X is CWUser, X eid %(x)s', {'x':12345}) - self.assertEqual(len(rset), 0) - - def test_nonregr_final_norestr(self): - self.assertRaises(BadRQLQuery, self.qexecute, 'Date X') - - def test_nonregr_eid_cmp(self): - peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - peid2 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] - rset = self.qexecute('Any X,Y WHERE X is Personne, Y is Personne, ' - 'X nom XD, Y nom XD, X eid Z, Y eid > Z') - self.assertEqual(rset.rows, [[peid1, peid2]]) - rset = self.qexecute('Any X,Y WHERE X nom XD, Y nom XD, X eid Z, Y eid > Z') - self.assertEqual(rset.rows, [[peid1, peid2]]) - - def test_nonregr_has_text_ambiguity_1(self): - peid = self.qexecute("INSERT CWUser X: X login 'bidule', X upassword 'bidule', " - "X in_group G WHERE G name 'users'")[0][0] - aeid = self.qexecute("INSERT Affaire X: X ref 'bidule'")[0][0] - rset = self.qexecute('Any X WHERE X is CWUser, X has_text "bidule"') - self.assertEqual(rset.rows, [[peid]]) - rset = self.qexecute('Any X WHERE X is CWUser, X has_text "bidule", ' - 'X in_state S, S name SN') - self.assertEqual(rset.rows, [[peid]]) - - - def test_nonregr_sql_cache(self): - # different SQL generated when 'name' is None or not (IS NULL). - self.assertFalse(self.qexecute('Any X WHERE X is CWEType, X name %(name)s', - {'name': None})) - self.assertTrue(self.qexecute('Any X WHERE X is CWEType, X name %(name)s', - {'name': 'CWEType'})) - - -class NonRegressionTC(CubicWebTC): - - def test_has_text_security_cache_bug(self): - with self.admin_access.repo_cnx() as cnx: - self.create_user(cnx, 'user', ('users',)) - aff1 = cnx.create_entity('Societe', nom=u'aff1') - aff2 = cnx.create_entity('Societe', nom=u'aff2') - cnx.commit() - with self.new_access('user').repo_cnx() as cnx: - res = cnx.execute('Any X WHERE X has_text %(text)s', {'text': u'aff1'}) - self.assertEqual(res.rows, [[aff1.eid]]) - res = cnx.execute('Any X WHERE X has_text %(text)s', {'text': u'aff2'}) - self.assertEqual(res.rows, [[aff2.eid]]) - - def test_set_relations_eid(self): - with self.admin_access.repo_cnx() as cnx: - # create 3 email addresses - a1 = cnx.create_entity('EmailAddress', address=u'a1') - a2 = cnx.create_entity('EmailAddress', address=u'a2') - a3 = cnx.create_entity('EmailAddress', address=u'a3') - # SET relations using '>=' operator on eids - cnx.execute('SET U use_email A WHERE U login "admin", A eid >= %s' % a2.eid) - self.assertEqual( - [[a2.eid], [a3.eid]], - cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) - # DELETE - cnx.execute('DELETE U use_email A WHERE U login "admin", A eid > %s' % a2.eid) - self.assertEqual( - [[a2.eid]], - cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) - cnx.execute('DELETE U use_email A WHERE U login "admin"') - # SET relations using '<' operator on eids - cnx.execute('SET U use_email A WHERE U login "admin", A eid < %s' % a2.eid) - self.assertEqual( - [[a1.eid]], - cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_repository.py --- a/server/test/unittest_repository.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,751 +0,0 @@ -# -*- coding: iso-8859-1 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.repository""" - -import threading -import time -import logging - -from six.moves import range - -from yams.constraints import UniqueConstraint -from yams import register_base_type, unregister_base_type - -from logilab.database import get_db_helper - -from cubicweb import (BadConnectionId, ValidationError, - UnknownEid, AuthenticationError, Unauthorized, QueryError) -from cubicweb.predicates import is_instance -from cubicweb.schema import RQLConstraint -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.repotest import tuplify -from cubicweb.server import hook -from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.server.hook import Hook -from cubicweb.server.sources import native -from cubicweb.server.session import SessionClosedError - - -class RepositoryTC(CubicWebTC): - """ singleton providing access to a persistent storage for entities - and relation - """ - - def test_unique_together_constraint(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"') - with self.assertRaises(ValidationError) as wraperr: - cnx.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"') - self.assertEqual( - {'cp': u'%(KEY-rtype)s is part of violated unicity constraint', - 'nom': u'%(KEY-rtype)s is part of violated unicity constraint', - 'type': u'%(KEY-rtype)s is part of violated unicity constraint', - '': u'some relations violate a unicity constraint'}, - wraperr.exception.args[1]) - - def test_unique_together_schema(self): - person = self.repo.schema.eschema('Personne') - self.assertEqual(len(person._unique_together), 1) - self.assertItemsEqual(person._unique_together[0], - ('nom', 'prenom', 'inline2')) - - def test_all_entities_have_owner(self): - with self.admin_access.repo_cnx() as cnx: - self.assertFalse(cnx.execute('Any X WHERE NOT X owned_by U')) - - def test_all_entities_have_is(self): - with self.admin_access.repo_cnx() as cnx: - self.assertFalse(cnx.execute('Any X WHERE NOT X is ET')) - - def test_all_entities_have_cw_source(self): - with self.admin_access.repo_cnx() as cnx: - self.assertFalse(cnx.execute('Any X WHERE NOT X cw_source S')) - - def test_connect(self): - cnxid = self.repo.connect(self.admlogin, password=self.admpassword) - self.assertTrue(cnxid) - self.repo.close(cnxid) - self.assertRaises(AuthenticationError, - self.repo.connect, self.admlogin, password='nimportnawak') - self.assertRaises(AuthenticationError, - self.repo.connect, self.admlogin, password='') - self.assertRaises(AuthenticationError, - self.repo.connect, self.admlogin, password=None) - self.assertRaises(AuthenticationError, - self.repo.connect, None, password=None) - self.assertRaises(AuthenticationError, - self.repo.connect, self.admlogin) - self.assertRaises(AuthenticationError, - self.repo.connect, None) - - def test_login_upassword_accent(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute('INSERT CWUser X: X login %(login)s, X upassword %(passwd)s, ' - 'X in_group G WHERE G name "users"', - {'login': u"barnab", 'passwd': u"hhh".encode('UTF8')}) - cnx.commit() - repo = self.repo - cnxid = repo.connect(u"barnab", password=u"hhh".encode('UTF8')) - self.assertTrue(cnxid) - repo.close(cnxid) - - def test_rollback_on_execute_validation_error(self): - class ValidationErrorAfterHook(Hook): - __regid__ = 'valerror-after-hook' - __select__ = Hook.__select__ & is_instance('CWGroup') - events = ('after_update_entity',) - def __call__(self): - raise ValidationError(self.entity.eid, {}) - - with self.admin_access.repo_cnx() as cnx: - with self.temporary_appobjects(ValidationErrorAfterHook): - self.assertRaises(ValidationError, - cnx.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') - self.assertTrue(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) - with self.assertRaises(QueryError) as cm: - cnx.commit() - self.assertEqual(str(cm.exception), 'transaction must be rolled back') - cnx.rollback() - self.assertFalse(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) - - def test_rollback_on_execute_unauthorized(self): - class UnauthorizedAfterHook(Hook): - __regid__ = 'unauthorized-after-hook' - __select__ = Hook.__select__ & is_instance('CWGroup') - events = ('after_update_entity',) - def __call__(self): - raise Unauthorized() - - with self.admin_access.repo_cnx() as cnx: - with self.temporary_appobjects(UnauthorizedAfterHook): - self.assertRaises(Unauthorized, - cnx.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') - self.assertTrue(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) - with self.assertRaises(QueryError) as cm: - cnx.commit() - self.assertEqual(str(cm.exception), 'transaction must be rolled back') - cnx.rollback() - self.assertFalse(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) - - - def test_close(self): - repo = self.repo - cnxid = repo.connect(self.admlogin, password=self.admpassword) - self.assertTrue(cnxid) - repo.close(cnxid) - - - def test_initial_schema(self): - schema = self.repo.schema - # check order of attributes is respected - notin = set(('eid', 'is', 'is_instance_of', 'identity', - 'creation_date', 'modification_date', 'cwuri', - 'owned_by', 'created_by', 'cw_source', - 'update_permission', 'read_permission', - 'add_permission', 'in_basket')) - self.assertListEqual(['relation_type', - 'from_entity', 'to_entity', - 'constrained_by', - 'cardinality', 'ordernum', 'formula', - 'indexed', 'fulltextindexed', 'internationalizable', - 'defaultval', 'extra_props', - 'description', 'description_format'], - [r.type - for r in schema.eschema('CWAttribute').ordered_relations() - if r.type not in notin]) - - self.assertEqual(schema.eschema('CWEType').main_attribute(), 'name') - self.assertEqual(schema.eschema('State').main_attribute(), 'name') - - constraints = schema.rschema('name').rdef('CWEType', 'String').constraints - self.assertEqual(len(constraints), 2) - for cstr in constraints[:]: - if isinstance(cstr, UniqueConstraint): - constraints.remove(cstr) - break - else: - self.fail('unique constraint not found') - sizeconstraint = constraints[0] - self.assertEqual(sizeconstraint.min, None) - self.assertEqual(sizeconstraint.max, 64) - - constraints = schema.rschema('relation_type').rdef('CWAttribute', 'CWRType').constraints - self.assertEqual(len(constraints), 1) - cstr = constraints[0] - self.assertIsInstance(cstr, RQLConstraint) - self.assertEqual(cstr.expression, 'O final TRUE') - - ownedby = schema.rschema('owned_by') - self.assertEqual(ownedby.objects('CWEType'), ('CWUser',)) - - def test_internal_api(self): - repo = self.repo - cnxid = repo.connect(self.admlogin, password=self.admpassword) - session = repo._get_session(cnxid) - with session.new_cnx() as cnx: - self.assertEqual(repo.type_and_source_from_eid(2, cnx), - ('CWGroup', None, 'system')) - self.assertEqual(repo.type_from_eid(2, cnx), 'CWGroup') - repo.close(cnxid) - - def test_public_api(self): - self.assertEqual(self.repo.get_schema(), self.repo.schema) - self.assertEqual(self.repo.source_defs(), {'system': {'type': 'native', - 'uri': 'system', - 'use-cwuri-as-url': False} - }) - # .properties() return a result set - self.assertEqual(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U') - - def test_schema_is_relation(self): - with self.admin_access.repo_cnx() as cnx: - no_is_rset = cnx.execute('Any X WHERE NOT X is ET') - self.assertFalse(no_is_rset, no_is_rset.description) - - def test_delete_if_singlecard1(self): - with self.admin_access.repo_cnx() as cnx: - note = cnx.create_entity('Affaire') - p1 = cnx.create_entity('Personne', nom=u'toto') - cnx.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', - {'x': note.eid, 'p': p1.eid}) - rset = cnx.execute('Any P WHERE A todo_by P, A eid %(x)s', - {'x': note.eid}) - self.assertEqual(len(rset), 1) - p2 = cnx.create_entity('Personne', nom=u'tutu') - cnx.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', - {'x': note.eid, 'p': p2.eid}) - rset = cnx.execute('Any P WHERE A todo_by P, A eid %(x)s', - {'x': note.eid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset.rows[0][0], p2.eid) - - def test_delete_if_object_inlined_singlecard(self): - with self.admin_access.repo_cnx() as cnx: - c = cnx.create_entity('Card', title=u'Carte') - cnx.create_entity('Personne', nom=u'Vincent', fiche=c) - cnx.create_entity('Personne', nom=u'Florent', fiche=c) - cnx.commit() - self.assertEqual(len(c.reverse_fiche), 1) - - def test_delete_computed_relation_nonregr(self): - with self.admin_access.repo_cnx() as cnx: - c = cnx.create_entity('Personne', nom=u'Adam', login_user=cnx.user.eid) - cnx.commit() - c.cw_delete() - cnx.commit() - - def test_cw_set_in_before_update(self): - # local hook - class DummyBeforeHook(Hook): - __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & is_instance('EmailAddress') - events = ('before_update_entity',) - def __call__(self): - # safety belt: avoid potential infinite recursion if the test - # fails (i.e. RuntimeError not raised) - pendings = self._cw.transaction_data.setdefault('pending', set()) - if self.entity.eid not in pendings: - pendings.add(self.entity.eid) - self.entity.cw_set(alias=u'foo') - - with self.admin_access.repo_cnx() as cnx: - with self.temporary_appobjects(DummyBeforeHook): - addr = cnx.create_entity('EmailAddress', address=u'a@b.fr') - addr.cw_set(address=u'a@b.com') - rset = cnx.execute('Any A,AA WHERE X eid %(x)s, X address A, X alias AA', - {'x': addr.eid}) - self.assertEqual(rset.rows, [[u'a@b.com', u'foo']]) - - def test_cw_set_in_before_add(self): - # local hook - class DummyBeforeHook(Hook): - __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & is_instance('EmailAddress') - events = ('before_add_entity',) - def __call__(self): - # cw_set is forbidden within before_add_entity() - self.entity.cw_set(alias=u'foo') - - with self.admin_access.repo_cnx() as cnx: - with self.temporary_appobjects(DummyBeforeHook): - # XXX will fail with python -O - self.assertRaises(AssertionError, cnx.create_entity, - 'EmailAddress', address=u'a@b.fr') - - def test_multiple_edit_cw_set(self): - """make sure cw_edited doesn't get cluttered - by previous entities on multiple set - """ - # local hook - class DummyBeforeHook(Hook): - _test = self # keep reference to test instance - __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & is_instance('Affaire') - events = ('before_update_entity',) - def __call__(self): - # invoiced attribute shouldn't be considered "edited" before the hook - self._test.assertFalse('invoiced' in self.entity.cw_edited, - 'cw_edited cluttered by previous update') - self.entity.cw_edited['invoiced'] = 10 - - with self.admin_access.repo_cnx() as cnx: - with self.temporary_appobjects(DummyBeforeHook): - cnx.create_entity('Affaire', ref=u'AFF01') - cnx.create_entity('Affaire', ref=u'AFF02') - cnx.execute('SET A duration 10 WHERE A is Affaire') - - - def test_user_friendly_error(self): - from cubicweb.entities.adapters import IUserFriendlyUniqueTogether - class MyIUserFriendlyUniqueTogether(IUserFriendlyUniqueTogether): - __select__ = IUserFriendlyUniqueTogether.__select__ & is_instance('Societe') - def raise_user_exception(self): - raise ValidationError(self.entity.eid, {'hip': 'hop'}) - - with self.admin_access.repo_cnx() as cnx: - with self.temporary_appobjects(MyIUserFriendlyUniqueTogether): - s = cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013') - cnx.commit() - with self.assertRaises(ValidationError) as cm: - cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013') - self.assertEqual(cm.exception.errors, {'hip': 'hop'}) - cnx.rollback() - cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'31400') - with self.assertRaises(ValidationError) as cm: - s.cw_set(cp=u'31400') - self.assertEqual(cm.exception.entity, s.eid) - self.assertEqual(cm.exception.errors, {'hip': 'hop'}) - cnx.rollback() - - def test_attribute_cache(self): - with self.admin_access.repo_cnx() as cnx: - bk = cnx.create_entity('Bookmark', title=u'index', path=u'/') - cnx.commit() - self.assertEqual(bk.title, 'index') - bk.cw_set(title=u'root') - self.assertEqual(bk.title, 'root') - cnx.commit() - self.assertEqual(bk.title, 'root') - -class SchemaDeserialTC(CubicWebTC): - - appid = 'data-schemaserial' - - @classmethod - def setUpClass(cls): - register_base_type('BabarTestType', ('jungle_speed',)) - helper = get_db_helper('sqlite') - helper.TYPE_MAPPING['BabarTestType'] = 'TEXT' - helper.TYPE_CONVERTERS['BabarTestType'] = lambda x: '"%s"' % x - super(SchemaDeserialTC, cls).setUpClass() - - - @classmethod - def tearDownClass(cls): - unregister_base_type('BabarTestType') - helper = get_db_helper('sqlite') - helper.TYPE_MAPPING.pop('BabarTestType', None) - helper.TYPE_CONVERTERS.pop('BabarTestType', None) - super(SchemaDeserialTC, cls).tearDownClass() - - def test_deserialization_base(self): - """Check the following deserialization - - * all CWEtype has name - * Final type - * CWUniqueTogetherConstraint - * _unique_together__ content""" - origshema = self.repo.schema - try: - self.repo.config.repairing = True # avoid versions checking - self.repo.set_schema(self.repo.deserialize_schema()) - table = SQL_PREFIX + 'CWEType' - namecol = SQL_PREFIX + 'name' - finalcol = SQL_PREFIX + 'final' - with self.admin_access.repo_cnx() as cnx: - cu = cnx.system_sql('SELECT %s FROM %s WHERE %s is NULL' - % (namecol, table, finalcol)) - self.assertEqual(cu.fetchall(), []) - cu = cnx.system_sql('SELECT %s FROM %s ' - 'WHERE %s=%%(final)s ORDER BY %s' - % (namecol, table, finalcol, namecol), - {'final': True}) - self.assertEqual(cu.fetchall(), - [(u'BabarTestType',), - (u'BigInt',), (u'Boolean',), (u'Bytes',), - (u'Date',), (u'Datetime',), - (u'Decimal',),(u'Float',), - (u'Int',), - (u'Interval',), (u'Password',), - (u'String',), - (u'TZDatetime',), (u'TZTime',), (u'Time',)]) - sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to " - "FROM cw_CWUniqueTogetherConstraint as cstr, " - " relations_relation as rel, " - " cw_CWEType as etype " - "WHERE cstr.cw_eid = rel.eid_from " - " AND cstr.cw_constraint_of = etype.cw_eid " - " AND etype.cw_name = 'Personne' " - ";") - cu = cnx.system_sql(sql) - rows = cu.fetchall() - self.assertEqual(len(rows), 3) - person = self.repo.schema.eschema('Personne') - self.assertEqual(len(person._unique_together), 1) - self.assertItemsEqual(person._unique_together[0], - ('nom', 'prenom', 'inline2')) - - finally: - self.repo.set_schema(origshema) - - def test_custom_attribute_param(self): - origshema = self.repo.schema - try: - self.repo.config.repairing = True # avoid versions checking - self.repo.set_schema(self.repo.deserialize_schema()) - pes = self.repo.schema['Personne'] - attr = pes.rdef('custom_field_of_jungle') - self.assertIn('jungle_speed', vars(attr)) - self.assertEqual(42, attr.jungle_speed) - finally: - self.repo.set_schema(origshema) - - - -class DataHelpersTC(CubicWebTC): - - def test_type_from_eid(self): - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(self.repo.type_from_eid(2, cnx), 'CWGroup') - - def test_type_from_eid_raise(self): - with self.admin_access.repo_cnx() as cnx: - self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, cnx) - - def test_add_delete_info(self): - with self.admin_access.repo_cnx() as cnx: - entity = self.repo.vreg['etypes'].etype_class('Personne')(cnx) - entity.eid = -1 - entity.complete = lambda x: None - self.repo.add_info(cnx, entity, self.repo.system_source) - cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1') - data = cu.fetchall() - self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None)]) - self.repo._delete_cascade_multi(cnx, [entity]) - self.repo.system_source.delete_info_multi(cnx, [entity]) - cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1') - data = cu.fetchall() - self.assertEqual(data, []) - - -class FTITC(CubicWebTC): - - def test_fulltext_container_entity(self): - with self.admin_access.repo_cnx() as cnx: - assert self.schema.rschema('use_email').fulltext_container == 'subject' - toto = cnx.create_entity('EmailAddress', address=u'toto@logilab.fr') - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEqual(rset.rows, []) - cnx.user.cw_set(use_email=toto) - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEqual(rset.rows, [[cnx.user.eid]]) - cnx.execute('DELETE X use_email Y WHERE X login "admin", Y eid %(y)s', - {'y': toto.eid}) - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEqual(rset.rows, []) - tutu = cnx.create_entity('EmailAddress', address=u'tutu@logilab.fr') - cnx.user.cw_set(use_email=tutu) - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) - self.assertEqual(rset.rows, [[cnx.user.eid]]) - tutu.cw_set(address=u'hip@logilab.fr') - cnx.commit() - rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) - self.assertEqual(rset.rows, []) - rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'hip'}) - self.assertEqual(rset.rows, [[cnx.user.eid]]) - - def test_no_uncessary_ftiindex_op(self): - with self.admin_access.repo_cnx() as cnx: - cnx.create_entity('Workflow', - name=u'dummy workflow', - description=u'huuuuu') - self.assertFalse(any(x for x in cnx.pending_operations - if isinstance(x, native.FTIndexEntityOp))) - - -class DBInitTC(CubicWebTC): - - def test_versions_inserted(self): - with self.admin_access.repo_cnx() as cnx: - inserted = [r[0] - for r in cnx.execute('Any K ORDERBY K ' - 'WHERE P pkey K, P pkey ~= "system.version.%"')] - self.assertEqual(inserted, - [u'system.version.basket', - u'system.version.card', - u'system.version.comment', - u'system.version.cubicweb', - u'system.version.file', - u'system.version.localperms', - u'system.version.tag']) - -CALLED = [] - -class InlineRelHooksTC(CubicWebTC): - """test relation hooks are called for inlined relations - """ - def setUp(self): - CubicWebTC.setUp(self) - CALLED[:] = () - - def test_inline_relation(self): - """make sure _relation hooks are called for inlined relation""" - - class EcritParHook(hook.Hook): - __regid__ = 'inlinedrelhook' - __select__ = hook.Hook.__select__ & hook.match_rtype('ecrit_par') - events = ('before_add_relation', 'after_add_relation', - 'before_delete_relation', 'after_delete_relation') - def __call__(self): - CALLED.append((self.event, self.eidfrom, self.rtype, self.eidto)) - - with self.temporary_appobjects(EcritParHook): - with self.admin_access.repo_cnx() as cnx: - eidp = cnx.execute('INSERT Personne X: X nom "toto"')[0][0] - eidn = cnx.execute('INSERT Note X: X type "T"')[0][0] - cnx.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), - ('after_add_relation', eidn, 'ecrit_par', eidp)]) - CALLED[:] = () - cnx.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEqual(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp), - ('after_delete_relation', eidn, 'ecrit_par', eidp)]) - CALLED[:] = () - eidn = cnx.execute('INSERT Note N: N ecrit_par P WHERE P nom "toto"')[0][0] - self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), - ('after_add_relation', eidn, 'ecrit_par', eidp)]) - - def test_unique_contraint(self): - with self.admin_access.repo_cnx() as cnx: - toto = cnx.create_entity('Personne', nom=u'toto') - a01 = cnx.create_entity('Affaire', ref=u'A01', todo_by=toto) - cnx.commit() - cnx.create_entity('Note', type=u'todo', inline1=a01) - cnx.commit() - cnx.create_entity('Note', type=u'todo', inline1=a01) - with self.assertRaises(ValidationError) as cm: - cnx.commit() - self.assertEqual(cm.exception.errors, - {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, ' - 'A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'}) - - def test_add_relations_at_creation_with_del_existing_rel(self): - with self.admin_access.repo_cnx() as cnx: - person = cnx.create_entity('Personne', - nom=u'Toto', - prenom=u'Lanturlu', - sexe=u'M') - users_rql = 'Any U WHERE U is CWGroup, U name "users"' - users = cnx.execute(users_rql).get_entity(0, 0) - cnx.create_entity('CWUser', - login=u'Toto', - upassword=u'firstname', - firstname=u'firstname', - surname=u'surname', - reverse_login_user=person, - in_group=users) - cnx.commit() - - -class PerformanceTest(CubicWebTC): - def setUp(self): - super(PerformanceTest, self).setUp() - logger = logging.getLogger('cubicweb.session') - #logger.handlers = [logging.StreamHandler(sys.stdout)] - logger.setLevel(logging.INFO) - self.info = logger.info - - def tearDown(self): - super(PerformanceTest, self).tearDown() - logger = logging.getLogger('cubicweb.session') - logger.setLevel(logging.CRITICAL) - - def test_composite_deletion(self): - with self.admin_access.repo_cnx() as cnx: - personnes = [] - t0 = time.time() - for i in range(2000): - p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - for j in range(0, 2000, 100): - abraham.cw_set(personne_composite=personnes[j:j+100]) - t1 = time.time() - self.info('creation: %.2gs', (t1 - t0)) - cnx.commit() - t2 = time.time() - self.info('commit creation: %.2gs', (t2 - t1)) - cnx.execute('DELETE Personne P WHERE P eid %(eid)s', {'eid': abraham.eid}) - t3 = time.time() - self.info('deletion: %.2gs', (t3 - t2)) - cnx.commit() - t4 = time.time() - self.info("commit deletion: %2gs", (t4 - t3)) - - def test_add_relation_non_inlined(self): - with self.admin_access.repo_cnx() as cnx: - personnes = [] - for i in range(2000): - p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - cnx.commit() - t0 = time.time() - abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M', - personne_composite=personnes[:100]) - t1 = time.time() - self.info('creation: %.2gs', (t1 - t0)) - for j in range(100, 2000, 100): - abraham.cw_set(personne_composite=personnes[j:j+100]) - t2 = time.time() - self.info('more relations: %.2gs', (t2-t1)) - cnx.commit() - t3 = time.time() - self.info('commit creation: %.2gs', (t3 - t2)) - - def test_add_relation_inlined(self): - with self.admin_access.repo_cnx() as cnx: - personnes = [] - for i in range(2000): - p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - cnx.commit() - t0 = time.time() - abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M', - personne_inlined=personnes[:100]) - t1 = time.time() - self.info('creation: %.2gs', (t1 - t0)) - for j in range(100, 2000, 100): - abraham.cw_set(personne_inlined=personnes[j:j+100]) - t2 = time.time() - self.info('more relations: %.2gs', (t2-t1)) - cnx.commit() - t3 = time.time() - self.info('commit creation: %.2gs', (t3 - t2)) - - - def test_session_add_relation(self): - """ to be compared with test_session_add_relations""" - with self.admin_access.repo_cnx() as cnx: - personnes = [] - for i in range(2000): - p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - cnx.commit() - t0 = time.time() - add_relation = cnx.add_relation - for p in personnes: - add_relation(abraham.eid, 'personne_composite', p.eid) - cnx.commit() - t1 = time.time() - self.info('add relation: %.2gs', t1-t0) - - def test_session_add_relations (self): - """ to be compared with test_session_add_relation""" - with self.admin_access.repo_cnx() as cnx: - personnes = [] - for i in range(2000): - p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - cnx.commit() - t0 = time.time() - add_relations = cnx.add_relations - relations = [('personne_composite', [(abraham.eid, p.eid) for p in personnes])] - add_relations(relations) - cnx.commit() - t1 = time.time() - self.info('add relations: %.2gs', t1-t0) - - def test_session_add_relation_inlined(self): - """ to be compared with test_session_add_relations""" - with self.admin_access.repo_cnx() as cnx: - personnes = [] - for i in range(2000): - p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - cnx.commit() - t0 = time.time() - add_relation = cnx.add_relation - for p in personnes: - add_relation(abraham.eid, 'personne_inlined', p.eid) - cnx.commit() - t1 = time.time() - self.info('add relation (inlined): %.2gs', t1-t0) - - def test_session_add_relations_inlined (self): - """ to be compared with test_session_add_relation""" - with self.admin_access.repo_cnx() as cnx: - personnes = [] - for i in range(2000): - p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - cnx.commit() - t0 = time.time() - add_relations = cnx.add_relations - relations = [('personne_inlined', [(abraham.eid, p.eid) for p in personnes])] - add_relations(relations) - cnx.commit() - t1 = time.time() - self.info('add relations (inlined): %.2gs', t1-t0) - - def test_optional_relation_reset_1(self): - with self.admin_access.repo_cnx() as cnx: - p1 = cnx.create_entity('Personne', nom=u'Vincent') - p2 = cnx.create_entity('Personne', nom=u'Florent') - w = cnx.create_entity('Affaire', ref=u'wc') - w.cw_set(todo_by=[p1,p2]) - w.cw_clear_all_caches() - cnx.commit() - self.assertEqual(len(w.todo_by), 1) - self.assertEqual(w.todo_by[0].eid, p2.eid) - - def test_optional_relation_reset_2(self): - with self.admin_access.repo_cnx() as cnx: - p1 = cnx.create_entity('Personne', nom=u'Vincent') - p2 = cnx.create_entity('Personne', nom=u'Florent') - w = cnx.create_entity('Affaire', ref=u'wc') - w.cw_set(todo_by=p1) - cnx.commit() - w.cw_set(todo_by=p2) - w.cw_clear_all_caches() - cnx.commit() - self.assertEqual(len(w.todo_by), 1) - self.assertEqual(w.todo_by[0].eid, p2.eid) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2231 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.sources.rql2sql""" -from __future__ import print_function - -import sys -import os -from datetime import date -from logilab.common.testlib import TestCase, unittest_main, mock_object - -from rql import BadRQLQuery -from rql.utils import register_function, FunctionDescr - -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.repotest import RQLGeneratorTC -from cubicweb.server.sources.rql2sql import remove_unused_solutions - - -# add a dumb registered procedure -class stockproc(FunctionDescr): - supported_backends = ('postgres', 'sqlite', 'mysql') -try: - register_function(stockproc) -except AssertionError as ex: - pass # already registered - - -from logilab import database as db -def monkey_patch_import_driver_module(driver, drivers, quiet=True): - if not driver in drivers: - raise db.UnknownDriver(driver) - for modname in drivers[driver]: - try: - if not quiet: - sys.stderr.write('Trying %s\n' % modname) - module = db.load_module_from_name(modname, use_sys=False) - break - except ImportError: - if not quiet: - sys.stderr.write('%s is not available\n' % modname) - continue - else: - return mock_object(STRING=1, BOOLEAN=2, BINARY=3, DATETIME=4, NUMBER=5), drivers[driver][0] - return module, modname - - -def setUpModule(): - global config, schema - config = TestServerConfiguration('data', apphome=CWRQLTC.datadir) - config.bootstrap_cubes() - schema = config.load_schema() - schema['in_state'].inlined = True - schema['state_of'].inlined = False - schema['comments'].inlined = False - db._backup_import_driver_module = db._import_driver_module - db._import_driver_module = monkey_patch_import_driver_module - -def tearDownModule(): - global config, schema - del config, schema - db._import_driver_module = db._backup_import_driver_module - del db._backup_import_driver_module - -PARSER = [ - (r"Personne P WHERE P nom 'Zig\'oto';", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE _P.cw_nom=Zig\'oto'''), - - (r'Personne P WHERE P nom ~= "Zig\"oto%";', - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE _P.cw_nom ILIKE Zig"oto%'''), - ] - -BASIC = [ - ("Any AS WHERE AS is Affaire", - '''SELECT _AS.cw_eid -FROM cw_Affaire AS _AS'''), - - ("Any X WHERE X is Affaire", - '''SELECT _X.cw_eid -FROM cw_Affaire AS _X'''), - - ("Any X WHERE X eid 0", - '''SELECT 0'''), - - ("Personne P", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P'''), - - ("Personne P WHERE P test TRUE", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE _P.cw_test=True'''), - - ("Personne P WHERE P test false", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE _P.cw_test=False'''), - - ("Personne P WHERE P eid -1", - '''SELECT -1'''), - - ("Personne P WHERE S is Societe, P travaille S, S nom 'Logilab';", - '''SELECT rel_travaille0.eid_from -FROM cw_Societe AS _S, travaille_relation AS rel_travaille0 -WHERE rel_travaille0.eid_to=_S.cw_eid AND _S.cw_nom=Logilab'''), - - ("Personne P WHERE P concerne A, A concerne S, S nom 'Logilab', S is Societe;", - '''SELECT rel_concerne0.eid_from -FROM concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1, cw_Societe AS _S -WHERE rel_concerne0.eid_to=rel_concerne1.eid_from AND rel_concerne1.eid_to=_S.cw_eid AND _S.cw_nom=Logilab'''), - - ("Note N WHERE X evaluee N, X nom 'Logilab';", - '''SELECT rel_evaluee0.eid_to -FROM cw_Division AS _X, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom=Logilab -UNION ALL -SELECT rel_evaluee0.eid_to -FROM cw_Personne AS _X, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom=Logilab -UNION ALL -SELECT rel_evaluee0.eid_to -FROM cw_Societe AS _X, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom=Logilab -UNION ALL -SELECT rel_evaluee0.eid_to -FROM cw_SubDivision AS _X, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom=Logilab'''), - - ("Note N WHERE X evaluee N, X nom in ('Logilab', 'Caesium');", - '''SELECT rel_evaluee0.eid_to -FROM cw_Division AS _X, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom IN(Logilab, Caesium) -UNION ALL -SELECT rel_evaluee0.eid_to -FROM cw_Personne AS _X, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom IN(Logilab, Caesium) -UNION ALL -SELECT rel_evaluee0.eid_to -FROM cw_Societe AS _X, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom IN(Logilab, Caesium) -UNION ALL -SELECT rel_evaluee0.eid_to -FROM cw_SubDivision AS _X, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_X.cw_eid AND _X.cw_nom IN(Logilab, Caesium)'''), - - ("Any N WHERE G is CWGroup, G name N, E eid 12, E read_permission G", - '''SELECT _G.cw_name -FROM cw_CWGroup AS _G, read_permission_relation AS rel_read_permission0 -WHERE rel_read_permission0.eid_from=12 AND rel_read_permission0.eid_to=_G.cw_eid'''), - - ('Any Y WHERE U login "admin", U login Y', # stupid but valid... - """SELECT _U.cw_login -FROM cw_CWUser AS _U -WHERE _U.cw_login=admin"""), - - ('Any T WHERE T tags X, X is State', - '''SELECT rel_tags0.eid_from -FROM cw_State AS _X, tags_relation AS rel_tags0 -WHERE rel_tags0.eid_to=_X.cw_eid'''), - - ('Any X,Y WHERE X eid 0, Y eid 1, X concerne Y', - '''SELECT 0, 1 -FROM concerne_relation AS rel_concerne0 -WHERE rel_concerne0.eid_from=0 AND rel_concerne0.eid_to=1'''), - - ("Any X WHERE X prenom 'lulu'," - "EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');", - '''SELECT _X.cw_eid -FROM cw_Personne AS _X -WHERE _X.cw_prenom=lulu AND EXISTS(SELECT 1 FROM cw_CWGroup AS _G, in_group_relation AS rel_in_group1, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers)))'''), - - ("Any X WHERE X prenom 'lulu'," - "NOT EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');", - '''SELECT _X.cw_eid -FROM cw_Personne AS _X -WHERE _X.cw_prenom=lulu AND NOT (EXISTS(SELECT 1 FROM cw_CWGroup AS _G, in_group_relation AS rel_in_group1, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers))))'''), - - ('Any X WHERE X title V, NOT X wikiid V, NOT X title "parent", X is Card', - '''SELECT _X.cw_eid -FROM cw_Card AS _X -WHERE NOT (_X.cw_wikiid=_X.cw_title) AND NOT (_X.cw_title=parent)'''), - - ("Any -AS WHERE AS is Affaire", - '''SELECT -_AS.cw_eid -FROM cw_Affaire AS _AS'''), - -] - -BASIC_WITH_LIMIT = [ - ("Personne P LIMIT 20 OFFSET 10", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -LIMIT 20 -OFFSET 10'''), - ("Any P ORDERBY N LIMIT 1 WHERE P is Personne, P travaille S, S eid %(eid)s, P nom N, P nom %(text)s", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P, travaille_relation AS rel_travaille0 -WHERE rel_travaille0.eid_from=_P.cw_eid AND rel_travaille0.eid_to=12345 AND _P.cw_nom=hip hop momo -ORDER BY _P.cw_nom -LIMIT 1'''), - ] - - -ADVANCED = [ - ("Societe S WHERE S2 is Societe, S2 nom SN, S nom 'Logilab' OR S nom SN", - '''SELECT _S.cw_eid -FROM cw_Societe AS _S, cw_Societe AS _S2 -WHERE ((_S.cw_nom=Logilab) OR (_S2.cw_nom=_S.cw_nom))'''), - - ("Societe S WHERE S nom 'Logilab' OR S nom 'Caesium'", - '''SELECT _S.cw_eid -FROM cw_Societe AS _S -WHERE ((_S.cw_nom=Logilab) OR (_S.cw_nom=Caesium))'''), - - ('Any X WHERE X nom "toto", X eid IN (9700, 9710, 1045, 674)', - '''SELECT _X.cw_eid -FROM cw_Division AS _X -WHERE _X.cw_nom=toto AND _X.cw_eid IN(9700, 9710, 1045, 674) -UNION ALL -SELECT _X.cw_eid -FROM cw_Personne AS _X -WHERE _X.cw_nom=toto AND _X.cw_eid IN(9700, 9710, 1045, 674) -UNION ALL -SELECT _X.cw_eid -FROM cw_Societe AS _X -WHERE _X.cw_nom=toto AND _X.cw_eid IN(9700, 9710, 1045, 674) -UNION ALL -SELECT _X.cw_eid -FROM cw_SubDivision AS _X -WHERE _X.cw_nom=toto AND _X.cw_eid IN(9700, 9710, 1045, 674)'''), - - ('Any Y, COUNT(N) GROUPBY Y WHERE Y evaluee N;', - '''SELECT rel_evaluee0.eid_from, COUNT(rel_evaluee0.eid_to) -FROM evaluee_relation AS rel_evaluee0 -GROUP BY rel_evaluee0.eid_from'''), - - ("Any X WHERE X concerne B or C concerne X", - '''SELECT _X.cw_eid -FROM concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1, cw_Affaire AS _X -WHERE ((rel_concerne0.eid_from=_X.cw_eid) OR (rel_concerne1.eid_to=_X.cw_eid))'''), - - ("Any X WHERE X travaille S or X concerne A", - '''SELECT _X.cw_eid -FROM concerne_relation AS rel_concerne1, cw_Personne AS _X, travaille_relation AS rel_travaille0 -WHERE ((rel_travaille0.eid_from=_X.cw_eid) OR (rel_concerne1.eid_from=_X.cw_eid))'''), - - ("Any N WHERE A evaluee N or N ecrit_par P", - '''SELECT _N.cw_eid -FROM cw_Note AS _N, evaluee_relation AS rel_evaluee0 -WHERE ((rel_evaluee0.eid_to=_N.cw_eid) OR (_N.cw_ecrit_par IS NOT NULL))'''), - - ("Any N WHERE A evaluee N or EXISTS(N todo_by U)", - '''SELECT _N.cw_eid -FROM cw_Note AS _N, evaluee_relation AS rel_evaluee0 -WHERE ((rel_evaluee0.eid_to=_N.cw_eid) OR (EXISTS(SELECT 1 FROM todo_by_relation AS rel_todo_by1 WHERE rel_todo_by1.eid_from=_N.cw_eid)))'''), - - ("Any N WHERE A evaluee N or N todo_by U", - '''SELECT _N.cw_eid -FROM cw_Note AS _N, evaluee_relation AS rel_evaluee0, todo_by_relation AS rel_todo_by1 -WHERE ((rel_evaluee0.eid_to=_N.cw_eid) OR (rel_todo_by1.eid_from=_N.cw_eid))'''), - - ("Any X WHERE X concerne B or C concerne X, B eid 12, C eid 13", - '''SELECT _X.cw_eid -FROM concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1, cw_Affaire AS _X -WHERE ((rel_concerne0.eid_from=_X.cw_eid AND rel_concerne0.eid_to=12) OR (rel_concerne1.eid_from=13 AND rel_concerne1.eid_to=_X.cw_eid))'''), - - ('Any X WHERE X created_by U, X concerne B OR C concerne X, B eid 12, C eid 13', - '''SELECT rel_created_by0.eid_from -FROM concerne_relation AS rel_concerne1, concerne_relation AS rel_concerne2, created_by_relation AS rel_created_by0 -WHERE ((rel_concerne1.eid_from=rel_created_by0.eid_from AND rel_concerne1.eid_to=12) OR (rel_concerne2.eid_from=13 AND rel_concerne2.eid_to=rel_created_by0.eid_from))'''), - - ('Any P WHERE P travaille_subdivision S1 OR P travaille_subdivision S2, S1 nom "logilab", S2 nom "caesium"', - '''SELECT _P.cw_eid -FROM cw_Personne AS _P, cw_SubDivision AS _S1, cw_SubDivision AS _S2, travaille_subdivision_relation AS rel_travaille_subdivision0, travaille_subdivision_relation AS rel_travaille_subdivision1 -WHERE ((rel_travaille_subdivision0.eid_from=_P.cw_eid AND rel_travaille_subdivision0.eid_to=_S1.cw_eid) OR (rel_travaille_subdivision1.eid_from=_P.cw_eid AND rel_travaille_subdivision1.eid_to=_S2.cw_eid)) AND _S1.cw_nom=logilab AND _S2.cw_nom=caesium'''), - - ('Any X WHERE T tags X', - '''SELECT rel_tags0.eid_to -FROM tags_relation AS rel_tags0'''), - - ('Any X WHERE X in_basket B, B eid 12', - '''SELECT rel_in_basket0.eid_from -FROM in_basket_relation AS rel_in_basket0 -WHERE rel_in_basket0.eid_to=12'''), - - ('Any SEN,RN,OEN WHERE X from_entity SE, SE eid 44, X relation_type R, R eid 139, X to_entity OE, OE eid 42, R name RN, SE name SEN, OE name OEN', - '''SELECT _SE.cw_name, _R.cw_name, _OE.cw_name -FROM cw_CWAttribute AS _X, cw_CWEType AS _OE, cw_CWEType AS _SE, cw_CWRType AS _R -WHERE _X.cw_from_entity=44 AND _SE.cw_eid=44 AND _X.cw_relation_type=139 AND _R.cw_eid=139 AND _X.cw_to_entity=42 AND _OE.cw_eid=42 -UNION ALL -SELECT _SE.cw_name, _R.cw_name, _OE.cw_name -FROM cw_CWEType AS _OE, cw_CWEType AS _SE, cw_CWRType AS _R, cw_CWRelation AS _X -WHERE _X.cw_from_entity=44 AND _SE.cw_eid=44 AND _X.cw_relation_type=139 AND _R.cw_eid=139 AND _X.cw_to_entity=42 AND _OE.cw_eid=42'''), - - # Any O WHERE NOT S corrected_in O, S eid %(x)s, S concerns P, O version_of P, O in_state ST, NOT ST name "published", O modification_date MTIME ORDERBY MTIME DESC LIMIT 9 - ('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P', - '''SELECT _O.cw_eid -FROM cw_Note AS _S, cw_Personne AS _O -WHERE (_S.cw_ecrit_par IS NULL OR _S.cw_ecrit_par!=_O.cw_eid) AND _S.cw_eid=1 AND _S.cw_inline1 IS NOT NULL AND _O.cw_inline2=_S.cw_inline1'''), - - ('Any N WHERE N todo_by U, N is Note, U eid 2, N filed_under T, T eid 3', - # N would actually be invarient if U eid 2 had given a specific type to U - '''SELECT _N.cw_eid -FROM cw_Note AS _N, filed_under_relation AS rel_filed_under1, todo_by_relation AS rel_todo_by0 -WHERE rel_todo_by0.eid_from=_N.cw_eid AND rel_todo_by0.eid_to=2 AND rel_filed_under1.eid_from=_N.cw_eid AND rel_filed_under1.eid_to=3'''), - - ('Any N WHERE N todo_by U, U eid 2, P evaluee N, P eid 3', - '''SELECT rel_evaluee1.eid_to -FROM evaluee_relation AS rel_evaluee1, todo_by_relation AS rel_todo_by0 -WHERE rel_evaluee1.eid_to=rel_todo_by0.eid_from AND rel_todo_by0.eid_to=2 AND rel_evaluee1.eid_from=3'''), - - - (' Any X,U WHERE C owned_by U, NOT X owned_by U, C eid 1, X eid 2', - '''SELECT 2, rel_owned_by0.eid_to -FROM owned_by_relation AS rel_owned_by0 -WHERE rel_owned_by0.eid_from=1 AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=2 AND rel_owned_by0.eid_to=rel_owned_by1.eid_to))'''), - - ('Any GN WHERE X in_group G, G name GN, (G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))', - '''SELECT _G.cw_name -FROM cw_CWGroup AS _G, in_group_relation AS rel_in_group0 -WHERE rel_in_group0.eid_to=_G.cw_eid AND ((_G.cw_name=managers) OR (EXISTS(SELECT 1 FROM copain_relation AS rel_copain1, cw_CWUser AS _T WHERE rel_copain1.eid_from=rel_in_group0.eid_from AND rel_copain1.eid_to=_T.cw_eid AND _T.cw_login IN(comme, cochon))))'''), - - ('Any C WHERE C is Card, EXISTS(X documented_by C)', - """SELECT _C.cw_eid -FROM cw_Card AS _C -WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_to=_C.cw_eid)"""), - - ('Any C WHERE C is Card, EXISTS(X documented_by C, X eid 12)', - """SELECT _C.cw_eid -FROM cw_Card AS _C -WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_from=12 AND rel_documented_by0.eid_to=_C.cw_eid)"""), - - ('Any T WHERE C is Card, C title T, EXISTS(X documented_by C, X eid 12)', - """SELECT _C.cw_title -FROM cw_Card AS _C -WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_from=12 AND rel_documented_by0.eid_to=_C.cw_eid)"""), - - ('Any GN,L WHERE X in_group G, X login L, G name GN, EXISTS(X copain T, T login L, T login IN("comme", "cochon"))', - '''SELECT _G.cw_name, _X.cw_login -FROM cw_CWGroup AS _G, cw_CWUser AS _X, in_group_relation AS rel_in_group0 -WHERE rel_in_group0.eid_from=_X.cw_eid AND rel_in_group0.eid_to=_G.cw_eid AND EXISTS(SELECT 1 FROM copain_relation AS rel_copain1, cw_CWUser AS _T WHERE rel_copain1.eid_from=_X.cw_eid AND rel_copain1.eid_to=_T.cw_eid AND _T.cw_login=_X.cw_login AND _T.cw_login IN(comme, cochon))'''), - - ('Any X,S, MAX(T) GROUPBY X,S ORDERBY S WHERE X is CWUser, T tags X, S eid IN(32), X in_state S', - '''SELECT _X.cw_eid, 32, MAX(rel_tags0.eid_from) -FROM cw_CWUser AS _X, tags_relation AS rel_tags0 -WHERE rel_tags0.eid_to=_X.cw_eid AND _X.cw_in_state=32 -GROUP BY _X.cw_eid'''), - - - ('Any X WHERE Y evaluee X, Y is CWUser', - '''SELECT rel_evaluee0.eid_to -FROM cw_CWUser AS _Y, evaluee_relation AS rel_evaluee0 -WHERE rel_evaluee0.eid_from=_Y.cw_eid'''), - - ('Any L WHERE X login "admin", X identity Y, Y login L', - '''SELECT _Y.cw_login -FROM cw_CWUser AS _X, cw_CWUser AS _Y -WHERE _X.cw_login=admin AND _X.cw_eid=_Y.cw_eid'''), - - ('Any L WHERE X login "admin", NOT X identity Y, Y login L', - '''SELECT _Y.cw_login -FROM cw_CWUser AS _X, cw_CWUser AS _Y -WHERE _X.cw_login=admin AND NOT (_X.cw_eid=_Y.cw_eid)'''), - - ('Any L WHERE X login "admin", X identity Y?, Y login L', - '''SELECT _Y.cw_login -FROM cw_CWUser AS _X LEFT OUTER JOIN cw_CWUser AS _Y ON (_X.cw_eid=_Y.cw_eid) -WHERE _X.cw_login=admin'''), - - ('Any XN ORDERBY XN WHERE X name XN, X is IN (Basket,Folder,Tag)', - '''SELECT _X.cw_name -FROM cw_Basket AS _X -UNION ALL -SELECT _X.cw_name -FROM cw_Folder AS _X -UNION ALL -SELECT _X.cw_name -FROM cw_Tag AS _X -ORDER BY 1'''), - - # DISTINCT, can use relation under exists scope as principal - ('DISTINCT Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), EXISTS(X read_permission Y)', - '''SELECT DISTINCT _X.cw_eid, rel_read_permission0.eid_to -FROM cw_CWEType AS _X, read_permission_relation AS rel_read_permission0 -WHERE _X.cw_name=CWGroup AND rel_read_permission0.eid_to IN(1, 2, 3) AND EXISTS(SELECT 1 WHERE rel_read_permission0.eid_from=_X.cw_eid)'''), - - # no distinct, Y can't be invariant - ('Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), EXISTS(X read_permission Y)', - '''SELECT _X.cw_eid, _Y.cw_eid -FROM cw_CWEType AS _X, cw_CWGroup AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION ALL -SELECT _X.cw_eid, _Y.cw_eid -FROM cw_CWEType AS _X, cw_RQLExpression AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''), - - # DISTINCT but NEGED exists, can't be invariant - ('DISTINCT Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)', - '''SELECT DISTINCT _X.cw_eid, _Y.cw_eid -FROM cw_CWEType AS _X, cw_CWGroup AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)) -UNION -SELECT DISTINCT _X.cw_eid, _Y.cw_eid -FROM cw_CWEType AS _X, cw_RQLExpression AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''), - - # should generate the same query as above - ('DISTINCT Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), NOT X read_permission Y', - '''SELECT DISTINCT _X.cw_eid, _Y.cw_eid -FROM cw_CWEType AS _X, cw_CWGroup AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)) -UNION -SELECT DISTINCT _X.cw_eid, _Y.cw_eid -FROM cw_CWEType AS _X, cw_RQLExpression AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''), - - # neged relation, can't be inveriant - ('Any X,Y WHERE X name "CWGroup", X is CWEType, Y eid IN(1, 2, 3), NOT X read_permission Y', - '''SELECT _X.cw_eid, _Y.cw_eid -FROM cw_CWEType AS _X, cw_CWGroup AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)) -UNION ALL -SELECT _X.cw_eid, _Y.cw_eid -FROM cw_CWEType AS _X, cw_RQLExpression AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''), - - ('Any MAX(X)+MIN(X), N GROUPBY N WHERE X name N, X is IN (Basket, Folder, Tag);', - '''SELECT (MAX(T1.C0) + MIN(T1.C0)), T1.C1 FROM (SELECT _X.cw_eid AS C0, _X.cw_name AS C1 -FROM cw_Basket AS _X -UNION ALL -SELECT _X.cw_eid AS C0, _X.cw_name AS C1 -FROM cw_Folder AS _X -UNION ALL -SELECT _X.cw_eid AS C0, _X.cw_name AS C1 -FROM cw_Tag AS _X) AS T1 -GROUP BY T1.C1'''), - - ('Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 1, N, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))), _X.cw_data_name -FROM cw_File AS _X -GROUP BY _X.cw_data_name,_X.cw_data_format -ORDER BY 1,2,_X.cw_data_format'''), - - # ambiguity in EXISTS() -> should union the sub-query - ('Any T WHERE T is Tag, NOT T name in ("t1", "t2"), EXISTS(T tags X, X is IN (CWUser, CWGroup))', - '''SELECT _T.cw_eid -FROM cw_Tag AS _T -WHERE NOT (_T.cw_name IN(t1, t2)) AND EXISTS(SELECT 1 FROM cw_CWGroup AS _X, tags_relation AS rel_tags0 WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X, tags_relation AS rel_tags1 WHERE rel_tags1.eid_from=_T.cw_eid AND rel_tags1.eid_to=_X.cw_eid)'''), - - # must not use a relation in EXISTS scope to inline a variable - ('Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)', - '''SELECT _U.cw_eid -FROM cw_CWUser AS _U -WHERE _U.cw_eid IN(1, 2) AND EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_to=_U.cw_eid)'''), - - ('Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)', - '''SELECT _U.cw_eid -FROM cw_CWUser AS _U -WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE _U.cw_eid IN(1, 2) AND rel_owned_by0.eid_to=_U.cw_eid)'''), - - ('Any COUNT(U) WHERE EXISTS (P owned_by U, P is IN (Note, Affaire))', - '''SELECT COUNT(_U.cw_eid) -FROM cw_CWUser AS _U -WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=_U.cw_eid UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=_U.cw_eid)'''), - - ('Any MAX(X)', - '''SELECT MAX(_X.eid) -FROM entities AS _X'''), - - ('Any MAX(X) WHERE X is Note', - '''SELECT MAX(_X.cw_eid) -FROM cw_Note AS _X'''), - - ('Any X WHERE X eid > 12', - '''SELECT _X.eid -FROM entities AS _X -WHERE _X.eid>12'''), - - ('Any X WHERE X eid > 12, X is Note', - """SELECT _X.eid -FROM entities AS _X -WHERE _X.type='Note' AND _X.eid>12"""), - - ('Any X, T WHERE X eid > 12, X title T, X is IN (Bookmark, Card)', - """SELECT _X.cw_eid, _X.cw_title -FROM cw_Bookmark AS _X -WHERE _X.cw_eid>12 -UNION ALL -SELECT _X.cw_eid, _X.cw_title -FROM cw_Card AS _X -WHERE _X.cw_eid>12"""), - - ('Any X', - '''SELECT _X.eid -FROM entities AS _X'''), - - ('Any X GROUPBY X WHERE X eid 12', - '''SELECT 12'''), - - ('Any X GROUPBY X ORDERBY Y WHERE X eid 12, X login Y', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE _X.cw_eid=12 -GROUP BY _X.cw_eid,_X.cw_login -ORDER BY _X.cw_login'''), - - ('Any U,COUNT(X) GROUPBY U WHERE U eid 12, X owned_by U HAVING COUNT(X) > 10', - '''SELECT rel_owned_by0.eid_to, COUNT(rel_owned_by0.eid_from) -FROM owned_by_relation AS rel_owned_by0 -WHERE rel_owned_by0.eid_to=12 -GROUP BY rel_owned_by0.eid_to -HAVING COUNT(rel_owned_by0.eid_from)>10'''), - - - ("Any X WHERE X eid 0, X test TRUE", - '''SELECT _X.cw_eid -FROM cw_Personne AS _X -WHERE _X.cw_eid=0 AND _X.cw_test=True'''), - - ('Any 1 WHERE X in_group G, X is CWUser', - '''SELECT 1 -FROM in_group_relation AS rel_in_group0'''), - - ('CWEType X WHERE X name CV, X description V HAVING NOT V=CV AND NOT V = "parent"', - '''SELECT _X.cw_eid -FROM cw_CWEType AS _X -WHERE NOT (EXISTS(SELECT 1 WHERE _X.cw_description=parent)) AND NOT (EXISTS(SELECT 1 WHERE _X.cw_description=_X.cw_name))'''), - ('CWEType X WHERE X name CV, X description V HAVING V!=CV AND V != "parent"', - '''SELECT _X.cw_eid -FROM cw_CWEType AS _X -WHERE _X.cw_description!=parent AND _X.cw_description!=_X.cw_name'''), - - ('DISTINCT Any X, SUM(C) GROUPBY X ORDERBY SUM(C) DESC WHERE H todo_by X, H duration C', - '''SELECT DISTINCT rel_todo_by0.eid_to, SUM(_H.cw_duration) -FROM cw_Affaire AS _H, todo_by_relation AS rel_todo_by0 -WHERE rel_todo_by0.eid_from=_H.cw_eid -GROUP BY rel_todo_by0.eid_to -ORDER BY 2 DESC'''), - - ('Any R2 WHERE R2 concerne R, R eid RE, R2 eid > RE', - '''SELECT _R2.eid -FROM concerne_relation AS rel_concerne0, entities AS _R2 -WHERE _R2.eid=rel_concerne0.eid_from AND _R2.eid>rel_concerne0.eid_to'''), - - ('Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y', - '''SELECT _X.cw_eid -FROM cw_Note AS _X -WHERE _X.cw_eid IN(999998, 999999) AND NOT (EXISTS(SELECT 1 FROM cw_source_relation AS rel_cw_source0 WHERE rel_cw_source0.eid_from=_X.cw_eid))'''), - - # Test for https://www.cubicweb.org/ticket/5503548 - ('''Any X - WHERE X is CWSourceSchemaConfig, - EXISTS(X created_by U, U login L), - X cw_schema X_CW_SCHEMA, - X owned_by X_OWNED_BY? - ''', '''SELECT _X.cw_eid -FROM cw_CWSourceSchemaConfig AS _X LEFT OUTER JOIN owned_by_relation AS rel_owned_by1 ON (rel_owned_by1.eid_from=_X.cw_eid) -WHERE EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0, cw_CWUser AS _U WHERE rel_created_by0.eid_from=_X.cw_eid AND rel_created_by0.eid_to=_U.cw_eid) AND _X.cw_cw_schema IS NOT NULL -''') - ] - -ADVANCED_WITH_GROUP_CONCAT = [ - ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN, X is CWGroup", - '''SELECT _X.cw_eid, GROUP_CONCAT(_T.cw_name) -FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0 -WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid -GROUP BY _X.cw_eid,_X.cw_name -ORDER BY _X.cw_name'''), - - ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN", - '''SELECT T1.C0, GROUP_CONCAT(T1.C1) FROM (SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2 -FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0 -WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid -UNION ALL -SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2 -FROM cw_State AS _X, cw_Tag AS _T, tags_relation AS rel_tags0 -WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid -UNION ALL -SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2 -FROM cw_Tag AS _T, cw_Tag AS _X, tags_relation AS rel_tags0 -WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid) AS T1 -GROUP BY T1.C0,T1.C2 -ORDER BY T1.C2'''), - -] - -ADVANCED_WITH_LIMIT_OR_ORDERBY = [ - ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))', - '''SELECT COUNT(rel_concerne0.eid_from), _C.cw_nom -FROM concerne_relation AS rel_concerne0, cw_Societe AS _C -WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM cw_Card AS _N, documented_by_relation AS rel_documented_by2 WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published))) -GROUP BY _C.cw_nom -ORDER BY 1 DESC -LIMIT 10'''), - ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI', - '''SELECT T1.C0 FROM (SELECT DISTINCT _S.cw_eid AS C0, STOCKPROC(_S.cw_para) AS C1 -FROM cw_Note AS _S -WHERE _S.cw_ecrit_par IS NULL -ORDER BY 2) AS T1'''), - - ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2 -FROM cw_File AS _X -GROUP BY _X.cw_data_name,_X.cw_data_format -ORDER BY 2,3) AS T1 -'''), - - ('DISTINCT Any X ORDERBY stockproc(X) WHERE U login X', - '''SELECT T1.C0 FROM (SELECT DISTINCT _U.cw_login AS C0, STOCKPROC(_U.cw_login) AS C1 -FROM cw_CWUser AS _U -ORDER BY 2) AS T1'''), - - ('DISTINCT Any X ORDERBY Y WHERE B bookmarked_by X, X login Y', - '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _X.cw_login AS C1 -FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_CWUser AS _X -WHERE rel_bookmarked_by0.eid_to=_X.cw_eid -ORDER BY 2) AS T1'''), - - ('DISTINCT Any X ORDERBY SN WHERE X in_state S, S name SN', - '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 -FROM cw_Affaire AS _X, cw_State AS _S -WHERE _X.cw_in_state=_S.cw_eid -UNION -SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 -FROM cw_CWUser AS _X, cw_State AS _S -WHERE _X.cw_in_state=_S.cw_eid -UNION -SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 -FROM cw_Note AS _X, cw_State AS _S -WHERE _X.cw_in_state=_S.cw_eid -ORDER BY 2) AS T1'''), - - ('Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT S use_email O, S eid 1, O is EmailAddress, O address AA, O alias AB, O modification_date AC, ' - 'EXISTS(A use_email O, EXISTS(A identity B, NOT B in_group D, D name "guests", D is CWGroup), A is CWUser), B eid 2', - '''SELECT _O.cw_eid, _O.cw_address, _O.cw_alias, _O.cw_modification_date -FROM cw_EmailAddress AS _O -WHERE NOT (EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid)) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid)) AND _D.cw_name=guests)) -ORDER BY 4 DESC'''), - - - ] - -MULTIPLE_SEL = [ - ("DISTINCT Any X,Y where P is Personne, P nom X , P prenom Y;", - '''SELECT DISTINCT _P.cw_nom, _P.cw_prenom -FROM cw_Personne AS _P'''), - ("Any X,Y where P is Personne, P nom X , P prenom Y, not P nom NULL;", - '''SELECT _P.cw_nom, _P.cw_prenom -FROM cw_Personne AS _P -WHERE NOT (_P.cw_nom IS NULL)'''), - ("Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE", - '''SELECT _X.cw_eid, _Y.cw_eid -FROM cw_Personne AS _X, cw_Personne AS _Y -WHERE _Y.cw_nom=_X.cw_nom AND NOT (_Y.cw_eid=_X.cw_eid)'''), - - ('Any X,Y WHERE X is Personne, Y is Personne, X nom XD, Y nom XD, X eid Z, Y eid > Z', - '''SELECT _X.cw_eid, _Y.cw_eid -FROM cw_Personne AS _X, cw_Personne AS _Y -WHERE _Y.cw_nom=_X.cw_nom AND _Y.cw_eid>_X.cw_eid'''), - ] - - -NEGATIONS = [ - - ("Personne X WHERE NOT X evaluee Y;", - '''SELECT _X.cw_eid -FROM cw_Personne AS _X -WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_X.cw_eid))'''), - - ("Note N WHERE NOT X evaluee N, X eid 0", - '''SELECT _N.cw_eid -FROM cw_Note AS _N -WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=0 AND rel_evaluee0.eid_to=_N.cw_eid))'''), - - ('Any X WHERE NOT X travaille S, X is Personne', - '''SELECT _X.cw_eid -FROM cw_Personne AS _X -WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid))'''), - - ("Personne P where NOT P concerne A", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=_P.cw_eid))'''), - - ("Affaire A where not P concerne A", - '''SELECT _A.cw_eid -FROM cw_Affaire AS _A -WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_to=_A.cw_eid))'''), - ("Personne P where not P concerne A, A sujet ~= 'TEST%'", - '''SELECT _P.cw_eid -FROM cw_Affaire AS _A, cw_Personne AS _P -WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=_P.cw_eid AND rel_concerne0.eid_to=_A.cw_eid)) AND _A.cw_sujet ILIKE TEST%'''), - - ('Any S WHERE NOT T eid 28258, T tags S', - '''SELECT rel_tags0.eid_to -FROM tags_relation AS rel_tags0 -WHERE NOT (rel_tags0.eid_from=28258)'''), - - ('Any S WHERE T is Tag, T name TN, NOT T eid 28258, T tags S, S name SN', - '''SELECT _S.cw_eid -FROM cw_CWGroup AS _S, cw_Tag AS _T, tags_relation AS rel_tags0 -WHERE NOT (_T.cw_eid=28258) AND rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_S.cw_eid -UNION ALL -SELECT _S.cw_eid -FROM cw_State AS _S, cw_Tag AS _T, tags_relation AS rel_tags0 -WHERE NOT (_T.cw_eid=28258) AND rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_S.cw_eid -UNION ALL -SELECT _S.cw_eid -FROM cw_Tag AS _S, cw_Tag AS _T, tags_relation AS rel_tags0 -WHERE NOT (_T.cw_eid=28258) AND rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_S.cw_eid'''), - - ('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid 6', - '''SELECT 5, rel_created_by0.eid_to -FROM created_by_relation AS rel_created_by0 -WHERE rel_created_by0.eid_from=5 AND NOT (rel_created_by0.eid_to=6)'''), - - ('Note X WHERE NOT Y evaluee X', - '''SELECT _X.cw_eid -FROM cw_Note AS _X -WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_to=_X.cw_eid))'''), - - ('Any Y WHERE NOT Y evaluee X', - '''SELECT _Y.cw_eid -FROM cw_CWUser AS _Y -WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)) -UNION ALL -SELECT _Y.cw_eid -FROM cw_Division AS _Y -WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)) -UNION ALL -SELECT _Y.cw_eid -FROM cw_Personne AS _Y -WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)) -UNION ALL -SELECT _Y.cw_eid -FROM cw_Societe AS _Y -WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)) -UNION ALL -SELECT _Y.cw_eid -FROM cw_SubDivision AS _Y -WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid))'''), - - ('Any X WHERE NOT Y evaluee X, Y is CWUser', - '''SELECT _X.cw_eid -FROM cw_Note AS _X -WHERE NOT (EXISTS(SELECT 1 FROM cw_CWUser AS _Y, evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid AND rel_evaluee0.eid_to=_X.cw_eid))'''), - - ('Any X,RT WHERE X relation_type RT, NOT X is CWAttribute', - '''SELECT _X.cw_eid, _X.cw_relation_type -FROM cw_CWRelation AS _X -WHERE _X.cw_relation_type IS NOT NULL'''), - - ('Any K,V WHERE P is CWProperty, P pkey K, P value V, NOT P for_user U', - '''SELECT _P.cw_pkey, _P.cw_value -FROM cw_CWProperty AS _P -WHERE _P.cw_for_user IS NULL'''), - - ('Any S WHERE NOT X in_state S, X is IN(Affaire, CWUser)', - '''SELECT _S.cw_eid -FROM cw_State AS _S -WHERE NOT (EXISTS(SELECT 1 FROM cw_Affaire AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid))'''), - - ('Any S WHERE NOT(X in_state S, S name "somename"), X is CWUser', - '''SELECT _S.cw_eid -FROM cw_State AS _S -WHERE NOT (EXISTS(SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid AND _S.cw_name=somename))'''), - ] - -HAS_TEXT_LG_INDEXER = [ - ('Any X WHERE X has_text "toto tata"', - """SELECT DISTINCT appears0.uid -FROM appears AS appears0 -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), - ('Personne X WHERE X has_text "toto tata"', - """SELECT DISTINCT _X.eid -FROM appears AS appears0, entities AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.eid AND _X.type='Personne'"""), - ('Personne X WHERE X has_text %(text)s', - """SELECT DISTINCT _X.eid -FROM appears AS appears0, entities AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('hip', 'hop', 'momo')) AND appears0.uid=_X.eid AND _X.type='Personne' -"""), - ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', - """SELECT DISTINCT _X.cw_eid -FROM appears AS appears0, cw_Basket AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -UNION -SELECT DISTINCT _X.cw_eid -FROM appears AS appears0, cw_Folder AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu""") - ] - - - -# XXXFIXME fail -# ('Any X,RT WHERE X relation_type RT?, NOT X is CWAttribute', -# '''SELECT _X.cw_eid, _X.cw_relation_type -# FROM cw_CWRelation AS _X'''), - - -OUTER_JOIN = [ - - ('Any U,G WHERE U login L, G name L?, G is CWGroup', - '''SELECT _U.cw_eid, _G.cw_eid -FROM cw_CWUser AS _U LEFT OUTER JOIN cw_CWGroup AS _G ON (_G.cw_name=_U.cw_login)'''), - - ('Any X,S WHERE X travaille S?', - '''SELECT _X.cw_eid, rel_travaille0.eid_to -FROM cw_Personne AS _X LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=_X.cw_eid)''' - ), - ('Any S,X WHERE X? travaille S, S is Societe', - '''SELECT _S.cw_eid, rel_travaille0.eid_from -FROM cw_Societe AS _S LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_to=_S.cw_eid)''' - ), - - ('Any N,A WHERE N inline1 A?', - '''SELECT _N.cw_eid, _N.cw_inline1 -FROM cw_Note AS _N'''), - - ('Any SN WHERE X from_state S?, S name SN', - '''SELECT _S.cw_name -FROM cw_TrInfo AS _X LEFT OUTER JOIN cw_State AS _S ON (_X.cw_from_state=_S.cw_eid)''' - ), - - ('Any A,N WHERE N? inline1 A', - '''SELECT _A.cw_eid, _N.cw_eid -FROM cw_Affaire AS _A LEFT OUTER JOIN cw_Note AS _N ON (_N.cw_inline1=_A.cw_eid)''' - ), - - ('Any A,B,C,D,E,F,G WHERE A eid 12,A creation_date B,A modification_date C,A comment D,A from_state E?,A to_state F?,A wf_info_for G?', - '''SELECT _A.cw_eid, _A.cw_creation_date, _A.cw_modification_date, _A.cw_comment, _A.cw_from_state, _A.cw_to_state, _A.cw_wf_info_for -FROM cw_TrInfo AS _A -WHERE _A.cw_eid=12'''), - - ('Any FS,TS,C,D,U ORDERBY D DESC WHERE WF wf_info_for X,WF from_state FS?, WF to_state TS, WF comment C,WF creation_date D, WF owned_by U, X eid 1', - '''SELECT _WF.cw_from_state, _WF.cw_to_state, _WF.cw_comment, _WF.cw_creation_date, rel_owned_by0.eid_to -FROM cw_TrInfo AS _WF, owned_by_relation AS rel_owned_by0 -WHERE _WF.cw_wf_info_for=1 AND _WF.cw_to_state IS NOT NULL AND rel_owned_by0.eid_from=_WF.cw_eid -ORDER BY 4 DESC'''), - - ('Any X WHERE X is Affaire, S is Societe, EXISTS(X owned_by U OR (X concerne S?, S owned_by U))', - '''SELECT _X.cw_eid -FROM cw_Affaire AS _X -WHERE EXISTS(SELECT 1 FROM cw_CWUser AS _U, owned_by_relation AS rel_owned_by0, owned_by_relation AS rel_owned_by2, cw_Affaire AS _A LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=_A.cw_eid) LEFT OUTER JOIN cw_Societe AS _S ON (rel_concerne1.eid_to=_S.cw_eid) WHERE ((rel_owned_by0.eid_from=_A.cw_eid AND rel_owned_by0.eid_to=_U.cw_eid) OR (rel_owned_by2.eid_from=_S.cw_eid AND rel_owned_by2.eid_to=_U.cw_eid)) AND _X.cw_eid=_A.cw_eid)'''), - - ('Any C,M WHERE C travaille G?, G evaluee M?, G is Societe', - '''SELECT _C.cw_eid, rel_evaluee1.eid_to -FROM cw_Personne AS _C LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=_C.cw_eid) LEFT OUTER JOIN cw_Societe AS _G ON (rel_travaille0.eid_to=_G.cw_eid) LEFT OUTER JOIN evaluee_relation AS rel_evaluee1 ON (rel_evaluee1.eid_from=_G.cw_eid)''' - ), - - ('Any A,C WHERE A documented_by C?, (C is NULL) OR (EXISTS(C require_permission F, ' - 'F name "read", F require_group E, U in_group E)), U eid 1', - '''SELECT _A.cw_eid, rel_documented_by0.eid_to -FROM cw_Affaire AS _A LEFT OUTER JOIN documented_by_relation AS rel_documented_by0 ON (rel_documented_by0.eid_from=_A.cw_eid) -WHERE ((rel_documented_by0.eid_to IS NULL) OR (EXISTS(SELECT 1 FROM cw_CWPermission AS _F, in_group_relation AS rel_in_group3, require_group_relation AS rel_require_group2, require_permission_relation AS rel_require_permission1 WHERE rel_documented_by0.eid_to=rel_require_permission1.eid_from AND rel_require_permission1.eid_to=_F.cw_eid AND _F.cw_name=read AND rel_require_group2.eid_from=_F.cw_eid AND rel_in_group3.eid_to=rel_require_group2.eid_to AND rel_in_group3.eid_from=1)))'''), - - ("Any X WHERE X eid 12, P? connait X", - '''SELECT _X.cw_eid -FROM cw_Personne AS _X LEFT OUTER JOIN connait_relation AS rel_connait0 ON (rel_connait0.eid_to=_X.cw_eid) -WHERE _X.cw_eid=12''' - ), - ("Any P WHERE X eid 12, P? concerne X, X todo_by S", - '''SELECT rel_concerne1.eid_from -FROM todo_by_relation AS rel_todo_by0 LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_to=12) -WHERE rel_todo_by0.eid_from=12''' - ), - - ('Any GN, TN ORDERBY GN WHERE T tags G?, T name TN, G name GN', - ''' -SELECT _T0.C1, _T.cw_name -FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN (SELECT _G.cw_eid AS C0, _G.cw_name AS C1 -FROM cw_CWGroup AS _G -UNION ALL -SELECT _G.cw_eid AS C0, _G.cw_name AS C1 -FROM cw_State AS _G -UNION ALL -SELECT _G.cw_eid AS C0, _G.cw_name AS C1 -FROM cw_Tag AS _G) AS _T0 ON (rel_tags0.eid_to=_T0.C0) -ORDER BY 1'''), - - - # optional variable with additional restriction - ('Any T,G WHERE T tags G?, G name "hop", G is CWGroup', - '''SELECT _T.cw_eid, _G.cw_eid -FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWGroup AS _G ON (rel_tags0.eid_to=_G.cw_eid AND _G.cw_name=hop)'''), - - # optional variable with additional invariant restriction - ('Any T,G WHERE T tags G?, G eid 12', - '''SELECT _T.cw_eid, rel_tags0.eid_to -FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=12)'''), - - # optional variable with additional restriction appearing before the relation - ('Any T,G WHERE G name "hop", T tags G?, G is CWGroup', - '''SELECT _T.cw_eid, _G.cw_eid -FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWGroup AS _G ON (rel_tags0.eid_to=_G.cw_eid AND _G.cw_name=hop)'''), - - # optional variable with additional restriction on inlined relation - # XXX the expected result should be as the query below. So what, raise BadRQLQuery ? - ('Any T,G,S WHERE T tags G?, G in_state S, S name "hop", G is CWUser', - '''SELECT _T.cw_eid, _G.cw_eid, _S.cw_eid -FROM cw_State AS _S, cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWUser AS _G ON (rel_tags0.eid_to=_G.cw_eid) -WHERE _G.cw_in_state=_S.cw_eid AND _S.cw_name=hop -'''), - - # optional variable with additional invariant restriction on an inlined relation - ('Any T,G,S WHERE T tags G, G in_state S?, S eid 1, G is CWUser', - '''SELECT rel_tags0.eid_from, _G.cw_eid, _G.cw_in_state -FROM cw_CWUser AS _G, tags_relation AS rel_tags0 -WHERE rel_tags0.eid_to=_G.cw_eid AND (_G.cw_in_state=1 OR _G.cw_in_state IS NULL)'''), - - # two optional variables with additional invariant restriction on an inlined relation - ('Any T,G,S WHERE T tags G?, G in_state S?, S eid 1, G is CWUser', - '''SELECT _T.cw_eid, _G.cw_eid, _G.cw_in_state -FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWUser AS _G ON (rel_tags0.eid_to=_G.cw_eid AND (_G.cw_in_state=1 OR _G.cw_in_state IS NULL))'''), - - # two optional variables with additional restriction on an inlined relation - ('Any T,G,S WHERE T tags G?, G in_state S?, S name "hop", G is CWUser', - '''SELECT _T.cw_eid, _G.cw_eid, _S.cw_eid -FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN cw_CWUser AS _G ON (rel_tags0.eid_to=_G.cw_eid) LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop)'''), - - # two optional variables with additional restriction on an ambigous inlined relation - ('Any T,G,S WHERE T tags G?, G in_state S?, S name "hop"', - ''' -SELECT _T.cw_eid, _T0.C0, _T0.C1 -FROM cw_Tag AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=_T.cw_eid) LEFT OUTER JOIN (SELECT _G.cw_eid AS C0, _S.cw_eid AS C1 -FROM cw_Affaire AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop) -UNION ALL -SELECT _G.cw_eid AS C0, _S.cw_eid AS C1 -FROM cw_CWUser AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop) -UNION ALL -SELECT _G.cw_eid AS C0, _S.cw_eid AS C1 -FROM cw_Note AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop)) AS _T0 ON (rel_tags0.eid_to=_T0.C0)'''), - - ('Any O,AD WHERE NOT S inline1 O, S eid 123, O todo_by AD?', - '''SELECT _O.cw_eid, rel_todo_by0.eid_to -FROM cw_Note AS _S, cw_Affaire AS _O LEFT OUTER JOIN todo_by_relation AS rel_todo_by0 ON (rel_todo_by0.eid_from=_O.cw_eid) -WHERE (_S.cw_inline1 IS NULL OR _S.cw_inline1!=_O.cw_eid) AND _S.cw_eid=123'''), - - ('Any X,AE WHERE X multisource_inlined_rel S?, S ambiguous_inlined A, A modification_date AE', - '''SELECT _X.cw_eid, _T0.C2 -FROM cw_Card AS _X LEFT OUTER JOIN (SELECT _S.cw_eid AS C0, _A.cw_eid AS C1, _A.cw_modification_date AS C2 -FROM cw_Affaire AS _S, cw_CWUser AS _A -WHERE _S.cw_ambiguous_inlined=_A.cw_eid -UNION ALL -SELECT _S.cw_eid AS C0, _A.cw_eid AS C1, _A.cw_modification_date AS C2 -FROM cw_CWUser AS _A, cw_Note AS _S -WHERE _S.cw_ambiguous_inlined=_A.cw_eid) AS _T0 ON (_X.cw_multisource_inlined_rel=_T0.C0) -UNION ALL -SELECT _X.cw_eid, _T0.C2 -FROM cw_Note AS _X LEFT OUTER JOIN (SELECT _S.cw_eid AS C0, _A.cw_eid AS C1, _A.cw_modification_date AS C2 -FROM cw_Affaire AS _S, cw_CWUser AS _A -WHERE _S.cw_ambiguous_inlined=_A.cw_eid -UNION ALL -SELECT _S.cw_eid AS C0, _A.cw_eid AS C1, _A.cw_modification_date AS C2 -FROM cw_CWUser AS _A, cw_Note AS _S -WHERE _S.cw_ambiguous_inlined=_A.cw_eid) AS _T0 ON (_X.cw_multisource_inlined_rel=_T0.C0)''' - ), - - ('Any X,T,OT WHERE X tags T, OT? tags X, X is Tag, X eid 123', - '''SELECT rel_tags0.eid_from, rel_tags0.eid_to, rel_tags1.eid_from -FROM tags_relation AS rel_tags0 LEFT OUTER JOIN tags_relation AS rel_tags1 ON (rel_tags1.eid_to=123) -WHERE rel_tags0.eid_from=123'''), - - ('Any CASE, CALIBCFG, CFG ' - 'WHERE CASE eid 1, CFG ecrit_par CASE, CALIBCFG? ecrit_par CASE', - '''SELECT _CFG.cw_ecrit_par, _CALIBCFG.cw_eid, _CFG.cw_eid -FROM cw_Note AS _CFG LEFT OUTER JOIN cw_Note AS _CALIBCFG ON (_CALIBCFG.cw_ecrit_par=1) -WHERE _CFG.cw_ecrit_par=1'''), - - ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)=UPPER(GL)?', - '''SELECT _U.cw_eid, _G.cw_eid -FROM cw_CWUser AS _U LEFT OUTER JOIN cw_CWGroup AS _G ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''), - - ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)?=UPPER(GL)', - '''SELECT _U.cw_eid, _G.cw_eid -FROM cw_CWGroup AS _G LEFT OUTER JOIN cw_CWUser AS _U ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''), - - ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)?=UPPER(GL)?', - '''SELECT _U.cw_eid, _G.cw_eid -FROM cw_CWUser AS _U FULL OUTER JOIN cw_CWGroup AS _G ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''), - - ('Any H, COUNT(X), SUM(XCE)/1000 ' - 'WHERE X type "0", X date XSCT, X para XCE, X? ecrit_par F, F eid 999999, F is Personne, ' - 'DH is Affaire, DH ref H ' - 'HAVING XSCT?=H', - '''SELECT _DH.cw_ref, COUNT(_X.cw_eid), (SUM(_X.cw_para) / 1000) -FROM cw_Affaire AS _DH LEFT OUTER JOIN cw_Note AS _X ON (_X.cw_date=_DH.cw_ref AND _X.cw_type=0 AND _X.cw_ecrit_par=999999)'''), - - ('Any C WHERE X ecrit_par C?, X? inline1 F, F eid 1, X type XT, Z is Personne, Z nom ZN HAVING ZN=XT?', - '''SELECT _X.cw_ecrit_par -FROM cw_Personne AS _Z LEFT OUTER JOIN cw_Note AS _X ON (_Z.cw_nom=_X.cw_type AND _X.cw_inline1=1)'''), - ] - -VIRTUAL_VARS = [ - - ('Any X WHERE X is CWUser, X creation_date > D1, Y creation_date D1, Y login "SWEB09"', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X, cw_CWUser AS _Y -WHERE _X.cw_creation_date>_Y.cw_creation_date AND _Y.cw_login=SWEB09'''), - - ('Any X WHERE X is CWUser, Y creation_date D1, Y login "SWEB09", X creation_date > D1', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X, cw_CWUser AS _Y -WHERE _Y.cw_login=SWEB09 AND _X.cw_creation_date>_Y.cw_creation_date'''), - - ('Personne P WHERE P travaille S, S tel T, S fax T, S is Societe', - '''SELECT rel_travaille0.eid_from -FROM cw_Societe AS _S, travaille_relation AS rel_travaille0 -WHERE rel_travaille0.eid_to=_S.cw_eid AND _S.cw_tel=_S.cw_fax'''), - - ("Personne P where X eid 0, X creation_date D, P tzdatenaiss < D, X is Affaire", - '''SELECT _P.cw_eid -FROM cw_Affaire AS _X, cw_Personne AS _P -WHERE _X.cw_eid=0 AND _P.cw_tzdatenaiss<_X.cw_creation_date'''), - - ("Any N,T WHERE N is Note, N type T;", - '''SELECT _N.cw_eid, _N.cw_type -FROM cw_Note AS _N'''), - - ("Personne P where X is Personne, X tel T, X fax F, P fax T+F", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P, cw_Personne AS _X -WHERE _P.cw_fax=(_X.cw_tel + _X.cw_fax)'''), - - ("Personne P where X tel T, X fax F, P fax IN (T,F)", - '''SELECT _P.cw_eid -FROM cw_Division AS _X, cw_Personne AS _P -WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax) -UNION ALL -SELECT _P.cw_eid -FROM cw_Personne AS _P, cw_Personne AS _X -WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax) -UNION ALL -SELECT _P.cw_eid -FROM cw_Personne AS _P, cw_Societe AS _X -WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax) -UNION ALL -SELECT _P.cw_eid -FROM cw_Personne AS _P, cw_SubDivision AS _X -WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax)'''), - - ("Personne P where X tel T, X fax F, P fax IN (T,F,0832542332)", - '''SELECT _P.cw_eid -FROM cw_Division AS _X, cw_Personne AS _P -WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax, 832542332) -UNION ALL -SELECT _P.cw_eid -FROM cw_Personne AS _P, cw_Personne AS _X -WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax, 832542332) -UNION ALL -SELECT _P.cw_eid -FROM cw_Personne AS _P, cw_Societe AS _X -WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax, 832542332) -UNION ALL -SELECT _P.cw_eid -FROM cw_Personne AS _P, cw_SubDivision AS _X -WHERE _P.cw_fax IN(_X.cw_tel, _X.cw_fax, 832542332)'''), - ] - -FUNCS = [ - ("Any COUNT(P) WHERE P is Personne", - '''SELECT COUNT(_P.cw_eid) -FROM cw_Personne AS _P'''), - ] - -INLINE = [ - - ('Any P WHERE N eid 1, N ecrit_par P, NOT P owned_by P2', - '''SELECT _N.cw_ecrit_par -FROM cw_Note AS _N -WHERE _N.cw_eid=1 AND _N.cw_ecrit_par IS NOT NULL AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE _N.cw_ecrit_par=rel_owned_by0.eid_from))'''), - - ('Any P, L WHERE N ecrit_par P, P nom L, N eid 0', - '''SELECT _P.cw_eid, _P.cw_nom -FROM cw_Note AS _N, cw_Personne AS _P -WHERE _N.cw_ecrit_par=_P.cw_eid AND _N.cw_eid=0'''), - - ('Any N WHERE NOT N ecrit_par P, P nom "toto"', - '''SELECT _N.cw_eid -FROM cw_Note AS _N, cw_Personne AS _P -WHERE (_N.cw_ecrit_par IS NULL OR _N.cw_ecrit_par!=_P.cw_eid) AND _P.cw_nom=toto'''), - - ('Any P WHERE NOT N ecrit_par P, P nom "toto"', - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE NOT (EXISTS(SELECT 1 FROM cw_Note AS _N WHERE _N.cw_ecrit_par=_P.cw_eid)) AND _P.cw_nom=toto'''), - - ('Any P WHERE N ecrit_par P, N eid 0', - '''SELECT _N.cw_ecrit_par -FROM cw_Note AS _N -WHERE _N.cw_ecrit_par IS NOT NULL AND _N.cw_eid=0'''), - - ('Any P WHERE N ecrit_par P, P is Personne, N eid 0', - '''SELECT _P.cw_eid -FROM cw_Note AS _N, cw_Personne AS _P -WHERE _N.cw_ecrit_par=_P.cw_eid AND _N.cw_eid=0'''), - - ('Any P WHERE NOT N ecrit_par P, P is Personne, N eid 512', - '''SELECT _P.cw_eid -FROM cw_Note AS _N, cw_Personne AS _P -WHERE (_N.cw_ecrit_par IS NULL OR _N.cw_ecrit_par!=_P.cw_eid) AND _N.cw_eid=512'''), - - ('Any S,ES,T WHERE S state_of ET, ET name "CWUser", ES allowed_transition T, T destination_state S', - # XXX "_T.cw_destination_state IS NOT NULL" could be avoided here but it's not worth it - '''SELECT _T.cw_destination_state, rel_allowed_transition1.eid_from, _T.cw_eid -FROM allowed_transition_relation AS rel_allowed_transition1, cw_Transition AS _T, cw_Workflow AS _ET, state_of_relation AS rel_state_of0 -WHERE _T.cw_destination_state=rel_state_of0.eid_from AND rel_state_of0.eid_to=_ET.cw_eid AND _ET.cw_name=CWUser AND rel_allowed_transition1.eid_to=_T.cw_eid AND _T.cw_destination_state IS NOT NULL'''), - - ('Any O WHERE S eid 0, S in_state O', - '''SELECT _S.cw_in_state -FROM cw_Affaire AS _S -WHERE _S.cw_eid=0 AND _S.cw_in_state IS NOT NULL -UNION ALL -SELECT _S.cw_in_state -FROM cw_CWUser AS _S -WHERE _S.cw_eid=0 AND _S.cw_in_state IS NOT NULL -UNION ALL -SELECT _S.cw_in_state -FROM cw_Note AS _S -WHERE _S.cw_eid=0 AND _S.cw_in_state IS NOT NULL'''), - - ('Any X WHERE NOT Y for_user X, X eid 123', - '''SELECT 123 -WHERE NOT (EXISTS(SELECT 1 FROM cw_CWProperty AS _Y WHERE _Y.cw_for_user=123))'''), - - ('DISTINCT Any X WHERE X from_entity OET, NOT X from_entity NET, OET name "Image", NET eid 1', - '''SELECT DISTINCT _X.cw_eid -FROM cw_CWAttribute AS _X, cw_CWEType AS _OET -WHERE _X.cw_from_entity=_OET.cw_eid AND (_X.cw_from_entity IS NULL OR _X.cw_from_entity!=1) AND _OET.cw_name=Image -UNION -SELECT DISTINCT _X.cw_eid -FROM cw_CWEType AS _OET, cw_CWRelation AS _X -WHERE _X.cw_from_entity=_OET.cw_eid AND (_X.cw_from_entity IS NULL OR _X.cw_from_entity!=1) AND _OET.cw_name=Image'''), - - ] - -INTERSECT = [ - ('Any SN WHERE NOT X in_state S, S name SN', - '''SELECT _S.cw_name -FROM cw_State AS _S -WHERE NOT (EXISTS(SELECT 1 FROM cw_Affaire AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_Note AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid))'''), - - ('Any PN WHERE NOT X travaille S, X nom PN, S is IN(Division, Societe)', - '''SELECT _X.cw_nom -FROM cw_Personne AS _X -WHERE NOT (EXISTS(SELECT 1 FROM cw_Division AS _S, travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid UNION SELECT 1 FROM cw_Societe AS _S, travaille_relation AS rel_travaille1 WHERE rel_travaille1.eid_from=_X.cw_eid AND rel_travaille1.eid_to=_S.cw_eid))'''), - - ('Any PN WHERE NOT X travaille S, S nom PN, S is IN(Division, Societe)', - '''SELECT _S.cw_nom -FROM cw_Division AS _S -WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid)) -UNION ALL -SELECT _S.cw_nom -FROM cw_Societe AS _S -WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid))'''), - - ('Personne X WHERE NOT X travaille S, S nom "chouette"', - '''SELECT _X.cw_eid -FROM cw_Division AS _S, cw_Personne AS _X -WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette -UNION ALL -SELECT _X.cw_eid -FROM cw_Personne AS _X, cw_Societe AS _S -WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette -UNION ALL -SELECT _X.cw_eid -FROM cw_Personne AS _X, cw_SubDivision AS _S -WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette'''), - - ('Any X WHERE X is ET, ET eid 2', - '''SELECT rel_is0.eid_from -FROM is_relation AS rel_is0 -WHERE rel_is0.eid_to=2'''), - - ] -class CWRQLTC(RQLGeneratorTC): - backend = 'sqlite' - - def setUp(self): - self.__class__.schema = schema - super(CWRQLTC, self).setUp() - - def test_nonregr_sol(self): - delete = self.rqlhelper.parse( - 'DELETE X read_permission READ_PERMISSIONSUBJECT,X add_permission ADD_PERMISSIONSUBJECT,' - 'X in_basket IN_BASKETSUBJECT,X delete_permission DELETE_PERMISSIONSUBJECT,' - 'X update_permission UPDATE_PERMISSIONSUBJECT,' - 'X created_by CREATED_BYSUBJECT,X is ISSUBJECT,X is_instance_of IS_INSTANCE_OFSUBJECT,' - 'X owned_by OWNED_BYSUBJECT,X specializes SPECIALIZESSUBJECT,ISOBJECT is X,' - 'SPECIALIZESOBJECT specializes X,IS_INSTANCE_OFOBJECT is_instance_of X,' - 'TO_ENTITYOBJECT to_entity X,FROM_ENTITYOBJECT from_entity X ' - 'WHERE X is CWEType') - self.rqlhelper.compute_solutions(delete) - def var_sols(var): - s = set() - for sol in delete.solutions: - s.add(sol.get(var)) - return s - self.assertEqual(var_sols('FROM_ENTITYOBJECT'), set(('CWAttribute', 'CWRelation'))) - self.assertEqual(var_sols('FROM_ENTITYOBJECT'), delete.defined_vars['FROM_ENTITYOBJECT'].stinfo['possibletypes']) - self.assertEqual(var_sols('ISOBJECT'), - set(x.type for x in self.schema.entities() if not x.final)) - self.assertEqual(var_sols('ISOBJECT'), delete.defined_vars['ISOBJECT'].stinfo['possibletypes']) - - -def strip(text): - return '\n'.join(l.strip() for l in text.strip().splitlines()) - -class PostgresSQLGeneratorTC(RQLGeneratorTC): - backend = 'postgres' - - def setUp(self): - self.__class__.schema = schema - super(PostgresSQLGeneratorTC, self).setUp() - - def _norm_sql(self, sql): - return sql.strip() - - def _check(self, rql, sql, varmap=None, args=None): - if args is None: - args = {'text': 'hip hop momo', 'eid': 12345} - try: - union = self._prepare(rql) - r, nargs, cbs = self.o.generate(union, args, - varmap=varmap) - args.update(nargs) - self.assertMultiLineEqual(strip(r % args), self._norm_sql(sql)) - except Exception as ex: - if 'r' in locals(): - try: - print((r%args).strip()) - except KeyError: - print('strange, missing substitution') - print(r, nargs) - print('!=') - print(sql.strip()) - print('RQL:', rql) - raise - - def _parse(self, rqls): - for rql, sql in rqls: - yield self._check, rql, sql - - def _checkall(self, rql, sql): - if isinstance(rql, tuple): - rql, args = rql - else: - args = None - try: - rqlst = self._prepare(rql) - r, args, cbs = self.o.generate(rqlst, args) - self.assertEqual((r.strip(), args), sql) - except Exception as ex: - print(rql) - if 'r' in locals(): - print(r.strip()) - print('!=') - print(sql[0].strip()) - raise - return - - def test1(self): - self._checkall(('Any count(RDEF) WHERE RDEF relation_type X, X eid %(x)s', {'x': None}), - ("""SELECT COUNT(T1.C0) FROM (SELECT _RDEF.cw_eid AS C0 -FROM cw_CWAttribute AS _RDEF -WHERE _RDEF.cw_relation_type=%(x)s -UNION ALL -SELECT _RDEF.cw_eid AS C0 -FROM cw_CWRelation AS _RDEF -WHERE _RDEF.cw_relation_type=%(x)s) AS T1""", {}), - ) - - def test2(self): - self._checkall(('Any X WHERE C comments X, C eid %(x)s', {'x': None}), - ('''SELECT rel_comments0.eid_to -FROM comments_relation AS rel_comments0 -WHERE rel_comments0.eid_from=%(x)s''', {}) - ) - - def test_cache_1(self): - self._check('Any X WHERE X in_basket B, B eid 12', - '''SELECT rel_in_basket0.eid_from -FROM in_basket_relation AS rel_in_basket0 -WHERE rel_in_basket0.eid_to=12''') - - self._check('Any X WHERE X in_basket B, B eid 12', - '''SELECT rel_in_basket0.eid_from -FROM in_basket_relation AS rel_in_basket0 -WHERE rel_in_basket0.eid_to=12''') - - def test_varmap1(self): - self._check('Any X,L WHERE X is CWUser, X in_group G, X login L, G name "users"', - '''SELECT T00.x, T00.l -FROM T00, cw_CWGroup AS _G, in_group_relation AS rel_in_group0 -WHERE rel_in_group0.eid_from=T00.x AND rel_in_group0.eid_to=_G.cw_eid AND _G.cw_name=users''', - varmap={'X': 'T00.x', 'X.login': 'T00.l'}) - - def test_varmap2(self): - self._check('Any X,L,GN WHERE X is CWUser, X in_group G, X login L, G name GN', - '''SELECT T00.x, T00.l, _G.cw_name -FROM T00, cw_CWGroup AS _G, in_group_relation AS rel_in_group0 -WHERE rel_in_group0.eid_from=T00.x AND rel_in_group0.eid_to=_G.cw_eid''', - varmap={'X': 'T00.x', 'X.login': 'T00.l'}) - - def test_varmap3(self): - self._check('Any %(x)s,D WHERE F data D, F is File', - 'SELECT 728, _TDF0.C0\nFROM _TDF0', - args={'x': 728}, - varmap={'F.data': '_TDF0.C0', 'D': '_TDF0.C0'}) - - def test_is_null_transform(self): - union = self._prepare('Any X WHERE X login %(login)s') - r, args, cbs = self.o.generate(union, {'login': None}) - self.assertMultiLineEqual((r % args).strip(), - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE _X.cw_login IS NULL''') - - def test_today(self): - for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire", - '''SELECT _X.cw_eid -FROM cw_Affaire AS _X -WHERE DATE(_X.cw_creation_date)=CAST(clock_timestamp() AS DATE)'''), - ("Personne P where not P datenaiss TODAY", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE NOT (DATE(_P.cw_datenaiss)=CAST(clock_timestamp() AS DATE))'''), - ]): - yield t - - def test_date_extraction(self): - self._check("Any MONTH(D) WHERE P is Personne, P creation_date D", - '''SELECT CAST(EXTRACT(MONTH from _P.cw_creation_date) AS INTEGER) -FROM cw_Personne AS _P''') - - def test_weekday_extraction(self): - self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D", - '''SELECT (CAST(EXTRACT(DOW from _P.cw_creation_date) AS INTEGER) + 1) -FROM cw_Personne AS _P''') - - def test_substring(self): - self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne", - '''SELECT SUBSTR(_P.cw_nom, 1, 1) -FROM cw_Personne AS _P''') - - def test_cast(self): - self._check("Any CAST(String, P) WHERE P is Personne", - '''SELECT CAST(_P.cw_eid AS text) -FROM cw_Personne AS _P''') - - def test_regexp(self): - self._check("Any X WHERE X login REGEXP '[0-9].*'", - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE _X.cw_login ~ [0-9].* -''') - - def test_parser_parse(self): - for t in self._parse(PARSER): - yield t - - def test_basic_parse(self): - for t in self._parse(BASIC + BASIC_WITH_LIMIT): - yield t - - def test_advanced_parse(self): - for t in self._parse(ADVANCED + ADVANCED_WITH_LIMIT_OR_ORDERBY + ADVANCED_WITH_GROUP_CONCAT): - yield t - - def test_outer_join_parse(self): - for t in self._parse(OUTER_JOIN): - yield t - - def test_virtual_vars_parse(self): - for t in self._parse(VIRTUAL_VARS): - yield t - - def test_multiple_sel_parse(self): - for t in self._parse(MULTIPLE_SEL): - yield t - - def test_functions(self): - for t in self._parse(FUNCS): - yield t - - def test_negation(self): - for t in self._parse(NEGATIONS): - yield t - - def test_intersection(self): - for t in self._parse(INTERSECT): - yield t - - def test_union(self): - for t in self._parse(( - ('(Any N ORDERBY 1 WHERE X name N, X is State)' - ' UNION ' - '(Any NN ORDERBY 1 WHERE XX name NN, XX is Transition)', - '''(SELECT _X.cw_name -FROM cw_State AS _X -ORDER BY 1) -UNION ALL -(SELECT _XX.cw_name -FROM cw_Transition AS _XX -ORDER BY 1)'''), - )): - yield t - - def test_subquery(self): - for t in self._parse(( - - ('Any X,N ' - 'WHERE NOT EXISTS(X owned_by U) ' - 'WITH X,N BEING ' - '((Any X,N WHERE X name N, X is State)' - ' UNION ' - '(Any XX,NN WHERE XX name NN, XX is Transition))', - '''SELECT _T0.C0, _T0.C1 -FROM ((SELECT _X.cw_eid AS C0, _X.cw_name AS C1 -FROM cw_State AS _X) -UNION ALL -(SELECT _XX.cw_eid AS C0, _XX.cw_name AS C1 -FROM cw_Transition AS _XX)) AS _T0 -WHERE NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_T0.C0))'''), - - ('Any N ORDERBY 1 WITH N BEING ' - '((Any N WHERE X name N, X is State)' - ' UNION ' - '(Any NN WHERE XX name NN, XX is Transition))', - '''SELECT _T0.C0 -FROM ((SELECT _X.cw_name AS C0 -FROM cw_State AS _X) -UNION ALL -(SELECT _XX.cw_name AS C0 -FROM cw_Transition AS _XX)) AS _T0 -ORDER BY 1'''), - - ('Any N,NX ORDERBY NX WITH N,NX BEING ' - '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)' - ' UNION ' - '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))', - '''SELECT _T0.C0, _T0.C1 -FROM ((SELECT _X.cw_name AS C0, COUNT(_X.cw_eid) AS C1 -FROM cw_State AS _X -GROUP BY _X.cw_name -HAVING COUNT(_X.cw_eid)>1) -UNION ALL -(SELECT _X.cw_name AS C0, COUNT(_X.cw_eid) AS C1 -FROM cw_Transition AS _X -GROUP BY _X.cw_name -HAVING COUNT(_X.cw_eid)>1)) AS _T0 -ORDER BY 2'''), - - ('Any N,COUNT(X) GROUPBY N HAVING COUNT(X)>1 ' - 'WITH X, N BEING ((Any X, N WHERE X name N, X is State) UNION ' - ' (Any X, N WHERE X name N, X is Transition))', - '''SELECT _T0.C1, COUNT(_T0.C0) -FROM ((SELECT _X.cw_eid AS C0, _X.cw_name AS C1 -FROM cw_State AS _X) -UNION ALL -(SELECT _X.cw_eid AS C0, _X.cw_name AS C1 -FROM cw_Transition AS _X)) AS _T0 -GROUP BY _T0.C1 -HAVING COUNT(_T0.C0)>1'''), - - ('Any ETN,COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN ' - 'WITH X BEING ((Any X WHERE X is Societe) UNION (Any X WHERE X is Affaire, (EXISTS(X owned_by 1)) OR ((EXISTS(D concerne B?, B owned_by 1, X identity D, B is Note)) OR (EXISTS(F concerne E?, E owned_by 1, E is Societe, X identity F)))))', - '''SELECT _ET.cw_name, COUNT(_T0.C0) -FROM ((SELECT _X.cw_eid AS C0 -FROM cw_Societe AS _X) -UNION ALL -(SELECT _X.cw_eid AS C0 -FROM cw_Affaire AS _X -WHERE ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_owned_by0.eid_to=1)) OR (((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by2, cw_Affaire AS _D LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=_D.cw_eid) LEFT OUTER JOIN cw_Note AS _B ON (rel_concerne1.eid_to=_B.cw_eid) WHERE rel_owned_by2.eid_from=_B.cw_eid AND rel_owned_by2.eid_to=1 AND _X.cw_eid=_D.cw_eid)) OR (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by4, cw_Affaire AS _F LEFT OUTER JOIN concerne_relation AS rel_concerne3 ON (rel_concerne3.eid_from=_F.cw_eid) LEFT OUTER JOIN cw_Societe AS _E ON (rel_concerne3.eid_to=_E.cw_eid) WHERE rel_owned_by4.eid_from=_E.cw_eid AND rel_owned_by4.eid_to=1 AND _X.cw_eid=_F.cw_eid))))))) AS _T0, cw_CWEType AS _ET, is_relation AS rel_is0 -WHERE rel_is0.eid_from=_T0.C0 AND rel_is0.eid_to=_ET.cw_eid -GROUP BY _ET.cw_name'''), - - ('Any A WHERE A ordernum O, A is CWAttribute WITH O BEING (Any MAX(O) WHERE A ordernum O, A is CWAttribute)', - '''SELECT _A.cw_eid -FROM (SELECT MAX(_A.cw_ordernum) AS C0 -FROM cw_CWAttribute AS _A) AS _T0, cw_CWAttribute AS _A -WHERE _A.cw_ordernum=_T0.C0'''), - - ('Any O1 HAVING O1=O2? WITH O1 BEING (Any MAX(O) WHERE A ordernum O, A is CWAttribute), O2 BEING (Any MAX(O) WHERE A ordernum O, A is CWRelation)', - '''SELECT _T0.C0 -FROM (SELECT MAX(_A.cw_ordernum) AS C0 -FROM cw_CWAttribute AS _A) AS _T0 LEFT OUTER JOIN (SELECT MAX(_A.cw_ordernum) AS C0 -FROM cw_CWRelation AS _A) AS _T1 ON (_T0.C0=_T1.C0)'''), - - ('''Any TT1,STD,STDD WHERE TT2 identity TT1? - WITH TT1,STDD BEING (Any T,SUM(TD) GROUPBY T WHERE T is Affaire, T duration TD, TAG? tags T, TAG name "t"), - TT2,STD BEING (Any T,SUM(TD) GROUPBY T WHERE T is Affaire, T duration TD)''', - '''SELECT _T0.C0, _T1.C1, _T0.C1 -FROM (SELECT _T.cw_eid AS C0, SUM(_T.cw_duration) AS C1 -FROM cw_Affaire AS _T -GROUP BY _T.cw_eid) AS _T1 LEFT OUTER JOIN (SELECT _T.cw_eid AS C0, SUM(_T.cw_duration) AS C1 -FROM cw_Affaire AS _T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_to=_T.cw_eid) LEFT OUTER JOIN cw_Tag AS _TAG ON (rel_tags0.eid_from=_TAG.cw_eid AND _TAG.cw_name=t) -GROUP BY _T.cw_eid) AS _T0 ON (_T1.C0=_T0.C0)'''), - - )): - yield t - - - def test_subquery_error(self): - rql = ('Any N WHERE X name N WITH X BEING ' - '((Any X WHERE X is State)' - ' UNION ' - ' (Any X WHERE X is Transition))') - rqlst = self._prepare(rql) - self.assertRaises(BadRQLQuery, self.o.generate, rqlst) - - def test_inline(self): - for t in self._parse(INLINE): - yield t - - def test_has_text(self): - for t in self._parse(( - ('Any X WHERE X has_text "toto tata"', - """SELECT appears0.uid -FROM appears AS appears0 -WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""), - - ('Personne X WHERE X has_text "toto tata"', - """SELECT _X.eid -FROM appears AS appears0, entities AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.eid AND _X.type='Personne'"""), - - ('Personne X WHERE X has_text %(text)s', - """SELECT _X.eid -FROM appears AS appears0, entities AS _X -WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne'"""), - - ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', - """SELECT _X.cw_eid -FROM appears AS appears0, cw_Basket AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -UNION ALL -SELECT _X.cw_eid -FROM appears AS appears0, cw_Folder AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu"""), - - ('Personne X where X has_text %(text)s, X travaille S, S has_text %(text)s', - """SELECT _X.eid -FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 -WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo')"""), - - ('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "toto tata"', - """SELECT appears0.uid -FROM appears AS appears0 -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') -ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight DESC"""), - - ('Personne X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', - """SELECT _X.eid -FROM appears AS appears0, entities AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.eid AND _X.type='Personne' -ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight"""), - - ('Personne X ORDERBY FTIRANK(X) WHERE X has_text %(text)s', - """SELECT _X.eid -FROM appears AS appears0, entities AS _X -WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' -ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight"""), - - ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', - """SELECT T1.C0 FROM (SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 -FROM appears AS appears0, cw_Basket AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -UNION ALL -SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 -FROM appears AS appears0, cw_Folder AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -ORDER BY 2) AS T1"""), - - ('Personne X ORDERBY FTIRANK(X),FTIRANK(S) WHERE X has_text %(text)s, X travaille S, S has_text %(text)s', - """SELECT _X.eid -FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 -WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo') -ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight,ts_rank(appears2.words, to_tsquery('default', 'hip&hop&momo'))*appears2.weight"""), - - - ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', - """SELECT appears0.uid, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight -FROM appears AS appears0 -WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""), - - - ('Any X WHERE NOT A tags X, X has_text "pouet"', - '''SELECT appears1.uid -FROM appears AS appears1 -WHERE NOT (EXISTS(SELECT 1 FROM tags_relation AS rel_tags0 WHERE appears1.uid=rel_tags0.eid_to)) AND appears1.words @@ to_tsquery('default', 'pouet') -'''), - - )): - yield t - - - def test_from_clause_needed(self): - queries = [("Any 1 WHERE EXISTS(T is CWGroup, T name 'managers')", - '''SELECT 1 -WHERE EXISTS(SELECT 1 FROM cw_CWGroup AS _T WHERE _T.cw_name=managers)'''), - ('Any X,Y WHERE NOT X created_by Y, X eid 5, Y eid 6', - '''SELECT 5, 6 -WHERE NOT (EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6))'''), - ] - for t in self._parse(queries): - yield t - - def test_ambigous_exists_no_from_clause(self): - self._check('Any COUNT(U) WHERE U eid 1, EXISTS (P owned_by U, P is IN (Note, Affaire))', - '''SELECT COUNT(1) -WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=1)''') - - def test_attr_map_sqlcb(self): - def generate_ref(gen, linkedvar, rel): - linkedvar.accept(gen) - return 'VERSION_DATA(%s)' % linkedvar._q_sql - self.o.attr_map['Affaire.ref'] = (generate_ref, False) - try: - self._check('Any R WHERE X ref R', - '''SELECT VERSION_DATA(_X.cw_eid) -FROM cw_Affaire AS _X''') - self._check('Any X WHERE X ref 1', - '''SELECT _X.cw_eid -FROM cw_Affaire AS _X -WHERE VERSION_DATA(_X.cw_eid)=1''') - finally: - self.o.attr_map.clear() - - def test_attr_map_sourcecb(self): - cb = lambda x,y: None - self.o.attr_map['Affaire.ref'] = (cb, True) - try: - union = self._prepare('Any R WHERE X ref R') - r, nargs, cbs = self.o.generate(union, args={}) - self.assertMultiLineEqual(r.strip(), 'SELECT _X.cw_ref\nFROM cw_Affaire AS _X') - self.assertEqual(cbs, {0: [cb]}) - finally: - self.o.attr_map.clear() - - - def test_concat_string(self): - self._check('Any "A"+R WHERE X ref R', - '''SELECT (A || _X.cw_ref) -FROM cw_Affaire AS _X''') - - def test_or_having_fake_terms_base(self): - self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE ((CAST(EXTRACT(YEAR from _X.cw_creation_date) AS INTEGER)=2010) OR (_X.cw_creation_date IS NULL))''') - - def test_or_having_fake_terms_exists(self): - # crash with rql <= 0.29.0 - self._check('Any X WHERE X is CWUser, EXISTS(B bookmarked_by X, B creation_date D) HAVING D=2010 OR D=NULL, D=1 OR D=NULL', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE EXISTS(SELECT 1 FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_Bookmark AS _B WHERE rel_bookmarked_by0.eid_from=_B.cw_eid AND rel_bookmarked_by0.eid_to=_X.cw_eid AND ((_B.cw_creation_date=1) OR (_B.cw_creation_date IS NULL)) AND ((_B.cw_creation_date=2010) OR (_B.cw_creation_date IS NULL)))''') - - def test_or_having_fake_terms_nocrash(self): - # crash with rql <= 0.29.0 - self._check('Any X WHERE X is CWUser, X creation_date D HAVING D=2010 OR D=NULL, D=1 OR D=NULL', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE ((_X.cw_creation_date=1) OR (_X.cw_creation_date IS NULL)) AND ((_X.cw_creation_date=2010) OR (_X.cw_creation_date IS NULL))''') - - def test_not_no_where(self): - # XXX will check if some in_group relation exists, that's it. - # We can't actually know if we want to check if there are some - # X without in_group relation, or some G without it. - self._check('Any 1 WHERE NOT X in_group G, X is CWUser', - '''SELECT 1 -WHERE NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group0))''') - - def test_nonregr_outer_join_multiple(self): - self._check('Any COUNT(P1148),G GROUPBY G ' - 'WHERE G owned_by D, D eid 1122, K1148 bookmarked_by P1148, ' - 'K1148 eid 1148, P1148? in_group G', - '''SELECT COUNT(rel_bookmarked_by1.eid_to), _G.cw_eid -FROM owned_by_relation AS rel_owned_by0, cw_CWGroup AS _G LEFT OUTER JOIN in_group_relation AS rel_in_group2 ON (rel_in_group2.eid_to=_G.cw_eid) LEFT OUTER JOIN bookmarked_by_relation AS rel_bookmarked_by1 ON (rel_in_group2.eid_from=rel_bookmarked_by1.eid_to) -WHERE rel_owned_by0.eid_from=_G.cw_eid AND rel_owned_by0.eid_to=1122 AND rel_bookmarked_by1.eid_from=1148 -GROUP BY _G.cw_eid''' - ) - - def test_nonregr_outer_join_multiple2(self): - self._check('Any COUNT(P1148),G GROUPBY G ' - 'WHERE G owned_by D, D eid 1122, K1148 bookmarked_by P1148?, ' - 'K1148 eid 1148, P1148? in_group G', - '''SELECT COUNT(rel_bookmarked_by1.eid_to), _G.cw_eid -FROM owned_by_relation AS rel_owned_by0, cw_CWGroup AS _G LEFT OUTER JOIN in_group_relation AS rel_in_group2 ON (rel_in_group2.eid_to=_G.cw_eid) LEFT OUTER JOIN bookmarked_by_relation AS rel_bookmarked_by1 ON (rel_bookmarked_by1.eid_from=1148 AND rel_in_group2.eid_from=rel_bookmarked_by1.eid_to) -WHERE rel_owned_by0.eid_from=_G.cw_eid AND rel_owned_by0.eid_to=1122 -GROUP BY _G.cw_eid''') - - def test_groupby_orderby_insertion_dont_modify_intention(self): - self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) ' - 'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 ' - 'WHERE X creation_date XSCT, X modification_date XECT, ' - 'X ordernum XCE, X is CWAttribute', - '''SELECT ((CAST(EXTRACT(YEAR from _X.cw_modification_date) AS INTEGER) * 100) + CAST(EXTRACT(MONTH from _X.cw_modification_date) AS INTEGER)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date)) -FROM cw_CWAttribute AS _X -GROUP BY CAST(EXTRACT(YEAR from _X.cw_modification_date) AS INTEGER),CAST(EXTRACT(MONTH from _X.cw_modification_date) AS INTEGER) -ORDER BY 1'''), - - def test_modulo(self): - self._check('Any 5 % 2', '''SELECT (5 % 2)''') - - -class SqlServer2005SQLGeneratorTC(PostgresSQLGeneratorTC): - backend = 'sqlserver2005' - def _norm_sql(self, sql): - return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ') - - def test_has_text(self): - for t in self._parse(HAS_TEXT_LG_INDEXER): - yield t - - def test_regexp(self): - self.skipTest('regexp-based pattern matching not implemented in sqlserver') - - def test_or_having_fake_terms_base(self): - self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE ((DATEPART(YEAR, _X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''') - - def test_date_extraction(self): - self._check("Any MONTH(D) WHERE P is Personne, P creation_date D", - '''SELECT DATEPART(MONTH, _P.cw_creation_date) -FROM cw_Personne AS _P''') - - def test_weekday_extraction(self): - self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D", - '''SELECT DATEPART(WEEKDAY, _P.cw_creation_date) -FROM cw_Personne AS _P''') - - def test_basic_parse(self): - for t in self._parse(BASIC):# + BASIC_WITH_LIMIT): - yield t - - def test_advanced_parse(self): - for t in self._parse(ADVANCED):# + ADVANCED_WITH_LIMIT_OR_ORDERBY): - yield t - - def test_limit_offset(self): - WITH_LIMIT = [ - ("Personne P LIMIT 20 OFFSET 10", - '''WITH orderedrows AS ( -SELECT -_L01 -, ROW_NUMBER() OVER (ORDER BY _L01) AS __RowNumber -FROM ( -SELECT _P.cw_eid AS _L01 FROM cw_Personne AS _P -) AS _SQ1 ) -SELECT -_L01 -FROM orderedrows WHERE -__RowNumber <= 30 AND __RowNumber > 10 - '''), - - ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))', - '''WITH orderedrows AS ( -SELECT -_L01, _L02 -, ROW_NUMBER() OVER (ORDER BY _L01 DESC) AS __RowNumber -FROM ( -SELECT COUNT(rel_concerne0.eid_from) AS _L01, _C.cw_nom AS _L02 FROM concerne_relation AS rel_concerne0, cw_Societe AS _C -WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM cw_Card AS _N, documented_by_relation AS rel_documented_by2 WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published))) -GROUP BY _C.cw_nom -) AS _SQ1 ) -SELECT -_L01, _L02 -FROM orderedrows WHERE -__RowNumber <= 10 - '''), - - ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2 -FROM cw_File AS _X -GROUP BY _X.cw_data_name,_X.cw_data_format) AS T1 -ORDER BY T1.C1,T1.C2 -'''), - - - ('DISTINCT Any X ORDERBY Y WHERE B bookmarked_by X, X login Y', - '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _X.cw_login AS C1 -FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_CWUser AS _X -WHERE rel_bookmarked_by0.eid_to=_X.cw_eid) AS T1 -ORDER BY T1.C1 - '''), - - ('DISTINCT Any X ORDERBY SN WHERE X in_state S, S name SN', - '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 -FROM cw_Affaire AS _X, cw_State AS _S -WHERE _X.cw_in_state=_S.cw_eid -UNION -SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 -FROM cw_CWUser AS _X, cw_State AS _S -WHERE _X.cw_in_state=_S.cw_eid -UNION -SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1 -FROM cw_Note AS _X, cw_State AS _S -WHERE _X.cw_in_state=_S.cw_eid) AS T1 -ORDER BY T1.C1'''), - - ('Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT S use_email O, S eid 1, O is EmailAddress, O address AA, O alias AB, O modification_date AC, ' - 'EXISTS(A use_email O, EXISTS(A identity B, NOT B in_group D, D name "guests", D is CWGroup), A is CWUser), B eid 2', - ''' -SELECT _O.cw_eid, _O.cw_address, _O.cw_alias, _O.cw_modification_date -FROM cw_EmailAddress AS _O -WHERE NOT (EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid)) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid)) AND _D.cw_name=guests)) -ORDER BY 4 DESC'''), - - ("Any P ORDERBY N LIMIT 1 WHERE P is Personne, P travaille S, S eid %(eid)s, P nom N, P nom %(text)s", - '''WITH orderedrows AS ( -SELECT -_L01 -, ROW_NUMBER() OVER (ORDER BY _L01) AS __RowNumber -FROM ( -SELECT _P.cw_eid AS _L01 FROM cw_Personne AS _P, travaille_relation AS rel_travaille0 -WHERE rel_travaille0.eid_from=_P.cw_eid AND rel_travaille0.eid_to=12345 AND _P.cw_nom=hip hop momo -) AS _SQ1 ) -SELECT -_L01 -FROM orderedrows WHERE -__RowNumber <= 1'''), - - ("Any P ORDERBY N LIMIT 1 WHERE P is Personne, P nom N", - '''WITH orderedrows AS ( -SELECT -_L01 -, ROW_NUMBER() OVER (ORDER BY _L01) AS __RowNumber -FROM ( -SELECT _P.cw_eid AS _L01 FROM cw_Personne AS _P -) AS _SQ1 ) -SELECT -_L01 -FROM orderedrows WHERE -__RowNumber <= 1 -'''), - - ("Any PN, N, P ORDERBY N LIMIT 1 WHERE P is Personne, P nom N, P prenom PN", - '''WITH orderedrows AS ( -SELECT -_L01, _L02, _L03 -, ROW_NUMBER() OVER (ORDER BY _L02) AS __RowNumber -FROM ( -SELECT _P.cw_prenom AS _L01, _P.cw_nom AS _L02, _P.cw_eid AS _L03 FROM cw_Personne AS _P -) AS _SQ1 ) -SELECT -_L01, _L02, _L03 -FROM orderedrows WHERE -__RowNumber <= 1 -'''), - ] - for t in self._parse(WITH_LIMIT):# + ADVANCED_WITH_LIMIT_OR_ORDERBY): - yield t - - def test_cast(self): - self._check("Any CAST(String, P) WHERE P is Personne", - '''SELECT CAST(_P.cw_eid AS nvarchar(max)) -FROM cw_Personne AS _P''') - - def test_groupby_orderby_insertion_dont_modify_intention(self): - self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) ' - 'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 ' - 'WHERE X creation_date XSCT, X modification_date XECT, ' - 'X ordernum XCE, X is CWAttribute', - '''SELECT ((DATEPART(YEAR, _X.cw_modification_date) * 100) + DATEPART(MONTH, _X.cw_modification_date)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date)) -FROM cw_CWAttribute AS _X -GROUP BY DATEPART(YEAR, _X.cw_modification_date),DATEPART(MONTH, _X.cw_modification_date) -ORDER BY 1''') - - def test_today(self): - for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire", - '''SELECT _X.cw_eid -FROM cw_Affaire AS _X -WHERE DATE(_X.cw_creation_date)=%s''' % self.dbhelper.sql_current_date()), - - ("Personne P where not P datenaiss TODAY", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE NOT (DATE(_P.cw_datenaiss)=%s)''' % self.dbhelper.sql_current_date()), - ]): - yield t - - -class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC): - backend = 'sqlite' - - def _norm_sql(self, sql): - return sql.strip().replace(' ILIKE ', ' LIKE ') - - def test_date_extraction(self): - self._check("Any MONTH(D) WHERE P is Personne, P creation_date D", - '''SELECT MONTH(_P.cw_creation_date) -FROM cw_Personne AS _P''') - - def test_weekday_extraction(self): - # custom impl. in cw.server.sqlutils - self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D", - '''SELECT WEEKDAY(_P.cw_creation_date) -FROM cw_Personne AS _P''') - - def test_regexp(self): - self._check("Any X WHERE X login REGEXP '[0-9].*'", - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE _X.cw_login REGEXP [0-9].* -''') - - - def test_union(self): - for t in self._parse(( - ('(Any N ORDERBY 1 WHERE X name N, X is State)' - ' UNION ' - '(Any NN ORDERBY 1 WHERE XX name NN, XX is Transition)', - '''SELECT _X.cw_name -FROM cw_State AS _X -ORDER BY 1 -UNION ALL -SELECT _XX.cw_name -FROM cw_Transition AS _XX -ORDER BY 1'''), - )): - yield t - - - def test_subquery(self): - # NOTE: no paren around UNION with sqlitebackend - for t in self._parse(( - - ('Any N ORDERBY 1 WITH N BEING ' - '((Any N WHERE X name N, X is State)' - ' UNION ' - '(Any NN WHERE XX name NN, XX is Transition))', - '''SELECT _T0.C0 -FROM (SELECT _X.cw_name AS C0 -FROM cw_State AS _X -UNION ALL -SELECT _XX.cw_name AS C0 -FROM cw_Transition AS _XX) AS _T0 -ORDER BY 1'''), - - ('Any N,NX ORDERBY NX WITH N,NX BEING ' - '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)' - ' UNION ' - '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))', - '''SELECT _T0.C0, _T0.C1 -FROM (SELECT _X.cw_name AS C0, COUNT(_X.cw_eid) AS C1 -FROM cw_State AS _X -GROUP BY _X.cw_name -HAVING COUNT(_X.cw_eid)>1 -UNION ALL -SELECT _X.cw_name AS C0, COUNT(_X.cw_eid) AS C1 -FROM cw_Transition AS _X -GROUP BY _X.cw_name -HAVING COUNT(_X.cw_eid)>1) AS _T0 -ORDER BY 2'''), - - ('Any N,COUNT(X) GROUPBY N HAVING COUNT(X)>1 ' - 'WITH X, N BEING ((Any X, N WHERE X name N, X is State) UNION ' - ' (Any X, N WHERE X name N, X is Transition))', - '''SELECT _T0.C1, COUNT(_T0.C0) -FROM (SELECT _X.cw_eid AS C0, _X.cw_name AS C1 -FROM cw_State AS _X -UNION ALL -SELECT _X.cw_eid AS C0, _X.cw_name AS C1 -FROM cw_Transition AS _X) AS _T0 -GROUP BY _T0.C1 -HAVING COUNT(_T0.C0)>1'''), - )): - yield t - - def test_has_text(self): - for t in self._parse(( - ('Any X WHERE X has_text "toto tata"', - """SELECT DISTINCT appears0.uid -FROM appears AS appears0 -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), - - ('Any X WHERE X has_text %(text)s', - """SELECT DISTINCT appears0.uid -FROM appears AS appears0 -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('hip', 'hop', 'momo'))"""), - - ('Personne X WHERE X has_text "toto tata"', - """SELECT DISTINCT _X.eid -FROM appears AS appears0, entities AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.eid AND _X.type='Personne'"""), - - ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', - """SELECT DISTINCT _X.cw_eid -FROM appears AS appears0, cw_Basket AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -UNION -SELECT DISTINCT _X.cw_eid -FROM appears AS appears0, cw_Folder AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -"""), - - ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', - """SELECT DISTINCT appears0.uid -FROM appears AS appears0 -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), - - ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', - """SELECT DISTINCT _X.cw_eid -FROM appears AS appears0, cw_Basket AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -UNION -SELECT DISTINCT _X.cw_eid -FROM appears AS appears0, cw_Folder AS _X -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -"""), - - ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', - """SELECT DISTINCT appears0.uid, 1.0 -FROM appears AS appears0 -WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), - )): - yield t - - - def test_or_having_fake_terms_base(self): - self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE ((YEAR(_X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''') - - def test_groupby_orderby_insertion_dont_modify_intention(self): - self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) ' - 'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 ' - 'WHERE X creation_date XSCT, X modification_date XECT, ' - 'X ordernum XCE, X is CWAttribute', - '''SELECT ((YEAR(_X.cw_modification_date) * 100) + MONTH(_X.cw_modification_date)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date)) -FROM cw_CWAttribute AS _X -GROUP BY YEAR(_X.cw_modification_date),MONTH(_X.cw_modification_date) -ORDER BY 1'''), - - def test_today(self): - for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire", - '''SELECT _X.cw_eid -FROM cw_Affaire AS _X -WHERE DATE(_X.cw_creation_date)=CURRENT_DATE'''), - - ("Personne P where not P datenaiss TODAY", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE NOT (DATE(_P.cw_datenaiss)=CURRENT_DATE)'''), - ]): - yield t - - -class MySQLGenerator(PostgresSQLGeneratorTC): - backend = 'mysql' - - def _norm_sql(self, sql): - sql = sql.strip().replace(' ILIKE ', ' LIKE ') - newsql = [] - latest = None - for line in sql.splitlines(False): - firstword = line.split(None, 1)[0] - if firstword == 'WHERE' and latest == 'SELECT': - newsql.append('FROM (SELECT 1) AS _T') - newsql.append(line) - latest = firstword - return '\n'.join(newsql) - - def test_date_extraction(self): - self._check("Any MONTH(D) WHERE P is Personne, P creation_date D", - '''SELECT EXTRACT(MONTH from _P.cw_creation_date) -FROM cw_Personne AS _P''') - - def test_weekday_extraction(self): - self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D", - '''SELECT DAYOFWEEK(_P.cw_creation_date) -FROM cw_Personne AS _P''') - - def test_cast(self): - self._check("Any CAST(String, P) WHERE P is Personne", - '''SELECT CAST(_P.cw_eid AS mediumtext) -FROM cw_Personne AS _P''') - - def test_regexp(self): - self._check("Any X WHERE X login REGEXP '[0-9].*'", - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE _X.cw_login REGEXP [0-9].* -''') - - def test_from_clause_needed(self): - queries = [("Any 1 WHERE EXISTS(T is CWGroup, T name 'managers')", - '''SELECT 1 -FROM (SELECT 1) AS _T -WHERE EXISTS(SELECT 1 FROM cw_CWGroup AS _T WHERE _T.cw_name=managers)'''), - ('Any X,Y WHERE NOT X created_by Y, X eid 5, Y eid 6', - '''SELECT 5, 6 -FROM (SELECT 1) AS _T -WHERE NOT (EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6))'''), - ] - for t in self._parse(queries): - yield t - - - def test_has_text(self): - queries = [ - ('Any X WHERE X has_text "toto tata"', - """SELECT appears0.uid -FROM appears AS appears0 -WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE)"""), - ('Personne X WHERE X has_text "toto tata"', - """SELECT _X.eid -FROM appears AS appears0, entities AS _X -WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=_X.eid AND _X.type='Personne'"""), - ('Personne X WHERE X has_text %(text)s', - """SELECT _X.eid -FROM appears AS appears0, entities AS _X -WHERE MATCH (appears0.words) AGAINST ('hip hop momo' IN BOOLEAN MODE) AND appears0.uid=_X.eid AND _X.type='Personne'"""), - ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', - """SELECT _X.cw_eid -FROM appears AS appears0, cw_Basket AS _X -WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -UNION ALL -SELECT _X.cw_eid -FROM appears AS appears0, cw_Folder AS _X -WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -""") - ] - for t in self._parse(queries): - yield t - - - def test_ambigous_exists_no_from_clause(self): - self._check('Any COUNT(U) WHERE U eid 1, EXISTS (P owned_by U, P is IN (Note, Affaire))', - '''SELECT COUNT(1) -FROM (SELECT 1) AS _T -WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=1)''') - - def test_groupby_multiple_outerjoins(self): - self._check('Any A,U,P,group_concat(TN) GROUPBY A,U,P WHERE A is Affaire, A concerne N, N todo_by U?, T? tags A, T name TN, A todo_by P?', - '''SELECT _A.cw_eid, rel_todo_by1.eid_to, rel_todo_by3.eid_to, GROUP_CONCAT(_T.cw_name) -FROM concerne_relation AS rel_concerne0, cw_Affaire AS _A LEFT OUTER JOIN tags_relation AS rel_tags2 ON (rel_tags2.eid_to=_A.cw_eid) LEFT OUTER JOIN cw_Tag AS _T ON (rel_tags2.eid_from=_T.cw_eid) LEFT OUTER JOIN todo_by_relation AS rel_todo_by3 ON (rel_todo_by3.eid_from=_A.cw_eid), cw_Note AS _N LEFT OUTER JOIN todo_by_relation AS rel_todo_by1 ON (rel_todo_by1.eid_from=_N.cw_eid) -WHERE rel_concerne0.eid_from=_A.cw_eid AND rel_concerne0.eid_to=_N.cw_eid -GROUP BY _A.cw_eid,rel_todo_by1.eid_to,rel_todo_by3.eid_to''') - - def test_substring(self): - self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne", - '''SELECT SUBSTRING(_P.cw_nom, 1, 1) -FROM cw_Personne AS _P''') - - - def test_or_having_fake_terms_base(self): - self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL', - '''SELECT _X.cw_eid -FROM cw_CWUser AS _X -WHERE ((EXTRACT(YEAR from _X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''') - - - def test_not_no_where(self): - self._check('Any 1 WHERE NOT X in_group G, X is CWUser', - '''SELECT 1 -FROM (SELECT 1) AS _T -WHERE NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group0))''') - - def test_groupby_orderby_insertion_dont_modify_intention(self): - self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) ' - 'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 ' - 'WHERE X creation_date XSCT, X modification_date XECT, ' - 'X ordernum XCE, X is CWAttribute', - '''SELECT ((EXTRACT(YEAR from _X.cw_modification_date) * 100) + EXTRACT(MONTH from _X.cw_modification_date)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date)) -FROM cw_CWAttribute AS _X -GROUP BY EXTRACT(YEAR from _X.cw_modification_date),EXTRACT(MONTH from _X.cw_modification_date) -ORDER BY 1'''), - - def test_today(self): - for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire", - '''SELECT _X.cw_eid -FROM cw_Affaire AS _X -WHERE DATE(_X.cw_creation_date)=CURRENT_DATE'''), - - ("Personne P where not P datenaiss TODAY", - '''SELECT _P.cw_eid -FROM cw_Personne AS _P -WHERE NOT (DATE(_P.cw_datenaiss)=CURRENT_DATE)'''), - ]): - yield t - -class removeUnsusedSolutionsTC(TestCase): - def test_invariant_not_varying(self): - rqlst = mock_object(defined_vars={}) - rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) - rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) - self.assertEqual(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, - {'A': 'FootGroup', 'B': 'FootTeam'}], {}, None), - ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, - {'A': 'FootGroup', 'B': 'FootTeam'}], - {}, set('B')) - ) - - def test_invariant_varying(self): - rqlst = mock_object(defined_vars={}) - rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) - rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) - self.assertEqual(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, - {'A': 'FootGroup', 'B': 'RugbyTeam'}], {}, None), - ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set()) - ) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_rqlannotation.py --- a/server/test/unittest_rqlannotation.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,445 +0,0 @@ -# -*- coding: iso-8859-1 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for modules cubicweb.server.rqlannotation""" - -from cubicweb.devtools import TestServerConfiguration, get_test_db_handler -from cubicweb.devtools.repotest import BaseQuerierTC - -class SQLGenAnnotatorTC(BaseQuerierTC): - - def setUp(self): - handler = get_test_db_handler(TestServerConfiguration( - 'data2', apphome=SQLGenAnnotatorTC.datadir)) - handler.build_db_cache() - repo, _cnx = handler.get_repo_and_cnx() - self.__class__.repo = repo - super(SQLGenAnnotatorTC, self).setUp() - - def get_max_eid(self): - # no need for cleanup here - return None - - def cleanup(self): - # no need for cleanup here - pass - - def test_0_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any SEN,RN,OEN WHERE X from_entity SE, ' - 'SE eid 44, X relation_type R, R eid 139, ' - 'X to_entity OE, OE eid 42, R name RN, SE name SEN, ' - 'OE name OEN') - self.assertEqual(rqlst.defined_vars['SE']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['OE']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['R']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['SE'].stinfo['attrvar'], None) - self.assertEqual(rqlst.defined_vars['OE'].stinfo['attrvar'], None) - self.assertEqual(rqlst.defined_vars['R'].stinfo['attrvar'], None) - - def test_0_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any O WHERE NOT S ecrit_par O, S eid 1, ' - 'S inline1 P, O inline2 P') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['O'].stinfo['attrvar'], None) - - def test_0_4(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any A,B,C WHERE A eid 12,A comment B, ' - 'A ?wf_info_for C') - self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) - self.assertTrue(rqlst.defined_vars['B'].stinfo['attrvar']) - self.assertEqual(rqlst.defined_vars['C']._q_invariant, False) - self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'}, - {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'}, - {'A': 'TrInfo', 'B': 'String', 'C': 'Note'}]) - - def test_0_5(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any P WHERE N ecrit_par P, N eid 0') - self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) - - def test_0_6(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any P WHERE NOT N ecrit_par P, N eid 512') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - - def test_0_7(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Personne X,Y where X nom NX, ' - 'Y nom NX, X eid XE, not Y eid XE') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - self.assertTrue(rqlst.defined_vars['XE'].stinfo['attrvar']) - - def test_0_8(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any P WHERE X eid 0, NOT X connait P') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - #self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(len(rqlst.solutions), 1, rqlst.solutions) - - def test_0_10(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X concerne Y, Y is Note') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_0_11(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X todo_by Y, X is Affaire') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_0_12(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Personne P WHERE P concerne A, ' - 'A concerne S, S nom "Logilab"') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) - - def test_1_0(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X,Y WHERE X created_by Y, ' - 'X eid 5, NOT Y eid 6') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_1_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X,Y WHERE X created_by Y, X eid 5, ' - 'NOT Y eid IN (6,7)') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X identity Y, Y eid 1') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - - def test_7(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Personne X,Y where X nom NX, Y nom NX, ' - 'X eid XE, not Y eid XE') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_8(self): - with self.session.new_cnx() as cnx: - # DISTINCT Any P WHERE P require_group %(g)s, - # NOT %(u)s has_group_permission P, P is CWPermission - rqlst = self._prepare(cnx, 'DISTINCT Any X WHERE A concerne X, ' - 'NOT N migrated_from X, ' - 'X is Note, N eid 1') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - - def test_diff_scope_identity_deamb(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X concerne Y, Y is Note, ' - 'EXISTS(Y identity Z, Z migrated_from N)') - self.assertEqual(rqlst.defined_vars['Z']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_optional_inlined(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X,S where X from_state S?') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) - - def test_optional_inlined_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any N,A WHERE N? inline1 A') - self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) - - def test_optional_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X,S WHERE X travaille S?') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) - - def test_greater_eid(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X eid > 5') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_greater_eid_typed(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X eid > 5, X is Note') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_max_eid(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any MAX(X)') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_max_eid_typed(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any MAX(X) WHERE X is Note') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - - def test_all_entities(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_all_typed_entity(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X is Note') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - - def test_has_text_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X has_text "toto tata"') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, - 'has_text') - - def test_has_text_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X is Personne, ' - 'X has_text "coucou"') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, - 'has_text') - - def test_not_relation_1(self): - with self.session.new_cnx() as cnx: - # P can't be invariant since deambiguification caused by "NOT X require_permission P" - # is not considered by generated sql (NOT EXISTS(...)) - rqlst = self._prepare(cnx, 'Any P,G WHERE P require_group G, ' - 'NOT X require_permission P') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['G']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_not_relation_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'TrInfo X WHERE X eid 2, ' - 'NOT X from_state Y, Y is State') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - - def test_not_relation_3(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X, Y WHERE X eid 1, Y eid in (2, 3)') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_not_relation_4_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Note X WHERE NOT Y evaluee X') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_not_relation_4_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_not_relation_4_3(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any Y WHERE NOT Y evaluee X') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_not_relation_4_4(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X, Y is CWUser') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_not_relation_4_5(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X, ' - 'Y eid %s, X is Note' % self.ueid) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.solutions, [{'X': 'Note'}]) - - def test_not_relation_5_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' - 'Y eid IN(1, 2, 3), NOT X read_permission Y') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_not_relation_5_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' - 'Y eid IN(1, 2, 3), NOT X read_permission Y') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_not_relation_6(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Personne P where NOT P concerne A') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) - - def test_not_relation_7(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any K,V WHERE P is CWProperty, ' - 'P pkey K, P value V, NOT P for_user U') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) - - def test_exists_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_exists_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_exists_3(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(X owned_by U, X bookmarked_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_exists_4(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' - 'Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_exists_5(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' - 'Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_not_exists_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any U WHERE NOT EXISTS(X owned_by U, ' - 'X bookmarked_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_not_exists_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' - 'Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_not_exists_distinct_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' - 'Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - - def test_or_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X concerne B OR ' - 'C concerne X, B eid 12, C eid 13') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - - def test_or_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X created_by U, X concerne B OR ' - 'C concerne X, B eid 12, C eid 13') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'created_by') - - def test_or_3(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any N WHERE A evaluee N or EXISTS(N todo_by U)') - self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) - - def test_or_exists_1(self): - with self.session.new_cnx() as cnx: - # query generated by security rewriting - rqlst = self._prepare(cnx, 'DISTINCT Any A,S WHERE A is Affaire, S nom "chouette", ' - 'S is IN(Division, Societe, SubDivision),' - '(EXISTS(A owned_by D)) ' - 'OR ((((EXISTS(E concerne C?, C owned_by D, A identity E, ' - ' C is Note, E is Affaire)) ' - 'OR (EXISTS(I concerne H?, H owned_by D, H is Societe, ' - ' A identity I, I is Affaire))) ' - 'OR (EXISTS(J concerne G?, G owned_by D, G is SubDivision, ' - ' A identity J, J is Affaire))) ' - 'OR (EXISTS(K concerne F?, F owned_by D, F is Division, ' - ' A identity K, K is Affaire)))') - self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) - - def test_or_exists_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(U in_group G, G name "managers") OR ' - 'EXISTS(X owned_by U, X bookmarked_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['G']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - - def test_or_exists_3(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' - 'WHERE C is Societe, S concerne C, C nom CS, ' - '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') - self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) - rqlst = self._prepare(cnx, 'Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' - 'WHERE S is Affaire, C is Societe, S concerne C, C nom CS, ' - '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') - self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) - - def test_nonregr_ambiguity(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Note N WHERE N attachment F') - # N may be an image as well, not invariant - self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['F']._q_invariant, True) - - def test_nonregr_ambiguity_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any S,SN WHERE X has_text "tot", X in_state S, S name SN, X is CWUser') - # X use has_text but should not be invariant as ambiguous, and has_text - # may not be its principal - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) - - def test_remove_from_deleted_source_1(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Note X WHERE X eid 999998, NOT X cw_source Y') - self.assertNotIn('X', rqlst.defined_vars) # simplified - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_remove_from_deleted_source_2(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - - def test_has_text_security_cache_bug(self): - with self.session.new_cnx() as cnx: - rqlst = self._prepare(cnx, 'Any X WHERE X has_text "toto" WITH X BEING ' - '(Any C WHERE C is Societe, C nom CS)') - self.assertTrue(rqlst.parent.has_text_query) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_schema2sql.py --- a/server/test/unittest_schema2sql.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,285 +0,0 @@ -# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.schema2sql -""" - -import os.path as osp - -from logilab.common.testlib import TestCase, unittest_main -from logilab.database import get_db_helper - -from yams.reader import SchemaLoader -from cubicweb.server import schema2sql - -schema2sql.SET_DEFAULT = True - -DATADIR = osp.abspath(osp.join(osp.dirname(__file__), 'data-schema2sql')) - -schema = SchemaLoader().load([DATADIR]) - - -EXPECTED_DATA_NO_DROP = """ -CREATE TABLE Affaire( - sujet varchar(128), - ref varchar(12), - inline_rel integer REFERENCES entities (eid) -); -CREATE INDEX affaire_inline_rel_idx ON Affaire(inline_rel); - -CREATE TABLE Company( - name text -); - -CREATE TABLE Datetest( - dt1 timestamp, - dt2 timestamp, - d1 date, - d2 date, - t1 time, - t2 time -, CONSTRAINT cstredd407706bdfbd2285714dd689e8fcc0 CHECK(d1 <= CAST(clock_timestamp() AS DATE)) -); - -CREATE TABLE Division( - name text -); - -CREATE TABLE EPermission( - name varchar(100) NOT NULL -); -CREATE INDEX epermission_name_idx ON EPermission(name); - -CREATE TABLE Eetype( - name varchar(64) UNIQUE NOT NULL, - description text, - meta boolean, - final boolean, - initial_state integer REFERENCES entities (eid) -); -CREATE INDEX eetype_name_idx ON Eetype(name); -CREATE INDEX eetype_initial_state_idx ON Eetype(initial_state); - -CREATE TABLE Employee( -); - -CREATE TABLE Note( - date varchar(10), - type varchar(1), - para varchar(512) -); - -CREATE TABLE Person( - nom varchar(64) NOT NULL, - prenom varchar(64), - sexe varchar(1) DEFAULT 'M', - promo varchar(6), - titre varchar(128), - adel varchar(128), - ass varchar(128), - web varchar(128), - tel integer, - fax integer, - datenaiss date, - test boolean, - salary float -, CONSTRAINT cstr41fe7db9ce1d5be95de2477e26590386 CHECK(promo IN ('bon', 'pasbon')) -); -CREATE UNIQUE INDEX unique_e6c2d219772dbf1715597f7d9a6b3892 ON Person(nom,prenom); - -CREATE TABLE Salaried( - nom varchar(64) NOT NULL, - prenom varchar(64), - sexe varchar(1) DEFAULT 'M', - promo varchar(6), - titre varchar(128), - adel varchar(128), - ass varchar(128), - web varchar(128), - tel integer, - fax integer, - datenaiss date, - test boolean, - salary float -, CONSTRAINT cstrc8556fcc665865217761cdbcd220cae0 CHECK(promo IN ('bon', 'pasbon')) -); -CREATE UNIQUE INDEX unique_98da0f9de8588baa8966f0b1a6f850a3 ON Salaried(nom,prenom); - -CREATE TABLE Societe( - nom varchar(64), - web varchar(128), - tel integer, - fax integer, - rncs varchar(32), - ad1 varchar(128), - ad2 varchar(128), - ad3 varchar(128), - cp varchar(12), - ville varchar(32) -, CONSTRAINT cstrc51dd462e9f6115506a0fe468d4c8114 CHECK(fax <= tel) -); - -CREATE TABLE State( - eid integer PRIMARY KEY REFERENCES entities (eid), - name varchar(256) NOT NULL, - description text -); -CREATE INDEX state_name_idx ON State(name); - -CREATE TABLE Subcompany( - name text -); - -CREATE TABLE Subdivision( - name text -); - -CREATE TABLE pkginfo( - modname varchar(30) NOT NULL, - version varchar(10) DEFAULT '0.1' NOT NULL, - copyright text NOT NULL, - license varchar(3), - short_desc varchar(80) NOT NULL, - long_desc text NOT NULL, - author varchar(100) NOT NULL, - author_email varchar(100) NOT NULL, - mailinglist varchar(100), - debian_handler varchar(6) -, CONSTRAINT cstr70f766f834557c715815d76f0a0db956 CHECK(license IN ('GPL', 'ZPL')) -, CONSTRAINT cstr831a117424d0007ae0278cc15f344f5e CHECK(debian_handler IN ('machin', 'bidule')) -); - - -CREATE TABLE concerne_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT concerne_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX concerne_relation_from_idx ON concerne_relation(eid_from); -CREATE INDEX concerne_relation_to_idx ON concerne_relation(eid_to); - -CREATE TABLE division_of_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT division_of_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX division_of_relation_from_idx ON division_of_relation(eid_from); -CREATE INDEX division_of_relation_to_idx ON division_of_relation(eid_to); - -CREATE TABLE evaluee_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT evaluee_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX evaluee_relation_from_idx ON evaluee_relation(eid_from); -CREATE INDEX evaluee_relation_to_idx ON evaluee_relation(eid_to); - -CREATE TABLE next_state_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT next_state_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX next_state_relation_from_idx ON next_state_relation(eid_from); -CREATE INDEX next_state_relation_to_idx ON next_state_relation(eid_to); - -CREATE TABLE obj_wildcard_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT obj_wildcard_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX obj_wildcard_relation_from_idx ON obj_wildcard_relation(eid_from); -CREATE INDEX obj_wildcard_relation_to_idx ON obj_wildcard_relation(eid_to); - -CREATE TABLE require_permission_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT require_permission_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX require_permission_relation_from_idx ON require_permission_relation(eid_from); -CREATE INDEX require_permission_relation_to_idx ON require_permission_relation(eid_to); - -CREATE TABLE state_of_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT state_of_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX state_of_relation_from_idx ON state_of_relation(eid_from); -CREATE INDEX state_of_relation_to_idx ON state_of_relation(eid_to); - -CREATE TABLE subcompany_of_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT subcompany_of_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX subcompany_of_relation_from_idx ON subcompany_of_relation(eid_from); -CREATE INDEX subcompany_of_relation_to_idx ON subcompany_of_relation(eid_to); - -CREATE TABLE subdivision_of_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT subdivision_of_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX subdivision_of_relation_from_idx ON subdivision_of_relation(eid_from); -CREATE INDEX subdivision_of_relation_to_idx ON subdivision_of_relation(eid_to); - -CREATE TABLE subj_wildcard_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT subj_wildcard_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX subj_wildcard_relation_from_idx ON subj_wildcard_relation(eid_from); -CREATE INDEX subj_wildcard_relation_to_idx ON subj_wildcard_relation(eid_to); - -CREATE TABLE sym_rel_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT sym_rel_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX sym_rel_relation_from_idx ON sym_rel_relation(eid_from); -CREATE INDEX sym_rel_relation_to_idx ON sym_rel_relation(eid_to); - -CREATE TABLE travaille_relation ( - eid_from INTEGER NOT NULL REFERENCES entities (eid), - eid_to INTEGER NOT NULL REFERENCES entities (eid), - CONSTRAINT travaille_relation_p_key PRIMARY KEY(eid_from, eid_to) -); - -CREATE INDEX travaille_relation_from_idx ON travaille_relation(eid_from); -CREATE INDEX travaille_relation_to_idx ON travaille_relation(eid_to); -""" - -class SQLSchemaTC(TestCase): - - def test_known_values(self): - dbhelper = get_db_helper('postgres') - output = schema2sql.schema2sql(dbhelper, schema, skip_relations=('works_for',)) - self.assertMultiLineEqual(EXPECTED_DATA_NO_DROP.strip(), output.strip()) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_schemaserial.py --- a/server/test/unittest_schemaserial.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,446 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for schema rql (de)serialization""" - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb import Binary -from cubicweb.schema import CubicWebSchemaLoader -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.testlib import CubicWebTC - -from cubicweb.server.schemaserial import (updateeschema2rql, updaterschema2rql, rschema2rql, - eschema2rql, rdef2rql, specialize2rql, - _erperms2rql as erperms2rql) - -from logilab.database import get_db_helper -from yams import register_base_type, unregister_base_type - -schema = config = None -def setUpModule(*args): - register_base_type('BabarTestType', ('jungle_speed',)) - helper = get_db_helper('sqlite') - helper.TYPE_MAPPING['BabarTestType'] = 'TEXT' - helper.TYPE_CONVERTERS['BabarTestType'] = lambda x: '"%s"' % x - - global schema, config - loader = CubicWebSchemaLoader() - apphome = Schema2RQLTC.datadir + '-schemaserial' - config = TestServerConfiguration('data', apphome=apphome) - config.bootstrap_cubes() - schema = loader.load(config) - - -def tearDownModule(*args): - global schema, config - schema = config = None - - unregister_base_type('BabarTestType') - helper = get_db_helper('sqlite') - helper.TYPE_MAPPING.pop('BabarTestType', None) - helper.TYPE_CONVERTERS.pop('BabarTestType', None) - -cstrtypemap = {'RQLConstraint': 'RQLConstraint_eid', - 'SizeConstraint': 'SizeConstraint_eid', - 'StaticVocabularyConstraint': 'StaticVocabularyConstraint_eid', - 'FormatConstraint': 'FormatConstraint_eid', - } - -class Schema2RQLTC(TestCase): - - def test_eschema2rql1(self): - self.assertListEqual([ - ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s', - {'description': u'define a final relation: ' - 'link a final relation type from a non final entity ' - 'to a final entity type. used to build the instance schema', - 'name': u'CWAttribute', 'final': False})], - list(eschema2rql(schema.eschema('CWAttribute')))) - - def test_eschema2rql2(self): - self.assertListEqual([ - ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s', - {'description': u'', 'final': True, 'name': u'String'})], - list(eschema2rql(schema.eschema('String')))) - - def test_eschema2rql_specialization(self): - # x: None since eschema.eid are None - self.assertListEqual([('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', - {'et': None, 'x': None}), - ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', - {'et': None, 'x': None})], - sorted(specialize2rql(schema))) - - def test_esche2rql_custom_type(self): - expected = [('INSERT CWEType X: X description %(description)s,X final %(final)s,' - 'X name %(name)s', - {'description': u'', - 'name': u'BabarTestType', 'final': True},)] - got = list(eschema2rql(schema.eschema('BabarTestType'))) - self.assertListEqual(expected, got) - - def test_rschema2rql1(self): - self.assertListEqual([ - ('INSERT CWRType X: X description %(description)s,X final %(final)s,' - 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' - 'X name %(name)s,X symmetric %(symmetric)s', - {'description': u'link a relation definition to its relation type', - 'symmetric': False, - 'name': u'relation_type', - 'final' : False, - 'fulltext_container': None, - 'inlined': True}), - - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' - 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' - 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'', - 'composite': u'object', - 'cardinality': u'1*', - 'ordernum': 1}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'ct': u'RQLConstraint_eid', - 'value': u';O;O final TRUE\n'}), - - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' - 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' - 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'', 'composite': u'object', - 'ordernum': 1, 'cardinality': u'1*'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'ct': u'RQLConstraint_eid', 'value': u';O;O final FALSE\n'}), - ], - list(rschema2rql(schema.rschema('relation_type'), cstrtypemap))) - - def test_rschema2rql2(self): - self.assertListEqual([ - ('INSERT CWRType X: X description %(description)s,X final %(final)s,' - 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' - 'X name %(name)s,X symmetric %(symmetric)s', - {'description': u'', - 'symmetric': False, - 'name': u'add_permission', - 'final': False, - 'fulltext_container': None, - 'inlined': False}), - - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' - 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' - 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, - 'rt': None, - 'oe': None, - 'description': u'groups allowed to add entities/relations of this type', - 'composite': None, - 'ordernum': 9999, - 'cardinality': u'**'}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' - 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' - 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, - 'rt': None, - 'oe': None, - 'description': u'rql expression allowing to add entities/relations of this type', - 'composite': 'subject', - 'ordernum': 9999, - 'cardinality': u'*?'}), - - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' - 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' - 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, - 'rt': None, - 'oe': None, - 'description': u'groups allowed to add entities/relations of this type', - 'composite': None, - 'ordernum': 9999, - 'cardinality': u'**'}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' - 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' - 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, - 'rt': None, - 'oe': None, - 'description': u'rql expression allowing to add entities/relations of this type', - 'composite': 'subject', - 'ordernum': 9999, - 'cardinality': u'*?'}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' - 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' - 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'cardinality': u'**', - 'composite': None, - 'description': u'groups allowed to add entities/relations of this type', - 'oe': None, - 'ordernum': 9999, - 'rt': None, - 'se': None}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' - 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' - 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'cardinality': u'*?', - 'composite': u'subject', - 'description': u'rql expression allowing to add entities/relations of this type', - 'oe': None, - 'ordernum': 9999, - 'rt': None, - 'se': None})], - list(rschema2rql(schema.rschema('add_permission'), cstrtypemap))) - - def test_rschema2rql3(self): - self.assertListEqual([ - ('INSERT CWRType X: X description %(description)s,X final %(final)s,' - 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' - 'X name %(name)s,X symmetric %(symmetric)s', - {'description': u'', - 'symmetric': False, - 'name': u'cardinality', - 'final': True, - 'fulltext_container': None, - 'inlined': False}), - - ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' - 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,' - 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' - 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' - 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, - 'rt': None, - 'oe': None, - 'description': u'subject/object cardinality', - 'internationalizable': True, - 'fulltextindexed': False, - 'ordernum': 5, - 'defaultval': None, - 'indexed': False, - 'formula': None, - 'cardinality': u'?1'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, - 'ct': u'SizeConstraint_eid', - 'value': u'max=2'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, - 'ct': u'StaticVocabularyConstraint_eid', - 'value': u"u'?1', u'11'"}), - - ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' - 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,' - 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' - 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE ' - 'WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, - 'rt': None, - 'oe': None, - 'description': u'subject/object cardinality', - 'internationalizable': True, - 'fulltextindexed': False, - 'ordernum': 5, - 'defaultval': None, - 'indexed': False, - 'formula': None, - 'cardinality': u'?1'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, - 'ct': u'SizeConstraint_eid', - 'value': u'max=2'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, - 'ct': u'StaticVocabularyConstraint_eid', - 'value': (u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', " - "u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'")})], - list(rschema2rql(schema.rschema('cardinality'), cstrtypemap))) - - def test_rschema2rql_custom_type(self): - expected = [('INSERT CWRType X: X description %(description)s,X final %(final)s,' - 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' - 'X name %(name)s,X symmetric %(symmetric)s', - {'description': u'', - 'final': True, - 'fulltext_container': None, - 'inlined': False, - 'name': u'custom_field_of_jungle', - 'symmetric': False}), - ('INSERT CWAttribute X: X cardinality %(cardinality)s,' - 'X defaultval %(defaultval)s,X description %(description)s,' - 'X extra_props %(extra_props)s,X formula %(formula)s,X indexed %(indexed)s,' - 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' - 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'cardinality': u'?1', - 'defaultval': None, - 'description': u'', - 'extra_props': b'{"jungle_speed": 42}', - 'formula': None, - 'indexed': False, - 'oe': None, - 'ordernum': 4, - 'rt': None, - 'se': None})] - - got = list(rschema2rql(schema.rschema('custom_field_of_jungle'), cstrtypemap)) - self.assertEqual(2, len(got)) - # this is a custom type attribute with an extra parameter - self.assertIn('extra_props', got[1][1]) - # this extr - extra_props = got[1][1]['extra_props'] - self.assertIsInstance(extra_props, Binary) - got[1][1]['extra_props'] = got[1][1]['extra_props'].getvalue() - self.assertListEqual(expected, got) - - def test_rdef2rql(self): - self.assertListEqual([ - ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' - 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,' - 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' - 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' - 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, - 'rt': None, - 'oe': None, - 'description': u'', - 'internationalizable': True, - 'fulltextindexed': False, - 'ordernum': 3, - 'defaultval': Binary.zpickle(u'text/plain'), - 'indexed': False, - 'formula': None, - 'cardinality': u'?1'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, - 'value': u'None', - 'ct': 'FormatConstraint_eid'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, - 'value': u'max=50', - 'ct': 'SizeConstraint_eid'})], - list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], - cstrtypemap))) - - def test_updateeschema2rql1(self): - self.assertListEqual([('SET X description %(description)s,X final %(final)s,' - 'X name %(name)s WHERE X eid %(x)s', - {'description': u'define a final relation: link a final relation type from' - ' a non final entity to a final entity type. used to build the instance schema', - 'x': 1, 'final': False, 'name': u'CWAttribute'})], - list(updateeschema2rql(schema.eschema('CWAttribute'), 1))) - - def test_updateeschema2rql2(self): - self.assertListEqual([('SET X description %(description)s,X final %(final)s,' - 'X name %(name)s WHERE X eid %(x)s', - {'description': u'', 'x': 1, 'final': True, 'name': u'String'})], - list(updateeschema2rql(schema.eschema('String'), 1))) - - def test_updaterschema2rql1(self): - self.assertListEqual([ - ('SET X description %(description)s,X final %(final)s,' - 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' - 'X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', - {'x': 1, - 'symmetric': False, - 'description': u'link a relation definition to its relation type', - 'final': False, 'fulltext_container': None, - 'inlined': True, - 'name': u'relation_type'})], - list(updaterschema2rql(schema.rschema('relation_type'), 1))) - - def test_updaterschema2rql2(self): - expected = [ - ('SET X description %(description)s,X final %(final)s,' - 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' - 'X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', - {'x': 1, - 'symmetric': False, - 'description': u'', - 'final': False, - 'fulltext_container': None, - 'inlined': False, - 'name': u'add_permission'}) - ] - for i, (rql, args) in enumerate(updaterschema2rql(schema.rschema('add_permission'), 1)): - yield self.assertEqual, expected[i], (rql, args) - -class Perms2RQLTC(TestCase): - GROUP_MAPPING = { - 'managers': 0, - 'users': 1, - 'guests': 2, - 'owners': 3, - } - - def test_eperms2rql1(self): - self.assertListEqual([('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), - ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), - ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), - ('SET X add_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), - ('SET X update_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), - ('SET X delete_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0})], - [(rql, kwargs) - for rql, kwargs in erperms2rql(schema.eschema('CWEType'), self.GROUP_MAPPING)]) - - def test_rperms2rql2(self): - self.assertListEqual([('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), - ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), - ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), - ('SET X add_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), - ('SET X delete_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0})], - [(rql, kwargs) - for rql, kwargs in erperms2rql(schema.rschema('read_permission').rdef('CWEType', 'CWGroup'), - self.GROUP_MAPPING)]) - - def test_rperms2rql3(self): - self.assertListEqual([('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), - ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), - ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), - ('SET X add_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), - ('SET X update_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0})], - [(rql, kwargs) - for rql, kwargs in erperms2rql(schema.rschema('name').rdef('CWEType', 'String'), - self.GROUP_MAPPING)]) - - #def test_perms2rql(self): - # self.assertListEqual(perms2rql(schema, self.GROUP_MAPPING), - # ['INSERT CWEType X: X name 'Societe', X final FALSE']) - -class ComputedAttributeAndRelationTC(CubicWebTC): - appid = 'data-cwep002' - - def test(self): - # force to read schema from the database - self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) - schema = self.repo.schema - self.assertEqual([('Company', 'Person')], list(schema['has_employee'].rdefs)) - self.assertEqual(schema['has_employee'].rdef('Company', 'Person').permissions['read'], - (u'managers',)) - self.assertEqual('O works_for S', - schema['has_employee'].rule) - self.assertEqual([('Company', 'Int')], list(schema['total_salary'].rdefs)) - self.assertEqual('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', - schema['total_salary'].rdefs['Company', 'Int'].formula) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_security.py --- a/server/test/unittest_security.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,688 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""functional tests for server'security""" - -from six.moves import range - -from logilab.common.testlib import unittest_main - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb import Unauthorized, ValidationError, QueryError, Binary -from cubicweb.schema import ERQLExpression -from cubicweb.server.querier import get_local_checks, check_relations_read_access -from cubicweb.server.utils import _CRYPTO_CTX - - -class BaseSecurityTC(CubicWebTC): - - def setup_database(self): - super(BaseSecurityTC, self).setup_database() - with self.admin_access.client_cnx() as cnx: - self.create_user(cnx, u'iaminusersgrouponly') - hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt') - self.create_user(cnx, u'oldpassword', password=Binary(hash.encode('ascii'))) - -class LowLevelSecurityFunctionTC(BaseSecurityTC): - - def test_check_relation_read_access(self): - rql = u'Personne U WHERE U nom "managers"' - rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0] - nom = self.repo.schema['Personne'].rdef('nom') - with self.temporary_permissions((nom, {'read': ('users', 'managers')})): - with self.admin_access.repo_cnx() as cnx: - self.repo.vreg.solutions(cnx, rqlst, None) - check_relations_read_access(cnx, rqlst, {}) - with self.new_access(u'anon').repo_cnx() as cnx: - self.assertRaises(Unauthorized, - check_relations_read_access, - cnx, rqlst, {}) - self.assertRaises(Unauthorized, cnx.execute, rql) - - def test_get_local_checks(self): - rql = u'Personne U WHERE U nom "managers"' - rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0] - with self.temporary_permissions(Personne={'read': ('users', 'managers')}): - with self.admin_access.repo_cnx() as cnx: - self.repo.vreg.solutions(cnx, rqlst, None) - solution = rqlst.solutions[0] - localchecks = get_local_checks(cnx, rqlst, solution) - self.assertEqual({}, localchecks) - with self.new_access(u'anon').repo_cnx() as cnx: - self.assertRaises(Unauthorized, - get_local_checks, - cnx, rqlst, solution) - self.assertRaises(Unauthorized, cnx.execute, rql) - - def test_upassword_not_selectable(self): - with self.admin_access.repo_cnx() as cnx: - self.assertRaises(Unauthorized, - cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P') - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - self.assertRaises(Unauthorized, - cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P') - - def test_update_password(self): - """Ensure that if a user's password is stored with a deprecated hash, - it will be updated on next login - """ - with self.repo.internal_cnx() as cnx: - oldhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " - "WHERE cw_login = 'oldpassword'").fetchone()[0] - oldhash = self.repo.system_source.binary_to_str(oldhash) - self.repo.close(self.repo.connect('oldpassword', password='oldpassword')) - newhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " - "WHERE cw_login = 'oldpassword'").fetchone()[0] - newhash = self.repo.system_source.binary_to_str(newhash) - self.assertNotEqual(oldhash, newhash) - self.assertTrue(newhash.startswith(b'$6$')) - self.repo.close(self.repo.connect('oldpassword', password='oldpassword')) - newnewhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE " - "cw_login = 'oldpassword'").fetchone()[0] - newnewhash = self.repo.system_source.binary_to_str(newnewhash) - self.assertEqual(newhash, newnewhash) - - -class SecurityRewritingTC(BaseSecurityTC): - def hijack_source_execute(self): - def syntax_tree_search(*args, **kwargs): - self.query = (args, kwargs) - return [] - self.repo.system_source.syntax_tree_search = syntax_tree_search - - def tearDown(self): - self.repo.system_source.__dict__.pop('syntax_tree_search', None) - super(SecurityRewritingTC, self).tearDown() - - def test_not_relation_read_security(self): - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - self.hijack_source_execute() - cnx.execute('Any U WHERE NOT A todo_by U, A is Affaire') - self.assertEqual(self.query[0][1].as_string(), - 'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') - cnx.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') - self.assertEqual(self.query[0][1].as_string(), - 'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') - -class SecurityTC(BaseSecurityTC): - - def setUp(self): - super(SecurityTC, self).setUp() - # implicitly test manager can add some entities - with self.admin_access.repo_cnx() as cnx: - cnx.execute("INSERT Affaire X: X sujet 'cool'") - cnx.execute("INSERT Societe X: X nom 'logilab'") - cnx.execute("INSERT Personne X: X nom 'bidule'") - cnx.execute('INSERT CWGroup X: X name "staff"') - cnx.commit() - - def test_insert_security(self): - with self.new_access(u'anon').repo_cnx() as cnx: - cnx.execute("INSERT Personne X: X nom 'bidule'") - self.assertRaises(Unauthorized, cnx.commit) - self.assertEqual(cnx.execute('Personne X').rowcount, 1) - - def test_insert_security_2(self): - with self.new_access(u'anon').repo_cnx() as cnx: - cnx.execute("INSERT Affaire X") - self.assertRaises(Unauthorized, cnx.commit) - # anon has no read permission on Affaire entities, so - # rowcount == 0 - self.assertEqual(cnx.execute('Affaire X').rowcount, 0) - - def test_insert_rql_permission(self): - # test user can only add une affaire related to a societe he owns - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute("INSERT Affaire X: X sujet 'cool'") - self.assertRaises(Unauthorized, cnx.commit) - # test nothing has actually been inserted - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(cnx.execute('Affaire X').rowcount, 1) - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute("INSERT Affaire X: X sujet 'cool'") - cnx.execute("INSERT Societe X: X nom 'chouette'") - cnx.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'") - cnx.commit() - - def test_update_security_1(self): - with self.new_access(u'anon').repo_cnx() as cnx: - # local security check - cnx.execute( "SET X nom 'bidulechouette' WHERE X is Personne") - self.assertRaises(Unauthorized, cnx.commit) - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) - - def test_update_security_2(self): - with self.temporary_permissions(Personne={'read': ('users', 'managers'), - 'add': ('guests', 'users', 'managers')}): - with self.new_access(u'anon').repo_cnx() as cnx: - self.assertRaises(Unauthorized, cnx.execute, - "SET X nom 'bidulechouette' WHERE X is Personne") - # test nothing has actually been inserted - with self.admin_access.repo_cnx() as cnx: - self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) - - def test_update_security_3(self): - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute("INSERT Personne X: X nom 'biduuule'") - cnx.execute("INSERT Societe X: X nom 'looogilab'") - cnx.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'") - - def test_insert_immutable_attribute_update(self): - with self.admin_access.repo_cnx() as cnx: - cnx.create_entity('Old', name=u'Babar') - cnx.commit() - # this should be equivalent - o = cnx.create_entity('Old') - o.cw_set(name=u'Celeste') - cnx.commit() - - def test_update_rql_permission(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") - cnx.commit() - # test user can only update une affaire related to a societe he owns - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute("SET X sujet 'pascool' WHERE X is Affaire") - # this won't actually do anything since the selection query won't return anything - cnx.commit() - # to actually get Unauthorized exception, try to update an entity we can read - cnx.execute("SET X nom 'toto' WHERE X is Societe") - self.assertRaises(Unauthorized, cnx.commit) - cnx.execute("INSERT Affaire X: X sujet 'pascool'") - cnx.execute("INSERT Societe X: X nom 'chouette'") - cnx.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") - cnx.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'") - cnx.commit() - - def test_delete_security(self): - # FIXME: sample below fails because we don't detect "owner" can't delete - # user anyway, and since no user with login == 'bidule' exists, no - # exception is raised - #user._groups = {'guests':1} - #self.assertRaises(Unauthorized, - # self.o.execute, user, "DELETE CWUser X WHERE X login 'bidule'") - # check local security - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - self.assertRaises(Unauthorized, cnx.execute, "DELETE CWGroup Y WHERE Y name 'staff'") - - def test_delete_rql_permission(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") - cnx.commit() - # test user can only dele une affaire related to a societe he owns - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - # this won't actually do anything since the selection query won't return anything - cnx.execute("DELETE Affaire X") - cnx.commit() - # to actually get Unauthorized exception, try to delete an entity we can read - self.assertRaises(Unauthorized, cnx.execute, "DELETE Societe S") - self.assertRaises(QueryError, cnx.commit) # can't commit anymore - cnx.rollback() - cnx.execute("INSERT Affaire X: X sujet 'pascool'") - cnx.execute("INSERT Societe X: X nom 'chouette'") - cnx.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") - cnx.commit() -## # this one should fail since it will try to delete two affaires, one authorized -## # and the other not -## self.assertRaises(Unauthorized, cnx.execute, "DELETE Affaire X") - cnx.execute("DELETE Affaire X WHERE X sujet 'pascool'") - cnx.commit() - - def test_insert_relation_rql_permission(self): - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") - # should raise Unauthorized since user don't own S though this won't - # actually do anything since the selection query won't return - # anything - cnx.commit() - # to actually get Unauthorized exception, try to insert a relation - # were we can read both entities - rset = cnx.execute('Personne P') - self.assertEqual(len(rset), 1) - ent = rset.get_entity(0, 0) - self.assertFalse(cnx.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe')) - self.assertRaises(Unauthorized, ent.cw_check_perm, 'update') - self.assertRaises(Unauthorized, - cnx.execute, "SET P travaille S WHERE P is Personne, S is Societe") - self.assertRaises(QueryError, cnx.commit) # can't commit anymore - cnx.rollback() - # test nothing has actually been inserted: - self.assertFalse(cnx.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe')) - cnx.execute("INSERT Societe X: X nom 'chouette'") - cnx.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") - cnx.commit() - - def test_delete_relation_rql_permission(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - # this won't actually do anything since the selection query won't return anything - cnx.execute("DELETE A concerne S") - cnx.commit() - with self.admin_access.repo_cnx() as cnx: - # to actually get Unauthorized exception, try to delete a relation we can read - eid = cnx.execute("INSERT Affaire X: X sujet 'pascool'")[0][0] - cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', - {'x': eid}) - cnx.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe") - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - self.assertRaises(Unauthorized, cnx.execute, "DELETE A concerne S") - self.assertRaises(QueryError, cnx.commit) # can't commit anymore - cnx.rollback() - cnx.execute("INSERT Societe X: X nom 'chouette'") - cnx.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") - cnx.commit() - cnx.execute("DELETE A concerne S WHERE S nom 'chouette'") - cnx.commit() - - - def test_user_can_change_its_upassword(self): - with self.admin_access.repo_cnx() as cnx: - ueid = self.create_user(cnx, u'user').eid - with self.new_access(u'user').repo_cnx() as cnx: - cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', - {'x': ueid, 'passwd': b'newpwd'}) - cnx.commit() - self.repo.close(self.repo.connect('user', password='newpwd')) - - def test_user_cant_change_other_upassword(self): - with self.admin_access.repo_cnx() as cnx: - ueid = self.create_user(cnx, u'otheruser').eid - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', - {'x': ueid, 'passwd': b'newpwd'}) - self.assertRaises(Unauthorized, cnx.commit) - - # read security test - - def test_read_base(self): - with self.temporary_permissions(Personne={'read': ('users', 'managers')}): - with self.new_access(u'anon').repo_cnx() as cnx: - self.assertRaises(Unauthorized, - cnx.execute, 'Personne U where U nom "managers"') - - def test_read_erqlexpr_base(self): - with self.admin_access.repo_cnx() as cnx: - eid = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - rset = cnx.execute('Affaire X') - self.assertEqual(rset.rows, []) - self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) - # cache test - self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) - aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] - cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") - cnx.commit() - rset = cnx.execute('Any X WHERE X eid %(x)s', {'x': aff2}) - self.assertEqual(rset.rows, [[aff2]]) - # more cache test w/ NOT eid - rset = cnx.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}) - self.assertEqual(rset.rows, [[aff2]]) - rset = cnx.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}) - self.assertEqual(rset.rows, []) - # test can't update an attribute of an entity that can't be readen - self.assertRaises(Unauthorized, cnx.execute, - 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}) - - - def test_entity_created_in_transaction(self): - affschema = self.schema['Affaire'] - with self.temporary_permissions(Affaire={'read': affschema.permissions['add']}): - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - # entity created in transaction are readable *by eid* - self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2})) - # XXX would be nice if it worked - rset = cnx.execute("Affaire X WHERE X sujet 'cool'") - self.assertEqual(len(rset), 0) - self.assertRaises(Unauthorized, cnx.commit) - - def test_read_erqlexpr_has_text1(self): - with self.admin_access.repo_cnx() as cnx: - aff1 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - card1 = cnx.execute("INSERT Card X: X title 'cool'")[0][0] - cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', - {'x': card1}) - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] - cnx.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}) - cnx.commit() - self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}) - self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2})) - self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':card1})) - rset = cnx.execute("Any X WHERE X has_text 'cool'") - self.assertEqual(sorted(eid for eid, in rset.rows), - [card1, aff2]) - - def test_read_erqlexpr_has_text2(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute("INSERT Personne X: X nom 'bidule'") - cnx.execute("INSERT Societe X: X nom 'bidule'") - cnx.commit() - with self.temporary_permissions(Personne={'read': ('managers',)}): - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - rset = cnx.execute('Any N WHERE N has_text "bidule"') - self.assertEqual(len(rset.rows), 1, rset.rows) - rset = cnx.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_read_erqlexpr_optional_rel(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute("INSERT Personne X: X nom 'bidule'") - cnx.execute("INSERT Societe X: X nom 'bidule'") - cnx.commit() - with self.temporary_permissions(Personne={'read': ('managers',)}): - with self.new_access(u'anon').repo_cnx() as cnx: - rset = cnx.execute('Any N,U WHERE N has_text "bidule", N owned_by U?') - self.assertEqual(len(rset.rows), 1, rset.rows) - - def test_read_erqlexpr_aggregat(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - rset = cnx.execute('Any COUNT(X) WHERE X is Affaire') - self.assertEqual(rset.rows, [[0]]) - aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] - cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") - cnx.commit() - rset = cnx.execute('Any COUNT(X) WHERE X is Affaire') - self.assertEqual(rset.rows, [[1]]) - rset = cnx.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN') - values = dict(rset) - self.assertEqual(values['Affaire'], 1) - self.assertEqual(values['Societe'], 2) - rset = cnx.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN ' - 'WITH X BEING ((Affaire X) UNION (Societe X))') - self.assertEqual(len(rset), 2) - values = dict(rset) - self.assertEqual(values['Affaire'], 1) - self.assertEqual(values['Societe'], 2) - - - def test_attribute_security(self): - with self.admin_access.repo_cnx() as cnx: - # only managers should be able to edit the 'test' attribute of Personne entities - eid = cnx.execute("INSERT Personne X: X nom 'bidule', " - "X web 'http://www.debian.org', X test TRUE")[0][0] - cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute("INSERT Personne X: X nom 'bidule', " - "X web 'http://www.debian.org', X test TRUE") - self.assertRaises(Unauthorized, cnx.commit) - cnx.execute("INSERT Personne X: X nom 'bidule', " - "X web 'http://www.debian.org', X test FALSE") - self.assertRaises(Unauthorized, cnx.commit) - eid = cnx.execute("INSERT Personne X: X nom 'bidule', " - "X web 'http://www.debian.org'")[0][0] - cnx.commit() - cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) - self.assertRaises(Unauthorized, cnx.commit) - cnx.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}) - self.assertRaises(Unauthorized, cnx.commit) - cnx.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}) - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute('INSERT Frozable F: F name "Foo"') - cnx.commit() - cnx.execute('SET F name "Bar" WHERE F is Frozable') - cnx.commit() - cnx.execute('SET F name "BaBar" WHERE F is Frozable') - cnx.execute('SET F frozen True WHERE F is Frozable') - with self.assertRaises(Unauthorized): - cnx.commit() - cnx.rollback() - cnx.execute('SET F frozen True WHERE F is Frozable') - cnx.commit() - cnx.execute('SET F name "Bar" WHERE F is Frozable') - with self.assertRaises(Unauthorized): - cnx.commit() - - def test_attribute_security_rqlexpr(self): - with self.admin_access.repo_cnx() as cnx: - # Note.para attribute editable by managers or if the note is in "todo" state - note = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) - cnx.commit() - note.cw_adapt_to('IWorkflowable').fire_transition('markasdone') - cnx.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}) - self.assertRaises(Unauthorized, cnx.commit) - note2 = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) - cnx.commit() - note2.cw_adapt_to('IWorkflowable').fire_transition('markasdone') - cnx.commit() - self.assertEqual(len(cnx.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', - {'x': note2.eid})), - 0) - cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) - self.assertRaises(Unauthorized, cnx.commit) - note2.cw_adapt_to('IWorkflowable').fire_transition('redoit') - cnx.commit() - cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) - cnx.commit() - cnx.execute("INSERT Note X: X something 'A'") - self.assertRaises(Unauthorized, cnx.commit) - cnx.execute("INSERT Note X: X para 'zogzog', X something 'A'") - cnx.commit() - note = cnx.execute("INSERT Note X").get_entity(0,0) - cnx.commit() - note.cw_set(something=u'B') - cnx.commit() - note.cw_set(something=None, para=u'zogzog') - cnx.commit() - - def test_attribute_read_security(self): - # anon not allowed to see users'login, but they can see users - login_rdef = self.repo.schema['CWUser'].rdef('login') - with self.temporary_permissions((login_rdef, {'read': ('users', 'managers')}), - CWUser={'read': ('guests', 'users', 'managers')}): - with self.new_access(u'anon').repo_cnx() as cnx: - rset = cnx.execute('CWUser X') - self.assertTrue(rset) - x = rset.get_entity(0, 0) - x.complete() - self.assertEqual(x.login, None) - self.assertTrue(x.creation_date) - x = rset.get_entity(1, 0) - x.complete() - self.assertEqual(x.login, None) - self.assertTrue(x.creation_date) - - def test_yams_inheritance_and_security_bug(self): - with self.temporary_permissions(Division={'read': ('managers', - ERQLExpression('X owned_by U'))}): - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - querier = cnx.repo.querier - rqlst = querier.parse('Any X WHERE X is_instance_of Societe') - querier.solutions(cnx, rqlst, {}) - querier._annotate(rqlst) - plan = querier.plan_factory(rqlst, {}, cnx) - plan.preprocess(rqlst) - self.assertEqual( - rqlst.as_string(), - '(Any X WHERE X is IN(Societe, SubDivision)) UNION ' - '(Any X WHERE X is Division, EXISTS(X owned_by %(B)s))') - - -class BaseSchemaSecurityTC(BaseSecurityTC): - """tests related to the base schema permission configuration""" - - def test_user_can_delete_object_he_created(self): - # even if some other user have changed object'state - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - # due to security test, affaire has to concerne a societe the user owns - cnx.execute('INSERT Societe X: X nom "ARCTIA"') - cnx.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"') - cnx.commit() - with self.admin_access.repo_cnx() as cnx: - affaire = cnx.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - affaire.cw_adapt_to('IWorkflowable').fire_transition('abort') - cnx.commit() - self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')), - 1) - self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",' - 'X owned_by U, U login "admin"')), - 1) # TrInfo at the above state change - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - cnx.execute('DELETE Affaire X WHERE X ref "ARCT01"') - cnx.commit() - self.assertFalse(cnx.execute('Affaire X')) - - def test_users_and_groups_non_readable_by_guests(self): - with self.repo.internal_cnx() as cnx: - admineid = cnx.execute('CWUser U WHERE U login "admin"').rows[0][0] - with self.new_access(u'anon').repo_cnx() as cnx: - anon = cnx.user - # anonymous user can only read itself - rset = cnx.execute('Any L WHERE X owned_by U, U login L') - self.assertEqual([['anon']], rset.rows) - rset = cnx.execute('CWUser X') - self.assertEqual([[anon.eid]], rset.rows) - # anonymous user can read groups (necessary to check allowed transitions for instance) - self.assertTrue(cnx.execute('CWGroup X')) - # should only be able to read the anonymous user, not another one - self.assertRaises(Unauthorized, - cnx.execute, 'CWUser X WHERE X eid %(x)s', {'x': admineid}) - rset = cnx.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}) - self.assertEqual([[anon.eid]], rset.rows) - # but can't modify it - cnx.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid}) - self.assertRaises(Unauthorized, cnx.commit) - - def test_in_group_relation(self): - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - rql = u"DELETE U in_group G WHERE U login 'admin'" - self.assertRaises(Unauthorized, cnx.execute, rql) - rql = u"SET U in_group G WHERE U login 'admin', G name 'users'" - self.assertRaises(Unauthorized, cnx.execute, rql) - - def test_owned_by(self): - with self.admin_access.repo_cnx() as cnx: - cnx.execute("INSERT Personne X: X nom 'bidule'") - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne" - self.assertRaises(Unauthorized, cnx.execute, rql) - - def test_bookmarked_by_guests_security(self): - with self.admin_access.repo_cnx() as cnx: - beid1 = cnx.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0] - beid2 = cnx.execute('INSERT Bookmark B: B path "?vid=index", B title "index", ' - 'B bookmarked_by U WHERE U login "anon"')[0][0] - cnx.commit() - with self.new_access(u'anon').repo_cnx() as cnx: - anoneid = cnx.user.eid - self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' - 'B bookmarked_by U, U eid %s' % anoneid).rows, - [['index', '?vid=index']]) - self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' - 'B bookmarked_by U, U eid %(x)s', {'x': anoneid}).rows, - [['index', '?vid=index']]) - # can read others bookmarks as well - self.assertEqual(cnx.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows, - [[beid1]]) - self.assertRaises(Unauthorized, cnx.execute,'DELETE B bookmarked_by U') - self.assertRaises(Unauthorized, - cnx.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s', - {'x': anoneid, 'b': beid1}) - - def test_ambigous_ordered(self): - with self.new_access(u'anon').repo_cnx() as cnx: - names = [t for t, in cnx.execute('Any N ORDERBY lower(N) WHERE X name N')] - self.assertEqual(names, sorted(names, key=lambda x: x.lower())) - - def test_in_state_without_update_perm(self): - """check a user change in_state without having update permission on the - subject - """ - with self.admin_access.repo_cnx() as cnx: - eid = cnx.execute('INSERT Affaire X: X ref "ARCT01"')[0][0] - cnx.commit() - with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: - # needed to remove rql expr granting update perm to the user - affschema = self.schema['Affaire'] - with self.temporary_permissions(Affaire={'update': affschema.get_groups('update'), - 'read': ('users',)}): - self.assertRaises(Unauthorized, - affschema.check_perm, cnx, 'update', eid=eid) - aff = cnx.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - aff.cw_adapt_to('IWorkflowable').fire_transition('abort') - cnx.commit() - # though changing a user state (even logged user) is reserved to managers - user = cnx.user - # XXX wether it should raise Unauthorized or ValidationError is not clear - # the best would probably ValidationError if the transition doesn't exist - # from the current state but Unauthorized if it exists but user can't pass it - self.assertRaises(ValidationError, - user.cw_adapt_to('IWorkflowable').fire_transition, 'deactivate') - - def test_trinfo_security(self): - with self.admin_access.repo_cnx() as cnx: - aff = cnx.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0) - iworkflowable = aff.cw_adapt_to('IWorkflowable') - cnx.commit() - iworkflowable.fire_transition('abort') - cnx.commit() - # can change tr info comment - cnx.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"', - {'c': u'bouh!'}) - cnx.commit() - aff.cw_clear_relation_cache('wf_info_for', 'object') - trinfo = iworkflowable.latest_trinfo() - self.assertEqual(trinfo.comment, 'bouh!') - # but not from_state/to_state - aff.cw_clear_relation_cache('wf_info_for', role='object') - self.assertRaises(Unauthorized, cnx.execute, - 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', - {'ti': trinfo.eid}) - self.assertRaises(Unauthorized, cnx.execute, - 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"', - {'ti': trinfo.eid}) - - def test_emailaddress_security(self): - # check for prexisting email adresse - with self.admin_access.repo_cnx() as cnx: - if cnx.execute('Any X WHERE X is EmailAddress'): - rset = cnx.execute('Any X, U WHERE X is EmailAddress, U use_email X') - msg = ['Preexisting email readable by anon found!'] - tmpl = ' - "%s" used by user "%s"' - for i in range(len(rset)): - email, user = rset.get_entity(i, 0), rset.get_entity(i, 1) - msg.append(tmpl % (email.dc_title(), user.dc_title())) - raise RuntimeError('\n'.join(msg)) - # actual test - cnx.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - cnx.execute('INSERT EmailAddress X: X address "anon", ' - 'U use_email X WHERE U login "anon"').get_entity(0, 0) - cnx.commit() - self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 2) - with self.new_access(u'anon').repo_cnx() as cnx: - self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 1) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_serverctl.py --- a/server/test/unittest_serverctl.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,26 +0,0 @@ -import os.path as osp -import shutil - -from cubicweb.devtools import testlib, ApptestConfiguration -from cubicweb.server.serverctl import _local_dump, DBDumpCommand -from cubicweb.server.serverconfig import ServerConfiguration - -class ServerCTLTC(testlib.CubicWebTC): - def setUp(self): - super(ServerCTLTC, self).setUp() - self.orig_config_for = ServerConfiguration.config_for - config_for = lambda appid: ApptestConfiguration(appid, apphome=self.datadir) - ServerConfiguration.config_for = staticmethod(config_for) - - def tearDown(self): - ServerConfiguration.config_for = self.orig_config_for - super(ServerCTLTC, self).tearDown() - - def test_dump(self): - DBDumpCommand(None).run([self.appid]) - shutil.rmtree(osp.join(self.config.apphome, 'backup')) - - -if __name__ == '__main__': - from unittest import main - main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_sources_native.py --- a/server/test/unittest_sources_native.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,38 +0,0 @@ -# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from logilab.common import tempattr - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.server.sources.native import FTIndexEntityOp - -class NativeSourceTC(CubicWebTC): - - def test_index_entity_consider_do_fti(self): - source = self.repo.system_source - with tempattr(source, 'do_fti', False): - with self.admin_access.repo_cnx() as cnx: - # when do_fti is set to false, call to index_entity (as may be done from hooks) - # should have no effect - source.index_entity(cnx, cnx.user) - self.assertNotIn(cnx.user.eid, FTIndexEntityOp.get_instance(cnx).get_data()) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_sqlutils.py --- a/server/test/unittest_sqlutils.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.sqlutils -""" - -import sys - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.server.sqlutils import * - -from cubicweb.devtools.testlib import CubicWebTC - -BASE_CONFIG = { - 'db-driver' : 'Postgres', - 'db-host' : 'crater', - 'db-name' : 'cubicweb2_test', - 'db-user' : 'toto', - 'db-upassword' : 'toto', - } - -class SQLAdapterMixInTC(TestCase): - - def test_init(self): - o = SQLAdapterMixIn(BASE_CONFIG) - self.assertEqual(o.dbhelper.dbencoding, 'UTF-8') - - def test_init_encoding(self): - config = BASE_CONFIG.copy() - config['db-encoding'] = 'ISO-8859-1' - o = SQLAdapterMixIn(config) - self.assertEqual(o.dbhelper.dbencoding, 'ISO-8859-1') - - -class SQLUtilsTC(CubicWebTC): - - def test_group_concat(self): - with self.admin_access.repo_cnx() as cnx: - g = cnx.create_entity('CWGroup', name=u'héhé') - u = cnx.create_entity('CWUser', login=u'toto', upassword=u'', - in_group=g.eid) - rset = cnx.execute(u'Any L,GROUP_CONCAT(G) GROUPBY L WHERE X login L,' - u'X in_group G, G name GN, NOT G name IN ("users", "héhé")') - self.assertEqual([[u'admin', u'3'], [u'anon', u'2']], - rset.rows) - rset = cnx.execute('Any L,GROUP_CONCAT(GN) GROUPBY L WHERE X login L,' - 'X in_group G, G name GN, NOT G name "users"') - self.assertEqual([[u'admin', u'managers'], [u'anon', u'guests'], [u'toto', u'héhé']], - rset.rows) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_ssplanner.py --- a/server/test/unittest_ssplanner.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,66 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from cubicweb.devtools import TestServerConfiguration, get_test_db_handler -from cubicweb.devtools.repotest import BasePlannerTC, test_plan -from cubicweb.server.ssplanner import SSPlanner - -# keep cnx so it's not garbage collected and the associated session closed -def setUpModule(*args): - global repo, cnx - handler = get_test_db_handler(TestServerConfiguration( - 'data', apphome=SSPlannerTC.datadir)) - handler.build_db_cache() - global repo, cnx - repo, cnx = handler.get_repo_and_cnx() - -def tearDownModule(*args): - global repo, cnx - del repo, cnx - -class SSPlannerTC(BasePlannerTC): - _test = test_plan - - def setUp(self): - self.__class__.repo = repo - BasePlannerTC.setUp(self) - self.planner = SSPlanner(self.o.schema, self.repo.vreg.rqlhelper) - self.system = self.o._repo.system_source - - def tearDown(self): - BasePlannerTC.tearDown(self) - - def test_ordered_ambigous_sol(self): - self._test('Any XN ORDERBY XN WHERE X name XN, X is IN (Basket, State, Folder)', - [('OneFetchStep', [('Any XN ORDERBY XN WHERE X name XN, X is IN(Basket, State, Folder)', - [{'X': 'Basket', 'XN': 'String'}, - {'X': 'State', 'XN': 'String'}, - {'X': 'Folder', 'XN': 'String'}])], - None, [])]) - - def test_groupeded_ambigous_sol(self): - self._test('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN (Basket, State, Folder)', - [('OneFetchStep', [('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN(Basket, State, Folder)', - [{'X': 'Basket', 'XN': 'String'}, - {'X': 'State', 'XN': 'String'}, - {'X': 'Folder', 'XN': 'String'}])], - None, [])]) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_storage.py --- a/server/test/unittest_storage.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,354 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.sources.storages""" - -from six import PY2 - -from logilab.common.testlib import unittest_main, tag, Tags -from cubicweb.devtools.testlib import CubicWebTC - -from glob import glob -import os -import os.path as osp -import sys -import shutil -import tempfile - -from cubicweb import Binary, QueryError -from cubicweb.predicates import is_instance -from cubicweb.server.sources import storages -from cubicweb.server.hook import Hook - -class DummyBeforeHook(Hook): - __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & is_instance('File') - events = ('before_add_entity',) - - def __call__(self): - self._cw.transaction_data['orig_file_value'] = self.entity.data.getvalue() - - -class DummyAfterHook(Hook): - __regid__ = 'dummy-after-hook' - __select__ = Hook.__select__ & is_instance('File') - events = ('after_add_entity',) - - def __call__(self): - # new value of entity.data should be the same as before - oldvalue = self._cw.transaction_data['orig_file_value'] - assert oldvalue == self.entity.data.getvalue() - -class StorageTC(CubicWebTC): - tempdir = None - tags = CubicWebTC.tags | Tags('Storage', 'BFSS') - - def setup_database(self): - self.tempdir = tempfile.mkdtemp() - bfs_storage = storages.BytesFileSystemStorage(self.tempdir) - self.bfs_storage = bfs_storage - storages.set_attribute_storage(self.repo, 'File', 'data', bfs_storage) - storages.set_attribute_storage(self.repo, 'BFSSTestable', 'opt_attr', bfs_storage) - - def tearDown(self): - super(StorageTC, self).tearDown() - storages.unset_attribute_storage(self.repo, 'File', 'data') - del self.bfs_storage - shutil.rmtree(self.tempdir) - - - def create_file(self, cnx, content=b'the-data'): - return cnx.create_entity('File', data=Binary(content), - data_format=u'text/plain', - data_name=u'foo.pdf') - - def fspath(self, cnx, entity): - fspath = cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', - {'f': entity.eid})[0][0].getvalue() - return fspath if PY2 else fspath.decode('utf-8') - - def test_bfss_wrong_fspath_usage(self): - with self.admin_access.repo_cnx() as cnx: - f1 = self.create_file(cnx) - cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': f1.eid}) - with self.assertRaises(NotImplementedError) as cm: - cnx.execute('Any fspath(F) WHERE F eid %(f)s', {'f': f1.eid}) - self.assertEqual(str(cm.exception), - 'This callback is only available for BytesFileSystemStorage ' - 'managed attribute. Is FSPATH() argument BFSS managed?') - - def test_bfss_storage(self): - with self.admin_access.web_request() as req: - cnx = req.cnx - f1 = self.create_file(req) - filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) - self.assertEqual(len(filepaths), 1, filepaths) - expected_filepath = filepaths[0] - # file should be read only - self.assertFalse(os.access(expected_filepath, os.W_OK)) - self.assertEqual(open(expected_filepath).read(), 'the-data') - cnx.rollback() - self.assertFalse(osp.isfile(expected_filepath)) - filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) - self.assertEqual(len(filepaths), 0, filepaths) - f1 = self.create_file(req) - cnx.commit() - filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) - self.assertEqual(len(filepaths), 1, filepaths) - expected_filepath = filepaths[0] - self.assertEqual(open(expected_filepath).read(), 'the-data') - - # add f1 back to the entity cache with req as _cw - f1 = req.entity_from_eid(f1.eid) - f1.cw_set(data=Binary(b'the new data')) - cnx.rollback() - self.assertEqual(open(expected_filepath).read(), 'the-data') - f1.cw_delete() - self.assertTrue(osp.isfile(expected_filepath)) - cnx.rollback() - self.assertTrue(osp.isfile(expected_filepath)) - f1.cw_delete() - cnx.commit() - self.assertFalse(osp.isfile(expected_filepath)) - - def test_bfss_sqlite_fspath(self): - with self.admin_access.repo_cnx() as cnx: - f1 = self.create_file(cnx) - expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name)) - base, ext = osp.splitext(expected_filepath) - self.assertTrue(self.fspath(cnx, f1).startswith(base)) - self.assertTrue(self.fspath(cnx, f1).endswith(ext)) - - def test_bfss_fs_importing_doesnt_touch_path(self): - with self.admin_access.repo_cnx() as cnx: - cnx.transaction_data['fs_importing'] = True - filepath = osp.abspath(__file__) - f1 = cnx.create_entity('File', data=Binary(filepath.encode(sys.getfilesystemencoding())), - data_format=u'text/plain', data_name=u'foo') - self.assertEqual(self.fspath(cnx, f1), filepath) - - def test_source_storage_transparency(self): - with self.admin_access.repo_cnx() as cnx: - with self.temporary_appobjects(DummyBeforeHook, DummyAfterHook): - self.create_file(cnx) - - def test_source_mapped_attribute_error_cases(self): - with self.admin_access.repo_cnx() as cnx: - with self.assertRaises(QueryError) as cm: - cnx.execute('Any X WHERE X data ~= "hop", X is File') - self.assertEqual(str(cm.exception), 'can\'t use File.data (X data ILIKE "hop") in restriction') - with self.assertRaises(QueryError) as cm: - cnx.execute('Any X, Y WHERE X data D, Y data D, ' - 'NOT X identity Y, X is File, Y is File') - self.assertEqual(str(cm.exception), "can't use D as a restriction variable") - # query returning mix of mapped / regular attributes (only file.data - # mapped, not image.data for instance) - with self.assertRaises(QueryError) as cm: - cnx.execute('Any X WITH X BEING (' - ' (Any NULL)' - ' UNION ' - ' (Any D WHERE X data D, X is File)' - ')') - self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') - with self.assertRaises(QueryError) as cm: - cnx.execute('(Any D WHERE X data D, X is File)' - ' UNION ' - '(Any D WHERE X title D, X is Bookmark)') - self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') - - storages.set_attribute_storage(self.repo, 'State', 'name', - storages.BytesFileSystemStorage(self.tempdir)) - try: - with self.assertRaises(QueryError) as cm: - cnx.execute('Any D WHERE X name D, X is IN (State, Transition)') - self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') - finally: - storages.unset_attribute_storage(self.repo, 'State', 'name') - - def test_source_mapped_attribute_advanced(self): - with self.admin_access.repo_cnx() as cnx: - f1 = self.create_file(cnx) - rset = cnx.execute('Any X,D WITH D,X BEING (' - ' (Any D, X WHERE X eid %(x)s, X data D)' - ' UNION ' - ' (Any D, X WHERE X eid %(x)s, X data D)' - ')', {'x': f1.eid}) - self.assertEqual(len(rset), 2) - self.assertEqual(rset[0][0], f1.eid) - self.assertEqual(rset[1][0], f1.eid) - self.assertEqual(rset[0][1].getvalue(), b'the-data') - self.assertEqual(rset[1][1].getvalue(), b'the-data') - rset = cnx.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', - {'x': f1.eid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][0], f1.eid) - self.assertEqual(rset[0][1], len('the-data')) - rset = cnx.execute('Any X,LENGTH(D) WITH D,X BEING (' - ' (Any D, X WHERE X eid %(x)s, X data D)' - ' UNION ' - ' (Any D, X WHERE X eid %(x)s, X data D)' - ')', {'x': f1.eid}) - self.assertEqual(len(rset), 2) - self.assertEqual(rset[0][0], f1.eid) - self.assertEqual(rset[1][0], f1.eid) - self.assertEqual(rset[0][1], len('the-data')) - self.assertEqual(rset[1][1], len('the-data')) - with self.assertRaises(QueryError) as cm: - cnx.execute('Any X,UPPER(D) WHERE X eid %(x)s, X data D', - {'x': f1.eid}) - self.assertEqual(str(cm.exception), 'UPPER can not be called on mapped attribute') - - - def test_bfss_fs_importing_transparency(self): - with self.admin_access.repo_cnx() as cnx: - cnx.transaction_data['fs_importing'] = True - filepath = osp.abspath(__file__) - f1 = cnx.create_entity('File', data=Binary(filepath.encode(sys.getfilesystemencoding())), - data_format=u'text/plain', data_name=u'foo') - cw_value = f1.data.getvalue() - fs_value = open(filepath, 'rb').read() - if cw_value != fs_value: - self.fail('cw value %r is different from file content' % cw_value) - - @tag('update') - def test_bfss_update_with_existing_data(self): - with self.admin_access.repo_cnx() as cnx: - f1 = cnx.create_entity('File', data=Binary(b'some data'), - data_format=u'text/plain', data_name=u'foo') - # NOTE: do not use cw_set() which would automatically - # update f1's local dict. We want the pure rql version to work - cnx.execute('SET F data %(d)s WHERE F eid %(f)s', - {'d': Binary(b'some other data'), 'f': f1.eid}) - self.assertEqual(f1.data.getvalue(), b'some other data') - cnx.commit() - f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) - self.assertEqual(f2.data.getvalue(), b'some other data') - - @tag('update', 'extension', 'commit') - def test_bfss_update_with_different_extension_commited(self): - with self.admin_access.repo_cnx() as cnx: - f1 = cnx.create_entity('File', data=Binary(b'some data'), - data_format=u'text/plain', data_name=u'foo.txt') - # NOTE: do not use cw_set() which would automatically - # update f1's local dict. We want the pure rql version to work - cnx.commit() - old_path = self.fspath(cnx, f1) - self.assertTrue(osp.isfile(old_path)) - self.assertEqual(osp.splitext(old_path)[1], '.txt') - cnx.execute('SET F data %(d)s, F data_name %(dn)s, ' - 'F data_format %(df)s WHERE F eid %(f)s', - {'d': Binary(b'some other data'), 'f': f1.eid, - 'dn': u'bar.jpg', 'df': u'image/jpeg'}) - cnx.commit() - # the new file exists with correct extension - # the old file is dead - f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) - new_path = self.fspath(cnx, f2) - self.assertFalse(osp.isfile(old_path)) - self.assertTrue(osp.isfile(new_path)) - self.assertEqual(osp.splitext(new_path)[1], '.jpg') - - @tag('update', 'extension', 'rollback') - def test_bfss_update_with_different_extension_rolled_back(self): - with self.admin_access.repo_cnx() as cnx: - f1 = cnx.create_entity('File', data=Binary(b'some data'), - data_format=u'text/plain', data_name=u'foo.txt') - # NOTE: do not use cw_set() which would automatically - # update f1's local dict. We want the pure rql version to work - cnx.commit() - old_path = self.fspath(cnx, f1) - old_data = f1.data.getvalue() - self.assertTrue(osp.isfile(old_path)) - self.assertEqual(osp.splitext(old_path)[1], '.txt') - cnx.execute('SET F data %(d)s, F data_name %(dn)s, ' - 'F data_format %(df)s WHERE F eid %(f)s', - {'d': Binary(b'some other data'), - 'f': f1.eid, - 'dn': u'bar.jpg', - 'df': u'image/jpeg'}) - cnx.rollback() - # the new file exists with correct extension - # the old file is dead - f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', - {'f': f1.eid}).get_entity(0, 0) - new_path = self.fspath(cnx, f2) - new_data = f2.data.getvalue() - self.assertTrue(osp.isfile(new_path)) - self.assertEqual(osp.splitext(new_path)[1], '.txt') - self.assertEqual(old_path, new_path) - self.assertEqual(old_data, new_data) - - @tag('update', 'NULL') - def test_bfss_update_to_None(self): - with self.admin_access.repo_cnx() as cnx: - f = cnx.create_entity('Affaire', opt_attr=Binary(b'toto')) - cnx.commit() - f.cw_set(opt_attr=None) - cnx.commit() - - @tag('fs_importing', 'update') - def test_bfss_update_with_fs_importing(self): - with self.admin_access.repo_cnx() as cnx: - f1 = cnx.create_entity('File', data=Binary(b'some data'), - data_format=u'text/plain', - data_name=u'foo') - old_fspath = self.fspath(cnx, f1) - cnx.transaction_data['fs_importing'] = True - new_fspath = osp.join(self.tempdir, 'newfile.txt') - open(new_fspath, 'w').write('the new data') - cnx.execute('SET F data %(d)s WHERE F eid %(f)s', - {'d': Binary(new_fspath.encode(sys.getfilesystemencoding())), 'f': f1.eid}) - cnx.commit() - self.assertEqual(f1.data.getvalue(), b'the new data') - self.assertEqual(self.fspath(cnx, f1), new_fspath) - self.assertFalse(osp.isfile(old_fspath)) - - @tag('fsimport') - def test_clean(self): - with self.admin_access.repo_cnx() as cnx: - fsimport = storages.fsimport - td = cnx.transaction_data - self.assertNotIn('fs_importing', td) - with fsimport(cnx): - self.assertIn('fs_importing', td) - self.assertTrue(td['fs_importing']) - self.assertNotIn('fs_importing', td) - - @tag('fsimport') - def test_true(self): - with self.admin_access.repo_cnx() as cnx: - fsimport = storages.fsimport - td = cnx.transaction_data - td['fs_importing'] = True - with fsimport(cnx): - self.assertIn('fs_importing', td) - self.assertTrue(td['fs_importing']) - self.assertTrue(td['fs_importing']) - - @tag('fsimport') - def test_False(self): - with self.admin_access.repo_cnx() as cnx: - fsimport = storages.fsimport - td = cnx.transaction_data - td['fs_importing'] = False - with fsimport(cnx): - self.assertIn('fs_importing', td) - self.assertTrue(td['fs_importing']) - self.assertFalse(td['fs_importing']) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_tools.py --- a/server/test/unittest_tools.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,30 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from logilab.common.testlib import TestCase, unittest_main - -class ImportTC(TestCase): - def test(self): - # the minimal test: module is importable... - import cubicweb.server.checkintegrity - import cubicweb.server.serverctl - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_undo.py --- a/server/test/unittest_undo.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,471 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from six import text_type - -from cubicweb import ValidationError -from cubicweb.devtools.testlib import CubicWebTC -import cubicweb.server.session -from cubicweb.server.session import Connection as OldConnection - -from cubicweb.server.sources.native import UndoTransactionException, _UndoException - -from cubicweb.transaction import NoSuchTransaction - -class UndoableTransactionTC(CubicWebTC): - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - self.totoeid = self.create_user(cnx, 'toto', - password='toto', - groups=('users',), - commit=False).eid - self.txuuid = cnx.commit() - - def toto(self, cnx): - return cnx.entity_from_eid(self.totoeid) - - def setUp(self): - class Connection(OldConnection): - """Force undo feature to be turned on in all case""" - undo_actions = property(lambda tx: True, lambda x, y:None) - cubicweb.server.session.Connection = Connection - super(UndoableTransactionTC, self).setUp() - - def tearDown(self): - cubicweb.server.session.Connection = OldConnection - super(UndoableTransactionTC, self).tearDown() - - def check_transaction_deleted(self, cnx, txuuid): - # also check transaction actions have been properly deleted - cu = cnx.system_sql( - "SELECT * from tx_entity_actions WHERE tx_uuid='%s'" % txuuid) - self.assertFalse(cu.fetchall()) - cu = cnx.system_sql( - "SELECT * from tx_relation_actions WHERE tx_uuid='%s'" % txuuid) - self.assertFalse(cu.fetchall()) - - def assertUndoTransaction(self, cnx, txuuid, expected_errors=None): - if expected_errors is None : - expected_errors = [] - try: - cnx.undo_transaction(txuuid) - except UndoTransactionException as exn: - errors = exn.errors - else: - errors = [] - self.assertEqual(errors, expected_errors) - - def test_undo_api(self): - self.assertTrue(self.txuuid) - # test transaction api - with self.admin_access.client_cnx() as cnx: - tx_actions = cnx.transaction_actions(self.txuuid) - self.assertEqual(len(tx_actions), 2, tx_actions) - self.assertRaises(NoSuchTransaction, - cnx.transaction_info, 'hop') - self.assertRaises(NoSuchTransaction, - cnx.transaction_actions, 'hop') - self.assertRaises(NoSuchTransaction, - cnx.undo_transaction, 'hop') - txinfo = cnx.transaction_info(self.txuuid) - self.assertTrue(txinfo.datetime) - self.assertEqual(txinfo.user_eid, cnx.user.eid) - self.assertEqual(txinfo.user().login, 'admin') - actions = txinfo.actions_list() - self.assertEqual(len(actions), 2) - actions = txinfo.actions_list(public=False) - self.assertEqual(len(actions), 6) - a1 = actions[0] - self.assertEqual(a1.action, 'C') - self.assertEqual(a1.eid, self.totoeid) - self.assertEqual(a1.etype,'CWUser') - self.assertEqual(a1.ertype, 'CWUser') - self.assertEqual(a1.changes, None) - self.assertEqual(a1.public, True) - self.assertEqual(a1.order, 1) - a4 = actions[3] - self.assertEqual(a4.action, 'A') - self.assertEqual(a4.rtype, 'in_group') - self.assertEqual(a4.ertype, 'in_group') - self.assertEqual(a4.eid_from, self.totoeid) - self.assertEqual(a4.eid_to, self.toto(cnx).in_group[0].eid) - self.assertEqual(a4.order, 4) - for i, rtype in ((1, 'owned_by'), (2, 'owned_by')): - a = actions[i] - self.assertEqual(a.action, 'A') - self.assertEqual(a.eid_from, self.totoeid) - self.assertEqual(a.rtype, rtype) - self.assertEqual(a.order, i+1) - self.assertEqual(set((actions[4].rtype, actions[5].rtype)), - set(('in_state', 'created_by'))) - for i in (4, 5): - a = actions[i] - self.assertEqual(a.action, 'A') - self.assertEqual(a.eid_from, self.totoeid) - self.assertEqual(a.order, i+1) - - # test undoable_transactions - txs = cnx.undoable_transactions() - self.assertEqual(len(txs), 1) - self.assertEqual(txs[0].uuid, self.txuuid) - # test transaction_info / undoable_transactions security - with self.new_access('anon').client_cnx() as cnx: - self.assertRaises(NoSuchTransaction, - cnx.transaction_info, self.txuuid) - self.assertRaises(NoSuchTransaction, - cnx.transaction_actions, self.txuuid) - self.assertRaises(NoSuchTransaction, - cnx.undo_transaction, self.txuuid) - txs = cnx.undoable_transactions() - self.assertEqual(len(txs), 0) - - def test_undoable_transactions(self): - with self.admin_access.client_cnx() as cnx: - toto = self.toto(cnx) - e = cnx.create_entity('EmailAddress', - address=u'toto@logilab.org', - reverse_use_email=toto) - txuuid1 = cnx.commit() - toto.cw_delete() - txuuid2 = cnx.commit() - undoable_transactions = cnx.undoable_transactions - txs = undoable_transactions(action='D') - self.assertEqual(len(txs), 1, txs) - self.assertEqual(txs[0].uuid, txuuid2) - txs = undoable_transactions(action='C') - self.assertEqual(len(txs), 2, txs) - self.assertEqual(txs[0].uuid, txuuid1) - self.assertEqual(txs[1].uuid, self.txuuid) - txs = undoable_transactions(eid=toto.eid) - self.assertEqual(len(txs), 3) - self.assertEqual(txs[0].uuid, txuuid2) - self.assertEqual(txs[1].uuid, txuuid1) - self.assertEqual(txs[2].uuid, self.txuuid) - txs = undoable_transactions(etype='CWUser') - self.assertEqual(len(txs), 2) - txs = undoable_transactions(etype='CWUser', action='C') - self.assertEqual(len(txs), 1) - self.assertEqual(txs[0].uuid, self.txuuid) - txs = undoable_transactions(etype='EmailAddress', action='D') - self.assertEqual(len(txs), 0) - txs = undoable_transactions(etype='EmailAddress', action='D', - public=False) - self.assertEqual(len(txs), 1) - self.assertEqual(txs[0].uuid, txuuid2) - txs = undoable_transactions(eid=toto.eid, action='R', public=False) - self.assertEqual(len(txs), 1) - self.assertEqual(txs[0].uuid, txuuid2) - - def test_undo_deletion_base(self): - with self.admin_access.client_cnx() as cnx: - toto = self.toto(cnx) - e = cnx.create_entity('EmailAddress', - address=u'toto@logilab.org', - reverse_use_email=toto) - # entity with inlined relation - p = cnx.create_entity('CWProperty', - pkey=u'ui.default-text-format', - value=u'text/rest', - for_user=toto) - cnx.commit() - txs = cnx.undoable_transactions() - self.assertEqual(len(txs), 2) - toto.cw_delete() - txuuid = cnx.commit() - actions = cnx.transaction_info(txuuid).actions_list() - self.assertEqual(len(actions), 1) - toto.cw_clear_all_caches() - e.cw_clear_all_caches() - self.assertUndoTransaction(cnx, txuuid) - undotxuuid = cnx.commit() - self.assertEqual(undotxuuid, None) # undo not undoable - self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) - self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) - self.assertTrue(cnx.execute('Any X WHERE X has_text "toto@logilab"')) - self.assertEqual(toto.cw_adapt_to('IWorkflowable').state, 'activated') - self.assertEqual(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org') - self.assertEqual([(p.pkey, p.value) for p in toto.reverse_for_user], - [('ui.default-text-format', 'text/rest')]) - self.assertEqual([g.name for g in toto.in_group], - ['users']) - self.assertEqual([et.name for et in toto.related('is', entities=True)], - ['CWUser']) - self.assertEqual([et.name for et in toto.is_instance_of], - ['CWUser']) - # undoing shouldn't be visble in undoable transaction, and the undone - # transaction should be removed - txs = cnx.undoable_transactions() - self.assertEqual(len(txs), 2) - self.assertRaises(NoSuchTransaction, - cnx.transaction_info, txuuid) - with self.admin_access.repo_cnx() as cnx: - self.check_transaction_deleted(cnx, txuuid) - # the final test: check we can login with the previously deleted user - with self.new_access('toto').client_cnx(): - pass - - def test_undo_deletion_integrity_1(self): - with self.admin_access.client_cnx() as cnx: - # 'Personne fiche Card with' '??' cardinality - c = cnx.create_entity('Card', title=u'hop', content=u'hop') - p = cnx.create_entity('Personne', nom=u'louis', fiche=c) - cnx.commit() - c.cw_delete() - txuuid = cnx.commit() - c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') - p.cw_set(fiche=c2) - cnx.commit() - self.assertUndoTransaction(cnx, txuuid, [ - "Can't restore object relation fiche to entity " - "%s which is already linked using this relation." % p.eid]) - cnx.commit() - p.cw_clear_all_caches() - self.assertEqual(p.fiche[0].eid, c2.eid) - # we restored the card - self.assertTrue(cnx.entity_from_eid(c.eid)) - - def test_undo_deletion_integrity_2(self): - with self.admin_access.client_cnx() as cnx: - # test validation error raised if we can't restore a required relation - g = cnx.create_entity('CWGroup', name=u'staff') - cnx.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.totoeid}) - self.toto(cnx).cw_set(in_group=g) - cnx.commit() - self.toto(cnx).cw_delete() - txuuid = cnx.commit() - g.cw_delete() - cnx.commit() - self.assertUndoTransaction(cnx, txuuid, [ - u"Can't restore relation in_group, object entity " - "%s doesn't exist anymore." % g.eid]) - with self.assertRaises(ValidationError) as cm: - cnx.commit() - cm.exception.translate(text_type) - self.assertEqual(cm.exception.entity, self.totoeid) - self.assertEqual(cm.exception.errors, - {'in_group-subject': u'at least one relation in_group is ' - 'required on CWUser (%s)' % self.totoeid}) - - def test_undo_creation_1(self): - with self.admin_access.client_cnx() as cnx: - c = cnx.create_entity('Card', title=u'hop', content=u'hop') - p = cnx.create_entity('Personne', nom=u'louis', fiche=c) - txuuid = cnx.commit() - self.assertUndoTransaction(cnx, txuuid) - cnx.commit() - self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': c.eid})) - self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': p.eid})) - self.assertFalse(cnx.execute('Any X,Y WHERE X fiche Y')) - with self.admin_access.repo_cnx() as cnx: - for eid in (p.eid, c.eid): - self.assertFalse(cnx.system_sql( - 'SELECT * FROM entities WHERE eid=%s' % eid).fetchall()) - self.assertFalse(cnx.system_sql( - 'SELECT 1 FROM owned_by_relation WHERE eid_from=%s' % eid).fetchall()) - # added by sql in hooks (except when using dataimport) - self.assertFalse(cnx.system_sql( - 'SELECT 1 FROM is_relation WHERE eid_from=%s' % eid).fetchall()) - self.assertFalse(cnx.system_sql( - 'SELECT 1 FROM is_instance_of_relation WHERE eid_from=%s' % eid).fetchall()) - self.check_transaction_deleted(cnx, txuuid) - - def test_undo_creation_integrity_1(self): - with self.admin_access.client_cnx() as cnx: - tutu = self.create_user(cnx, 'tutu', commit=False) - txuuid = cnx.commit() - email = cnx.create_entity('EmailAddress', address=u'tutu@cubicweb.org') - prop = cnx.create_entity('CWProperty', pkey=u'ui.default-text-format', - value=u'text/html') - tutu.cw_set(use_email=email, reverse_for_user=prop) - cnx.commit() - with self.assertRaises(ValidationError) as cm: - cnx.undo_transaction(txuuid) - self.assertEqual(cm.exception.entity, tutu.eid) - self.assertEqual(cm.exception.errors, - {None: 'some later transaction(s) touch entity, undo them first'}) - - def test_undo_creation_integrity_2(self): - with self.admin_access.client_cnx() as cnx: - g = cnx.create_entity('CWGroup', name=u'staff') - txuuid = cnx.commit() - cnx.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.totoeid}) - self.toto(cnx).cw_set(in_group=g) - cnx.commit() - with self.assertRaises(ValidationError) as cm: - cnx.undo_transaction(txuuid) - self.assertEqual(cm.exception.entity, g.eid) - self.assertEqual(cm.exception.errors, - {None: 'some later transaction(s) touch entity, undo them first'}) - # self.assertEqual(errors, - # [u"Can't restore relation in_group, object entity " - # "%s doesn't exist anymore." % g.eid]) - # with self.assertRaises(ValidationError) as cm: cnx.commit() - # self.assertEqual(cm.exception.entity, self.totoeid) - # self.assertEqual(cm.exception.errors, - # {'in_group-subject': u'at least one relation in_group is ' - # 'required on CWUser (%s)' % self.totoeid}) - - # test implicit 'replacement' of an inlined relation - - def test_undo_inline_rel_remove_ok(self): - """Undo remove relation Personne (?) fiche (?) Card - - NB: processed by `_undo_r` as expected""" - with self.admin_access.client_cnx() as cnx: - c = cnx.create_entity('Card', title=u'hop', content=u'hop') - p = cnx.create_entity('Personne', nom=u'louis', fiche=c) - cnx.commit() - p.cw_set(fiche=None) - txuuid = cnx.commit() - self.assertUndoTransaction(cnx, txuuid) - cnx.commit() - p.cw_clear_all_caches() - self.assertEqual(p.fiche[0].eid, c.eid) - - def test_undo_inline_rel_remove_ko(self): - """Restore an inlined relation to a deleted entity, with an error. - - NB: processed by `_undo_r` as expected""" - with self.admin_access.client_cnx() as cnx: - c = cnx.create_entity('Card', title=u'hop', content=u'hop') - p = cnx.create_entity('Personne', nom=u'louis', fiche=c) - cnx.commit() - p.cw_set(fiche=None) - txuuid = cnx.commit() - c.cw_delete() - cnx.commit() - self.assertUndoTransaction(cnx, txuuid, [ - "Can't restore relation fiche, object entity %d doesn't exist anymore." % c.eid]) - cnx.commit() - p.cw_clear_all_caches() - self.assertFalse(p.fiche) - with self.admin_access.repo_cnx() as cnx: - self.assertIsNone(cnx.system_sql( - 'SELECT cw_fiche FROM cw_Personne WHERE cw_eid=%s' % p.eid).fetchall()[0][0]) - - def test_undo_inline_rel_add_ok(self): - """Undo add relation Personne (?) fiche (?) Card - - Caution processed by `_undo_u`, not `_undo_a` !""" - with self.admin_access.client_cnx() as cnx: - c = cnx.create_entity('Card', title=u'hop', content=u'hop') - p = cnx.create_entity('Personne', nom=u'louis') - cnx.commit() - p.cw_set(fiche=c) - txuuid = cnx.commit() - self.assertUndoTransaction(cnx, txuuid) - cnx.commit() - p.cw_clear_all_caches() - self.assertFalse(p.fiche) - - def test_undo_inline_rel_delete_ko(self): - with self.admin_access.client_cnx() as cnx: - c = cnx.create_entity('Card', title=u'hop', content=u'hop') - txuuid = cnx.commit() - p = cnx.create_entity('Personne', nom=u'louis', fiche=c) - cnx.commit() - integrityerror = self.repo.sources_by_uri['system'].dbhelper.dbapi_module.IntegrityError - with self.assertRaises(integrityerror): - cnx.undo_transaction(txuuid) - - - def test_undo_inline_rel_add_ko(self): - """Undo add relation Personne (?) fiche (?) Card - - Caution processed by `_undo_u`, not `_undo_a` !""" - with self.admin_access.client_cnx() as cnx: - c = cnx.create_entity('Card', title=u'hop', content=u'hop') - p = cnx.create_entity('Personne', nom=u'louis') - cnx.commit() - p.cw_set(fiche=c) - txuuid = cnx.commit() - c.cw_delete() - cnx.commit() - self.assertUndoTransaction(cnx, txuuid) - - def test_undo_inline_rel_replace_ok(self): - """Undo changing relation Personne (?) fiche (?) Card - - Caution processed by `_undo_u` """ - with self.admin_access.client_cnx() as cnx: - c1 = cnx.create_entity('Card', title=u'hop', content=u'hop') - c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') - p = cnx.create_entity('Personne', nom=u'louis', fiche=c1) - cnx.commit() - p.cw_set(fiche=c2) - txuuid = cnx.commit() - self.assertUndoTransaction(cnx, txuuid) - cnx.commit() - p.cw_clear_all_caches() - self.assertEqual(p.fiche[0].eid, c1.eid) - - def test_undo_inline_rel_replace_ko(self): - """Undo changing relation Personne (?) fiche (?) Card, with an error - - Caution processed by `_undo_u` """ - with self.admin_access.client_cnx() as cnx: - c1 = cnx.create_entity('Card', title=u'hop', content=u'hop') - c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') - p = cnx.create_entity('Personne', nom=u'louis', fiche=c1) - cnx.commit() - p.cw_set(fiche=c2) - txuuid = cnx.commit() - c1.cw_delete() - cnx.commit() - self.assertUndoTransaction(cnx, txuuid, [ - "can't restore entity %s of type Personne, target of fiche (eid %s)" - " does not exist any longer" % (p.eid, c1.eid)]) - cnx.commit() - p.cw_clear_all_caches() - self.assertFalse(p.fiche) - - def test_undo_attr_update_ok(self): - with self.admin_access.client_cnx() as cnx: - p = cnx.create_entity('Personne', nom=u'toto') - cnx.commit() - p.cw_set(nom=u'titi') - txuuid = cnx.commit() - self.assertUndoTransaction(cnx, txuuid) - p.cw_clear_all_caches() - self.assertEqual(p.nom, u'toto') - - def test_undo_attr_update_ko(self): - with self.admin_access.client_cnx() as cnx: - p = cnx.create_entity('Personne', nom=u'toto') - cnx.commit() - p.cw_set(nom=u'titi') - txuuid = cnx.commit() - p.cw_delete() - cnx.commit() - self.assertUndoTransaction(cnx, txuuid, [ - u"can't restore state of entity %s, it has been deleted inbetween" % p.eid]) - - -class UndoExceptionInUnicode(CubicWebTC): - - # problem occurs in string manipulation for python < 2.6 - def test___unicode__method(self): - u = _UndoException(u"voilà") - self.assertIsInstance(text_type(u), text_type) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/test/unittest_utils.py --- a/server/test/unittest_utils.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,43 +0,0 @@ -# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.server import utils - -class UtilsTC(TestCase): - def test_crypt(self): - for hash in ( - utils.crypt_password('xxx'), # default sha512 - b'ab$5UsKFxRKKN.d8iBIFBnQ80', # custom md5 - b'ab4Vlm81ZUHlg', # DES - ): - self.assertEqual(utils.crypt_password('xxx', hash), hash) - self.assertEqual(utils.crypt_password(u'xxx', hash), hash) - self.assertEqual(utils.crypt_password(u'xxx', hash.decode('ascii')), hash.decode('ascii')) - self.assertEqual(utils.crypt_password('yyy', hash), b'') - - # accept any password for empty hashes (is it a good idea?) - self.assertEqual(utils.crypt_password('xxx', ''), '') - self.assertEqual(utils.crypt_password('yyy', ''), '') - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 server/utils.py --- a/server/utils.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,243 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Some utilities for the CubicWeb server.""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -import logging -from threading import Timer, Thread -from getpass import getpass - -from six import PY2, text_type -from six.moves import input - -from passlib.utils import handlers as uh, to_hash_str -from passlib.context import CryptContext - -from cubicweb.md5crypt import crypt as md5crypt - - -class CustomMD5Crypt(uh.HasSalt, uh.GenericHandler): - name = 'cubicwebmd5crypt' - setting_kwds = ('salt',) - min_salt_size = 0 - max_salt_size = 8 - salt_chars = uh.H64_CHARS - - @classmethod - def from_string(cls, hash): - salt, chk = uh.parse_mc2(hash, u'') - if chk is None: - raise ValueError('missing checksum') - return cls(salt=salt, checksum=chk) - - def to_string(self): - return to_hash_str(u'%s$%s' % (self.salt, self.checksum or u'')) - - # passlib 1.5 wants calc_checksum, 1.6 wants _calc_checksum - def calc_checksum(self, secret): - return md5crypt(secret, self.salt.encode('ascii')).decode('utf-8') - _calc_checksum = calc_checksum - -_CRYPTO_CTX = CryptContext(['sha512_crypt', CustomMD5Crypt, 'des_crypt', 'ldap_salted_sha1'], - deprecated=['cubicwebmd5crypt', 'des_crypt']) -verify_and_update = _CRYPTO_CTX.verify_and_update - -def crypt_password(passwd, salt=None): - """return the encrypted password using the given salt or a generated one - """ - if salt is None: - return _CRYPTO_CTX.encrypt(passwd).encode('ascii') - # empty hash, accept any password for backwards compat - if salt == '': - return salt - try: - if _CRYPTO_CTX.verify(passwd, salt): - return salt - except ValueError: # e.g. couldn't identify hash - pass - # wrong password - return b'' - - -def eschema_eid(cnx, eschema): - """get eid of the CWEType entity for the given yams type. You should use - this because when schema has been loaded from the file-system, not from the - database, (e.g. during tests), eschema.eid is not set. - """ - if eschema.eid is None: - eschema.eid = cnx.execute( - 'Any X WHERE X is CWEType, X name %(name)s', - {'name': text_type(eschema)})[0][0] - return eschema.eid - - -DEFAULT_MSG = 'we need a manager connection on the repository \ -(the server doesn\'t have to run, even should better not)' - -def manager_userpasswd(user=None, msg=DEFAULT_MSG, confirm=False, - passwdmsg='password'): - if not user: - if msg: - print(msg) - while not user: - user = input('login: ') - if PY2: - user = unicode(user, sys.stdin.encoding) - passwd = getpass('%s: ' % passwdmsg) - if confirm: - while True: - passwd2 = getpass('confirm password: ') - if passwd == passwd2: - break - print('password doesn\'t match') - passwd = getpass('password: ') - # XXX decode password using stdin encoding then encode it using appl'encoding - return user, passwd - - -_MARKER = object() -def func_name(func): - name = getattr(func, '__name__', _MARKER) - if name is _MARKER: - name = getattr(func, 'func_name', _MARKER) - if name is _MARKER: - name = repr(func) - return name - -class LoopTask(object): - """threaded task restarting itself once executed""" - def __init__(self, tasks_manager, interval, func, args): - if interval < 0: - raise ValueError('Loop task interval must be >= 0 ' - '(current value: %f for %s)' % \ - (interval, func_name(func))) - self._tasks_manager = tasks_manager - self.interval = interval - def auto_restart_func(self=self, func=func, args=args): - restart = True - try: - func(*args) - except Exception: - logger = logging.getLogger('cubicweb.repository') - logger.exception('Unhandled exception in LoopTask %s', self.name) - raise - except BaseException: - restart = False - finally: - if restart and tasks_manager.running: - self.start() - self.func = auto_restart_func - self.name = func_name(func) - - def __str__(self): - return '%s (%s seconds)' % (self.name, self.interval) - - def start(self): - self._t = Timer(self.interval, self.func) - self._t.setName('%s-%s[%d]' % (self._t.getName(), self.name, self.interval)) - self._t.start() - - def cancel(self): - self._t.cancel() - - def join(self): - if self._t.isAlive(): - self._t.join() - - -class RepoThread(Thread): - """subclass of thread so it auto remove itself from a given list once - executed - """ - def __init__(self, target, running_threads): - def auto_remove_func(self=self, func=target): - try: - func() - except Exception: - logger = logging.getLogger('cubicweb.repository') - logger.exception('Unhandled exception in RepoThread %s', self._name) - raise - finally: - self.running_threads.remove(self) - Thread.__init__(self, target=auto_remove_func) - self.running_threads = running_threads - self._name = func_name(target) - - def start(self): - self.running_threads.append(self) - self.daemon = True - Thread.start(self) - - def getName(self): - return '%s(%s)' % (self._name, Thread.getName(self)) - -class TasksManager(object): - """Object dedicated manage background task""" - - def __init__(self): - self.running = False - self._tasks = [] - self._looping_tasks = [] - - def add_looping_task(self, interval, func, *args): - """register a function to be called every `interval` seconds. - - If interval is negative, no looping task is registered. - """ - if interval < 0: - self.debug('looping task %s ignored due to interval %f < 0', - func_name(func), interval) - return - task = LoopTask(self, interval, func, args) - if self.running: - self._start_task(task) - else: - self._tasks.append(task) - - def _start_task(self, task): - self._looping_tasks.append(task) - self.info('starting task %s with interval %.2fs', task.name, - task.interval) - task.start() - - def start(self): - """Start running looping task""" - assert self.running == False # bw compat purpose maintly - while self._tasks: - task = self._tasks.pop() - self._start_task(task) - self.running = True - - def stop(self): - """Stop all running task. - - returns when all task have been cancel and none are running anymore""" - if self.running: - while self._looping_tasks: - looptask = self._looping_tasks.pop() - self.info('canceling task %s...', looptask.name) - looptask.cancel() - looptask.join() - self.info('task %s finished', looptask.name) - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(TasksManager, getLogger('cubicweb.repository')) diff -r 058bb3dc685f -r 0b59724cb3f2 setup.py --- a/setup.py Mon Jan 04 18:40:30 2016 +0100 +++ b/setup.py Sat Jan 16 13:48:51 2016 +0100 @@ -24,7 +24,7 @@ import os import sys import shutil -from os.path import isdir, exists, join, walk +from os.path import dirname, exists, isdir, join try: if os.environ.get('NO_SETUPTOOLS'): @@ -38,30 +38,40 @@ USE_SETUPTOOLS = False from distutils.command import install_data +here = dirname(__file__) + # import required features -from __pkginfo__ import modname, version, license, description, web, \ - author, author_email +pkginfo = join(here, 'cubicweb', '__pkginfo__.py') +__pkginfo__ = {} +with open(pkginfo) as f: + exec(f.read(), __pkginfo__) +modname = __pkginfo__['modname'] +version = __pkginfo__['version'] +license = __pkginfo__['license'] +description = __pkginfo__['description'] +web = __pkginfo__['web'] +author = __pkginfo__['author'] +author_email = __pkginfo__['author_email'] long_description = open('README').read() # import optional features -import __pkginfo__ if USE_SETUPTOOLS: requires = {} for entry in ("__depends__",): # "__recommends__"): - requires.update(getattr(__pkginfo__, entry, {})) + requires.update(__pkginfo__.get(entry, {})) install_requires = [("%s %s" % (d, v and v or "")).strip() for d, v in requires.items()] else: install_requires = [] -distname = getattr(__pkginfo__, 'distname', modname) -scripts = getattr(__pkginfo__, 'scripts', ()) -include_dirs = getattr(__pkginfo__, 'include_dirs', ()) -data_files = getattr(__pkginfo__, 'data_files', None) -subpackage_of = getattr(__pkginfo__, 'subpackage_of', None) -ext_modules = getattr(__pkginfo__, 'ext_modules', None) -package_data = getattr(__pkginfo__, 'package_data', {}) +distname = __pkginfo__.get('distname', modname) +scripts = __pkginfo__.get('scripts', ()) +include_dirs = __pkginfo__.get('include_dirs', ()) +data_files = __pkginfo__.get('data_files', None) +subpackage_of = __pkginfo__.get('subpackage_of', None) +ext_modules = __pkginfo__.get('ext_modules', None) +package_data = __pkginfo__.get('package_data', {}) BASE_BLACKLIST = ('CVS', 'dist', 'build', '__buildlog') IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc') @@ -100,33 +110,6 @@ blacklist=BASE_BLACKLIST, ignore_ext=IGNORED_EXTENSIONS, verbose=True): - """make a mirror of from_dir in to_dir, omitting directories and files - listed in the black list - """ - def make_mirror(arg, directory, fnames): - """walk handler""" - for norecurs in blacklist: - try: - fnames.remove(norecurs) - except ValueError: - pass - for filename in fnames: - # don't include binary files - if filename[-4:] in ignore_ext: - continue - if filename[-1] == '~': - continue - src = '%s/%s' % (directory, filename) - dest = to_dir + src[len(from_dir):] - if verbose: - sys.stderr.write('%s -> %s\n' % (src, dest)) - if os.path.isdir(src): - if not exists(dest): - os.mkdir(dest) - else: - if exists(dest): - os.remove(dest) - shutil.copy2(src, dest) try: os.mkdir(to_dir) except OSError as ex: @@ -134,7 +117,27 @@ import errno if ex.errno != errno.EEXIST: raise - walk(from_dir, make_mirror, None) + for dirpath, dirnames, filenames in os.walk(from_dir): + for norecurs in blacklist: + try: + dirnames.remove(norecurs) + except ValueError: + pass + for dirname in dirnames: + dest = join(to_dir, dirname) + if not exists(dest): + os.mkdir(dest) + for filename in filenames: + # don't include binary files + src = join(dirpath, filename) + dest = to_dir + src[len(from_dir):] + if filename[-4:] in ignore_ext: + continue + if filename[-1] == '~': + continue + if exists(dest): + os.remove(dest) + shutil.copy2(src, dest) EMPTY_FILE = '"""generated file, don\'t modify or your data will be lost"""\n' @@ -219,8 +222,7 @@ if USE_SETUPTOOLS: kwargs['namespace_packages'] = [subpackage_of] else: - kwargs['package_dir'] = {modname : '.'} - packages = [modname] + get_packages(os.getcwd(), modname) + packages = [modname] + get_packages(join(here, modname), modname) if USE_SETUPTOOLS: kwargs['install_requires'] = install_requires kwargs['zip_safe'] = False diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/DISTNAME.spec.tmpl --- a/skeleton/DISTNAME.spec.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -# for el5, force use of python2.6 -%%if 0%%{?el5} -%%define python python26 -%%define __python /usr/bin/python2.6 -%%else -%%define python python -%%define __python /usr/bin/python -%%endif -%%{!?_python_sitelib: %%define _python_sitelib %%(%%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")} - -Name: %(distname)s -Version: 0.1.0 -Release: logilab.1%%{?dist} -Summary: %(shortdesc)s -Group: Applications/Internet -License: %(license)s -Source0: %(distname)s-%%{version}.tar.gz - -BuildArch: noarch -BuildRoot: %%{_tmppath}/%%{name}-%%{version}-%%{release}-buildroot - -BuildRequires: %%{python} %%{python}-setuptools -Requires: cubicweb >= %(version)s -Requires: %%{python}-six >= 1.4.0 - -%%description -%(longdesc)s - -%%prep -%%setup -q -n %(distname)s-%%{version} -%%if 0%%{?el5} -# change the python version in shebangs -find . -name '*.py' -type f -print0 | xargs -0 sed -i '1,3s;^#!.*python.*$;#! /usr/bin/python2.6;' -%%endif - -%%install -NO_SETUPTOOLS=1 %%{__python} setup.py --quiet install --no-compile --prefix=%%{_prefix} --root="$RPM_BUILD_ROOT" -# remove generated .egg-info file -rm -rf $RPM_BUILD_ROOT/usr/lib/python* - - -%%clean -rm -rf $RPM_BUILD_ROOT - -%%files -%%defattr(-, root, root) -/* diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/MANIFEST.in --- a/skeleton/MANIFEST.in Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -include *.py -include */*.py -recursive-include data *.gif *.png *.ico *.css *.js -recursive-include i18n *.po -recursive-include wdoc * diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/README.tmpl --- a/skeleton/README.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -Summary -------- -%(longdesc)s diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/__init__.py.tmpl --- a/skeleton/__init__.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -"""cubicweb-%(cubename)s application package - -%(longdesc)s -""" diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/__pkginfo__.py.tmpl --- a/skeleton/__pkginfo__.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,49 +0,0 @@ -# pylint: disable=W0622 -"""%(distname)s application packaging information""" - -modname = '%(cubename)s' -distname = '%(distname)s' - -numversion = (0, 1, 0) -version = '.'.join(str(num) for num in numversion) - -license = '%(license)s' -author = '%(author)s' -author_email = '%(author-email)s' -description = '%(shortdesc)s' -web = 'http://www.cubicweb.org/project/%%s' %% distname - -__depends__ = %(dependencies)s -__recommends__ = {} - -classifiers = [ - 'Environment :: Web Environment', - 'Framework :: CubicWeb', - 'Programming Language :: Python', - 'Programming Language :: JavaScript', - ] - -from os import listdir as _listdir -from os.path import join, isdir -from glob import glob - -THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname) - - -def listdir(dirpath): - return [join(dirpath, fname) for fname in _listdir(dirpath) - if fname[0] != '.' and not fname.endswith('.pyc') - and not fname.endswith('~') - and not isdir(join(dirpath, fname))] - -data_files = [ - # common files - [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']], - ] -# check for possible extended cube layout -for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', - 'wdoc', 'i18n', 'migration'): - if isdir(dname): - data_files.append([join(THIS_CUBE_DIR, dname), listdir(dname)]) -# Note: here, you'll need to add subdirectories if you want -# them to be included in the debian package diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/data/cubes.CUBENAME.css --- a/skeleton/data/cubes.CUBENAME.css Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -/* cube-specific CSS */ diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/data/cubes.CUBENAME.js --- a/skeleton/data/cubes.CUBENAME.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -// This contains cube-specific javascript \ No newline at end of file diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/debian/changelog.tmpl --- a/skeleton/debian/changelog.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -%(distname)s (0.1.0-1) unstable; urgency=low - - * initial release - - -- - diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/debian/compat --- a/skeleton/debian/compat Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -7 diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/debian/control.tmpl --- a/skeleton/debian/control.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,24 +0,0 @@ -Source: %(distname)s -Section: web -Priority: optional -Maintainer: %(author)s <%(author-email)s> -Build-Depends: - debhelper (>= 7), - python (>= 2.6.5), -Standards-Version: 3.9.3 -X-Python-Version: >= 2.6 - -Package: %(distname)s -Architecture: all -Depends: - cubicweb-common (>= %(version)s), - python-six (>= 1.4.0), - ${python:Depends}, - ${misc:Depends}, -Description: %(shortdesc)s - CubicWeb is a semantic web application framework. - . - %(longdesc)s - . - This package will install all the components you need to run the - %(distname)s application (cube :).. diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/debian/copyright.tmpl --- a/skeleton/debian/copyright.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -Upstream Author: - - %(author)s <%(author-email)s> - -Copyright: - -Copyright (c) %(year)s %(author)s. -%(author-web-site)s -- mailto:%(author-email)s diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/debian/rules --- a/skeleton/debian/rules Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -#!/usr/bin/make -f - -export NO_SETUPTOOLS=1 - -%: - dh $@ --with python2 - -override_dh_python2: - dh_python2 -i /usr/share/cubicweb diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/entities.py.tmpl --- a/skeleton/entities.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright %(year)s %(author)s, all rights reserved. -# contact %(author-web-site)s -- mailto:%(author-email)s -# -%(long-license)s -"""%(distname)s entity's classes""" diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/hooks.py.tmpl --- a/skeleton/hooks.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright %(year)s %(author)s, all rights reserved. -# contact %(author-web-site)s -- mailto:%(author-email)s -# -%(long-license)s -"""%(distname)s specific hooks and operations""" diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/i18n/en.po --- a/skeleton/i18n/en.po Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -msgid "" -msgstr "" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: pygettext.py 1.5\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n" - diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/i18n/es.po --- a/skeleton/i18n/es.po Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -msgid "" -msgstr "" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: pygettext.py 1.5\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n" - diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/i18n/fr.po --- a/skeleton/i18n/fr.po Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -msgid "" -msgstr "" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: pygettext.py 1.5\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n" - diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/migration/postcreate.py.tmpl --- a/skeleton/migration/postcreate.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright %(year)s %(author)s, all rights reserved. -# contact %(author-web-site)s -- mailto:%(author-email)s -# -%(long-license)s -"""%(distname)s postcreate script, executed at instance creation time or when -the cube is added to an existing instance. - -You could setup site properties or a workflow here for example. -""" - -# Example of site property change -#set_property('ui.site-title', "") diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/migration/precreate.py.tmpl --- a/skeleton/migration/precreate.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright %(year)s %(author)s, all rights reserved. -# contact %(author-web-site)s -- mailto:%(author-email)s -# -%(long-license)s -"""%(distname)s precreate script, executed at instance creation time or when -the cube is added to an existing instance, before the schema is serialized. - -This is typically to create groups referenced by the cube'schema. -""" diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/schema.py.tmpl --- a/skeleton/schema.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright %(year)s %(author)s, all rights reserved. -# contact %(author-web-site)s -- mailto:%(author-email)s -# -%(long-license)s -"""%(distname)s schema""" diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/setup.py --- a/skeleton/setup.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,206 +0,0 @@ -#!/usr/bin/env python -# pylint: disable=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611 -# -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with CubicWeb. If not, see . -"""Generic Setup script, takes package info from __pkginfo__.py file -""" -__docformat__ = "restructuredtext en" - -import os -import sys -import shutil -from os.path import exists, join, walk - -try: - if os.environ.get('NO_SETUPTOOLS'): - raise ImportError() # do as there is no setuptools - from setuptools import setup - from setuptools.command import install_lib - USE_SETUPTOOLS = True -except ImportError: - from distutils.core import setup - from distutils.command import install_lib - USE_SETUPTOOLS = False -from distutils.command import install_data - -# import required features -from __pkginfo__ import modname, version, license, description, web, \ - author, author_email, classifiers - -if exists('README'): - long_description = open('README').read() -else: - long_description = '' - -# import optional features -import __pkginfo__ -if USE_SETUPTOOLS: - requires = {} - for entry in ("__depends__",): # "__recommends__"): - requires.update(getattr(__pkginfo__, entry, {})) - install_requires = [("%s %s" % (d, v and v or "")).strip() - for d, v in requires.items()] -else: - install_requires = [] - -distname = getattr(__pkginfo__, 'distname', modname) -scripts = getattr(__pkginfo__, 'scripts', ()) -include_dirs = getattr(__pkginfo__, 'include_dirs', ()) -data_files = getattr(__pkginfo__, 'data_files', None) -ext_modules = getattr(__pkginfo__, 'ext_modules', None) -dependency_links = getattr(__pkginfo__, 'dependency_links', ()) - -BASE_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build') -IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~') - - -def ensure_scripts(linux_scripts): - """ - Creates the proper script names required for each platform - (taken from 4Suite) - """ - from distutils import util - if util.get_platform()[:3] == 'win': - scripts_ = [script + '.bat' for script in linux_scripts] - else: - scripts_ = linux_scripts - return scripts_ - - -def export(from_dir, to_dir, - blacklist=BASE_BLACKLIST, - ignore_ext=IGNORED_EXTENSIONS, - verbose=True): - """make a mirror of from_dir in to_dir, omitting directories and files - listed in the black list - """ - def make_mirror(arg, directory, fnames): - """walk handler""" - for norecurs in blacklist: - try: - fnames.remove(norecurs) - except ValueError: - pass - for filename in fnames: - # don't include binary files - if filename[-4:] in ignore_ext: - continue - if filename[-1] == '~': - continue - src = join(directory, filename) - dest = to_dir + src[len(from_dir):] - if verbose: - sys.stderr.write('%s -> %s\n' % (src, dest)) - if os.path.isdir(src): - if not exists(dest): - os.mkdir(dest) - else: - if exists(dest): - os.remove(dest) - shutil.copy2(src, dest) - try: - os.mkdir(to_dir) - except OSError as ex: - # file exists ? - import errno - if ex.errno != errno.EEXIST: - raise - walk(from_dir, make_mirror, None) - - -class MyInstallLib(install_lib.install_lib): - """extend install_lib command to handle package __init__.py and - include_dirs variable if necessary - """ - def run(self): - """overridden from install_lib class""" - install_lib.install_lib.run(self) - # manually install included directories if any - if include_dirs: - base = modname - for directory in include_dirs: - dest = join(self.install_dir, base, directory) - export(directory, dest, verbose=False) - -# re-enable copying data files in sys.prefix -old_install_data = install_data.install_data -if USE_SETUPTOOLS: - # overwrite InstallData to use sys.prefix instead of the egg directory - class MyInstallData(old_install_data): - """A class that manages data files installation""" - def run(self): - _old_install_dir = self.install_dir - if self.install_dir.endswith('egg'): - self.install_dir = sys.prefix - old_install_data.run(self) - self.install_dir = _old_install_dir - try: - # only if easy_install available - import setuptools.command.easy_install # noqa - # monkey patch: Crack SandboxViolation verification - from setuptools.sandbox import DirectorySandbox as DS - old_ok = DS._ok - - def _ok(self, path): - """Return True if ``path`` can be written during installation.""" - out = old_ok(self, path) # here for side effect from setuptools - realpath = os.path.normcase(os.path.realpath(path)) - allowed_path = os.path.normcase(sys.prefix) - if realpath.startswith(allowed_path): - out = True - return out - DS._ok = _ok - except ImportError: - pass - - -def install(**kwargs): - """setup entry point""" - if USE_SETUPTOOLS: - if '--force-manifest' in sys.argv: - sys.argv.remove('--force-manifest') - # install-layout option was introduced in 2.5.3-1~exp1 - elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv: - sys.argv.remove('--install-layout=deb') - cmdclass = {'install_lib': MyInstallLib} - if USE_SETUPTOOLS: - kwargs['install_requires'] = install_requires - kwargs['dependency_links'] = dependency_links - kwargs['zip_safe'] = False - cmdclass['install_data'] = MyInstallData - - return setup(name=distname, - version=version, - license=license, - description=description, - long_description=long_description, - author=author, - author_email=author_email, - url=web, - scripts=ensure_scripts(scripts), - data_files=data_files, - ext_modules=ext_modules, - cmdclass=cmdclass, - classifiers=classifiers, - **kwargs - ) - - -if __name__ == '__main__': - install() diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/sobjects.py.tmpl --- a/skeleton/sobjects.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright %(year)s %(author)s, all rights reserved. -# contact %(author-web-site)s -- mailto:%(author-email)s -# -%(long-license)s -"""%(distname)s repository side views, usually for notification""" diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/test/data/bootstrap_cubes.tmpl --- a/skeleton/test/data/bootstrap_cubes.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -%(cubename)s diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/test/pytestconf.py --- a/skeleton/test/pytestconf.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with CubicWeb. If not, see . -""" - -""" -import os -import sys - -from logilab.common.pytest import PyTester - - -def getlogin(): - """avoid usinng os.getlogin() because of strange tty / stdin problems - (man 3 getlogin) - Another solution would be to use $LOGNAME, $USER or $USERNAME - """ - if sys.platform == 'win32': - return os.environ.get('USERNAME') or 'cubicweb' - import pwd - return pwd.getpwuid(os.getuid())[0] - - -def update_parser(parser): - login = getlogin() - parser.add_option('-r', '--rebuild-database', dest='rebuild_db', - default=False, action="store_true", - help="remove tmpdb and rebuilds the test database") - parser.add_option('-u', '--dbuser', dest='dbuser', action='store', - default=login, help="database user") - parser.add_option('-w', '--dbpassword', dest='dbpassword', action='store', - default=login, help="database user's password") - parser.add_option('-n', '--dbname', dest='dbname', action='store', - default=None, help="database name") - parser.add_option('--euser', dest='euser', action='store', - default=login, help="euser name") - parser.add_option('--epassword', dest='epassword', action='store', - default=login, help="euser's password' name") - return parser - - -class CustomPyTester(PyTester): - def __init__(self, cvg, options): - super(CustomPyTester, self).__init__(cvg, options) - if options.rebuild_db: - os.unlink('tmpdb') - os.unlink('tmpdb-template') diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/test/realdb_test_CUBENAME.py --- a/skeleton/test/realdb_test_CUBENAME.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,53 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with CubicWeb. If not, see . -""" - -""" -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.realdbtest import buildconfig, loadconfig - - -def setUpModule(options): - if options.source: - configcls = loadconfig(options.source) - elif options.dbname is None: - raise Exception('either or options are required') - else: - configcls = buildconfig(options.dbuser, options.dbpassword, - options.dbname, - options.euser, options.epassword) - RealDatabaseTC.configcls = configcls - - -class RealDatabaseTC(CubicWebTC): - configcls = None # set by setUpModule() - - def test_all_primaries(self): - for rset in self.iter_individual_rsets(limit=50): - yield self.view, 'primary', rset, rset.req.reset_headers() - - ## startup views - def test_startup_views(self): - for vid in self.list_startup_views(): - req = self.request() - yield self.view, vid, None, req - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/test/test_CUBENAME.py.tmpl --- a/skeleton/test/test_CUBENAME.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,38 +0,0 @@ -# copyright %(year)s %(author)s, all rights reserved. -# contact %(author-web-site)s -- mailto:%(author-email)s -# -%(long-license)s -"""%(distname)s automatic tests - - -uncomment code below if you want to activate automatic test for your cube: - -.. sourcecode:: python - - from cubicweb.devtools.testlib import AutomaticWebTest - - class AutomaticWebTest(AutomaticWebTest): - '''provides `to_test_etypes` and/or `list_startup_views` implementation - to limit test scope - ''' - - def to_test_etypes(self): - '''only test views for entities of the returned types''' - return set(('My', 'Cube', 'Entity', 'Types')) - - def list_startup_views(self): - '''only test startup views of the returned identifiers''' - return ('some', 'startup', 'views') -""" - -from cubicweb.devtools import testlib - - -class DefaultTC(testlib.CubicWebTC): - def test_something(self): - self.skipTest('this cube has no test') - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/uiprops.py.tmpl --- a/skeleton/uiprops.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################################### -# -# Put here information about external resources / styles used by your cube, -# or to overides existing UI properties. -# -# Existing properties are available through the `sheet` dictionary available -# in the global namespace. You also have access to a `data` function which -# will return proper url for resources in the 'data' directory. -# -# /!\ this file should not be imported /!\ -############################################################################### - -# CSS stylesheets to include in HTML headers -# uncomment the line below to use template specific stylesheet -# STYLESHEETS = sheet['STYLESHEETS'] + [data('cubes.%(cubename)s.css')] diff -r 058bb3dc685f -r 0b59724cb3f2 skeleton/views.py.tmpl --- a/skeleton/views.py.tmpl Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright %(year)s %(author)s, all rights reserved. -# contact %(author-web-site)s -- mailto:%(author-email)s -# -%(long-license)s -"""%(distname)s views/forms/actions/components for web ui""" diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/__init__.py --- a/sobjects/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,30 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""server side objects""" - -import os.path as osp - -def registration_callback(vreg): - vreg.register_all(globals().values(), __name__) - global URL_MAPPING - URL_MAPPING = {} - if vreg.config.apphome: - url_mapping_file = osp.join(vreg.config.apphome, 'urlmapping.py') - if osp.exists(url_mapping_file): - URL_MAPPING = eval(open(url_mapping_file).read()) - vreg.info('using url mapping %s from %s', URL_MAPPING, url_mapping_file) diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/cwxmlparser.py --- a/sobjects/cwxmlparser.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,500 +0,0 @@ -# copyright 2010-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""datafeed parser for xml generated by cubicweb - -Example of mapping for CWEntityXMLParser:: - - {u'CWUser': { # EntityType - (u'in_group', u'subject', u'link'): [ # (rtype, role, action) - (u'CWGroup', {u'linkattr': u'name'})], # -> rules = [(EntityType, options), ...] - (u'tags', u'object', u'link-or-create'): [ # (...) - (u'Tag', {u'linkattr': u'name'})], # -> ... - (u'use_email', u'subject', u'copy'): [ # (...) - (u'EmailAddress', {})] # -> ... - } - } - -""" - -from datetime import datetime, time -import urllib - -from six import text_type -from six.moves.urllib.parse import urlparse, urlunparse, parse_qs, urlencode - -import pytz -from logilab.common.date import todate, totime -from logilab.common.textutils import splitstrip, text_to_dict -from logilab.common.decorators import classproperty - -from yams.constraints import BASE_CONVERTERS -from yams.schema import role_name as rn - -from cubicweb import ValidationError, RegistryException -from cubicweb.view import Component -from cubicweb.server.sources import datafeed -from cubicweb.server.hook import match_rtype - -# XXX see cubicweb.cwvreg.YAMS_TO_PY -# XXX see cubicweb.web.views.xmlrss.SERIALIZERS -DEFAULT_CONVERTERS = BASE_CONVERTERS.copy() -DEFAULT_CONVERTERS['String'] = text_type -DEFAULT_CONVERTERS['Password'] = lambda x: x.encode('utf8') -def convert_date(ustr): - return todate(datetime.strptime(ustr, '%Y-%m-%d')) -DEFAULT_CONVERTERS['Date'] = convert_date -def convert_datetime(ustr): - if '.' in ustr: # assume %Y-%m-%d %H:%M:%S.mmmmmm - ustr = ustr.split('.', 1)[0] - return datetime.strptime(ustr, '%Y-%m-%d %H:%M:%S') -DEFAULT_CONVERTERS['Datetime'] = convert_datetime -# XXX handle timezone, though this will be enough as TZDatetime are -# serialized without time zone by default (UTC time). See -# cw.web.views.xmlrss.SERIALIZERS. -def convert_tzdatetime(ustr): - date = convert_datetime(ustr) - date = date.replace(tzinfo=pytz.utc) - return date -DEFAULT_CONVERTERS['TZDatetime'] = convert_tzdatetime -def convert_time(ustr): - return totime(datetime.strptime(ustr, '%H:%M:%S')) -DEFAULT_CONVERTERS['Time'] = convert_time -DEFAULT_CONVERTERS['TZTime'] = convert_time -def convert_interval(ustr): - return time(seconds=int(ustr)) -DEFAULT_CONVERTERS['Interval'] = convert_interval - -def extract_typed_attrs(eschema, stringdict, converters=DEFAULT_CONVERTERS): - typeddict = {} - for rschema in eschema.subject_relations(): - if rschema.final and rschema in stringdict: - if rschema in ('eid', 'cwuri', 'cwtype', 'cwsource'): - continue - attrtype = eschema.destination(rschema) - value = stringdict[rschema] - if value is not None: - value = converters[attrtype](value) - typeddict[rschema.type] = value - return typeddict - -def rtype_role_rql(rtype, role): - if role == 'object': - return 'Y %s X WHERE X eid %%(x)s' % rtype - else: - return 'X %s Y WHERE X eid %%(x)s' % rtype - - -class CWEntityXMLParser(datafeed.DataFeedXMLParser): - """datafeed parser for the 'xml' entity view - - Most of the logic is delegated to the following components: - - * an "item builder" component, turning an etree xml node into a specific - python dictionary representing an entity - - * "action" components, selected given an entity, a relation and its role in - the relation, and responsible to link the entity to given related items - (eg dictionary) - - So the parser is only doing the gluing service and the connection to the - source. - """ - __regid__ = 'cw.entityxml' - - def __init__(self, *args, **kwargs): - super(CWEntityXMLParser, self).__init__(*args, **kwargs) - self._parsed_urls = {} - self._processed_entities = set() - - def select_linker(self, action, rtype, role, entity=None): - try: - return self._cw.vreg['components'].select( - 'cw.entityxml.action.%s' % action, self._cw, entity=entity, - rtype=rtype, role=role, parser=self) - except RegistryException: - raise RegistryException('Unknown action %s' % action) - - def list_actions(self): - reg = self._cw.vreg['components'] - return sorted(clss[0].action for rid, clss in reg.items() - if rid.startswith('cw.entityxml.action.')) - - # mapping handling ######################################################### - - def add_schema_config(self, schemacfg, checkonly=False): - """added CWSourceSchemaConfig, modify mapping accordingly""" - _ = self._cw._ - try: - rtype = schemacfg.schema.rtype.name - except AttributeError: - msg = _("entity and relation types can't be mapped, only attributes " - "or relations") - raise ValidationError(schemacfg.eid, {rn('cw_for_schema', 'subject'): msg}) - if schemacfg.options: - options = text_to_dict(schemacfg.options) - else: - options = {} - try: - role = options.pop('role') - if role not in ('subject', 'object'): - raise KeyError - except KeyError: - msg = _('"role=subject" or "role=object" must be specified in options') - raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg}) - try: - action = options.pop('action') - linker = self.select_linker(action, rtype, role) - linker.check_options(options, schemacfg.eid) - except KeyError: - msg = _('"action" must be specified in options; allowed values are ' - '%s') % ', '.join(self.list_actions()) - raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg}) - except RegistryException: - msg = _('allowed values for "action" are %s') % ', '.join(self.list_actions()) - raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg}) - if not checkonly: - if role == 'subject': - etype = schemacfg.schema.stype.name - ttype = schemacfg.schema.otype.name - else: - etype = schemacfg.schema.otype.name - ttype = schemacfg.schema.stype.name - etyperules = self.source.mapping.setdefault(etype, {}) - etyperules.setdefault((rtype, role, action), []).append( - (ttype, options)) - self.source.mapping_idx[schemacfg.eid] = ( - etype, rtype, role, action, ttype) - - def del_schema_config(self, schemacfg, checkonly=False): - """deleted CWSourceSchemaConfig, modify mapping accordingly""" - etype, rtype, role, action, ttype = self.source.mapping_idx[schemacfg.eid] - rules = self.source.mapping[etype][(rtype, role, action)] - rules = [x for x in rules if not x[0] == ttype] - if not rules: - del self.source.mapping[etype][(rtype, role, action)] - - # import handling ########################################################## - - def process(self, url, raise_on_error=False): - """IDataFeedParser main entry point""" - if url.startswith('http'): # XXX similar loose test as in parse of sources.datafeed - url = self.complete_url(url) - super(CWEntityXMLParser, self).process(url, raise_on_error) - - def parse_etree(self, parent): - for node in list(parent): - builder = self._cw.vreg['components'].select( - 'cw.entityxml.item-builder', self._cw, node=node, - parser=self) - yield builder.build_item() - - def process_item(self, item, rels, raise_on_error=False): - """ - item and rels are what's returned by the item builder `build_item` method: - - * `item` is an {attribute: value} dictionary - * `rels` is for relations and structured as - {role: {relation: [(related item, related rels)...]} - """ - entity = self.extid2entity(item['cwuri'].encode('ascii'), item['cwtype'], - cwsource=item['cwsource'], item=item, - raise_on_error=raise_on_error) - if entity is None: - return None - if entity.eid in self._processed_entities: - return entity - self._processed_entities.add(entity.eid) - if not (self.created_during_pull(entity) or self.updated_during_pull(entity)): - attrs = extract_typed_attrs(entity.e_schema, item) - self.update_if_necessary(entity, attrs) - self.process_relations(entity, rels) - return entity - - def process_relations(self, entity, rels): - etype = entity.cw_etype - for (rtype, role, action), rules in self.source.mapping.get(etype, {}).items(): - try: - related_items = rels[role][rtype] - except KeyError: - self.import_log.record_error('relation %s-%s not found in xml export of %s' - % (rtype, role, etype)) - continue - try: - linker = self.select_linker(action, rtype, role, entity) - except RegistryException: - self.import_log.record_error('no linker for action %s' % action) - else: - linker.link_items(related_items, rules) - - def before_entity_copy(self, entity, sourceparams): - """IDataFeedParser callback""" - attrs = extract_typed_attrs(entity.e_schema, sourceparams['item']) - entity.cw_edited.update(attrs) - - def normalize_url(self, url): - """overridden to add vid=xml if vid is not set in the qs""" - url = super(CWEntityXMLParser, self).normalize_url(url) - purl = urlparse(url) - if purl.scheme in ('http', 'https'): - params = parse_qs(purl.query) - if 'vid' not in params: - params['vid'] = ['xml'] - purl = list(purl) - purl[4] = urlencode(params, doseq=True) - return urlunparse(purl) - return url - - def complete_url(self, url, etype=None, known_relations=None): - """append to the url's query string information about relation that should - be included in the resulting xml, according to source mapping. - - If etype is not specified, try to guess it using the last path part of - the url, i.e. the format used by default in cubicweb to map all entities - of a given type as in 'http://mysite.org/EntityType'. - - If `known_relations` is given, it should be a dictionary of already - known relations, so they don't get queried again. - """ - purl = urlparse(url) - params = parse_qs(purl.query) - if etype is None: - etype = purl.path.split('/')[-1] - try: - etype = self._cw.vreg.case_insensitive_etypes[etype.lower()] - except KeyError: - return url - relations = params['relation'] = set(params.get('relation', ())) - for rtype, role, _ in self.source.mapping.get(etype, ()): - if known_relations and rtype in known_relations.get('role', ()): - continue - relations.add('%s-%s' % (rtype, role)) - purl = list(purl) - purl[4] = urlencode(params, doseq=True) - return urlunparse(purl) - - def complete_item(self, item, rels): - try: - return self._parsed_urls[item['cwuri']] - except KeyError: - itemurl = self.complete_url(item['cwuri'], item['cwtype'], rels) - item_rels = list(self.parse(itemurl)) - assert len(item_rels) == 1, 'url %s expected to bring back one '\ - 'and only one entity, got %s' % (itemurl, len(item_rels)) - self._parsed_urls[item['cwuri']] = item_rels[0] - if rels: - # XXX (do it better) merge relations - new_rels = item_rels[0][1] - new_rels.get('subject', {}).update(rels.get('subject', {})) - new_rels.get('object', {}).update(rels.get('object', {})) - return item_rels[0] - - -class CWEntityXMLItemBuilder(Component): - __regid__ = 'cw.entityxml.item-builder' - - def __init__(self, _cw, parser, node, **kwargs): - super(CWEntityXMLItemBuilder, self).__init__(_cw, **kwargs) - self.parser = parser - self.node = node - - def build_item(self): - """parse a XML document node and return two dictionaries defining (part - of) an entity: - - - {attribute: value} - - {role: {relation: [(related item, related rels)...]} - """ - node = self.node - item = dict(node.attrib.items()) - item['cwtype'] = text_type(node.tag) - item.setdefault('cwsource', None) - try: - item['eid'] = int(item['eid']) - except KeyError: - # cw < 3.11 compat mode XXX - item['eid'] = int(node.find('eid').text) - item['cwuri'] = node.find('cwuri').text - rels = {} - for child in node: - role = child.get('role') - if role: - # relation - related = rels.setdefault(role, {}).setdefault(child.tag, []) - related += self.parser.parse_etree(child) - elif child.text: - # attribute - item[child.tag] = text_type(child.text) - else: - # None attribute (empty tag) - item[child.tag] = None - return item, rels - - -class CWEntityXMLActionCopy(Component): - """implementation of cubicweb entity xml parser's'copy' action - - Takes no option. - """ - __regid__ = 'cw.entityxml.action.copy' - - def __init__(self, _cw, parser, rtype, role, entity=None, **kwargs): - super(CWEntityXMLActionCopy, self).__init__(_cw, **kwargs) - self.parser = parser - self.rtype = rtype - self.role = role - self.entity = entity - - @classproperty - def action(cls): - return cls.__regid__.rsplit('.', 1)[-1] - - def check_options(self, options, eid): - self._check_no_options(options, eid) - - def _check_no_options(self, options, eid, msg=None): - if options: - if msg is None: - msg = self._cw._("'%s' action doesn't take any options") % self.action - raise ValidationError(eid, {rn('options', 'subject'): msg}) - - def link_items(self, others, rules): - assert not any(x[1] for x in rules), "'copy' action takes no option" - ttypes = frozenset([x[0] for x in rules]) - eids = [] # local eids - for item, rels in others: - if item['cwtype'] in ttypes: - item, rels = self.parser.complete_item(item, rels) - other_entity = self.parser.process_item(item, rels) - if other_entity is not None: - eids.append(other_entity.eid) - if eids: - self._set_relation(eids) - else: - self._clear_relation(ttypes) - - def _clear_relation(self, ttypes): - if not self.parser.created_during_pull(self.entity): - if len(ttypes) > 1: - typerestr = ', Y is IN(%s)' % ','.join(ttypes) - else: - typerestr = ', Y is %s' % ','.join(ttypes) - self._cw.execute('DELETE ' + rtype_role_rql(self.rtype, self.role) + typerestr, - {'x': self.entity.eid}) - - def _set_relation(self, eids): - assert eids - rtype = self.rtype - rqlbase = rtype_role_rql(rtype, self.role) - eidstr = ','.join(str(eid) for eid in eids) - self._cw.execute('DELETE %s, NOT Y eid IN (%s)' % (rqlbase, eidstr), - {'x': self.entity.eid}) - if self.role == 'object': - rql = 'SET %s, Y eid IN (%s), NOT Y %s X' % (rqlbase, eidstr, rtype) - else: - rql = 'SET %s, Y eid IN (%s), NOT X %s Y' % (rqlbase, eidstr, rtype) - self._cw.execute(rql, {'x': self.entity.eid}) - - -class CWEntityXMLActionLink(CWEntityXMLActionCopy): - """implementation of cubicweb entity xml parser's'link' action - - requires a 'linkattr' option to control search of the linked entity. - """ - __regid__ = 'cw.entityxml.action.link' - - def check_options(self, options, eid): - if not 'linkattr' in options: - msg = self._cw._("'%s' action requires 'linkattr' option") % self.action - raise ValidationError(eid, {rn('options', 'subject'): msg}) - - create_when_not_found = False - - def link_items(self, others, rules): - for ttype, options in rules: - searchattrs = splitstrip(options.get('linkattr', '')) - self._related_link(ttype, others, searchattrs) - - def _related_link(self, ttype, others, searchattrs): - def issubset(x, y): - return all(z in y for z in x) - eids = [] # local eids - log = self.parser.import_log - for item, rels in others: - if item['cwtype'] != ttype: - continue - if not issubset(searchattrs, item): - item, rels = self.parser.complete_item(item, rels) - if not issubset(searchattrs, item): - log.record_error('missing attribute, got %s expected keys %s' - % (item, searchattrs)) - continue - # XXX str() needed with python < 2.6 - kwargs = dict((str(attr), item[attr]) for attr in searchattrs) - targets = self._find_entities(item, kwargs) - if len(targets) == 1: - entity = targets[0] - elif not targets and self.create_when_not_found: - entity = self._cw.create_entity(item['cwtype'], **kwargs) - else: - if len(targets) > 1: - log.record_error('ambiguous link: found %s entity %s with attributes %s' - % (len(targets), item['cwtype'], kwargs)) - else: - log.record_error('can not find %s entity with attributes %s' - % (item['cwtype'], kwargs)) - continue - eids.append(entity.eid) - self.parser.process_relations(entity, rels) - if eids: - self._set_relation(eids) - else: - self._clear_relation((ttype,)) - - def _find_entities(self, item, kwargs): - return tuple(self._cw.find(item['cwtype'], **kwargs).entities()) - - -class CWEntityXMLActionLinkInState(CWEntityXMLActionLink): - """custom implementation of cubicweb entity xml parser's'link' action for - in_state relation - """ - __select__ = match_rtype('in_state') - - def check_options(self, options, eid): - super(CWEntityXMLActionLinkInState, self).check_options(options, eid) - if not 'name' in options['linkattr']: - msg = self._cw._("'%s' action for in_state relation should at least have 'linkattr=name' option") % self.action - raise ValidationError(eid, {rn('options', 'subject'): msg}) - - def _find_entities(self, item, kwargs): - assert 'name' in item # XXX else, complete_item - state_name = item['name'] - wf = self.entity.cw_adapt_to('IWorkflowable').current_workflow - state = wf.state_by_name(state_name) - if state is None: - return () - return (state,) - - -class CWEntityXMLActionLinkOrCreate(CWEntityXMLActionLink): - """implementation of cubicweb entity xml parser's'link-or-create' action - - requires a 'linkattr' option to control search of the linked entity. - """ - __regid__ = 'cw.entityxml.action.link-or-create' - create_when_not_found = True diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/ldapparser.py --- a/sobjects/ldapparser.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,255 +0,0 @@ -# copyright 2011-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb ldap feed source - -unlike ldapuser source, this source is copy based and will import ldap content -(beside passwords for authentication) into the system source. -""" -from six.moves import map, filter - -from logilab.common.decorators import cached, cachedproperty -from logilab.common.shellutils import generate_password - -from cubicweb import Binary, ConfigurationError -from cubicweb.server.utils import crypt_password -from cubicweb.server.sources import datafeed -from cubicweb.dataimport import stores, importer - - -class UserMetaGenerator(stores.MetaGenerator): - """Specific metadata generator, used to see newly created user into their initial state. - """ - @cached - def base_etype_dicts(self, entity): - entity, rels = super(UserMetaGenerator, self).base_etype_dicts(entity) - if entity.cw_etype == 'CWUser': - wf_state = self._cnx.execute('Any S WHERE ET default_workflow WF, ET name %(etype)s, ' - 'WF initial_state S', {'etype': entity.cw_etype}).one() - rels['in_state'] = wf_state.eid - return entity, rels - - -class DataFeedLDAPAdapter(datafeed.DataFeedParser): - __regid__ = 'ldapfeed' - # attributes that may appears in source user_attrs dict which are not - # attributes of the cw user - non_attribute_keys = set(('email', 'eid', 'member', 'modification_date')) - - @cachedproperty - def searchfilterstr(self): - """ ldap search string, including user-filter """ - return '(&%s)' % ''.join(self.source.base_filters) - - @cachedproperty - def searchgroupfilterstr(self): - """ ldap search string, including user-filter """ - return '(&%s)' % ''.join(self.source.group_base_filters) - - @cachedproperty - def user_source_entities_by_extid(self): - source = self.source - if source.user_base_dn.strip(): - attrs = list(map(str, source.user_attrs.keys())) - return dict((userdict['dn'].encode('ascii'), userdict) - for userdict in source._search(self._cw, - source.user_base_dn, - source.user_base_scope, - self.searchfilterstr, - attrs)) - return {} - - @cachedproperty - def group_source_entities_by_extid(self): - source = self.source - if source.group_base_dn.strip(): - attrs = list(map(str, ['modifyTimestamp'] + list(source.group_attrs.keys()))) - return dict((groupdict['dn'].encode('ascii'), groupdict) - for groupdict in source._search(self._cw, - source.group_base_dn, - source.group_base_scope, - self.searchgroupfilterstr, - attrs)) - return {} - - def process(self, url, raise_on_error=False): - """IDataFeedParser main entry point""" - self.debug('processing ldapfeed source %s %s', self.source, self.searchfilterstr) - self._group_members = {} - eeimporter = self.build_importer(raise_on_error) - for name in self.source.user_default_groups: - geid = self._get_group(name) - eeimporter.extid2eid[geid] = geid - entities = self.extentities_generator() - set_cwuri = importer.use_extid_as_cwuri(eeimporter.extid2eid) - eeimporter.import_entities(set_cwuri(entities)) - self.stats['created'] = eeimporter.created - self.stats['updated'] = eeimporter.updated - # handle in_group relation - for group, members in self._group_members.items(): - self._cw.execute('DELETE U in_group G WHERE G name %(g)s', {'g': group}) - if members: - members = ["'%s'" % e for e in members] - rql = 'SET U in_group G WHERE G name %%(g)s, U login IN (%s)' % ','.join(members) - self._cw.execute(rql, {'g': group}) - # ensure updated users are activated - for eid in eeimporter.updated: - entity = self._cw.entity_from_eid(eid) - if entity.cw_etype == 'CWUser': - self.ensure_activated(entity) - # manually set primary email if necessary, it's not handled automatically since hooks are - # deactivated - self._cw.execute('SET X primary_email E WHERE NOT X primary_email E, X use_email E, ' - 'X cw_source S, S eid %(s)s, X in_state ST, TS name "activated"', - {'s': self.source.eid}) - - def build_importer(self, raise_on_error): - """Instantiate and configure an importer""" - etypes = ('CWUser', 'EmailAddress', 'CWGroup') - extid2eid = dict((self.source.decode_extid(x), y) for x, y in - self._cw.system_sql('select extid, eid from entities where asource = %(s)s', {'s': self.source.uri})) - existing_relations = {} - for rtype in ('in_group', 'use_email', 'owned_by'): - rql = 'Any S,O WHERE S {} O, S cw_source SO, SO eid %(s)s'.format(rtype) - rset = self._cw.execute(rql, {'s': self.source.eid}) - existing_relations[rtype] = set(tuple(x) for x in rset) - return importer.ExtEntitiesImporter(self._cw.vreg.schema, self.build_store(), - extid2eid=extid2eid, - existing_relations=existing_relations, - etypes_order_hint=etypes, - import_log=self.import_log, - raise_on_error=raise_on_error) - - def build_store(self): - """Instantiate and configure a store""" - metagenerator = UserMetaGenerator(self._cw, source=self.source) - return stores.NoHookRQLObjectStore(self._cw, metagenerator) - - def extentities_generator(self): - self.debug('processing ldapfeed source %s %s', self.source, self.searchgroupfilterstr) - # generate users and email addresses - for userdict in self.user_source_entities_by_extid.values(): - attrs = self.ldap2cwattrs(userdict, 'CWUser') - pwd = attrs.get('upassword') - if not pwd: - # generate a dumb password if not fetched from ldap (see - # userPassword) - pwd = crypt_password(generate_password()) - attrs['upassword'] = set([Binary(pwd)]) - extuser = importer.ExtEntity('CWUser', userdict['dn'].encode('ascii'), attrs) - extuser.values['owned_by'] = set([extuser.extid]) - for extemail in self._process_email(extuser, userdict): - yield extemail - groups = list(filter(None, [self._get_group(name) - for name in self.source.user_default_groups])) - if groups: - extuser.values['in_group'] = groups - yield extuser - # generate groups - for groupdict in self.group_source_entities_by_extid.values(): - attrs = self.ldap2cwattrs(groupdict, 'CWGroup') - extgroup = importer.ExtEntity('CWGroup', groupdict['dn'].encode('ascii'), attrs) - yield extgroup - # record group membership for later insertion - members = groupdict.get(self.source.group_rev_attrs['member'], ()) - self._group_members[attrs['name']] = members - - def _process_email(self, extuser, userdict): - try: - emailaddrs = userdict.pop(self.source.user_rev_attrs['email']) - except KeyError: - return # no email for that user, nothing to do - if not isinstance(emailaddrs, list): - emailaddrs = [emailaddrs] - for emailaddr in emailaddrs: - # search for existing email first, may be coming from another source - rset = self._cw.execute('EmailAddress X WHERE X address %(addr)s', - {'addr': emailaddr}) - emailextid = (userdict['dn'] + '@@' + emailaddr).encode('ascii') - if not rset: - # not found, create it. first forge an external id - extuser.values.setdefault('use_email', []).append(emailextid) - yield importer.ExtEntity('EmailAddress', emailextid, dict(address=[emailaddr])) - elif self.sourceuris: - # pop from sourceuris anyway, else email may be removed by the - # source once import is finished - self.sourceuris.pop(emailextid, None) - # XXX else check use_email relation? - - def handle_deletion(self, config, cnx, myuris): - if config['delete-entities']: - super(DataFeedLDAPAdapter, self).handle_deletion(config, cnx, myuris) - return - if myuris: - for extid, (eid, etype) in myuris.items(): - if etype != 'CWUser' or not self.is_deleted(extid, etype, eid): - continue - self.info('deactivate user %s', eid) - wf = cnx.entity_from_eid(eid).cw_adapt_to('IWorkflowable') - wf.fire_transition_if_possible('deactivate') - cnx.commit() - - def ensure_activated(self, entity): - if entity.cw_etype == 'CWUser': - wf = entity.cw_adapt_to('IWorkflowable') - if wf.state == 'deactivated': - wf.fire_transition('activate') - self.info('user %s reactivated', entity.login) - - def ldap2cwattrs(self, sdict, etype): - """Transform dictionary of LDAP attributes to CW. - - etype must be CWUser or CWGroup - """ - assert etype in ('CWUser', 'CWGroup'), etype - tdict = {} - if etype == 'CWUser': - items = self.source.user_attrs.items() - elif etype == 'CWGroup': - items = self.source.group_attrs.items() - for sattr, tattr in items: - if tattr not in self.non_attribute_keys: - try: - value = sdict[sattr] - except KeyError: - raise ConfigurationError( - 'source attribute %s has not been found in the source, ' - 'please check the %s-attrs-map field and the permissions of ' - 'the LDAP binding user' % (sattr, etype[2:].lower())) - if not isinstance(value, list): - value = [value] - tdict[tattr] = value - return tdict - - def is_deleted(self, extidplus, etype, eid): - try: - extid = extidplus.rsplit(b'@@', 1)[0] - except ValueError: - # for some reason extids here tend to come in both forms, e.g: - # dn, dn@@Babar - extid = extidplus - return extid not in self.user_source_entities_by_extid - - @cached - def _get_group(self, name): - try: - return self._cw.execute('Any X WHERE X is CWGroup, X name %(name)s', - {'name': name})[0][0] - except IndexError: - self.error('group %r referenced by source configuration %r does not exist', - name, self.source.uri) - return None diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/notification.py --- a/sobjects/notification.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,322 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some views to handle notification on data changes""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from itertools import repeat - -from six import text_type - -from logilab.common.textutils import normalize_text -from logilab.common.deprecation import class_renamed, class_moved, deprecated -from logilab.common.registry import yes - -from cubicweb.entity import Entity -from cubicweb.view import Component, EntityView -from cubicweb.server.hook import SendMailOp -from cubicweb.mail import construct_message_id, format_mail -from cubicweb.server.session import Session, InternalManager - - -class RecipientsFinder(Component): - """this component is responsible to find recipients of a notification - - by default user's with their email set are notified if any, else the default - email addresses specified in the configuration are used - """ - __regid__ = 'recipients_finder' - __select__ = yes() - user_rql = ('Any X,E,A WHERE X is CWUser, X in_state S, S name "activated",' - 'X primary_email E, E address A') - - def recipients(self): - mode = self._cw.vreg.config['default-recipients-mode'] - if mode == 'users': - execute = self._cw.execute - dests = [(u.cw_adapt_to('IEmailable').get_email(), - u.property_value('ui.language')) - for u in execute(self.user_rql, build_descr=True).entities()] - elif mode == 'default-dest-addrs': - lang = self._cw.vreg.property_value('ui.language') - dests = zip(self._cw.vreg.config['default-dest-addrs'], repeat(lang)) - else: # mode == 'none' - dests = [] - return dests - - -# abstract or deactivated notification views and mixin ######################## - - -class SkipEmail(Exception): - """raise this if you decide to skip an email during its generation""" - - -class NotificationView(EntityView): - """abstract view implementing the "email" API (eg to simplify sending - notification) - """ - # XXX refactor this class to work with len(rset) > 1 - - msgid_timestamp = True - - # to be defined on concrete sub-classes - content = None # body of the mail - message = None # action verb of the subject - - # this is usually the method to call - def render_and_send(self, **kwargs): - """generate and send email messages for this view""" - # render_emails changes self._cw so cache it here so all mails are sent - # after we commit our transaction. - cnx = self._cw - for msg, recipients in self.render_emails(**kwargs): - SendMailOp(cnx, recipients=recipients, msg=msg) - - def cell_call(self, row, col=0, **kwargs): - self.w(self._cw._(self.content) % self.context(**kwargs)) - - def render_emails(self, **kwargs): - """generate and send emails for this view (one per recipient)""" - self._kwargs = kwargs - recipients = self.recipients() - if not recipients: - self.info('skipping %s notification, no recipients', self.__regid__) - return - if self.cw_rset is not None: - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - # if the view is using timestamp in message ids, no way to reference - # previous email - if not self.msgid_timestamp: - refs = [self.construct_message_id(eid) - for eid in entity.cw_adapt_to('INotifiable').notification_references(self)] - else: - refs = () - msgid = self.construct_message_id(entity.eid) - else: - refs = () - msgid = None - req = self._cw - self.user_data = req.user_data() - for something in recipients: - if isinstance(something, tuple): - emailaddr, lang = something - user = InternalManager(lang=lang) - else: - emailaddr = something.cw_adapt_to('IEmailable').get_email() - user = something - # hi-jack self._cw to get a session for the returned user - session = Session(user, self._cw.repo) - with session.new_cnx() as cnx: - self._cw = cnx - try: - # since the same view (eg self) may be called multiple time and we - # need a fresh stream at each iteration, reset it explicitly - self.w = None - try: - # XXX forcing the row & col here may make the content and - # subject inconsistent because subject will depend on - # self.cw_row & self.cw_col if they are set. - content = self.render(row=0, col=0, **kwargs) - subject = self.subject() - except SkipEmail: - continue - except Exception as ex: - # shouldn't make the whole transaction fail because of rendering - # error (unauthorized or such) XXX check it doesn't actually - # occurs due to rollback on such error - self.exception(str(ex)) - continue - msg = format_mail(self.user_data, [emailaddr], content, subject, - config=self._cw.vreg.config, msgid=msgid, references=refs) - yield msg, [emailaddr] - finally: - self._cw = req - - # recipients handling ###################################################### - - def recipients(self): - """return a list of either 2-uple (email, language) or user entity to - whom this email should be sent - """ - finder = self._cw.vreg['components'].select( - 'recipients_finder', self._cw, rset=self.cw_rset, - row=self.cw_row or 0, col=self.cw_col or 0) - return finder.recipients() - - # email generation helpers ################################################# - - def construct_message_id(self, eid): - return construct_message_id(self._cw.vreg.config.appid, eid, - self.msgid_timestamp) - - def format_field(self, attr, value): - return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value} - - def format_section(self, attr, value): - return '%(attr)s\n%(ul)s\n%(value)s\n' % { - 'attr': attr, 'ul': '-'*len(attr), 'value': value} - - def subject(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - subject = self._cw._(self.message) - etype = entity.dc_type() - eid = entity.eid - login = self.user_data['login'] - return self._cw._('%(subject)s %(etype)s #%(eid)s (%(login)s)') % locals() - - def context(self, **kwargs): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - for key, val in kwargs.items(): - if val and isinstance(val, text_type) and val.strip(): - kwargs[key] = self._cw._(val) - kwargs.update({'user': self.user_data['login'], - 'eid': entity.eid, - 'etype': entity.dc_type(), - 'url': entity.absolute_url(__secure__=True), - 'title': entity.dc_long_title(),}) - return kwargs - - -class StatusChangeMixIn(object): - __regid__ = 'notif_status_change' - msgid_timestamp = True - message = _('status changed') - content = _(""" -%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for entity -'%(title)s' - -%(comment)s - -url: %(url)s -""") - - -############################################################################### -# Actual notification views. # -# # -# disable them at the recipients_finder level if you don't want them # -############################################################################### - -# XXX should be based on dc_title/dc_description, no? - -class ContentAddedView(NotificationView): - """abstract class for notification on entity/relation - - all you have to do by default is : - * set id and __select__ attributes to match desired events and entity types - * set a content attribute to define the content of the email (unless you - override call) - """ - __abstract__ = True - __regid__ = 'notif_after_add_entity' - msgid_timestamp = False - message = _('new') - content = """ -%(title)s - -%(content)s - -url: %(url)s -""" - # to be defined on concrete sub-classes - content_attr = None - - def context(self, **kwargs): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - content = entity.printable_value(self.content_attr, format='text/plain') - if content: - contentformat = getattr(entity, self.content_attr + '_format', - 'text/rest') - # XXX don't try to wrap rest until we've a proper transformation (see - # #103822) - if contentformat != 'text/rest': - content = normalize_text(content, 80) - return super(ContentAddedView, self).context(content=content, **kwargs) - - def subject(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - return u'%s #%s (%s)' % (self._cw.__('New %s' % entity.e_schema), - entity.eid, self.user_data['login']) - - -def format_value(value): - if isinstance(value, text_type): - return u'"%s"' % value - return value - - -class EntityUpdatedNotificationView(NotificationView): - """abstract class for notification on entity/relation - - all you have to do by default is : - * set id and __select__ attributes to match desired events and entity types - * set a content attribute to define the content of the email (unless you - override call) - """ - __abstract__ = True - __regid__ = 'notif_entity_updated' - msgid_timestamp = True - message = _('updated') - no_detailed_change_attrs = () - content = """ -Properties have been updated by %(user)s: - -%(changes)s - -url: %(url)s -""" - - def context(self, changes=(), **kwargs): - context = super(EntityUpdatedNotificationView, self).context(**kwargs) - _ = self._cw._ - formatted_changes = [] - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - for attr, oldvalue, newvalue in sorted(changes): - # check current user has permission to see the attribute - rschema = self._cw.vreg.schema[attr] - if rschema.final: - rdef = entity.e_schema.rdef(rschema) - if not rdef.has_perm(self._cw, 'read', eid=self.cw_rset[0][0]): - continue - # XXX suppose it's a subject relation... - elif not rschema.has_perm(self._cw, 'read', - fromeid=self.cw_rset[0][0]): - continue - if attr in self.no_detailed_change_attrs: - msg = _('%s updated') % _(attr) - elif oldvalue not in (None, ''): - msg = _('%(attr)s updated from %(oldvalue)s to %(newvalue)s') % { - 'attr': _(attr), - 'oldvalue': format_value(oldvalue), - 'newvalue': format_value(newvalue)} - else: - msg = _('%(attr)s set to %(newvalue)s') % { - 'attr': _(attr), 'newvalue': format_value(newvalue)} - formatted_changes.append('* ' + msg) - if not formatted_changes: - # current user isn't allowed to see changes, skip this notification - raise SkipEmail() - context['changes'] = '\n'.join(formatted_changes) - return context - - def subject(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - return u'%s #%s (%s)' % (self._cw.__('Updated %s' % entity.e_schema), - entity.eid, self.user_data['login']) diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/services.py --- a/sobjects/services.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,174 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Define server side service provided by cubicweb""" - -import threading - -from six import text_type - -from yams.schema import role_name - -from cubicweb import ValidationError -from cubicweb.server import Service -from cubicweb.predicates import match_user_groups, match_kwargs - -class StatsService(Service): - """Return a dictionary containing some statistics about the repository - resources usage. - """ - - __regid__ = 'repo_stats' - __select__ = match_user_groups('managers', 'users') - - def call(self): - repo = self._cw.repo # Service are repo side only. - results = {} - querier = repo.querier - source = repo.system_source - for size, maxsize, hits, misses, title in ( - (len(querier._rql_cache), repo.config['rql-cache-size'], - querier.cache_hit, querier.cache_miss, 'rqlt_st'), - (len(source._cache), repo.config['rql-cache-size'], - source.cache_hit, source.cache_miss, 'sql'), - ): - results['%s_cache_size' % title] = {'size': size, 'maxsize': maxsize} - results['%s_cache_hit' % title] = hits - results['%s_cache_miss' % title] = misses - results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses) - results['type_source_cache_size'] = len(repo._type_source_cache) - results['extid_cache_size'] = len(repo._extid_cache) - results['sql_no_cache'] = repo.system_source.no_cache - results['nb_open_sessions'] = len(repo._sessions) - results['nb_active_threads'] = threading.activeCount() - looping_tasks = repo._tasks_manager._looping_tasks - results['looping_tasks'] = [(t.name, t.interval) for t in looping_tasks] - results['available_cnxsets'] = repo._cnxsets_pool.qsize() - results['threads'] = [t.name for t in threading.enumerate()] - return results - - -class GcStatsService(Service): - """Return a dictionary containing some statistics about the repository - resources usage. - """ - - __regid__ = 'repo_gc_stats' - __select__ = match_user_groups('managers') - - def call(self, nmax=20): - """Return a dictionary containing some statistics about the repository - memory usage. - - This is a public method, not requiring a session id. - - nmax is the max number of (most) referenced object returned as - the 'referenced' result - """ - - from cubicweb._gcdebug import gc_info - from cubicweb.appobject import AppObject - from cubicweb.rset import ResultSet - from cubicweb.web.request import CubicWebRequestBase - from rql.stmts import Union - - lookupclasses = (AppObject, - Union, ResultSet, - CubicWebRequestBase) - try: - from cubicweb.server.session import Session, InternalSession - lookupclasses += (InternalSession, Session) - except ImportError: - pass # no server part installed - - results = {} - counters, ocounters, garbage = gc_info(lookupclasses, - viewreferrersclasses=()) - values = sorted(counters.items(), key=lambda x: x[1], reverse=True) - results['lookupclasses'] = values - values = sorted(ocounters.items(), key=lambda x: x[1], reverse=True)[:nmax] - results['referenced'] = values - results['unreachable'] = garbage - return results - - -class RegisterUserService(Service): - """check if a user with the given login exists, if not create it with the - given password. This service is designed to be used for anonymous - registration on public web sites. - - To use it, do: - with self.appli.repo.internal_cnx() as cnx: - cnx.call_service('register_user', - login=login, - password=password, - **cwuserkwargs) - """ - __regid__ = 'register_user' - __select__ = Service.__select__ & match_kwargs('login', 'password') - default_groups = ('users',) - - def call(self, login, password, email=None, groups=None, **cwuserkwargs): - cnx = self._cw - errmsg = cnx._('the value "%s" is already used, use another one') - - if (cnx.execute('CWUser X WHERE X login %(login)s', {'login': login}, - build_descr=False) - or cnx.execute('CWUser X WHERE X use_email C, C address %(login)s', - {'login': login}, build_descr=False)): - qname = role_name('login', 'subject') - raise ValidationError(None, {qname: errmsg % login}) - - if isinstance(password, text_type): - # password should *always* be utf8 encoded - password = password.encode('UTF8') - cwuserkwargs['login'] = login - cwuserkwargs['upassword'] = password - # we have to create the user - user = cnx.create_entity('CWUser', **cwuserkwargs) - if groups is None: - groups = self.default_groups - assert groups, "CWUsers must belong to at least one CWGroup" - group_names = ', '.join('%r' % group for group in groups) - cnx.execute('SET X in_group G WHERE X eid %%(x)s, G name IN (%s)' % group_names, - {'x': user.eid}) - - if email or '@' in login: - d = {'login': login, 'email': email or login} - if cnx.execute('EmailAddress X WHERE X address %(email)s', d, - build_descr=False): - qname = role_name('address', 'subject') - raise ValidationError(None, {qname: errmsg % d['email']}) - cnx.execute('INSERT EmailAddress X: X address %(email)s, ' - 'U primary_email X, U use_email X ' - 'WHERE U login %(login)s', d, build_descr=False) - - return user - - -class SourceSynchronizationService(Service): - """Force synchronization of a datafeed source""" - __regid__ = 'source-sync' - __select__ = Service.__select__ & match_user_groups('managers') - - def call(self, source_eid): - source_entity = self._cw.entity_from_eid(source_eid) - repo = self._cw.repo # Service are repo side only. - with repo.internal_cnx() as cnx: - source = repo.sources_by_uri[source_entity.name] - source.pull_data(cnx) - diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/supervising.py --- a/sobjects/supervising.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,191 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some hooks and views to handle supervising of any data changes""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from cubicweb import UnknownEid -from cubicweb.predicates import none_rset -from cubicweb.schema import display_name -from cubicweb.view import Component -from cubicweb.mail import format_mail -from cubicweb.server.hook import SendMailOp - - -def filter_changes(changes): - """ - * when an entity has been deleted: - * don't show deletion of its relations - * don't show related TrInfo deletion if any - * when an entity has been added don't show owned_by relation addition - * don't show new TrInfo entities if any - """ - # first build an index of changes - index = {} - added, deleted = set(), set() - for change in changes[:]: - event, changedescr = change - if event == 'add_entity': - entity = changedescr.entity - added.add(entity.eid) - if entity.e_schema == 'TrInfo': - changes.remove(change) - event = 'change_state' - change = (event, - (entity.wf_info_for[0], - entity.from_state[0], entity.to_state[0])) - changes.append(change) - elif event == 'delete_entity': - deleted.add(changedescr[0]) - index.setdefault(event, set()).add(change) - for key in ('delete_relation', 'add_relation'): - for change in index.get(key, {}).copy(): - if change[1].rtype == 'in_state': - index[key].remove(change) - # filter changes - for eid in added: - try: - for change in index['add_relation'].copy(): - changedescr = change[1] - # skip meta-relations which are set automatically - # XXX generate list below using rtags (category = 'generated') - if changedescr.rtype in ('created_by', 'owned_by', 'is', 'is_instance_of', - 'from_state', 'to_state', 'by_transition', - 'wf_info_for') \ - and changedescr.eidfrom == eid: - index['add_relation'].remove(change) - except KeyError: - break - for eid in deleted: - try: - for change in index['delete_relation'].copy(): - if change[1].eidfrom == eid: - index['delete_relation'].remove(change) - elif change[1].eidto == eid: - index['delete_relation'].remove(change) - if change[1].rtype == 'wf_info_for': - for change_ in index['delete_entity'].copy(): - if change_[1].eidfrom == change[1].eidfrom: - index['delete_entity'].remove(change_) - except KeyError: - break - for change in changes: - event, changedescr = change - if change in index[event]: - yield change - - -class SupervisionEmailView(Component): - """view implementing the email API for data changes supervision notification - """ - __regid__ = 'supervision_notif' - __select__ = none_rset() - - def recipients(self): - return self._cw.vreg.config['supervising-addrs'] - - def subject(self): - return self._cw._('[%s supervision] changes summary') % self._cw.vreg.config.appid - - def call(self, changes): - user = self._cw.user - self.w(self._cw._('user %s has made the following change(s):\n\n') - % user.login) - for event, changedescr in filter_changes(changes): - self.w(u'* ') - getattr(self, event)(changedescr) - self.w(u'\n\n') - - def _entity_context(self, entity): - return {'eid': entity.eid, - 'etype': entity.dc_type().lower(), - 'title': entity.dc_title()} - - def add_entity(self, changedescr): - msg = self._cw._('added %(etype)s #%(eid)s (%(title)s)') - self.w(u'%s\n' % (msg % self._entity_context(changedescr.entity))) - self.w(u' %s' % changedescr.entity.absolute_url()) - - def update_entity(self, changedescr): - msg = self._cw._('updated %(etype)s #%(eid)s (%(title)s)') - self.w(u'%s\n' % (msg % self._entity_context(changedescr.entity))) - # XXX print changes - self.w(u' %s' % changedescr.entity.absolute_url()) - - def delete_entity(self, args): - eid, etype, title = args - msg = self._cw._('deleted %(etype)s #%(eid)s (%(title)s)') - etype = display_name(self._cw, etype).lower() - self.w(msg % locals()) - - def change_state(self, args): - _ = self._cw._ - entity, fromstate, tostate = args - msg = _('changed state of %(etype)s #%(eid)s (%(title)s)') - self.w(u'%s\n' % (msg % self._entity_context(entity))) - self.w(_(' from state %(fromstate)s to state %(tostate)s\n' % - {'fromstate': _(fromstate.name), 'tostate': _(tostate.name)})) - self.w(u' %s' % entity.absolute_url()) - - def _relation_context(self, changedescr): - cnx = self._cw - def describe(eid): - try: - return cnx._(cnx.entity_metas(eid)['type']).lower() - except UnknownEid: - # may occurs when an entity has been deleted from an external - # source and we're cleaning its relation - return cnx._('unknown external entity') - eidfrom, rtype, eidto = changedescr.eidfrom, changedescr.rtype, changedescr.eidto - return {'rtype': cnx._(rtype), - 'eidfrom': eidfrom, - 'frometype': describe(eidfrom), - 'eidto': eidto, - 'toetype': describe(eidto)} - - def add_relation(self, changedescr): - msg = self._cw._('added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%(eidto)s') - self.w(msg % self._relation_context(changedescr)) - - def delete_relation(self, changedescr): - msg = self._cw._('deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%(eidto)s') - self.w(msg % self._relation_context(changedescr)) - - -class SupervisionMailOp(SendMailOp): - """special send email operation which should be done only once for a bunch - of changes - """ - def _get_view(self): - return self.cnx.vreg['components'].select('supervision_notif', self.cnx) - - def _prepare_email(self): - cnx = self.cnx - config = cnx.vreg.config - uinfo = {'email': config['sender-addr'], - 'name': config['sender-name']} - view = self._get_view() - content = view.render(changes=cnx.transaction_data.get('pendingchanges')) - recipients = view.recipients() - msg = format_mail(uinfo, recipients, content, view.subject(), config=config) - self.to_send = [(msg, recipients)] - - def postcommit_event(self): - self._prepare_email() - SendMailOp.postcommit_event(self) diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/data/bootstrap_cubes --- a/sobjects/test/data/bootstrap_cubes Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -card,comment diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/data/schema.py --- a/sobjects/test/data/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,28 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from yams.buildobjs import EntityType, RelationDefinition, String, SubjectRelation - -class comments(RelationDefinition): - subject = 'Comment' - object = 'Card' - cardinality='1*' - composite='object' - -class Tag(EntityType): - name = String(unique=True) - tags = SubjectRelation(('CWUser', 'CWGroup', 'EmailAddress')) diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/data/sobjects/__init__.py --- a/sobjects/test/data/sobjects/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from cubicweb.predicates import is_instance -from cubicweb.sobjects.notification import StatusChangeMixIn, NotificationView - -class UserStatusChangeView(StatusChangeMixIn, NotificationView): - __select__ = NotificationView.__select__ & is_instance('CWUser') diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/requirements.txt --- a/sobjects/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -cubicweb-card -cubicweb-comment diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/unittest_cwxmlparser.py --- a/sobjects/test/unittest_cwxmlparser.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,338 +0,0 @@ -# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from datetime import datetime - -from six.moves.urllib.parse import urlsplit, parse_qsl - -import pytz -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.sobjects.cwxmlparser import CWEntityXMLParser - -orig_parse = CWEntityXMLParser.parse - -def parse(self, url): - try: - url = RELATEDXML[url.split('?')[0]] - except KeyError: - pass - return orig_parse(self, url) - -def setUpModule(): - CWEntityXMLParser.parse = parse - -def tearDownModule(): - CWEntityXMLParser.parse = orig_parse - - -BASEXML = ''.join(u''' - - - sthenault - toto - 2011-01-25 14:14:06 - 2010-01-22 10:27:59 - 2011-01-25 14:14:06 - - - - - - - - - - - - - - - - -'''.splitlines()) - -RELATEDXML = { - 'http://pouet.org/6': u''' - - -
      syt@logilab.fr
      - 2010-04-13 14:35:56 - 2010-04-13 14:35:56 - - - -
      -
      -''', - 'http://pouet.org/7': u''' - - - users - - - - - -''', - 'http://pouet.org/8': u''' - - - unknown - - -''', - 'http://pouet.org/9': u''' - - - hop - - -''', - 'http://pouet.org/10': u''' - - - unknown - - -''', - } - - -OTHERXML = ''.join(u''' - - - sthenault - toto - 2011-01-25 14:14:06 - 2010-01-22 10:27:59 - 2011-01-25 14:14:06 - - - - - -'''.splitlines() -) - - -class CWEntityXMLParserTC(CubicWebTC): - """/!\ this test use a pre-setup database /!\, if you modify above xml, - REMOVE THE DATABASE TEMPLATE else it won't be considered - """ - test_db_id = 'xmlparser' - - def assertURLEquiv(self, first, second): - # ignore ordering differences in query params - parsed_first = urlsplit(first) - parsed_second = urlsplit(second) - self.assertEqual(parsed_first.scheme, parsed_second.scheme) - self.assertEqual(parsed_first.netloc, parsed_second.netloc) - self.assertEqual(parsed_first.path, parsed_second.path) - self.assertEqual(parsed_first.fragment, parsed_second.fragment) - self.assertCountEqual(parse_qsl(parsed_first.query), parse_qsl(parsed_second.query)) - - @classmethod - def pre_setup_database(cls, cnx, config): - myfeed = cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed', - parser=u'cw.entityxml', url=BASEXML) - myotherfeed = cnx.create_entity('CWSource', name=u'myotherfeed', type=u'datafeed', - parser=u'cw.entityxml', url=OTHERXML) - cnx.commit() - myfeed.init_mapping([(('CWUser', 'use_email', '*'), - u'role=subject\naction=copy'), - (('CWUser', 'in_group', '*'), - u'role=subject\naction=link\nlinkattr=name'), - (('CWUser', 'in_state', '*'), - u'role=subject\naction=link\nlinkattr=name'), - (('*', 'tags', '*'), - u'role=object\naction=link-or-create\nlinkattr=name'), - ]) - myotherfeed.init_mapping([(('CWUser', 'in_group', '*'), - u'role=subject\naction=link\nlinkattr=name'), - (('CWUser', 'in_state', '*'), - u'role=subject\naction=link\nlinkattr=name'), - ]) - cnx.create_entity('Tag', name=u'hop') - cnx.commit() - - def test_complete_url(self): - dfsource = self.repo.sources_by_uri['myfeed'] - with self.admin_access.repo_cnx() as cnx: - parser = dfsource._get_parser(cnx) - self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/CWUser'), - 'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') - self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/cwuser'), - 'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') - self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'), - 'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf') - self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'), - 'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf') - self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'), - 'http://www.cubicweb.org/?rql=cwuser&relation=hop') - - - def test_actions(self): - dfsource = self.repo.sources_by_uri['myfeed'] - self.assertEqual(dfsource.mapping, - {u'CWUser': { - (u'in_group', u'subject', u'link'): [ - (u'CWGroup', {u'linkattr': u'name'})], - (u'in_state', u'subject', u'link'): [ - (u'State', {u'linkattr': u'name'})], - (u'tags', u'object', u'link-or-create'): [ - (u'Tag', {u'linkattr': u'name'})], - (u'use_email', u'subject', u'copy'): [ - (u'EmailAddress', {})] - }, - u'CWGroup': { - (u'tags', u'object', u'link-or-create'): [ - (u'Tag', {u'linkattr': u'name'})], - }, - u'EmailAddress': { - (u'tags', u'object', u'link-or-create'): [ - (u'Tag', {u'linkattr': u'name'})], - }, - }) - with self.repo.internal_cnx() as cnx: - stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) - self.assertEqual(sorted(stats), ['checked', 'created', 'updated']) - self.assertEqual(len(stats['created']), 2) - self.assertEqual(stats['updated'], set()) - - with self.admin_access.web_request() as req: - user = req.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) - self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59, tzinfo=pytz.utc)) - self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6, tzinfo=pytz.utc)) - self.assertEqual(user.cwuri, 'http://pouet.org/5') - self.assertEqual(user.cw_source[0].name, 'myfeed') - self.assertEqual(user.absolute_url(), 'http://pouet.org/5') - self.assertEqual(len(user.use_email), 1) - # copy action - email = user.use_email[0] - self.assertEqual(email.address, 'syt@logilab.fr') - self.assertEqual(email.cwuri, 'http://pouet.org/6') - self.assertEqual(email.absolute_url(), 'http://pouet.org/6') - self.assertEqual(email.cw_source[0].name, 'myfeed') - self.assertEqual(len(email.reverse_tags), 1) - self.assertEqual(email.reverse_tags[0].name, 'hop') - # link action - self.assertFalse(req.execute('CWGroup X WHERE X name "unknown"')) - groups = sorted([g.name for g in user.in_group]) - self.assertEqual(groups, ['users']) - group = user.in_group[0] - self.assertEqual(len(group.reverse_tags), 1) - self.assertEqual(group.reverse_tags[0].name, 'hop') - # link or create action - tags = set([(t.name, t.cwuri.replace(str(t.eid), ''), t.cw_source[0].name) - for t in user.reverse_tags]) - self.assertEqual(tags, set((('hop', 'http://testing.fr/cubicweb/', 'system'), - ('unknown', 'http://testing.fr/cubicweb/', 'system'))) - ) - with self.repo.internal_cnx() as cnx: - stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) - self.assertEqual(stats['created'], set()) - self.assertEqual(len(stats['updated']), 0) - self.assertEqual(len(stats['checked']), 2) - self.repo._type_source_cache.clear() - self.repo._extid_cache.clear() - stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) - self.assertEqual(stats['created'], set()) - self.assertEqual(len(stats['updated']), 0) - self.assertEqual(len(stats['checked']), 2) - - # test move to system source - cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': email.eid}) - cnx.commit() - rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') - self.assertEqual(len(rset), 1) - e = rset.get_entity(0, 0) - self.assertEqual(e.eid, email.eid) - self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', - 'use-cwuri-as-url': False}, - 'type': 'EmailAddress', - 'extid': None}) - self.assertEqual(e.cw_source[0].name, 'system') - self.assertEqual(e.reverse_use_email[0].login, 'sthenault') - # test everything is still fine after source synchronization - # clear caches to make sure we look at the moved_entities table - self.repo._type_source_cache.clear() - self.repo._extid_cache.clear() - stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) - self.assertEqual(stats['updated'], set((email.eid,))) - rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') - self.assertEqual(len(rset), 1) - e = rset.get_entity(0, 0) - self.assertEqual(e.eid, email.eid) - self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', - 'use-cwuri-as-url': False}, - 'type': 'EmailAddress', - 'extid': None}) - self.assertEqual(e.cw_source[0].name, 'system') - self.assertEqual(e.reverse_use_email[0].login, 'sthenault') - cnx.commit() - - # test delete entity - e.cw_delete() - cnx.commit() - # test everything is still fine after source synchronization - stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) - rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') - self.assertEqual(len(rset), 0) - rset = cnx.execute('Any X WHERE X use_email E, X login "sthenault"') - self.assertEqual(len(rset), 0) - - def test_external_entity(self): - dfsource = self.repo.sources_by_uri['myotherfeed'] - with self.repo.internal_cnx() as cnx: - stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) - user = cnx.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) - self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59, tzinfo=pytz.utc)) - self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6, tzinfo=pytz.utc)) - self.assertEqual(user.cwuri, 'http://pouet.org/5') - self.assertEqual(user.cw_source[0].name, 'myfeed') - - def test_noerror_missing_fti_attribute(self): - dfsource = self.repo.sources_by_uri['myfeed'] - with self.repo.internal_cnx() as cnx: - parser = dfsource._get_parser(cnx) - dfsource.process_urls(parser, [''' - - - how-to - - -'''], raise_on_error=True) - - def test_noerror_unspecified_date(self): - dfsource = self.repo.sources_by_uri['myfeed'] - with self.repo.internal_cnx() as cnx: - parser = dfsource._get_parser(cnx) - dfsource.process_urls(parser, [''' - - - how-to - how-to - how-to - - - -'''], raise_on_error=True) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/unittest_email.py --- a/sobjects/test/unittest_email.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,68 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from cubicweb import Unauthorized -from cubicweb.devtools.testlib import CubicWebTC - -class EmailAddressHooksTC(CubicWebTC): - - def test_use_email_set_primary_email(self): - with self.admin_access.client_cnx() as cnx: - cnx.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"') - self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows, - []) - cnx.commit() - self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], - 'admin@logilab.fr') - # having another email should'nt change anything - cnx.execute('INSERT EmailAddress X: X address "a@logilab.fr", U use_email X WHERE U login "admin"') - cnx.commit() - self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], - 'admin@logilab.fr') - - def test_primary_email_set_use_email(self): - with self.admin_access.client_cnx() as cnx: - cnx.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X WHERE U login "admin"') - self.assertEqual(cnx.execute('Any A WHERE U use_email X, U login "admin", X address A').rows, - []) - cnx.commit() - self.assertEqual(cnx.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0], - 'admin@logilab.fr') - - def test_cardinality_check(self): - with self.admin_access.client_cnx() as cnx: - email1 = cnx.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0] - cnx.commit() - cnx.execute('SET U primary_email E WHERE U login "anon", E address "client@client.com"') - cnx.commit() - rset = cnx.execute('Any X WHERE X use_email E, E eid %(e)s', {'e': email1}) - self.assertFalse(rset.rowcount != 1, rset) - - def test_security_check(self): - with self.admin_access.client_cnx() as cnx: - self.create_user(cnx, 'toto') - email1 = cnx.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0] - cnx.commit() - with self.new_access('toto').client_cnx() as cnx: - self.assertRaises(Unauthorized, - cnx.execute, 'SET U primary_email E WHERE E eid %(e)s, U login "toto"', - {'e': email1}) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/unittest_notification.py --- a/sobjects/test/unittest_notification.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,103 +0,0 @@ -# -*- coding: iso-8859-1 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from socket import gethostname - -from logilab.common.testlib import unittest_main, TestCase -from cubicweb.devtools.testlib import CubicWebTC, MAILBOX - -from cubicweb.mail import construct_message_id, parse_message_id - -class MessageIdTC(TestCase): - def test_base(self): - msgid1 = construct_message_id('testapp', 21) - msgid2 = construct_message_id('testapp', 21) - self.assertNotEqual(msgid1, msgid2) - self.assertNotIn('&', msgid1) - self.assertNotIn('=', msgid1) - self.assertNotIn('/', msgid1) - self.assertNotIn('+', msgid1) - values = parse_message_id(msgid1, 'testapp') - self.assertTrue(values) - # parse_message_id should work with or without surrounding <> - self.assertEqual(values, parse_message_id(msgid1[1:-1], 'testapp')) - self.assertEqual(values['eid'], '21') - self.assertIn('timestamp', values) - self.assertEqual(parse_message_id(msgid1[1:-1], 'anotherapp'), None) - - def test_notimestamp(self): - msgid1 = construct_message_id('testapp', 21, False) - msgid2 = construct_message_id('testapp', 21, False) - values = parse_message_id(msgid1, 'testapp') - self.assertEqual(values, {'eid': '21'}) - - def test_parse_message_doesnt_raise(self): - self.assertEqual(parse_message_id('oijioj@bla.bla', 'tesapp'), None) - self.assertEqual(parse_message_id('oijioj@bla', 'tesapp'), None) - self.assertEqual(parse_message_id('oijioj', 'tesapp'), None) - - - def test_nonregr_empty_message_id(self): - for eid in (1, 12, 123, 1234): - msgid1 = construct_message_id('testapp', eid, 12) - self.assertNotEqual(msgid1, '<@testapp.%s>' % gethostname()) - -class NotificationTC(CubicWebTC): - - def test_recipients_finder(self): - with self.admin_access.web_request() as req: - urset = req.execute('CWUser X WHERE X login "admin"') - req.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X ' - 'WHERE U eid %(x)s', {'x': urset[0][0]}) - req.execute('INSERT CWProperty X: X pkey "ui.language", X value "fr", X for_user U ' - 'WHERE U eid %(x)s', {'x': urset[0][0]}) - req.cnx.commit() # commit so that admin get its properties updated - finder = self.vreg['components'].select('recipients_finder', - req, rset=urset) - self.set_option('default-recipients-mode', 'none') - self.assertEqual(finder.recipients(), []) - self.set_option('default-recipients-mode', 'users') - self.assertEqual(finder.recipients(), [(u'admin@logilab.fr', 'fr')]) - self.set_option('default-recipients-mode', 'default-dest-addrs') - self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr') - self.assertEqual(list(finder.recipients()), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) - - def test_status_change_view(self): - with self.admin_access.web_request() as req: - u = self.create_user(req, 'toto') - iwfable = u.cw_adapt_to('IWorkflowable') - iwfable.fire_transition('deactivate', comment=u'yeah') - self.assertFalse(MAILBOX) - req.cnx.commit() - self.assertEqual(len(MAILBOX), 1) - email = MAILBOX[0] - self.assertEqual(email.content, - ''' -admin changed status from to for entity -'toto' - -yeah - -url: http://testing.fr/cubicweb/cwuser/toto -''') - self.assertEqual(email.subject, - 'status changed CWUser #%s (admin)' % u.eid) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/unittest_register_user.py --- a/sobjects/test/unittest_register_user.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,95 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unittest for cubicweb.dbapi""" - -from cubicweb import ValidationError -from cubicweb.web import Unauthorized -from cubicweb.devtools.testlib import CubicWebTC - - -class RegisterUserTC(CubicWebTC): - - def test_register_user_service(self): - acc = self.admin_access - with acc.client_cnx() as cnx: - cnx.call_service('register_user', login=u'foo1', password=u'bar1', - email=u'foo1@bar1.com', firstname=u'Foo1', - surname=u'Bar1') - - acc = self.new_access('anon') - with acc.client_cnx() as cnx: - self.assertRaises(Unauthorized, cnx.call_service, 'register_user', - login=u'foo2', password=u'bar2', - email=u'foo2@bar2.com', firstname=u'Foo2', surname=u'Bar2') - - with self.repo.internal_cnx() as cnx: - cnx.call_service('register_user', login=u'foo3', - password=u'bar3', email=u'foo3@bar3.com', - firstname=u'Foo3', surname=u'Bar3') - # same login - with self.assertRaises(ValidationError): - cnx.call_service('register_user', login=u'foo3', - password=u'bar3') - - def test_register_user_attributes(self): - with self.repo.internal_cnx() as cnx: - cnx.call_service('register_user', login=u'foo3', - password=u'bar3', email=u'foo3@bar3.com', - firstname=u'Foo3', surname=u'Bar3') - cnx.commit() - - with self.admin_access.client_cnx() as cnx: - user = cnx.find('CWUser', login=u'foo3').one() - self.assertEqual(user.firstname, u'Foo3') - self.assertEqual(user.use_email[0].address, u'foo3@bar3.com') - - def test_register_user_groups(self): - with self.repo.internal_cnx() as cnx: - # default - cnx.call_service('register_user', login=u'foo_user', - password=u'bar_user', email=u'foo_user@bar_user.com', - firstname=u'Foo_user', surname=u'Bar_user') - - # group kwarg - cnx.call_service('register_user', login=u'foo_admin', - password=u'bar_admin', email=u'foo_admin@bar_admin.com', - firstname=u'Foo_admin', surname=u'Bar_admin', - groups=('managers', 'users')) - - # class attribute - from cubicweb.sobjects import services - services.RegisterUserService.default_groups = ('guests',) - cnx.call_service('register_user', login=u'foo_guest', - password=u'bar_guest', email=u'foo_guest@bar_guest.com', - firstname=u'Foo_guest', surname=u'Bar_guest') - cnx.commit() - - with self.admin_access.client_cnx() as cnx: - user = cnx.find('CWUser', login=u'foo_user').one() - self.assertEqual([g.name for g in user.in_group], ['users']) - - admin = cnx.find('CWUser', login=u'foo_admin').one() - self.assertEqual(sorted(g.name for g in admin.in_group), ['managers', 'users']) - - guest = cnx.find('CWUser', login=u'foo_guest').one() - self.assertEqual([g.name for g in guest.in_group], ['guests']) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,109 +0,0 @@ -# -*- coding: iso-8859-1 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -import re - -from logilab.common.testlib import unittest_main -from cubicweb.devtools.testlib import CubicWebTC - -from cubicweb.sobjects.supervising import SendMailOp, SupervisionMailOp - - -class SupervisingTC(CubicWebTC): - - def setup_database(self): - with self.admin_access.client_cnx() as cnx: - cnx.create_entity('Card', title=u"une news !", content=u"cubicweb c'est beau") - card = cnx.create_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau") - cnx.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index") - cnx.create_entity('Comment', content=u"Yo !", comments=card) - cnx.commit() - self.vreg.config.global_set_option('supervising-addrs', 'test@logilab.fr') - - - def test_supervision(self): - # do some modification - with self.admin_access.repo_cnx() as cnx: - user = cnx.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G ' - 'WHERE G name "users"').get_entity(0, 0) - cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}) - cnx.execute('DELETE Card B WHERE B title "une news !"') - cnx.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}) - cnx.execute('SET X content "duh?" WHERE X is Comment') - cnx.execute('DELETE Comment C WHERE C comments Y, Y is Card, Y title "une autre news !"') - # check only one supervision email operation - sentops = [op for op in cnx.pending_operations - if isinstance(op, SupervisionMailOp)] - self.assertEqual(len(sentops), 1) - # check view content - op = sentops[0] - view = sentops[0]._get_view() - self.assertEqual(view.recipients(), ['test@logilab.fr']) - self.assertEqual(view.subject(), '[data supervision] changes summary') - data = view.render(changes=cnx.transaction_data.get('pendingchanges')).strip() - data = re.sub('#\d+', '#EID', data) - data = re.sub('/\d+', '/EID', data) - self.assertMultiLineEqual('''user admin has made the following change(s): - -* added cwuser #EID (toto) - http://testing.fr/cubicweb/cwuser/toto - -* added relation in_group from cwuser #EID to cwgroup #EID - -* deleted card #EID (une news !) - -* added relation bookmarked_by from bookmark #EID to cwuser #EID - -* updated comment #EID (duh?) - http://testing.fr/cubicweb/comment/EID - -* deleted comment #EID (duh?)''', - data) - # check prepared email - op._prepare_email() - self.assertEqual(len(op.to_send), 1) - self.assertTrue(op.to_send[0][0]) - self.assertEqual(op.to_send[0][1], ['test@logilab.fr']) - cnx.commit() - # some other changes ####### - user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') - sentops = [op for op in cnx.pending_operations - if isinstance(op, SupervisionMailOp)] - self.assertEqual(len(sentops), 1) - # check view content - op = sentops[0] - view = sentops[0]._get_view() - data = view.render(changes=cnx.transaction_data.get('pendingchanges')).strip() - data = re.sub('#\d+', '#EID', data) - data = re.sub('/\d+', '/EID', data) - self.assertMultiLineEqual('''user admin has made the following change(s): - -* changed state of cwuser #EID (toto) - from state activated to state deactivated - http://testing.fr/cubicweb/cwuser/toto''', - data) - - def test_nonregr1(self): - with self.admin_access.repo_cnx() as cnx: - # do some unlogged modification - cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': cnx.user.eid}) - cnx.commit() # no crash - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 sobjects/textparsers.py --- a/sobjects/textparsers.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,90 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Some parsers to detect action to do from text - -Currently only a parser to look for state change instruction is provided. -Take care to security when you're using it, think about the user that -will provide the text to analyze... -""" - -__docformat__ = "restructuredtext en" - -import re - -from cubicweb import UnknownEid -from cubicweb.view import Component - - -class TextAnalyzer(Component): - """analyze and extract information from plain text by calling registered - text parsers - """ - __regid__ = 'textanalyzer' - - def parse(self, caller, text): - for parsercls in self._cw.vreg['components'].get('textparser', ()): - parsercls(self._cw).parse(caller, text) - - -class TextParser(Component): - """base class for text parser, responsible to extract some information - from plain text. When something is done, it usually call the - - .fire_event(something, {event args}) - - method on the caller. - """ - __regid__ = 'textparser' - __abstract__ = True - - def parse(self, caller, text): - raise NotImplementedError - - -class ChangeStateTextParser(TextParser): - """search some text for change state instruction in the form - - :: #? - """ - instr_rgx = re.compile(':(\w+):\s*#?(\d+)', re.U) - - def parse(self, caller, text): - for trname, eid in self.instr_rgx.findall(text): - try: - entity = self._cw.entity_from_eid(int(eid)) - except UnknownEid: - self.error("can't get entity with eid %s", eid) - continue - if not hasattr(entity, 'in_state'): - self.error('bad change state instruction for eid %s', eid) - continue - iworkflowable = entity.cw_adapt_to('IWorkflowable') - if iworkflowable.current_workflow: - tr = iworkflowable.current_workflow.transition_by_name(trname) - else: - tr = None - if tr and tr.may_be_fired(entity.eid): - try: - trinfo = iworkflowable.fire_transition(tr) - caller.fire_event('state-changed', {'trinfo': trinfo, - 'entity': entity}) - except Exception: - self.exception('while changing state of %s', entity) - else: - self.error("can't pass transition %s on entity %s", - trname, entity) diff -r 058bb3dc685f -r 0b59724cb3f2 spa2rql.py --- a/spa2rql.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,220 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""SPARQL -> RQL translator""" - -from logilab.common import make_domains -from rql import TypeResolverException -from fyzz.yappsparser import parse -from fyzz import ast - -from cubicweb.xy import xy - - -class UnsupportedQuery(Exception): pass - -def order_limit_offset(sparqlst): - addons = '' - if sparqlst.orderby: - sortterms = ', '.join('%s %s' % (var.name.upper(), ascdesc.upper()) - for var, ascdesc in sparqlst.orderby) - addons += ' ORDERBY %s' % sortterms - if sparqlst.limit: - addons += ' LIMIT %s' % sparqlst.limit - if sparqlst.offset: - addons += ' OFFSET %s' % sparqlst.offset - return addons - - -class QueryInfo(object): - """wrapper class containing necessary information to generate a RQL query - from a sparql syntax tree - """ - def __init__(self, sparqlst): - self.sparqlst = sparqlst - if sparqlst.selected == ['*']: - self.selection = [var.upper() for var in sparqlst.variables] - else: - self.selection = [var.name.upper() for var in sparqlst.selected] - self.possible_types = {} - self.infer_types_info = [] - self.union_params = [] - self.restrictions = [] - self.literals = {} - self._litcount = 0 - - def add_literal(self, value): - key = chr(ord('a') + self._litcount) - self._litcount += 1 - self.literals[key] = value - return key - - def set_possible_types(self, var, varpossibletypes): - """set/restrict possible types for the given variable. - - :return: True if something changed, else false. - :raise: TypeResolverException if no more type allowed - """ - varpossibletypes = set(varpossibletypes) - try: - ctypes = self.possible_types[var] - nbctypes = len(ctypes) - ctypes &= varpossibletypes - if not ctypes: - raise TypeResolverException('No possible type') - return len(ctypes) != nbctypes - except KeyError: - self.possible_types[var] = varpossibletypes - return True - - def infer_types(self): - # XXX should use something similar to rql.analyze for proper type inference - modified = True - # loop to infer types until nothing changed - while modified: - modified = False - for yams_predicates, subjvar, obj in self.infer_types_info: - nbchoices = len(yams_predicates) - # get possible types for the subject variable, according to the - # current predicate - svptypes = set(s for s, r, o in yams_predicates) - if not '*' in svptypes: - if self.set_possible_types(subjvar, svptypes): - modified = True - # restrict predicates according to allowed subject var types - if subjvar in self.possible_types: - yams_predicates[:] = [(s, r, o) for s, r, o in yams_predicates - if s == '*' or s in self.possible_types[subjvar]] - if isinstance(obj, ast.SparqlVar): - # make a valid rql var name - objvar = obj.name.upper() - # get possible types for the object variable, according to - # the current predicate - ovptypes = set(o for s, r, o in yams_predicates) - if not '*' in ovptypes: - if self.set_possible_types(objvar, ovptypes): - modified = True - # restrict predicates according to allowed object var types - if objvar in self.possible_types: - yams_predicates[:] = [(s, r, o) for s, r, o in yams_predicates - if o == '*' or o in self.possible_types[objvar]] - # ensure this still make sense - if not yams_predicates: - raise TypeResolverException('No yams predicate') - if len(yams_predicates) != nbchoices: - modified = True - - def build_restrictions(self): - # now, for each predicate - for yams_predicates, subjvar, obj in self.infer_types_info: - rel = yams_predicates[0] - # if there are several yams relation type equivalences, we will have - # to generate several unioned rql queries - for s, r, o in yams_predicates[1:]: - if r != rel[1]: - self.union_params.append((yams_predicates, subjvar, obj)) - break - # else we can simply add it to base rql restrictions - else: - restr = self.build_restriction(subjvar, rel[1], obj) - self.restrictions.append(restr) - - def build_restriction(self, subjvar, rtype, obj): - if isinstance(obj, ast.SparqlLiteral): - key = self.add_literal(obj.value) - objvar = '%%(%s)s' % key - else: - assert isinstance(obj, ast.SparqlVar) - # make a valid rql var name - objvar = obj.name.upper() - # else we can simply add it to base rql restrictions - return '%s %s %s' % (subjvar, rtype, objvar) - - def finalize(self): - """return corresponding rql query (string) / args (dict)""" - for varname, ptypes in self.possible_types.items(): - if len(ptypes) == 1: - self.restrictions.append('%s is %s' % (varname, next(iter(ptypes)))) - unions = [] - for releq, subjvar, obj in self.union_params: - thisunions = [] - for st, rt, ot in releq: - thisunions.append([self.build_restriction(subjvar, rt, obj)]) - if st != '*': - thisunions[-1].append('%s is %s' % (subjvar, st)) - if isinstance(obj, ast.SparqlVar) and ot != '*': - objvar = obj.name.upper() - thisunions[-1].append('%s is %s' % (objvar, objvar)) - if not unions: - unions = thisunions - else: - unions = zip(*make_domains([unions, thisunions])) - selection = 'Any ' + ', '.join(self.selection) - sparqlst = self.sparqlst - if sparqlst.distinct: - selection = 'DISTINCT ' + selection - if unions: - baserql = '%s WHERE %s' % (selection, ', '.join(self.restrictions)) - rqls = ['(%s, %s)' % (baserql, ', '.join(unionrestrs)) - for unionrestrs in unions] - rql = ' UNION '.join(rqls) - if sparqlst.orderby or sparqlst.limit or sparqlst.offset: - rql = '%s%s WITH %s BEING (%s)' % ( - selection, order_limit_offset(sparqlst), - ', '.join(self.selection), rql) - else: - rql = '%s%s WHERE %s' % (selection, order_limit_offset(sparqlst), - ', '.join(self.restrictions)) - return rql, self.literals - - -class Sparql2rqlTranslator(object): - def __init__(self, yschema): - self.yschema = yschema - - def translate(self, sparql): - sparqlst = parse(sparql) - if sparqlst.type != 'select': - raise UnsupportedQuery() - qi = QueryInfo(sparqlst) - for subj, predicate, obj in sparqlst.where: - if not isinstance(subj, ast.SparqlVar): - raise UnsupportedQuery() - # make a valid rql var name - subjvar = subj.name.upper() - if predicate in [('', 'a'), - ('http://www.w3.org/1999/02/22-rdf-syntax-ns#', 'type')]: - # special 'is' relation - if not isinstance(obj, tuple): - raise UnsupportedQuery() - # restrict possible types for the subject variable - qi.set_possible_types( - subjvar, xy.yeq(':'.join(obj), isentity=True)) - else: - # 'regular' relation (eg not 'is') - if not isinstance(predicate, tuple): - raise UnsupportedQuery() - # list of 3-uple - # (yams etype (subject), yams rtype, yams etype (object)) - # where subject / object entity type may '*' if not specified - yams_predicates = xy.yeq(':'.join(predicate)) - qi.infer_types_info.append((yams_predicates, subjvar, obj)) - if not isinstance(obj, (ast.SparqlLiteral, ast.SparqlVar)): - raise UnsupportedQuery() - qi.infer_types() - qi.build_restrictions() - return qi diff -r 058bb3dc685f -r 0b59724cb3f2 statsd_logger.py --- a/statsd_logger.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,135 +0,0 @@ -# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -"""Simple statsd_ logger for cubicweb. - -This module is meant to be configured by setting a couple of global variables: - -- ``bucket`` global variable will be used as statsd bucket in every -statsd_ UDP sent packet. - -`- `address`` is a pair (IP, port) specifying the address of the -statsd_ server - - -There are 3 kinds of statds_ message:: - -- ``statsd_c(context, n)`` is a simple function to send statsd_ - counter-type of messages like:: - - .:|c\n - -- ``statsd_g(context, value)`` to send statsd_ gauge-type of messages - like:: - - .:|g\n - -- ``statsd_t(context, ms)`` to send statsd_ time-type of messages - like:: - - .:|ms\n - -There is also a decorator (``statsd_timeit``) that may be used to -measure and send to the statsd_ server the time passed in a function -or a method and the number of calls. It will send a message like:: - - .:|ms\n.:1|c\n - - -.. _statsd: https://github.com/etsy/statsd - -""" - -__docformat__ = "restructuredtext en" - -import time -import socket - -_bucket = 'cubicweb' -_address = None -_socket = None - - -def setup(bucket, address): - """Configure the statsd endpoint - - :param bucket: the name of the statsd bucket that will be used to - build messages. - - :param address: the UDP endpoint of the statsd server. Must a - couple (ip, port). - """ - global _bucket, _address, _socket - packed = None - for family in (socket.AF_INET6, socket.AF_INET): - try: - packed = socket.inet_pton(family, address[0]) - break - except socket.error: - continue - if packed is None: - return - _bucket, _address = bucket, address - _socket = socket.socket(family, socket.SOCK_DGRAM) - - -def statsd_c(context, n=1): - if _address is not None: - _socket.sendto('{0}.{1}:{2}|c\n'.format(_bucket, context, n), _address) - - -def statsd_g(context, value): - if _address is not None: - _socket.sendto('{0}.{1}:{2}|g\n'.format(_bucket, context, value), _address) - - -def statsd_t(context, value): - if _address is not None: - _socket.sendto('{0}.{1}:{2:.4f}|ms\n'.format(_bucket, context, value), _address) - - -class statsd_timeit(object): - __slots__ = ('callable',) - - def __init__(self, callableobj): - self.callable = callableobj - - @property - def __doc__(self): - return self.callable.__doc__ - @property - def __name__(self): - return self.callable.__name__ - - def __call__(self, *args, **kw): - if _address is None: - return self.callable(*args, **kw) - t0 = time.time() - try: - return self.callable(*args, **kw) - finally: - dt = 1000*(time.time()-t0) - msg = '{0}.{1}:{2:.4f}|ms\n{0}.{1}:1|c\n'.format(_bucket, self.__name__, dt) - _socket.sendto(msg, _address) - - def __get__(self, obj, objtype): - """Support instance methods.""" - if obj is None: # class method or some already wrapped method - return self - import functools - return functools.partial(self.__call__, obj) diff -r 058bb3dc685f -r 0b59724cb3f2 tags.py --- a/tags.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""helper classes to generate simple (X)HTML tags""" - -__docformat__ = "restructuredtext en" - -from cubicweb.uilib import simple_sgml_tag, sgml_attributes - -class tag(object): - def __init__(self, name, escapecontent=True): - self.name = name - self.escapecontent = escapecontent - - def __call__(self, __content=None, **attrs): - attrs.setdefault('escapecontent', self.escapecontent) - return simple_sgml_tag(self.name, __content, **attrs) - -button = tag('button') -input = tag('input') -textarea = tag('textarea') -a = tag('a') -span = tag('span') -div = tag('div', False) -img = tag('img') -label = tag('label') -option = tag('option') -h1 = tag('h1') -h2 = tag('h2') -h3 = tag('h3') -h4 = tag('h4') -h5 = tag('h5') -tr = tag('tr') -th = tag('th') -td = tag('td') -iframe = tag('iframe') - -def select(name, id=None, multiple=False, options=[], **attrs): - if multiple: - attrs['multiple'] = 'multiple' - if id: - attrs['id'] = id - attrs['name'] = name - html = [u'') - return u'\n'.join(html) diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/bootstrap_cubes --- a/test/data/bootstrap_cubes Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -card, tag, localperms diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/comment/__init__.py --- a/test/data/cubes/comment/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/comment/__pkginfo__.py --- a/test/data/cubes/comment/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,25 +0,0 @@ -# pylint: disable=W0622 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb-comment packaging information""" - -distname = "cubicweb-comment" -modname = distname.split('-', 1)[1] - -numversion = (1, 4, 3) -version = '.'.join(str(num) for num in numversion) diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/email/__init__.py --- a/test/data/cubes/email/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/email/__pkginfo__.py --- a/test/data/cubes/email/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,30 +0,0 @@ -# pylint: disable=W0622 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb-email packaging information""" - -distname = "cubicweb-email" -modname = distname.split('-', 1)[1] - -numversion = (1, 4, 3) -version = '.'.join(str(num) for num in numversion) - - -__depends__ = {'cubicweb': None, - 'cubicweb-file': None} -__recommends__ = {'cubicweb-comment': None} diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/email/entities.py --- a/test/data/cubes/email/entities.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -"test" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/email/hooks.py --- a/test/data/cubes/email/hooks.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -"test" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/email/views/__init__.py --- a/test/data/cubes/email/views/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -"test" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/file/__init__.py --- a/test/data/cubes/file/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/file/__pkginfo__.py --- a/test/data/cubes/file/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,25 +0,0 @@ -# pylint: disable=W0622 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb-file packaging information""" - -distname = "cubicweb-file" -modname = distname.split('-', 1)[1] - -numversion = (1, 4, 3) -version = '.'.join(str(num) for num in numversion) diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/file/entities/__init__.py --- a/test/data/cubes/file/entities/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -"test" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/file/hooks/__init__.py --- a/test/data/cubes/file/hooks/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -"test" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/file/views.py --- a/test/data/cubes/file/views.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -"test" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/forge/__init__.py --- a/test/data/cubes/forge/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/forge/__pkginfo__.py --- a/test/data/cubes/forge/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,32 +0,0 @@ -# pylint: disable=W0622 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb-forge packaging information""" - -distname = "cubicweb-forge" -modname = distname.split('-', 1)[1] - -numversion = (1, 4, 3) -version = '.'.join(str(num) for num in numversion) - - -__depends__ = {'cubicweb': None, - 'cubicweb-file': None, - 'cubicweb-email': None, - 'cubicweb-comment': None, - } diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/mycube/__init__.py --- a/test/data/cubes/mycube/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""mycube's __init__ - -""" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/cubes/mycube/__pkginfo__.py --- a/test/data/cubes/mycube/__pkginfo__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -distname = 'cubicweb-mycube' diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/entities.py --- a/test/data/entities.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,42 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from cubicweb.entities import AnyEntity, fetch_config, adapters -from cubicweb.predicates import is_instance - - -class Societe(AnyEntity): - __regid__ = 'Societe' - fetch_attrs = ('nom',) - -class Personne(Societe): - """customized class forne Person entities""" - __regid__ = 'Personne' - fetch_attrs, cw_fetch_order = fetch_config(['nom', 'prenom']) - rest_attr = 'nom' - -class Ami(Societe): - __regid__ = 'Ami' - rest_attr = 'nom' - -class Note(AnyEntity): - __regid__ = 'Note' - - -class FakeFileIDownloadableAdapter(adapters.IDownloadableAdapter): - __select__ = is_instance('FakeFile') diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/erqlexpr_on_ertype.py --- a/test/data/erqlexpr_on_ertype.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,46 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from yams.buildobjs import EntityType, RelationType, SubjectRelation -from cubicweb.schema import ERQLExpression - -class ToTo(EntityType): - __permissions__ = { - 'read': ('managers',), - 'add': ('managers',), - 'update': ('managers',), - 'delete': ('managers',), - } - toto = SubjectRelation('TuTu') - -class TuTu(EntityType): - __permissions__ = { - 'read': ('managers',), - 'add': ('managers',), - 'update': ('managers',), - 'delete': ('managers',), - } - -class toto(RelationType): - __permissions__ = { - 'read': ('managers', ), - 'add': ('managers', ERQLExpression('S bla Y'),), - 'delete': ('managers',), - } diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/lowered_etype.py --- a/test/data/lowered_etype.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ - -from yams.buildobjs import EntityType - -class my_etype(EntityType): - pass diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/migration/0.0.3_Any.py --- a/test/data/migration/0.0.3_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -coucou diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/migration/0.0.4_Any.py --- a/test/data/migration/0.0.4_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -coucou diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/migration/0.1.0_Any.py --- a/test/data/migration/0.1.0_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -coucou diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/migration/0.1.0_common.py --- a/test/data/migration/0.1.0_common.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""common to all configuration - -""" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/migration/0.1.0_repository.py --- a/test/data/migration/0.1.0_repository.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""repository specific - -""" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/migration/0.1.2_Any.py --- a/test/data/migration/0.1.2_Any.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -coucou diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/rewrite/__init__.py --- a/test/data/rewrite/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/rewrite/bootstrap_cubes --- a/test/data/rewrite/bootstrap_cubes Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -card,localperms diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/rewrite/schema.py --- a/test/data/rewrite/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,124 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from yams.buildobjs import (EntityType, RelationDefinition, String, SubjectRelation, - ComputedRelation, Int) -from cubicweb.schema import ERQLExpression - - -class Person(EntityType): - name = String() - - -class Affaire(EntityType): - __permissions__ = { - 'read': ('managers', - ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), - 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), - 'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')), - 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), - } - ref = String(fulltextindexed=True, indexed=True, maxsize=16) - documented_by = SubjectRelation('Card', cardinality='1*') - concerne = SubjectRelation(('Societe', 'Note'), cardinality='1*') - - -class Societe(EntityType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), - 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), - 'add': ('managers', 'users',) - } - nom = String() - - -class Division(Societe): - __specializes_schema__ = True - - -class Note(EntityType): - pass - - -class require_permission(RelationDefinition): - subject = ('Card', 'Note') - object = 'CWPermission' - - -class require_state(RelationDefinition): - subject = 'CWPermission' - object = 'State' - - -class inlined_card(RelationDefinition): - subject = 'Affaire' - object = 'Card' - inlined = True - cardinality = '?*' - -class inlined_note(RelationDefinition): - subject = 'Card' - object = 'Note' - inlined = True - cardinality = '?*' - -class inlined_affaire(RelationDefinition): - subject = 'Note' - object = 'Affaire' - inlined = True - cardinality = '?*' - -class responsable(RelationDefinition): - subject = 'Societe' - object = 'CWUser' - inlined = True - cardinality = '1*' - -class Contribution(EntityType): - code = Int() - -class ArtWork(EntityType): - name = String() - -class Role(EntityType): - name = String() - -class contributor(RelationDefinition): - subject = 'Contribution' - object = 'Person' - cardinality = '1*' - inlined = True - -class manifestation(RelationDefinition): - subject = 'Contribution' - object = 'ArtWork' - -class role(RelationDefinition): - subject = 'Contribution' - object = 'Role' - -class illustrator_of(ComputedRelation): - rule = ('C is Contribution, C contributor S, C manifestation O, ' - 'C role R, R name "illustrator"') - -class participated_in(ComputedRelation): - rule = 'S contributor O' - -class match(RelationDefinition): - subject = 'ArtWork' - object = 'Note' diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/rqlexpr_on_computedrel.py --- a/test/data/rqlexpr_on_computedrel.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ -from yams.buildobjs import ComputedRelation, EntityType, RelationDefinition -from cubicweb.schema import RRQLExpression - -class Subject(EntityType): - pass - -class Object(EntityType): - pass - -class relation(RelationDefinition): - subject = 'Subject' - object = 'Object' - -class computed(ComputedRelation): - rule = 'S relation O' - __permissions__ = {'read': (RRQLExpression('S is ET'),)} diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/rqlexpr_on_ertype_read.py --- a/test/data/rqlexpr_on_ertype_read.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,46 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from yams.buildobjs import EntityType, RelationType, SubjectRelation -from cubicweb.schema import RRQLExpression - -class ToTo(EntityType): - __permissions__ = { - 'read': ('managers',), - 'add': ('managers',), - 'update': ('managers',), - 'delete': ('managers',), - } - toto = SubjectRelation('TuTu') - -class TuTu(EntityType): - __permissions__ = { - 'read': ('managers',), - 'add': ('managers',), - 'update': ('managers',), - 'delete': ('managers',), - } - -class toto(RelationType): - __permissions__ = { - 'read': ('managers', RRQLExpression('S bla Y'), ), - 'add': ('managers',), - 'delete': ('managers',), - } diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/rrqlexpr_on_attr.py --- a/test/data/rrqlexpr_on_attr.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,36 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import EntityType, RelationType, String -from cubicweb.schema import RRQLExpression - -class ToTo(EntityType): - __permissions__ = { - 'read': ('managers',), - 'add': ('managers',), - 'update': ('managers',), - 'delete': ('managers',), - } - attr = String() - -class attr(RelationType): - __permissions__ = { - 'read': ('managers', ), - 'update': ('managers', RRQLExpression('S bla Y'),), - 'add': ('managers', RRQLExpression('S bla Y'),), - } diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/rrqlexpr_on_eetype.py --- a/test/data/rrqlexpr_on_eetype.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,31 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from yams.buildobjs import EntityType, String -from cubicweb.schema import RRQLExpression - -class ToTo(EntityType): - __permissions__ = { - 'read': ('managers', RRQLExpression('S bla Y'),), - 'add': ('managers',), - 'update': ('managers',), - 'delete': ('managers',), - } - attr = String() diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/schema.py --- a/test/data/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,113 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from yams.buildobjs import (EntityType, String, RichString, Bytes, - SubjectRelation, RelationDefinition) - -from cubicweb.schema import (WorkflowableEntityType, - RQLConstraint, RQLVocabularyConstraint) - - -from cubicweb import _ - - -class Personne(EntityType): - nom = String(required=True) - prenom = String() - type = String() - travaille = SubjectRelation('Societe') - evaluee = SubjectRelation(('Note', 'Personne')) - connait = SubjectRelation( - 'Personne', symmetric=True, - constraints=[ - RQLConstraint('NOT S identity O'), - # conflicting constraints, see cw_unrelated_rql tests in - # unittest_entity.py - RQLVocabularyConstraint('NOT (S connait P, P nom "toto")'), - RQLVocabularyConstraint('S travaille P, P nom "tutu"')]) - actionnaire = SubjectRelation('Societe', cardinality='??', - constraints=[RQLConstraint('NOT EXISTS(O contrat_exclusif S)')]) - dirige = SubjectRelation('Societe', cardinality='??', - constraints=[RQLConstraint('S actionnaire O')]) - associe = SubjectRelation('Personne', cardinality='?*', - constraints=[RQLConstraint('S actionnaire SOC, O actionnaire SOC')]) - -class Ami(EntityType): - """A Person, for which surname is not required""" - prenom = String() - nom = String() - -class Societe(EntityType): - nom = String() - evaluee = SubjectRelation('Note') - fournit = SubjectRelation(('Service', 'Produit'), cardinality='1*') - contrat_exclusif = SubjectRelation('Personne', cardinality='??') - -class Service(EntityType): - fabrique_par = SubjectRelation('Personne', cardinality='1*') - - -class Produit(EntityType): - fabrique_par = SubjectRelation('Usine', cardinality='1*', inlined=True) - - -class Usine(EntityType): - lieu = String(required=True) - - -class Note(EntityType): - type = String() - ecrit_par = SubjectRelation('Personne') - - -class SubNote(Note): - __specializes_schema__ = True - description = String() - - -class tags(RelationDefinition): - subject = 'Tag' - object = ('Personne', 'Note') - - -class evaluee(RelationDefinition): - subject = 'CWUser' - object = 'Note' - - -class StateFull(WorkflowableEntityType): - name = String() - - -class Reference(EntityType): - nom = String(unique=True) - ean = String(unique=True, required=True) - - -class FakeFile(EntityType): - title = String(fulltextindexed=True, maxsize=256) - data = Bytes(required=True, fulltextindexed=True, description=_('file to upload')) - data_format = String(required=True, maxsize=128, - description=_('MIME type of the file. Should be dynamically set at upload time.')) - data_encoding = String(maxsize=32, - description=_('encoding of the file when it applies (e.g. text). ' - 'Should be dynamically set at upload time.')) - data_name = String(required=True, fulltextindexed=True, - description=_('name of the file. Should be dynamically set at upload time.')) - description = RichString(fulltextindexed=True, internationalizable=True, - default_format='text/rest') diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/scripts/script1.py --- a/test/data/scripts/script1.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -from os.path import join -assert __file__.endswith(join('scripts', 'script1.py')), __file__ -assert '__main__' == __name__, __name__ -assert [] == __args__, __args__ diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/scripts/script2.py --- a/test/data/scripts/script2.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -from os.path import join -assert __file__.endswith(join('scripts', 'script2.py')), __file__ -assert '__main__' == __name__, __name__ -assert ['-v'] == __args__, __args__ diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/scripts/script3.py --- a/test/data/scripts/script3.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -from os.path import join -assert __file__.endswith(join('scripts', 'script3.py')), __file__ -assert '__main__' == __name__, __name__ -assert ['-vd', '-f', 'FILE.TXT'] == __args__, __args__ diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/server_migration/2.10.2_Any.sql diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/server_migration/2.5.0_Any.sql diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/server_migration/2.6.0_Any.sql diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/server_migration/bootstrapmigration_repository.py --- a/test/data/server_migration/bootstrapmigration_repository.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""allways executed before all others in server migration - -""" diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/uppered_rtype.py --- a/test/data/uppered_rtype.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ - -from yams.buildobjs import RelationDefinition - -class ARelation(RelationDefinition): - subject = 'CWUser' - object = 'CWGroup' diff -r 058bb3dc685f -r 0b59724cb3f2 test/data/views.py --- a/test/data/views.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,33 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from cubicweb.web.views import xmlrss -xmlrss.RSSIconBox.visible = True - - -from cubicweb.predicates import match_user_groups -from cubicweb.server import Service - - -class TestService(Service): - __regid__ = 'test_service' - __select__ = Service.__select__ & match_user_groups('managers') - passed_here = [] - - def call(self, msg): - self.passed_here.append(msg) - return 'babar' diff -r 058bb3dc685f -r 0b59724cb3f2 test/data_schemareader/schema.py --- a/test/data_schemareader/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -from cubicweb.schemas.base import in_group, CWSourceSchemaConfig -# copy __permissions__ to avoid modifying a shared dictionary -in_group.__permissions__ = in_group.__permissions__.copy() -in_group.__permissions__['read'] = ('managers',) - -cw_for_source = CWSourceSchemaConfig.get_relation('cw_for_source') -cw_for_source.__permissions__ = {'read': ('managers', 'users'), - 'add': ('managers',), - 'delete': ('managers',)} diff -r 058bb3dc685f -r 0b59724cb3f2 test/requirements.txt --- a/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -Pygments -#fyzz XXX pip install fails -cubicweb-card -cubicweb-file -cubicweb-localperms -cubicweb-tag diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_binary.py --- a/test/unittest_binary.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,54 +0,0 @@ -from six import PY2 - -from unittest import TestCase -from tempfile import NamedTemporaryFile -import os.path as osp - -from logilab.common.shellutils import tempdir -from cubicweb import Binary - - -class BinaryTC(TestCase): - def test_init(self): - Binary() - Binary(b'toto') - Binary(bytearray(b'toto')) - if PY2: - Binary(buffer('toto')) - else: - Binary(memoryview(b'toto')) - with self.assertRaises((AssertionError, TypeError)): - # TypeError is raised by BytesIO if python runs with -O - Binary(u'toto') - - def test_write(self): - b = Binary() - b.write(b'toto') - b.write(bytearray(b'toto')) - if PY2: - b.write(buffer('toto')) - else: - b.write(memoryview(b'toto')) - with self.assertRaises((AssertionError, TypeError)): - # TypeError is raised by BytesIO if python runs with -O - b.write(u'toto') - - def test_gzpickle_roundtrip(self): - old = (u'foo', b'bar', 42, {}) - new = Binary.zpickle(old).unzpickle() - self.assertEqual(old, new) - self.assertIsNot(old, new) - - def test_from_file_to_file(self): - with tempdir() as dpath: - fpath = osp.join(dpath, 'binary.bin') - with open(fpath, 'wb') as fobj: - Binary(b'binaryblob').to_file(fobj) - - bobj = Binary.from_file(fpath) - self.assertEqual(bobj.getvalue(), b'binaryblob') - - -if __name__ == '__main__': - from unittest import main - main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_cwconfig.py --- a/test/unittest_cwconfig.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,222 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb.cwconfig unit tests""" - -import sys -import os -import tempfile -from os.path import dirname, join, abspath - -from logilab.common.modutils import cleanup_sys_modules -from logilab.common.testlib import (TestCase, unittest_main, - with_tempdir) -from logilab.common.changelog import Version - -from cubicweb.devtools import ApptestConfiguration -from cubicweb.cwconfig import _find_prefix - -def unabsolutize(path): - parts = path.split(os.sep) - for i, part in reversed(tuple(enumerate(parts))): - if part.startswith('cubicweb') or part == 'cubes': - return '/'.join(parts[i+1:]) - raise Exception('duh? %s' % path) - -CUSTOM_CUBES_DIR = abspath(join(dirname(__file__), 'data', 'cubes')) - - -class CubicWebConfigurationTC(TestCase): - def setUp(self): - cleanup_sys_modules([CUSTOM_CUBES_DIR, ApptestConfiguration.CUBES_DIR]) - self.config = ApptestConfiguration('data', apphome=self.datadir) - self.config._cubes = ('email', 'file') - - def tearDown(self): - ApptestConfiguration.CUBES_PATH = [] - - def test_reorder_cubes(self): - self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] - self.config.adjust_sys_path() - # forge depends on email and file and comment - # email depends on file - self.assertEqual(self.config.reorder_cubes(['file', 'email', 'forge']), - ('forge', 'email', 'file')) - self.assertEqual(self.config.reorder_cubes(['email', 'file', 'forge']), - ('forge', 'email', 'file')) - self.assertEqual(self.config.reorder_cubes(['email', 'forge', 'file']), - ('forge', 'email', 'file')) - self.assertEqual(self.config.reorder_cubes(['file', 'forge', 'email']), - ('forge', 'email', 'file')) - self.assertEqual(self.config.reorder_cubes(['forge', 'file', 'email']), - ('forge', 'email', 'file')) - self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'file')), - ('forge', 'email', 'file')) - - def test_reorder_cubes_recommends(self): - self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] - self.config.adjust_sys_path() - from cubes.comment import __pkginfo__ as comment_pkginfo - comment_pkginfo.__recommends_cubes__ = {'file': None} - try: - # email recommends comment - # comment recommends file - self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'file', 'comment')), - ('forge', 'email', 'comment', 'file')) - self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'comment', 'file')), - ('forge', 'email', 'comment', 'file')) - self.assertEqual(self.config.reorder_cubes(('forge', 'comment', 'email', 'file')), - ('forge', 'email', 'comment', 'file')) - self.assertEqual(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')), - ('forge', 'email', 'comment', 'file')) - finally: - comment_pkginfo.__recommends_cubes__ = {} - - def test_expand_cubes(self): - self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] - self.config.adjust_sys_path() - self.assertEqual(self.config.expand_cubes(('email', 'comment')), - ['email', 'comment', 'file']) - - def test_appobjects_path(self): - self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] - self.config.adjust_sys_path() - path = [unabsolutize(p) for p in self.config.appobjects_path()] - self.assertEqual(path[0], 'entities') - self.assertCountEqual(path[1:4], ['web/views', 'sobjects', 'hooks']) - self.assertEqual(path[4], 'file/entities') - self.assertCountEqual(path[5:7], ['file/views.py', 'file/hooks']) - self.assertEqual(path[7], 'email/entities.py') - self.assertCountEqual(path[8:10], ['email/views', 'email/hooks.py']) - self.assertEqual(path[10:], ['test/data/entities.py', 'test/data/views.py']) - - def test_cubes_path(self): - # make sure we don't import the email cube, but the stdlib email package - import email - self.assertNotEqual(dirname(email.__file__), self.config.CUBES_DIR) - self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] - self.assertEqual(self.config.cubes_search_path(), - [CUSTOM_CUBES_DIR, self.config.CUBES_DIR]) - self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR, - self.config.CUBES_DIR, 'unexistant'] - # filter out unexistant and duplicates - self.assertEqual(self.config.cubes_search_path(), - [CUSTOM_CUBES_DIR, - self.config.CUBES_DIR]) - self.assertIn('mycube', self.config.available_cubes()) - # test cubes python path - self.config.adjust_sys_path() - import cubes - self.assertEqual(cubes.__path__, self.config.cubes_search_path()) - # this import should succeed once path is adjusted - from cubes import mycube - self.assertEqual(mycube.__path__, [join(CUSTOM_CUBES_DIR, 'mycube')]) - # file cube should be overriden by the one found in data/cubes - sys.modules.pop('cubes.file', None) - del cubes.file - from cubes import file - self.assertEqual(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')]) - - -class FindPrefixTC(TestCase): - def make_dirs(self, *args): - path = join(tempfile.tempdir, *args) - if not os.path.exists(path): - os.makedirs(path) - return path - - def make_file(self, *args): - self.make_dirs(*args[: -1]) - file_path = join(tempfile.tempdir, *args) - file_obj = open(file_path, 'w') - file_obj.write('""" None """') - file_obj.close() - return file_path - - @with_tempdir - def test_samedir(self): - prefix = tempfile.tempdir - self.make_dirs('share', 'cubicweb') - self.assertEqual(_find_prefix(prefix), prefix) - - @with_tempdir - def test_samedir_filepath(self): - prefix = tempfile.tempdir - self.make_dirs('share', 'cubicweb') - file_path = self.make_file('bob.py') - self.assertEqual(_find_prefix(file_path), prefix) - - @with_tempdir - def test_dir_inside_prefix(self): - prefix = tempfile.tempdir - self.make_dirs('share', 'cubicweb') - dir_path = self.make_dirs('bob') - self.assertEqual(_find_prefix(dir_path), prefix) - - @with_tempdir - def test_file_in_dir_inside_prefix(self): - prefix = tempfile.tempdir - self.make_dirs('share', 'cubicweb') - file_path = self.make_file('bob', 'toto.py') - self.assertEqual(_find_prefix(file_path), prefix) - - @with_tempdir - def test_file_in_deeper_dir_inside_prefix(self): - prefix = tempfile.tempdir - self.make_dirs('share', 'cubicweb') - file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') - self.assertEqual(_find_prefix(file_path), prefix) - - @with_tempdir - def test_multiple_candidate_prefix(self): - self.make_dirs('share', 'cubicweb') - prefix = self.make_dirs('bob') - self.make_dirs('bob', 'share', 'cubicweb') - file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') - self.assertEqual(_find_prefix(file_path), prefix) - - @with_tempdir - def test_sister_candidate_prefix(self): - prefix = tempfile.tempdir - self.make_dirs('share', 'cubicweb') - self.make_dirs('bob', 'share', 'cubicweb') - file_path = self.make_file('bell', 'toto.py') - self.assertEqual(_find_prefix(file_path), prefix) - - @with_tempdir - def test_multiple_parent_candidate_prefix(self): - self.make_dirs('share', 'cubicweb') - prefix = self.make_dirs('share', 'cubicweb', 'bob') - self.make_dirs('share', 'cubicweb', 'bob', 'share', 'cubicweb') - file_path = self.make_file('share', 'cubicweb', 'bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') - self.assertEqual(_find_prefix(file_path), prefix) - - @with_tempdir - def test_upper_candidate_prefix(self): - prefix = tempfile.tempdir - self.make_dirs('share', 'cubicweb') - self.make_dirs('bell','bob', 'share', 'cubicweb') - file_path = self.make_file('bell', 'toto.py') - self.assertEqual(_find_prefix(file_path), prefix) - - @with_tempdir - def test_no_prefix(self): - prefix = tempfile.tempdir - self.assertEqual(_find_prefix(prefix), sys.prefix) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_cwctl.py --- a/test/unittest_cwctl.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,68 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -import sys -import os -from os.path import join -from io import StringIO, BytesIO - -from six import PY2 - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.cwconfig import CubicWebConfiguration -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.server.migractions import ServerMigrationHelper - -CubicWebConfiguration.load_cwctl_plugins() # XXX necessary? - - -class CubicWebCtlTC(TestCase): - def setUp(self): - self.stream = BytesIO() if PY2 else StringIO() - sys.stdout = self.stream - def tearDown(self): - sys.stdout = sys.__stdout__ - - def test_list(self): - from cubicweb.cwctl import ListCommand - ListCommand(None).run([]) - - -class CubicWebShellTC(CubicWebTC): - - def test_process_script_args_context(self): - repo = self.repo - with self.admin_access.repo_cnx() as cnx: - mih = ServerMigrationHelper(None, repo=repo, cnx=cnx, - interactive=False, - # hack so it don't try to load fs schema - schema=1) - scripts = {'script1.py': list(), - 'script2.py': ['-v'], - 'script3.py': ['-vd', '-f', 'FILE.TXT'], - } - mih.cmd_process_script(join(self.datadir, 'scripts', 'script1.py'), - funcname=None) - for script, args in scripts.items(): - scriptname = os.path.join(self.datadir, 'scripts', script) - self.assertTrue(os.path.exists(scriptname)) - mih.cmd_process_script(scriptname, None, scriptargs=args) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_entity.py --- a/test/unittest_entity.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,848 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for cubicweb.web.views.entities module""" - -from datetime import datetime - -from six import text_type - -from logilab.common import tempattr -from logilab.common.decorators import clear_cache - -from cubicweb import Binary -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.mttransforms import HAS_TAL -from cubicweb.entity import can_use_rest_path -from cubicweb.entities import fetch_config -from cubicweb.uilib import soup2xhtml -from cubicweb.schema import RRQLExpression - -class EntityTC(CubicWebTC): - - def setUp(self): - super(EntityTC, self).setUp() - self.backup_dict = {} - for cls in self.vreg['etypes'].iter_classes(): - self.backup_dict[cls] = (cls.fetch_attrs, cls.cw_fetch_order) - - def tearDown(self): - super(EntityTC, self).tearDown() - for cls in self.vreg['etypes'].iter_classes(): - cls.fetch_attrs, cls.cw_fetch_order = self.backup_dict[cls] - - def test_no_prefill_related_cache_bug(self): - with self.admin_access.repo_cnx() as cnx: - usine = cnx.create_entity('Usine', lieu=u'Montbeliard') - produit = cnx.create_entity('Produit') - # usine was prefilled in glob_add_entity - # let's simulate produit creation without prefill - produit._cw_related_cache.clear() - # use add_relations - cnx.add_relations([('fabrique_par', [(produit.eid, usine.eid)])]) - self.assertEqual(1, len(usine.reverse_fabrique_par)) - self.assertEqual(1, len(produit.fabrique_par)) - - def test_boolean_value(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('CWUser')(req) - self.assertTrue(e) - - def test_yams_inheritance(self): - from entities import Note - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('SubNote')(req) - self.assertIsInstance(e, Note) - e2 = self.vreg['etypes'].etype_class('SubNote')(req) - self.assertIs(e.__class__, e2.__class__) - - def test_has_eid(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('CWUser')(req) - self.assertEqual(e.eid, None) - self.assertEqual(e.has_eid(), False) - e.eid = 'X' - self.assertEqual(e.has_eid(), False) - e.eid = 0 - self.assertEqual(e.has_eid(), True) - e.eid = 2 - self.assertEqual(e.has_eid(), True) - - def test_copy(self): - with self.admin_access.web_request() as req: - req.create_entity('Tag', name=u'x') - p = req.create_entity('Personne', nom=u'toto') - oe = req.create_entity('Note', type=u'x') - req.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', - {'t': oe.eid, 'u': p.eid}) - req.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}) - e = req.create_entity('Note', type=u'z') - e.copy_relations(oe.eid) - self.assertEqual(len(e.ecrit_par), 1) - self.assertEqual(e.ecrit_par[0].eid, p.eid) - self.assertEqual(len(e.reverse_tags), 1) - # check meta-relations are not copied, set on commit - self.assertEqual(len(e.created_by), 0) - - def test_copy_with_nonmeta_composite_inlined(self): - with self.admin_access.web_request() as req: - p = req.create_entity('Personne', nom=u'toto') - oe = req.create_entity('Note', type=u'x') - self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject' - req.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', - {'t': oe.eid, 'u': p.eid}) - e = req.create_entity('Note', type=u'z') - e.copy_relations(oe.eid) - self.assertFalse(e.ecrit_par) - self.assertTrue(oe.ecrit_par) - - def test_copy_with_composite(self): - with self.admin_access.web_request() as req: - adeleid = req.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] - e = req.execute('Any X WHERE X eid %(x)s', {'x': req.user.eid}).get_entity(0, 0) - self.assertEqual(e.use_email[0].address, "toto@logilab.org") - self.assertEqual(e.use_email[0].eid, adeleid) - usereid = req.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G ' - 'WHERE G name "users"')[0][0] - e = req.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0) - e.copy_relations(req.user.eid) - self.assertFalse(e.use_email) - self.assertFalse(e.primary_email) - - def test_copy_with_non_initial_state(self): - with self.admin_access.web_request() as req: - user = req.execute('INSERT CWUser X: X login "toto", X upassword %(pwd)s, X in_group G WHERE G name "users"', - {'pwd': 'toto'}).get_entity(0, 0) - req.cnx.commit() - user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') - req.cnx.commit() - eid2 = req.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] - e = req.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) - e.copy_relations(user.eid) - req.cnx.commit() - e.cw_clear_relation_cache('in_state', 'subject') - self.assertEqual(e.cw_adapt_to('IWorkflowable').state, 'activated') - - def test_related_cache_both(self): - with self.admin_access.web_request() as req: - user = req.execute('Any X WHERE X eid %(x)s', {'x':req.user.eid}).get_entity(0, 0) - adeleid = req.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] - req.cnx.commit() - self.assertEqual(user._cw_related_cache, {}) - email = user.primary_email[0] - self.assertEqual(sorted(user._cw_related_cache), ['primary_email_subject']) - self.assertEqual(list(email._cw_related_cache), ['primary_email_object']) - groups = user.in_group - self.assertEqual(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject']) - for group in groups: - self.assertNotIn('in_group_subject', group._cw_related_cache) - user.cw_clear_all_caches() - user.related('in_group', entities=True) - self.assertIn('in_group_subject', user._cw_related_cache) - user.cw_clear_all_caches() - user.related('in_group', targettypes=('CWGroup',), entities=True) - self.assertNotIn('in_group_subject', user._cw_related_cache) - - def test_related_limit(self): - with self.admin_access.web_request() as req: - p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - for tag in u'abcd': - req.create_entity('Tag', name=tag) - req.execute('SET X tags Y WHERE X is Tag, Y is Personne') - self.assertEqual(len(p.related('tags', 'object', limit=2)), 2) - self.assertEqual(len(p.related('tags', 'object')), 4) - p.cw_clear_all_caches() - self.assertEqual(len(p.related('tags', 'object', entities=True, limit=2)), 2) - self.assertEqual(len(p.related('tags', 'object', entities=True)), 4) - - def test_related_targettypes(self): - with self.admin_access.web_request() as req: - p = req.create_entity('Personne', nom=u'Loxodonta', prenom=u'Babar') - n = req.create_entity('Note', type=u'scratch', ecrit_par=p) - t = req.create_entity('Tag', name=u'a tag', tags=(p, n)) - req.cnx.commit() - with self.admin_access.web_request() as req: - t = req.entity_from_eid(t.eid) - self.assertEqual(2, t.related('tags').rowcount) - self.assertEqual(1, t.related('tags', targettypes=('Personne',)).rowcount) - self.assertEqual(1, t.related('tags', targettypes=('Note',)).rowcount) - - def test_cw_instantiate_relation(self): - with self.admin_access.web_request() as req: - p1 = req.create_entity('Personne', nom=u'di') - p2 = req.create_entity('Personne', nom=u'mascio') - t = req.create_entity('Tag', name=u't0', tags=[]) - self.assertCountEqual(t.tags, []) - t = req.create_entity('Tag', name=u't1', tags=p1) - self.assertCountEqual(t.tags, [p1]) - t = req.create_entity('Tag', name=u't2', tags=p1.eid) - self.assertCountEqual(t.tags, [p1]) - t = req.create_entity('Tag', name=u't3', tags=[p1, p2.eid]) - self.assertCountEqual(t.tags, [p1, p2]) - - def test_cw_instantiate_reverse_relation(self): - with self.admin_access.web_request() as req: - t1 = req.create_entity('Tag', name=u't1') - t2 = req.create_entity('Tag', name=u't2') - p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=t1) - self.assertCountEqual(p.reverse_tags, [t1]) - p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=t1.eid) - self.assertCountEqual(p.reverse_tags, [t1]) - p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=[t1, t2.eid]) - self.assertCountEqual(p.reverse_tags, [t1, t2]) - - def test_fetch_rql(self): - Personne = self.vreg['etypes'].etype_class('Personne') - Societe = self.vreg['etypes'].etype_class('Societe') - Note = self.vreg['etypes'].etype_class('Note') - peschema = Personne.e_schema - seschema = Societe.e_schema - torestore = [] - for rdef, card in [(peschema.subjrels['travaille'].rdef(peschema, seschema), '1*'), - (peschema.subjrels['connait'].rdef(peschema, peschema), '11'), - (peschema.subjrels['evaluee'].rdef(peschema, Note.e_schema), '1*'), - (seschema.subjrels['evaluee'].rdef(seschema, Note.e_schema), '1*')]: - cm = tempattr(rdef, 'cardinality', card) - cm.__enter__() - torestore.append(cm) - try: - with self.admin_access.web_request() as req: - user = req.user - # testing basic fetch_attrs attribute - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC ORDERBY AB ' - 'WHERE X is_instance_of Personne, X modification_date AA, X nom AB, X prenom AC') - # testing unknown attributes - Personne.fetch_attrs = ('bloug', 'beep') - self.assertEqual(Personne.fetch_rql(user), 'Any X WHERE X is_instance_of Personne') - # testing one non final relation - Personne.fetch_attrs = ('nom', 'prenom', 'travaille') - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD ORDERBY AA ' - 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') - # testing two non final relations - Personne.fetch_attrs = ('nom', 'prenom', 'travaille', 'evaluee') - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD,AE ORDERBY AB ' - 'WHERE X is_instance_of Personne, X evaluee AA?, X nom AB, X prenom AC, X travaille AD?, ' - 'AD nom AE') - # testing one non final relation with recursion - Personne.fetch_attrs = ('nom', 'prenom', 'travaille') - Societe.fetch_attrs = ('nom', 'evaluee') - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA ' - 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, ' - 'AC evaluee AD?, AD modification_date AE, AC nom AF') - # testing symmetric relation - Personne.fetch_attrs = ('nom', 'connait') - self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AB ' - 'WHERE X is_instance_of Personne, X connait AA?, X nom AB') - # testing optional relation - peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '?*' - Personne.fetch_attrs = ('nom', 'prenom', 'travaille') - Societe.fetch_attrs = ('nom',) - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD ORDERBY AA WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') - # testing relation with cardinality > 1 - peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '**' - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB ORDERBY AA WHERE X is_instance_of Personne, X nom AA, X prenom AB') - # XXX test unauthorized attribute - finally: - # fetch_attrs restored by generic tearDown - for cm in torestore: - cm.__exit__(None, None, None) - - def test_related_rql_base(self): - Personne = self.vreg['etypes'].etype_class('Personne') - Note = self.vreg['etypes'].etype_class('Note') - SubNote = self.vreg['etypes'].etype_class('SubNote') - self.assertTrue(issubclass(self.vreg['etypes'].etype_class('SubNote'), Note)) - Personne.fetch_attrs, Personne.cw_fetch_order = fetch_config(('nom', 'type')) - Note.fetch_attrs, Note.cw_fetch_order = fetch_config(('type',)) - SubNote.fetch_attrs, SubNote.cw_fetch_order = fetch_config(('type',)) - with self.admin_access.web_request() as req: - p = req.create_entity('Personne', nom=u'pouet') - self.assertEqual(p.cw_related_rql('evaluee'), - 'Any X,AA,AB ORDERBY AB WHERE E eid %(x)s, E evaluee X, ' - 'X modification_date AA, X type AB') - n = req.create_entity('Note') - self.assertEqual(n.cw_related_rql('evaluee', role='object', - targettypes=('Societe', 'Personne')), - "Any X,AA ORDERBY AB DESC WHERE E eid %(x)s, X evaluee E, " - "X is IN(Personne, Societe), X nom AA, " - "X modification_date AB") - Personne.fetch_attrs, Personne.cw_fetch_order = fetch_config(('nom', )) - # XXX - self.assertEqual(p.cw_related_rql('evaluee'), - 'Any X,AA ORDERBY AA DESC ' - 'WHERE E eid %(x)s, E evaluee X, X modification_date AA') - - tag = self.vreg['etypes'].etype_class('Tag')(req) - self.assertEqual(tag.cw_related_rql('tags', 'subject'), - 'Any X,AA ORDERBY AA DESC ' - 'WHERE E eid %(x)s, E tags X, X modification_date AA') - self.assertEqual(tag.cw_related_rql('tags', 'subject', ('Personne',)), - 'Any X,AA,AB ORDERBY AB ' - 'WHERE E eid %(x)s, E tags X, X is Personne, X modification_date AA, ' - 'X nom AB') - - def test_related_rql_ambiguous_cant_use_fetch_order(self): - with self.admin_access.web_request() as req: - tag = self.vreg['etypes'].etype_class('Tag')(req) - for ttype in self.schema['tags'].objects(): - self.vreg['etypes'].etype_class(ttype).fetch_attrs = ('modification_date',) - self.assertEqual(tag.cw_related_rql('tags', 'subject'), - 'Any X,AA ORDERBY AA DESC ' - 'WHERE E eid %(x)s, E tags X, X modification_date AA') - - def test_related_rql_fetch_ambiguous_rtype(self): - etvreg = self.vreg['etypes'] - soc_etype = etvreg.etype_class('Societe') - with self.admin_access.web_request() as req: - soc = soc_etype(req) - soc_etype.fetch_attrs = ('fournit',) - etvreg.etype_class('Service').fetch_attrs = ('fabrique_par',) - etvreg.etype_class('Produit').fetch_attrs = ('fabrique_par',) - etvreg.etype_class('Usine').fetch_attrs = ('lieu',) - etvreg.etype_class('Personne').fetch_attrs = ('nom',) - self.assertEqual(soc.cw_related_rql('fournit', 'subject'), - 'Any X,A WHERE E eid %(x)s, E fournit X, X fabrique_par A') - - def test_unrelated_rql_security_1_manager(self): - with self.admin_access.web_request() as req: - user = req.user - rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEqual(rql, - 'Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT A use_email O, S eid %(x)s, ' - 'O is_instance_of EmailAddress, O address AA, O alias AB, ' - 'O modification_date AC') - - def test_unrelated_rql_security_1_user(self): - with self.admin_access.web_request() as req: - self.create_user(req, 'toto') - with self.new_access('toto').web_request() as req: - user = req.user # XXX - rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEqual(rql, - 'Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT A use_email O, S eid %(x)s, ' - 'O is_instance_of EmailAddress, O address AA, O alias AB, O modification_date AC') - user = req.execute('Any X WHERE X login "admin"').get_entity(0, 0) - rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT A use_email O, S eid %(x)s, ' - 'O is EmailAddress, O address AA, O alias AB, O modification_date AC, AD eid %(AE)s, ' - 'EXISTS(S identity AD, NOT AD in_group AF, AF name "guests", AF is CWGroup), A is CWUser') - - def test_unrelated_rql_security_1_anon(self): - with self.new_access('anon').web_request() as req: - user = req.user - rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT A use_email O, S eid %(x)s, ' - 'O is EmailAddress, O address AA, O alias AB, O modification_date AC, AD eid %(AE)s, ' - 'EXISTS(S identity AD, NOT AD in_group AF, AF name "guests", AF is CWGroup), A is CWUser') - - def test_unrelated_rql_security_2(self): - with self.admin_access.web_request() as req: - email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AB ' - 'WHERE NOT S use_email O, O eid %(x)s, S is_instance_of CWUser, ' - 'S firstname AA, S login AB, S modification_date AC, S surname AD') - req.cnx.commit() - rperms = self.schema['EmailAddress'].permissions['read'] - clear_cache(self.schema['EmailAddress'], 'get_groups') - clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') - self.schema['EmailAddress'].permissions['read'] = ('managers', 'users', 'guests',) - try: - with self.new_access('anon').web_request() as req: - email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AB ' - 'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, ' - 'S firstname AA, S login AB, S modification_date AC, S surname AD, ' - 'AE eid %(AF)s, EXISTS(S identity AE, NOT AE in_group AG, AG name "guests", AG is CWGroup)') - finally: - clear_cache(self.schema['EmailAddress'], 'get_groups') - clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') - self.schema['EmailAddress'].permissions['read'] = rperms - - def test_cw_linkable_rql(self): - with self.admin_access.web_request() as req: - email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - rql = email.cw_linkable_rql('use_email', 'CWUser', 'object')[0] - self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AB ' - 'WHERE O eid %(x)s, S is_instance_of CWUser, ' - 'S firstname AA, S login AB, S modification_date AC, S surname AD') - - def test_unrelated_rql_security_nonexistant(self): - with self.new_access('anon').web_request() as req: - email = self.vreg['etypes'].etype_class('EmailAddress')(req) - rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AB ' - 'WHERE S is CWUser, ' - 'S firstname AA, S login AB, S modification_date AC, S surname AD, ' - 'AE eid %(AF)s, EXISTS(S identity AE, NOT AE in_group AG, AG name "guests", AG is CWGroup)') - - def test_unrelated_rql_constraints_creation_subject(self): - with self.admin_access.web_request() as req: - person = self.vreg['etypes'].etype_class('Personne')(req) - rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] - self.assertEqual( - rql, 'Any O,AA,AB,AC ORDERBY AA DESC WHERE ' - 'O is_instance_of Personne, O modification_date AA, O nom AB, O prenom AC') - - def test_unrelated_rql_constraints_creation_object(self): - with self.admin_access.web_request() as req: - person = self.vreg['etypes'].etype_class('Personne')(req) - rql = person.cw_unrelated_rql('connait', 'Personne', 'object')[0] - self.assertEqual( - rql, 'Any S,AA,AB,AC ORDERBY AA DESC WHERE ' - 'S is Personne, S modification_date AA, S nom AB, S prenom AC, ' - 'NOT (S connait AD, AD nom "toto"), AD is Personne, ' - 'EXISTS(S travaille AE, AE nom "tutu")') - - def test_unrelated_rql_security_rel_perms(self): - '''check `connait` add permission has no effect for a new entity on the - unrelated rql''' - rdef = self.schema['Personne'].rdef('connait') - perm_rrqle = RRQLExpression('U has_update_permission S') - with self.temporary_permissions((rdef, {'add': (perm_rrqle,)})): - with self.admin_access.web_request() as req: - person = self.vreg['etypes'].etype_class('Personne')(req) - rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] - self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AA DESC WHERE ' - 'O is_instance_of Personne, O modification_date AA, O nom AB, ' - 'O prenom AC') - - def test_unrelated_rql_constraints_edition_subject(self): - with self.admin_access.web_request() as req: - person = req.create_entity('Personne', nom=u'sylvain') - rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] - self.assertEqual( - rql, 'Any O,AA,AB,AC ORDERBY AA DESC WHERE ' - 'NOT S connait O, S eid %(x)s, O is Personne, ' - 'O modification_date AA, O nom AB, O prenom AC, ' - 'NOT S identity O') - - def test_unrelated_rql_constraints_edition_object(self): - with self.admin_access.web_request() as req: - person = req.create_entity('Personne', nom=u'sylvain') - rql = person.cw_unrelated_rql('connait', 'Personne', 'object')[0] - self.assertEqual( - rql, 'Any S,AA,AB,AC ORDERBY AA DESC WHERE ' - 'NOT S connait O, O eid %(x)s, S is Personne, ' - 'S modification_date AA, S nom AB, S prenom AC, ' - 'NOT S identity O, NOT (S connait AD, AD nom "toto"), ' - 'EXISTS(S travaille AE, AE nom "tutu")') - - def test_unrelated_rql_s_linkto_s(self): - with self.admin_access.web_request() as req: - person = self.vreg['etypes'].etype_class('Personne')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - soc = req.create_entity('Societe', nom=u'logilab') - lt_infos = {('actionnaire', 'subject'): [soc.eid]} - rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject', - lt_infos=lt_infos) - self.assertEqual(u'Any O ORDERBY O WHERE O is Personne, ' - u'EXISTS(AA eid %(SOC)s, O actionnaire AA)', rql) - self.assertEqual({'SOC': soc.eid}, args) - - def test_unrelated_rql_s_linkto_o(self): - with self.admin_access.web_request() as req: - person = self.vreg['etypes'].etype_class('Personne')(req) - self.vreg['etypes'].etype_class('Societe').fetch_attrs = () - soc = req.create_entity('Societe', nom=u'logilab') - lt_infos = {('contrat_exclusif', 'object'): [soc.eid]} - rql, args = person.cw_unrelated_rql('actionnaire', 'Societe', 'subject', - lt_infos=lt_infos) - self.assertEqual(u'Any O ORDERBY O WHERE NOT A actionnaire O, ' - u'O is_instance_of Societe, NOT EXISTS(O eid %(O)s), ' - u'A is Personne', rql) - self.assertEqual({'O': soc.eid}, args) - - def test_unrelated_rql_o_linkto_s(self): - with self.admin_access.web_request() as req: - soc = self.vreg['etypes'].etype_class('Societe')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - person = req.create_entity('Personne', nom=u'florent') - lt_infos = {('contrat_exclusif', 'subject'): [person.eid]} - rql, args = soc.cw_unrelated_rql('actionnaire', 'Personne', 'object', - lt_infos=lt_infos) - self.assertEqual(u'Any S ORDERBY S WHERE NOT S actionnaire A, ' - u'S is_instance_of Personne, NOT EXISTS(S eid %(S)s), ' - u'A is Societe', rql) - self.assertEqual({'S': person.eid}, args) - - def test_unrelated_rql_o_linkto_o(self): - with self.admin_access.web_request() as req: - soc = self.vreg['etypes'].etype_class('Societe')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - person = req.create_entity('Personne', nom=u'florent') - lt_infos = {('actionnaire', 'object'): [person.eid]} - rql, args = soc.cw_unrelated_rql('dirige', 'Personne', 'object', - lt_infos=lt_infos) - self.assertEqual(u'Any S ORDERBY S WHERE NOT S dirige A, ' - u'S is_instance_of Personne, EXISTS(S eid %(S)s), ' - u'A is Societe', rql) - self.assertEqual({'S': person.eid}, args) - - def test_unrelated_rql_s_linkto_s_no_info(self): - with self.admin_access.web_request() as req: - person = self.vreg['etypes'].etype_class('Personne')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - soc = req.create_entity('Societe', nom=u'logilab') - rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject') - self.assertEqual(u'Any O ORDERBY O WHERE O is_instance_of Personne', rql) - self.assertEqual({}, args) - - def test_unrelated_rql_s_linkto_s_unused_info(self): - with self.admin_access.web_request() as req: - person = self.vreg['etypes'].etype_class('Personne')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - other_p = req.create_entity('Personne', nom=u'titi') - lt_infos = {('dirige', 'subject'): [other_p.eid]} - rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject', - lt_infos=lt_infos) - self.assertEqual(u'Any O ORDERBY O WHERE O is_instance_of Personne', rql) - - def test_unrelated_base(self): - with self.admin_access.web_request() as req: - p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - e = req.create_entity('Tag', name=u'x') - related = [r.eid for r in e.tags] - self.assertEqual(related, []) - unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] - self.assertIn(p.eid, unrelated) - req.execute('SET X tags Y WHERE X is Tag, Y is Personne') - e = req.execute('Any X WHERE X is Tag').get_entity(0, 0) - unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] - self.assertNotIn(p.eid, unrelated) - - def test_unrelated_limit(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Tag', name=u'x') - req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - req.create_entity('Personne', nom=u'thenault', prenom=u'sylvain') - self.assertEqual(len(e.unrelated('tags', 'Personne', 'subject', limit=1)), - 1) - - def test_unrelated_security(self): - rperms = self.schema['EmailAddress'].permissions['read'] - clear_cache(self.schema['EmailAddress'], 'get_groups') - clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') - self.schema['EmailAddress'].permissions['read'] = ('managers', 'users', 'guests',) - try: - with self.admin_access.web_request() as req: - email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEqual([x.login for x in rset.entities()], [u'admin', u'anon']) - user = req.user - rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEqual([x.address for x in rset.entities()], [u'hop']) - self.create_user(req, 'toto') - with self.new_access('toto').web_request() as req: - email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEqual([x.login for x in rset.entities()], ['toto']) - user = req.user - rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEqual([x.address for x in rset.entities()], ['hop']) - user = req.execute('Any X WHERE X login "admin"').get_entity(0, 0) - rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEqual([x.address for x in rset.entities()], []) - with self.new_access('anon').web_request() as req: - email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEqual([x.login for x in rset.entities()], []) - user = req.user - rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEqual([x.address for x in rset.entities()], []) - finally: - clear_cache(self.schema['EmailAddress'], 'get_groups') - clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') - self.schema['EmailAddress'].permissions['read'] = rperms - - def test_unrelated_new_entity(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('CWUser')(req) - unrelated = [r[0] for r in e.unrelated('in_group', 'CWGroup', 'subject')] - # should be default groups but owners, i.e. managers, users, guests - self.assertEqual(len(unrelated), 3) - - def test_markdown_printable_value_string(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Card', title=u'rest markdown', - content=u'This is [an example](http://example.com/ "Title") inline link`', - content_format=u'text/markdown') - self.assertEqual( - u'

      This is an example inline link`

      ', - e.printable_value('content')) - - def test_printable_value_string(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Card', title=u'rest test', - content=u'du :eid:`1:*ReST*`', - content_format=u'text/rest') - self.assertEqual(e.printable_value('content'), - '

      du *ReST*

      ') - e.cw_attr_cache['content'] = 'du html users' - e.cw_attr_cache['content_format'] = 'text/html' - self.assertEqual(e.printable_value('content'), - 'du html users') - e.cw_attr_cache['content'] = 'du *texte*' - e.cw_attr_cache['content_format'] = 'text/plain' - self.assertEqual(e.printable_value('content').replace("\n", ""), - '

      du *texte*

      ') - e.cw_attr_cache['title'] = 'zou' - e.cw_attr_cache['content'] = '''\ -a title -======= -du :eid:`1:*ReST*`''' - e.cw_attr_cache['content_format'] = 'text/rest' - self.assertEqual(e.printable_value('content', format='text/plain'), - e.cw_attr_cache['content']) - - e.cw_attr_cache['content'] = u'yo (zou éà ;)' - e.cw_attr_cache['content_format'] = 'text/html' - self.assertEqual(e.printable_value('content', format='text/plain').strip(), - u'**yo (zou éà ;)**') - if HAS_TAL: - e.cw_attr_cache['content'] = '

      titre

      ' - e.cw_attr_cache['content_format'] = 'text/cubicweb-page-template' - self.assertEqual(e.printable_value('content'), - '

      zou

      ') - - - def test_printable_value_bytes(self): - with self.admin_access.web_request() as req: - e = req.create_entity('FakeFile', data=Binary(b'lambda x: 1'), data_format=u'text/x-python', - data_encoding=u'ascii', data_name=u'toto.py') - from cubicweb import mttransforms - if mttransforms.HAS_PYGMENTS_TRANSFORMS: - import pygments - if tuple(int(i) for i in pygments.__version__.split('.')[:2]) >= (1, 3): - self.assertEqual(e.printable_value('data'), - '''
      lambda x: 1
      -
      ''') - else: - self.assertEqual(e.printable_value('data'), - '''
      lambda x: 1
      -
      ''') - else: - self.assertEqual(e.printable_value('data'), - '''
      -    lambda x: 1
      -
      ''') - - e = req.create_entity('FakeFile', - data=Binary(u'*héhéhé*'.encode('utf-8')), - data_format=u'text/rest', - data_encoding=u'utf-8', data_name=u'toto.txt') - self.assertEqual(e.printable_value('data'), - u'

      héhéhé

      ') - - def test_printable_value_bad_html(self): - """make sure we don't crash if we try to render invalid XHTML strings""" - with self.admin_access.web_request() as req: - e = req.create_entity('Card', title=u'bad html', content=u'
      R&D
      ', - content_format=u'text/html') - tidy = lambda x: x.replace('\n', '') - self.assertEqual(tidy(e.printable_value('content')), - '
      R&D
      ') - e.cw_attr_cache['content'] = u'yo !! R&D
      pas fermé' - self.assertEqual(tidy(e.printable_value('content')), - u'yo !! R&D
      pas fermé
      ') - e.cw_attr_cache['content'] = u'R&D' - self.assertEqual(tidy(e.printable_value('content')), u'R&D') - e.cw_attr_cache['content'] = u'R&D;' - self.assertEqual(tidy(e.printable_value('content')), u'R&D;') - e.cw_attr_cache['content'] = u'yo !! R&D
      pas fermé' - self.assertEqual(tidy(e.printable_value('content')), - u'yo !! R&D
      pas fermé
      ') - e.cw_attr_cache['content'] = u'été
      été' - self.assertEqual(tidy(e.printable_value('content')), - u'été
      été
      ') - e.cw_attr_cache['content'] = u'C'est un exemple sérieux' - self.assertEqual(tidy(e.printable_value('content')), - u"C'est un exemple sérieux") - # make sure valid xhtml is left untouched - e.cw_attr_cache['content'] = u'
      R&D
      ' - self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) - e.cw_attr_cache['content'] = u'
      été
      ' - self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) - e.cw_attr_cache['content'] = u'été' - self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) - e.cw_attr_cache['content'] = u'hop\r\nhop\nhip\rmomo' - self.assertEqual(e.printable_value('content'), u'hop\nhop\nhip\nmomo') - - def test_printable_value_bad_html_ms(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Card', title=u'bad html', content=u'
      R&D
      ', - content_format=u'text/html') - tidy = lambda x: x.replace('\n', '') - e.cw_attr_cache['content'] = u'
      ms orifice produces weird html
      ' - # Caution! current implementation of soup2xhtml strips first div element - content = soup2xhtml(e.printable_value('content'), 'utf-8') - self.assertMultiLineEqual(content, u'
      ms orifice produces weird html
      ') - - def test_fulltextindex(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('FakeFile')(req) - e.cw_attr_cache['description'] = 'du html' - e.cw_attr_cache['description_format'] = 'text/html' - e.cw_attr_cache['data'] = Binary(b'some data') - e.cw_attr_cache['data_name'] = 'an html file' - e.cw_attr_cache['data_format'] = 'text/html' - e.cw_attr_cache['data_encoding'] = 'ascii' - e._cw.transaction_data.clear() - words = e.cw_adapt_to('IFTIndexable').get_words() - words['C'].sort() - self.assertEqual({'C': sorted(['an', 'html', 'file', 'du', 'html', 'some', 'data'])}, - words) - - - def test_nonregr_relation_cache(self): - with self.admin_access.web_request() as req: - p1 = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - p2 = req.create_entity('Personne', nom=u'toto') - req.execute('SET X evaluee Y WHERE X nom "di mascio", Y nom "toto"') - self.assertEqual(p1.evaluee[0].nom, "toto") - self.assertFalse(p1.reverse_evaluee) - - def test_complete_relation(self): - with self.admin_access.repo_cnx() as cnx: - eid = cnx.execute( - 'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 ' - 'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0] - trinfo = cnx.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) - trinfo.complete() - self.assertIsInstance(trinfo.cw_attr_cache['creation_date'], datetime) - self.assertTrue(trinfo.cw_relation_cached('from_state', 'subject')) - self.assertTrue(trinfo.cw_relation_cached('to_state', 'subject')) - self.assertTrue(trinfo.cw_relation_cached('wf_info_for', 'subject')) - self.assertEqual(trinfo.by_transition, ()) - - def test_request_cache(self): - with self.admin_access.web_request() as req: - user = req.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) - state = user.in_state[0] - samestate = req.execute('State X WHERE X name "activated"').get_entity(0, 0) - self.assertIs(state, samestate) - - def test_rest_path(self): - with self.admin_access.web_request() as req: - note = req.create_entity('Note', type=u'z') - self.assertEqual(note.rest_path(), 'note/%s' % note.eid) - # unique attr - tag = req.create_entity('Tag', name=u'x') - self.assertEqual(tag.rest_path(), 'tag/x') - # test explicit rest_attr - person = req.create_entity('Personne', prenom=u'john', nom=u'doe') - self.assertEqual(person.rest_path(), 'personne/doe') - # ambiguity test - person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe') - person.cw_clear_all_caches() - self.assertEqual(person.rest_path(), text_type(person.eid)) - self.assertEqual(person2.rest_path(), text_type(person2.eid)) - # unique attr with None value (nom in this case) - friend = req.create_entity('Ami', prenom=u'bob') - self.assertEqual(friend.rest_path(), text_type(friend.eid)) - # 'ref' below is created without the unique but not required - # attribute, make sur that the unique _and_ required 'ean' is used - # as the rest attribute - ref = req.create_entity('Reference', ean=u'42-1337-42') - self.assertEqual(ref.rest_path(), 'reference/42-1337-42') - - def test_can_use_rest_path(self): - self.assertTrue(can_use_rest_path(u'zobi')) - # don't use rest if we have /, ? or & in the path (breaks mod_proxy) - self.assertFalse(can_use_rest_path(u'zo/bi')) - self.assertFalse(can_use_rest_path(u'zo&bi')) - self.assertFalse(can_use_rest_path(u'zo?bi')) - - def test_cw_set_attributes(self): - with self.admin_access.web_request() as req: - person = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - self.assertEqual(person.prenom, u'adrien') - self.assertEqual(person.nom, u'di mascio') - person.cw_set(prenom=u'sylvain', nom=u'thénault') - person = req.execute('Personne P').get_entity(0, 0) # XXX retreival needed ? - self.assertEqual(person.prenom, u'sylvain') - self.assertEqual(person.nom, u'thénault') - - def test_cw_set_relations(self): - with self.admin_access.web_request() as req: - person = req.create_entity('Personne', nom=u'chauvat', prenom=u'nicolas') - note = req.create_entity('Note', type=u'x') - note.cw_set(ecrit_par=person) - note = req.create_entity('Note', type=u'y') - note.cw_set(ecrit_par=person.eid) - self.assertEqual(len(person.reverse_ecrit_par), 2) - - def test_metainformation_and_external_absolute_url(self): - with self.admin_access.web_request() as req: - note = req.create_entity('Note', type=u'z') - metainf = note.cw_metainformation() - self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system', - 'use-cwuri-as-url': False}, - 'type': u'Note', 'extid': None}) - self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) - metainf['source'] = metainf['source'].copy() - metainf['source']['base-url'] = 'http://cubicweb2.com/' - metainf['extid'] = 1234 - self.assertEqual(note.absolute_url(), 'http://cubicweb2.com/note/1234') - - def test_absolute_url_empty_field(self): - with self.admin_access.web_request() as req: - card = req.create_entity('Card', wikiid=u'', title=u'test') - self.assertEqual(card.absolute_url(), - 'http://testing.fr/cubicweb/%s' % card.eid) - - def test_create_and_compare_entity(self): - access = self.admin_access - with access.web_request() as req: - p1 = req.create_entity('Personne', nom=u'fayolle', prenom=u'alexandre') - p2 = req.create_entity('Personne', nom=u'campeas', prenom=u'aurelien') - note = req.create_entity('Note', type=u'z') - p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien', - connait=p1, evaluee=[p1, p2], - reverse_ecrit_par=note) - self.assertEqual(p.nom, 'di mascio') - self.assertEqual([c.nom for c in p.connait], ['fayolle']) - self.assertEqual(sorted([c.nom for c in p.evaluee]), ['campeas', 'fayolle']) - self.assertEqual([c.type for c in p.reverse_ecrit_par], ['z']) - req.cnx.commit() - with access.web_request() as req: - auc = req.execute('Personne P WHERE P prenom "aurelien"').get_entity(0,0) - persons = set() - persons.add(p1) - persons.add(p2) - persons.add(auc) - self.assertEqual(2, len(persons)) - self.assertNotEqual(p1, p2) - self.assertEqual(p2, auc) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_mail.py --- a/test/unittest_mail.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.mail - -""" - -import os -import re -import sys - -from logilab.common.testlib import unittest_main -from logilab.common.umessage import message_from_string - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.mail import format_mail - - -def getlogin(): - """avoid using os.getlogin() because of strange tty / stdin problems - (man 3 getlogin) - Another solution would be to use $LOGNAME, $USER or $USERNAME - """ - if sys.platform != 'win32': - import pwd - return pwd.getpwuid(os.getuid())[0] - else: - return os.environ.get('USERNAME') - - -class EmailTC(CubicWebTC): - - def test_format_mail(self): - self.set_option('sender-addr', 'bim@boum.fr') - self.set_option('sender-name', 'BimBam') - - mail = format_mail({'name': 'oim', 'email': 'oim@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou', u'bïjour', - config=self.config) - result = mail.as_string() - result = re.sub('^Date: .*$', 'Date: now', result, flags=re.MULTILINE) - self.assertMultiLineEqual(result, """\ -MIME-Version: 1.0 -Content-Type: text/plain; charset="utf-8" -Content-Transfer-Encoding: base64 -Subject: =?utf-8?q?b=C3=AFjour?= -From: =?utf-8?q?oim?= -Reply-to: =?utf-8?q?oim?= , =?utf-8?q?BimBam?= -X-CW: data -To: test@logilab.fr -Date: now - -dW4gcGV0aXQgY8O2dWNvdQ== -""") - msg = message_from_string(mail.as_string()) - self.assertEqual(msg.get('subject'), u'bïjour') - self.assertEqual(msg.get('from'), u'oim ') - self.assertEqual(msg.get('to'), u'test@logilab.fr') - self.assertEqual(msg.get('reply-to'), u'oim , BimBam ') - self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou') - - - def test_format_mail_euro(self): - mail = format_mail({'name': u'oîm', 'email': u'oim@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') - result = mail.as_string() - result = re.sub('^Date: .*$', 'Date: now', result, flags=re.MULTILINE) - self.assertMultiLineEqual(result, """\ -MIME-Version: 1.0 -Content-Type: text/plain; charset="utf-8" -Content-Transfer-Encoding: base64 -Subject: =?utf-8?b?YsOvam91ciDigqw=?= -From: =?utf-8?q?o=C3=AEm?= -Reply-to: =?utf-8?q?o=C3=AEm?= -To: test@logilab.fr -Date: now - -dW4gcGV0aXQgY8O2dWNvdSDigqw= -""") - msg = message_from_string(mail.as_string()) - self.assertEqual(msg.get('subject'), u'bïjour €') - self.assertEqual(msg.get('from'), u'oîm ') - self.assertEqual(msg.get('to'), u'test@logilab.fr') - self.assertEqual(msg.get('reply-to'), u'oîm ') - self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou €') - - - def test_format_mail_from_reply_to(self): - # no sender-name, sender-addr in the configuration - self.set_option('sender-name', '') - self.set_option('sender-addr', '') - msg = format_mail({'name': u'', 'email': u''}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - self.assertEqual(msg.get('from'), u'') - self.assertEqual(msg.get('reply-to'), None) - msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - msg = message_from_string(msg.as_string()) - self.assertEqual(msg.get('from'), u'tutu ') - self.assertEqual(msg.get('reply-to'), u'tutu ') - msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') - msg = message_from_string(msg.as_string()) - self.assertEqual(msg.get('from'), u'tutu ') - self.assertEqual(msg.get('reply-to'), u'tutu ') - # set sender name and address as expected - self.set_option('sender-name', 'cubicweb-test') - self.set_option('sender-addr', 'cubicweb-test@logilab.fr') - # anonymous notification: no name and no email specified - msg = format_mail({'name': u'', 'email': u''}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - msg = message_from_string(msg.as_string()) - self.assertEqual(msg.get('from'), u'cubicweb-test ') - self.assertEqual(msg.get('reply-to'), u'cubicweb-test ') - # anonymous notification: only email specified - msg = format_mail({'email': u'tutu@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - msg = message_from_string(msg.as_string()) - self.assertEqual(msg.get('from'), u'cubicweb-test ') - self.assertEqual(msg.get('reply-to'), u'cubicweb-test , cubicweb-test ') - # anonymous notification: only name specified - msg = format_mail({'name': u'tutu'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - msg = message_from_string(msg.as_string()) - self.assertEqual(msg.get('from'), u'tutu ') - self.assertEqual(msg.get('reply-to'), u'tutu ') - - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_migration.py --- a/test/unittest_migration.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,116 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb.migration unit tests""" - -from os.path import abspath, dirname, join -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.devtools import TestServerConfiguration -from cubicweb.cwconfig import CubicWebConfiguration -from cubicweb.migration import MigrationHelper, filter_scripts, version_strictly_lower -from cubicweb.server.migractions import ServerMigrationHelper - - -class Schema(dict): - def has_entity(self, e_type): - return e_type in self - -SMIGRDIR = join(dirname(__file__), 'data', 'server_migration') + '/' -TMIGRDIR = join(dirname(__file__), 'data', 'migration') + '/' - -class MigrTestConfig(TestServerConfiguration): - verbosity = 0 - def migration_scripts_dir(cls): - return SMIGRDIR - - def cube_migration_scripts_dir(cls, cube): - return TMIGRDIR - -class MigrationToolsTC(TestCase): - def setUp(self): - self.config = MigrTestConfig('data') - from yams.schema import Schema - self.config.load_schema = lambda expand_cubes=False: Schema('test') - self.config.__class__.cubicweb_appobject_path = frozenset() - self.config.__class__.cube_appobject_path = frozenset() - - def test_filter_scripts_base(self): - self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,3,0), (2,4,0)), - []) - self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)), - [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql')]) - self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)), - [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)), - [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql'), - ((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)), - []) - self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)), - [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'), - ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')]) - self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)), - [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - - self.assertListEqual(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,3)), - [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py')]) - self.assertListEqual(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,4)), - [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py'), - ((0, 0, 4), TMIGRDIR+'0.0.4_Any.py')]) - - def test_filter_scripts_for_mode(self): - config = CubicWebConfiguration('data') - config.verbosity = 0 - config = self.config - config.__class__.name = 'repository' - self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), - [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')]) - config.__class__.name = 'all-in-one' - self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), - [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')]) - config.__class__.name = 'repository' - - def test_version_strictly_lower(self): - self.assertTrue(version_strictly_lower(None, '1.0.0')) - self.assertFalse(version_strictly_lower('1.0.0', None)) - - -from cubicweb.devtools import ApptestConfiguration, get_test_db_handler - -class BaseCreationTC(TestCase): - - def test_db_creation(self): - """make sure database can be created""" - config = ApptestConfiguration('data', apphome=self.datadir) - source = config.system_source_config - self.assertEqual(source['db-driver'], 'sqlite') - handler = get_test_db_handler(config) - handler.init_test_database() - handler.build_db_cache() - repo, cnx = handler.get_repo_and_cnx() - with cnx: - self.assertEqual(cnx.execute('Any SN WHERE X is CWUser, X login "admin", X in_state S, S name SN').rows, - [['activated']]) - repo.shutdown() - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_predicates.py --- a/test/unittest_predicates.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,524 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for selectors mechanism""" - -from operator import eq, lt, le, gt -from contextlib import contextmanager - -from six.moves import range - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.decorators import clear_cache - -from cubicweb import Binary -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.predicates import (is_instance, adaptable, match_kwargs, match_user_groups, - multi_lines_rset, score_entity, is_in_state, - rql_condition, relation_possible, match_form_params, - paginated_rset) -from cubicweb.selectors import on_transition # XXX on_transition is deprecated -from cubicweb.view import EntityAdapter -from cubicweb.web import action - - - -class ImplementsTC(CubicWebTC): - def test_etype_priority(self): - with self.admin_access.web_request() as req: - f = req.create_entity('FakeFile', data_name=u'hop.txt', data=Binary(b'hop'), - data_format=u'text/plain') - rset = f.as_rset() - anyscore = is_instance('Any')(f.__class__, req, rset=rset) - idownscore = adaptable('IDownloadable')(f.__class__, req, rset=rset) - self.assertTrue(idownscore > anyscore, (idownscore, anyscore)) - filescore = is_instance('FakeFile')(f.__class__, req, rset=rset) - self.assertTrue(filescore > idownscore, (filescore, idownscore)) - - def test_etype_inheritance_no_yams_inheritance(self): - cls = self.vreg['etypes'].etype_class('Personne') - with self.admin_access.web_request() as req: - self.assertFalse(is_instance('Societe').score_class(cls, req)) - - def test_yams_inheritance(self): - cls = self.vreg['etypes'].etype_class('Transition') - with self.admin_access.web_request() as req: - self.assertEqual(is_instance('BaseTransition').score_class(cls, req), - 3) - - def test_outer_join(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any U,B WHERE B? bookmarked_by U, U login "anon"') - self.assertEqual(is_instance('Bookmark')(None, req, rset=rset, row=0, col=1), - 0) - - -class WorkflowSelectorTC(CubicWebTC): - - def setUp(self): - super(WorkflowSelectorTC, self).setUp() - # enable debug mode to state/transition validation on the fly - self.vreg.config.debugmode = True - - def tearDown(self): - self.vreg.config.debugmode = False - super(WorkflowSelectorTC, self).tearDown() - - def setup_database(self): - with self.admin_access.shell() as shell: - wf = shell.add_workflow("wf_test", 'StateFull', default=True) - created = wf.add_state('created', initial=True) - validated = wf.add_state('validated') - abandoned = wf.add_state('abandoned') - wf.add_transition('validate', created, validated, ('managers',)) - wf.add_transition('forsake', (created, validated,), abandoned, ('managers',)) - - @contextmanager - def statefull_stuff(self): - with self.admin_access.web_request() as req: - wf_entity = req.create_entity('StateFull', name=u'') - rset = wf_entity.as_rset() - adapter = wf_entity.cw_adapt_to('IWorkflowable') - req.cnx.commit() - self.assertEqual(adapter.state, 'created') - yield req, wf_entity, rset, adapter - - def test_is_in_state(self): - with self.statefull_stuff() as (req, wf_entity, rset, adapter): - for state in ('created', 'validated', 'abandoned'): - selector = is_in_state(state) - self.assertEqual(selector(None, req, rset=rset), - state=="created") - - adapter.fire_transition('validate') - req.cnx.commit(); wf_entity.cw_clear_all_caches() - self.assertEqual(adapter.state, 'validated') - - clear_cache(rset, 'get_entity') - - selector = is_in_state('created') - self.assertEqual(selector(None, req, rset=rset), 0) - selector = is_in_state('validated') - self.assertEqual(selector(None, req, rset=rset), 1) - selector = is_in_state('validated', 'abandoned') - self.assertEqual(selector(None, req, rset=rset), 1) - selector = is_in_state('abandoned') - self.assertEqual(selector(None, req, rset=rset), 0) - - adapter.fire_transition('forsake') - req.cnx.commit(); wf_entity.cw_clear_all_caches() - self.assertEqual(adapter.state, 'abandoned') - - clear_cache(rset, 'get_entity') - - selector = is_in_state('created') - self.assertEqual(selector(None, req, rset=rset), 0) - selector = is_in_state('validated') - self.assertEqual(selector(None, req, rset=rset), 0) - selector = is_in_state('validated', 'abandoned') - self.assertEqual(selector(None, req, rset=rset), 1) - self.assertEqual(adapter.state, 'abandoned') - self.assertEqual(selector(None, req, rset=rset), 1) - - def test_is_in_state_unvalid_names(self): - with self.statefull_stuff() as (req, wf_entity, rset, adapter): - selector = is_in_state("unknown") - with self.assertRaises(ValueError) as cm: - selector(None, req, rset=rset) - self.assertEqual(str(cm.exception), - "wf_test: unknown state(s): unknown") - selector = is_in_state("weird", "unknown", "created", "weird") - with self.assertRaises(ValueError) as cm: - selector(None, req, rset=rset) - self.assertEqual(str(cm.exception), - "wf_test: unknown state(s): unknown,weird") - - def test_on_transition(self): - with self.statefull_stuff() as (req, wf_entity, rset, adapter): - for transition in ('validate', 'forsake'): - selector = on_transition(transition) - self.assertEqual(selector(None, req, rset=rset), 0) - - adapter.fire_transition('validate') - req.cnx.commit(); wf_entity.cw_clear_all_caches() - self.assertEqual(adapter.state, 'validated') - - clear_cache(rset, 'get_entity') - - selector = on_transition("validate") - self.assertEqual(selector(None, req, rset=rset), 1) - selector = on_transition("validate", "forsake") - self.assertEqual(selector(None, req, rset=rset), 1) - selector = on_transition("forsake") - self.assertEqual(selector(None, req, rset=rset), 0) - - adapter.fire_transition('forsake') - req.cnx.commit(); wf_entity.cw_clear_all_caches() - self.assertEqual(adapter.state, 'abandoned') - - clear_cache(rset, 'get_entity') - - selector = on_transition("validate") - self.assertEqual(selector(None, req, rset=rset), 0) - selector = on_transition("validate", "forsake") - self.assertEqual(selector(None, req, rset=rset), 1) - selector = on_transition("forsake") - self.assertEqual(selector(None, req, rset=rset), 1) - - def test_on_transition_unvalid_names(self): - with self.statefull_stuff() as (req, wf_entity, rset, adapter): - selector = on_transition("unknown") - with self.assertRaises(ValueError) as cm: - selector(None, req, rset=rset) - self.assertEqual(str(cm.exception), - "wf_test: unknown transition(s): unknown") - selector = on_transition("weird", "unknown", "validate", "weird") - with self.assertRaises(ValueError) as cm: - selector(None, req, rset=rset) - self.assertEqual(str(cm.exception), - "wf_test: unknown transition(s): unknown,weird") - - def test_on_transition_with_no_effect(self): - """selector will not be triggered with `change_state()`""" - with self.statefull_stuff() as (req, wf_entity, rset, adapter): - adapter.change_state('validated') - req.cnx.commit(); wf_entity.cw_clear_all_caches() - self.assertEqual(adapter.state, 'validated') - - selector = on_transition("validate") - self.assertEqual(selector(None, req, rset=rset), 0) - selector = on_transition("validate", "forsake") - self.assertEqual(selector(None, req, rset=rset), 0) - selector = on_transition("forsake") - self.assertEqual(selector(None, req, rset=rset), 0) - - -class RelationPossibleTC(CubicWebTC): - - def test_rqlst_1(self): - with self.admin_access.web_request() as req: - selector = relation_possible('in_group') - select = self.vreg.parse(req, 'Any X WHERE X is CWUser').children[0] - score = selector(None, req, rset=1, - select=select, filtered_variable=select.defined_vars['X']) - self.assertEqual(score, 1) - - def test_rqlst_2(self): - with self.admin_access.web_request() as req: - selector = relation_possible('in_group') - select = self.vreg.parse(req, 'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' - 'Y creation_date YD, Y is CWGroup ' - 'HAVING DAY(XD)=DAY(YD)').children[0] - score = selector(None, req, rset=1, - select=select, filtered_variable=select.defined_vars['X']) - self.assertEqual(score, 1) - - def test_ambiguous(self): - # Ambiguous relations are : - # (Service, fabrique_par, Personne) and (Produit, fabrique_par, Usine) - # There used to be a crash here with a bad rdef choice in the strict - # checking case. - selector = relation_possible('fabrique_par', role='object', - target_etype='Personne', strict=True) - with self.admin_access.web_request() as req: - usine = req.create_entity('Usine', lieu=u'here') - score = selector(None, req, rset=usine.as_rset()) - self.assertEqual(0, score) - - -class MatchUserGroupsTC(CubicWebTC): - def test_owners_group(self): - """tests usage of 'owners' group with match_user_group""" - class SomeAction(action.Action): - __regid__ = 'yo' - category = 'foo' - __select__ = match_user_groups('owners') - self.vreg._loadedmods[__name__] = {} - self.vreg.register(SomeAction) - SomeAction.__registered__(self.vreg['actions']) - self.assertTrue(SomeAction in self.vreg['actions']['yo'], self.vreg['actions']) - try: - with self.admin_access.web_request() as req: - self.create_user(req, 'john') - # login as a simple user - john_access = self.new_access('john') - with john_access.web_request() as req: - # it should not be possible to use SomeAction not owned objects - rset = req.execute('Any G WHERE G is CWGroup, G name "managers"') - self.assertFalse('yo' in dict(self.pactions(req, rset))) - # insert a new card, and check that we can use SomeAction on our object - req.execute('INSERT Card C: C title "zoubidou"') - req.cnx.commit() - with john_access.web_request() as req: - rset = req.execute('Card C WHERE C title "zoubidou"') - self.assertTrue('yo' in dict(self.pactions(req, rset)), self.pactions(req, rset)) - # make sure even managers can't use the action - with self.admin_access.web_request() as req: - rset = req.execute('Card C WHERE C title "zoubidou"') - self.assertFalse('yo' in dict(self.pactions(req, rset))) - finally: - del self.vreg[SomeAction.__registry__][SomeAction.__regid__] - - -class MultiLinesRsetTC(CubicWebTC): - def setup_database(self): - with self.admin_access.web_request() as req: - req.execute('INSERT CWGroup G: G name "group1"') - req.execute('INSERT CWGroup G: G name "group2"') - req.cnx.commit() - - def test_default_op_in_selector(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any G WHERE G is CWGroup') - expected = len(rset) - selector = multi_lines_rset(expected) - self.assertEqual(selector(None, req, rset=rset), 1) - self.assertEqual(selector(None, req, None), 0) - selector = multi_lines_rset(expected + 1) - self.assertEqual(selector(None, req, rset=rset), 0) - self.assertEqual(selector(None, req, None), 0) - selector = multi_lines_rset(expected - 1) - self.assertEqual(selector(None, req, rset=rset), 0) - self.assertEqual(selector(None, req, None), 0) - - def test_without_rset(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any G WHERE G is CWGroup') - expected = len(rset) - selector = multi_lines_rset(expected) - self.assertEqual(selector(None, req, None), 0) - selector = multi_lines_rset(expected + 1) - self.assertEqual(selector(None, req, None), 0) - selector = multi_lines_rset(expected - 1) - self.assertEqual(selector(None, req, None), 0) - - def test_with_operators(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any G WHERE G is CWGroup') - expected = len(rset) - - # Format 'expected', 'operator', 'assert' - testdata = (( expected, eq, 1), - ( expected+1, eq, 0), - ( expected-1, eq, 0), - ( expected, le, 1), - ( expected+1, le, 1), - ( expected-1, le, 0), - ( expected-1, gt, 1), - ( expected, gt, 0), - ( expected+1, gt, 0), - ( expected+1, lt, 1), - ( expected, lt, 0), - ( expected-1, lt, 0)) - - for (expected, operator, assertion) in testdata: - selector = multi_lines_rset(expected, operator) - yield self.assertEqual, selector(None, req, rset=rset), assertion - - -class MatchKwargsTC(TestCase): - - def test_match_kwargs_default(self): - selector = match_kwargs( set( ('a', 'b') ) ) - self.assertEqual(selector(None, None, a=1, b=2), 2) - self.assertEqual(selector(None, None, a=1), 0) - self.assertEqual(selector(None, None, c=1), 0) - self.assertEqual(selector(None, None, a=1, c=1), 0) - - def test_match_kwargs_any(self): - selector = match_kwargs( set( ('a', 'b') ), mode='any') - self.assertEqual(selector(None, None, a=1, b=2), 2) - self.assertEqual(selector(None, None, a=1), 1) - self.assertEqual(selector(None, None, c=1), 0) - self.assertEqual(selector(None, None, a=1, c=1), 1) - - -class ScoreEntityTC(CubicWebTC): - - def test_intscore_entity_selector(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any E WHERE E eid 1') - selector = score_entity(lambda x: None) - self.assertEqual(selector(None, req, rset=rset), 0) - selector = score_entity(lambda x: "something") - self.assertEqual(selector(None, req, rset=rset), 1) - selector = score_entity(lambda x: object) - self.assertEqual(selector(None, req, rset=rset), 1) - rset = req.execute('Any G LIMIT 2 WHERE G is CWGroup') - selector = score_entity(lambda x: 10) - self.assertEqual(selector(None, req, rset=rset), 20) - selector = score_entity(lambda x: 10, mode='any') - self.assertEqual(selector(None, req, rset=rset), 10) - - def test_rql_condition_entity(self): - with self.admin_access.web_request() as req: - selector = rql_condition('X identity U') - rset = req.user.as_rset() - self.assertEqual(selector(None, req, rset=rset), 1) - self.assertEqual(selector(None, req, entity=req.user), 1) - self.assertEqual(selector(None, req), 0) - - def test_rql_condition_user(self): - with self.admin_access.web_request() as req: - selector = rql_condition('U login "admin"', user_condition=True) - self.assertEqual(selector(None, req), 1) - selector = rql_condition('U login "toto"', user_condition=True) - self.assertEqual(selector(None, req), 0) - - -class AdaptablePredicateTC(CubicWebTC): - - def test_multiple_entity_types_rset(self): - class CWUserIWhatever(EntityAdapter): - __regid__ = 'IWhatever' - __select__ = is_instance('CWUser') - class CWGroupIWhatever(EntityAdapter): - __regid__ = 'IWhatever' - __select__ = is_instance('CWGroup') - with self.temporary_appobjects(CWUserIWhatever, CWGroupIWhatever): - with self.admin_access.web_request() as req: - selector = adaptable('IWhatever') - rset = req.execute('Any X WHERE X is IN(CWGroup, CWUser)') - self.assertTrue(selector(None, req, rset=rset)) - - -class MatchFormParamsTC(CubicWebTC): - """tests for match_form_params predicate""" - - def test_keyonly_match(self): - """test standard usage: ``match_form_params('param1', 'param2')`` - - ``param1`` and ``param2`` must be specified in request's form. - """ - web_request = self.admin_access.web_request - vid_selector = match_form_params('vid') - vid_subvid_selector = match_form_params('vid', 'subvid') - # no parameter => KO,KO - with web_request() as req: - self.assertEqual(vid_selector(None, req), 0) - self.assertEqual(vid_subvid_selector(None, req), 0) - # one expected parameter found => OK,KO - with web_request(vid='foo') as req: - self.assertEqual(vid_selector(None, req), 1) - self.assertEqual(vid_subvid_selector(None, req), 0) - # all expected parameters found => OK,OK - with web_request(vid='foo', subvid='bar') as req: - self.assertEqual(vid_selector(None, req), 1) - self.assertEqual(vid_subvid_selector(None, req), 2) - - def test_keyvalue_match_one_parameter(self): - """test dict usage: ``match_form_params(param1=value1)`` - - ``param1`` must be specified in the request's form and its value - must be ``value1``. - """ - web_request = self.admin_access.web_request - # test both positional and named parameters - vid_selector = match_form_params(vid='foo') - # no parameter => should fail - with web_request() as req: - self.assertEqual(vid_selector(None, req), 0) - # expected parameter found with expected value => OK - with web_request(vid='foo', subvid='bar') as req: - self.assertEqual(vid_selector(None, req), 1) - # expected parameter found but value is incorrect => KO - with web_request(vid='bar') as req: - self.assertEqual(vid_selector(None, req), 0) - - def test_keyvalue_match_two_parameters(self): - """test dict usage: ``match_form_params(param1=value1, param2=value2)`` - - ``param1`` and ``param2`` must be specified in the request's form and - their respective value must be ``value1`` and ``value2``. - """ - web_request = self.admin_access.web_request - vid_subvid_selector = match_form_params(vid='list', subvid='tsearch') - # missing one expected parameter => KO - with web_request(vid='list') as req: - self.assertEqual(vid_subvid_selector(None, req), 0) - # expected parameters found but values are incorrect => KO - with web_request(vid='list', subvid='foo') as req: - self.assertEqual(vid_subvid_selector(None, req), 0) - # expected parameters found and values are correct => OK - with web_request(vid='list', subvid='tsearch') as req: - self.assertEqual(vid_subvid_selector(None, req), 2) - - def test_keyvalue_multiple_match(self): - """test dict usage with multiple values - - i.e. as in ``match_form_params(param1=('value1', 'value2'))`` - - ``param1`` must be specified in the request's form and its value - must be either ``value1`` or ``value2``. - """ - web_request = self.admin_access.web_request - vid_subvid_selector = match_form_params(vid='list', subvid=('tsearch', 'listitem')) - # expected parameters found and values correct => OK - with web_request(vid='list', subvid='tsearch') as req: - self.assertEqual(vid_subvid_selector(None, req), 2) - with web_request(vid='list', subvid='listitem') as req: - self.assertEqual(vid_subvid_selector(None, req), 2) - # expected parameters found but values are incorrect => OK - with web_request(vid='list', subvid='foo') as req: - self.assertEqual(vid_subvid_selector(None, req), 0) - - def test_invalid_calls(self): - """checks invalid calls raise a ValueError""" - # mixing named and positional arguments should fail - with self.assertRaises(ValueError) as cm: - match_form_params('list', x='1', y='2') - self.assertEqual(str(cm.exception), - "match_form_params() can't be called with both " - "positional and named arguments") - # using a dict as first and unique argument should fail - with self.assertRaises(ValueError) as cm: - match_form_params({'x': 1}) - self.assertEqual(str(cm.exception), - "match_form_params() positional arguments must be strings") - - -class PaginatedTC(CubicWebTC): - """tests for paginated_rset predicate""" - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - for i in range(30): - cnx.create_entity('CWGroup', name=u"group%d" % i) - cnx.commit() - - def test_paginated_rset(self): - default_nb_pages = 1 - web_request = self.admin_access.web_request - with web_request() as req: - rset = req.execute('Any G WHERE G is CWGroup') - self.assertEqual(len(rset), 34) - with web_request(vid='list', page_size='10') as req: - self.assertEqual(paginated_rset()(None, req, rset), default_nb_pages) - with web_request(vid='list', page_size='20') as req: - self.assertEqual(paginated_rset()(None, req, rset), default_nb_pages) - with web_request(vid='list', page_size='50') as req: - self.assertEqual(paginated_rset()(None, req, rset), 0) - with web_request(vid='list', page_size='10/') as req: - self.assertEqual(paginated_rset()(None, req, rset), 0) - with web_request(vid='list', page_size='.1') as req: - self.assertEqual(paginated_rset()(None, req, rset), 0) - with web_request(vid='list', page_size='not_an_int') as req: - self.assertEqual(paginated_rset()(None, req, rset), 0) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_repoapi.py --- a/test/unittest_repoapi.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,89 +0,0 @@ -# copyright 2013-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unittest for cubicweb.repoapi""" - - -from cubicweb.devtools.testlib import CubicWebTC - -from cubicweb import ProgrammingError -from cubicweb.repoapi import Connection, connect, anonymous_cnx - - -class REPOAPITC(CubicWebTC): - - def test_cnx_basic_usage(self): - """Test that a client connection can be used to access the database""" - with self.admin_access.client_cnx() as cltcnx: - # (1) some RQL request - rset = cltcnx.execute('Any X WHERE X is CWUser') - self.assertTrue(rset) - # (2) ORM usage - random_user = rset.get_entity(0, 0) - # (3) Write operation - random_user.cw_set(surname=u'babar') - # (4) commit - cltcnx.commit() - rset = cltcnx.execute('''Any X WHERE X is CWUser, - X surname "babar" - ''') - self.assertTrue(rset) - # prepare test for implicit rollback - random_user = rset.get_entity(0, 0) - random_user.cw_set(surname=u'celestine') - # implicit rollback on exit - with self.admin_access.client_cnx() as cltcnx: - rset = cltcnx.execute('''Any X WHERE X is CWUser, - X surname "babar" - ''') - self.assertTrue(rset) - - def test_cnx_life_cycle(self): - """Check that ClientConnection requires explicit open and close - """ - access = self.admin_access - cltcnx = Connection(access._session) - # connection not open yet - with self.assertRaises(ProgrammingError): - cltcnx.execute('Any X WHERE X is CWUser') - # connection open and working - with cltcnx: - cltcnx.execute('Any X WHERE X is CWUser') - # connection closed - with self.assertRaises(ProgrammingError): - cltcnx.execute('Any X WHERE X is CWUser') - - def test_connect(self): - """check that repoapi.connect works and returns a usable connection""" - cnx = connect(self.repo, login='admin', password='gingkow') - self.assertEqual('admin', cnx.user.login) - with cnx: - rset = cnx.execute('Any X WHERE X is CWUser') - self.assertTrue(rset) - - def test_anonymous_connect(self): - """check that you can get anonymous connection when the data exist""" - cnx = anonymous_cnx(self.repo) - self.assertEqual('anon', cnx.user.login) - with cnx: - rset = cnx.execute('Any X WHERE X is CWUser') - self.assertTrue(rset) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_req.py --- a/test/unittest_req.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,153 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from logilab.common.testlib import TestCase, unittest_main -from cubicweb import ObjectNotFound -from cubicweb.req import RequestSessionBase, FindEntityError -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb import Unauthorized - -class RequestTC(TestCase): - def test_rebuild_url(self): - rebuild_url = RequestSessionBase(None).rebuild_url - self.assertEqual(rebuild_url('http://logilab.fr?__message=pouet', __message='hop'), - 'http://logilab.fr?__message=hop') - self.assertEqual(rebuild_url('http://logilab.fr', __message='hop'), - 'http://logilab.fr?__message=hop') - self.assertEqual(rebuild_url('http://logilab.fr?vid=index', __message='hop'), - 'http://logilab.fr?__message=hop&vid=index') - - def test_build_url(self): - req = RequestSessionBase(None) - req.from_controller = lambda : 'view' - req.relative_path = lambda includeparams=True: None - req.base_url = lambda secure=None: 'http://testing.fr/cubicweb/' - self.assertEqual(req.build_url(), u'http://testing.fr/cubicweb/view') - self.assertEqual(req.build_url(None), u'http://testing.fr/cubicweb/view') - self.assertEqual(req.build_url('one'), u'http://testing.fr/cubicweb/one') - self.assertEqual(req.build_url(param='ok'), u'http://testing.fr/cubicweb/view?param=ok') - self.assertRaises(AssertionError, req.build_url, 'one', 'two not allowed') - self.assertRaises(AssertionError, req.build_url, 'view', test=None) - - def test_ensure_no_rql(self): - req = RequestSessionBase(None) - self.assertEqual(req.ensure_ro_rql('Any X WHERE X is CWUser'), None) - self.assertEqual(req.ensure_ro_rql(' Any X WHERE X is CWUser '), None) - self.assertRaises(Unauthorized, req.ensure_ro_rql, 'SET X login "toto" WHERE X is CWUser') - self.assertRaises(Unauthorized, req.ensure_ro_rql, ' SET X login "toto" WHERE X is CWUser ') - - -class RequestCWTC(CubicWebTC): - - def test_base_url(self): - base_url = self.config['base-url'] - with self.admin_access.repo_cnx() as session: - self.assertEqual(session.base_url(), base_url) - assert 'https-url' not in self.config - self.assertEqual(session.base_url(secure=True), base_url) - secure_base_url = base_url.replace('http', 'https') - self.config.global_set_option('https-url', secure_base_url) - self.assertEqual(session.base_url(secure=True), secure_base_url) - - def test_view_catch_ex(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWUser X WHERE X login "hop"') - self.assertEqual(req.view('oneline', rset, 'null'), '') - self.assertRaises(ObjectNotFound, req.view, 'onelinee', rset, 'null') - - def test_find_one_entity(self): - with self.admin_access.web_request() as req: - req.create_entity( - 'CWUser', login=u'cdevienne', upassword=u'cdevienne', - surname=u'de Vienne', firstname=u'Christophe', - in_group=req.find('CWGroup', name=u'users').one()) - - req.create_entity( - 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', - firstname=u'adrien', - in_group=req.find('CWGroup', name=u'users').one()) - - u = req.find_one_entity('CWUser', login=u'cdevienne') - self.assertEqual(u.firstname, u"Christophe") - - with self.assertRaises(FindEntityError): - req.find_one_entity('CWUser', login=u'patanok') - - with self.assertRaises(FindEntityError): - req.find_one_entity('CWUser') - - def test_find_entities(self): - with self.admin_access.web_request() as req: - req.create_entity( - 'CWUser', login=u'cdevienne', upassword=u'cdevienne', - surname=u'de Vienne', firstname=u'Christophe', - in_group=req.find('CWGroup', name=u'users').one()) - - req.create_entity( - 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', - firstname=u'adrien', - in_group=req.find('CWGroup', name=u'users').one()) - - l = list(req.find_entities('CWUser', login=u'cdevienne')) - self.assertEqual(1, len(l)) - self.assertEqual(l[0].firstname, u"Christophe") - - l = list(req.find_entities('CWUser', login=u'patanok')) - self.assertEqual(0, len(l)) - - l = list(req.find_entities('CWUser')) - self.assertEqual(4, len(l)) - - def test_find(self): - with self.admin_access.web_request() as req: - req.create_entity( - 'CWUser', login=u'cdevienne', upassword=u'cdevienne', - surname=u'de Vienne', firstname=u'Christophe', - in_group=req.find('CWGroup', name=u'users').one()) - - req.create_entity( - 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', - firstname=u'adrien', - in_group=req.find('CWGroup', name=u'users').one()) - - u = req.find('CWUser', login=u'cdevienne').one() - self.assertEqual(u.firstname, u"Christophe") - - users = list(req.find('CWUser').entities()) - self.assertEqual(len(users), 4) - - groups = list( - req.find('CWGroup', reverse_in_group=u).entities()) - self.assertEqual(len(groups), 1) - self.assertEqual(groups[0].name, u'users') - - users = req.find('CWUser', in_group=groups[0]).entities() - users = list(users) - self.assertEqual(len(users), 2) - - with self.assertRaises(AssertionError): - req.find('CWUser', chapeau=u"melon") - - with self.assertRaises(AssertionError): - req.find('CWUser', reverse_buddy=users[0]) - - with self.assertRaises(NotImplementedError): - req.find('CWUser', in_group=[1, 2]) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_rqlrewrite.py --- a/test/unittest_rqlrewrite.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,816 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from six import string_types - -from logilab.common.testlib import unittest_main, TestCase -from logilab.common.testlib import mock_object -from yams import BadSchemaDefinition -from yams.buildobjs import RelationDefinition -from rql import parse, nodes, RQLHelper - -from cubicweb import Unauthorized, rqlrewrite -from cubicweb.schema import RRQLExpression, ERQLExpression -from cubicweb.devtools import repotest, TestServerConfiguration, BaseApptestConfiguration - - -def setUpModule(*args): - global rqlhelper, schema - config = TestServerConfiguration(RQLRewriteTC.datapath('rewrite')) - config.bootstrap_cubes() - schema = config.load_schema() - schema.add_relation_def(RelationDefinition(subject='Card', name='in_state', - object='State', cardinality='1*')) - rqlhelper = RQLHelper(schema, special_relations={'eid': 'uid', - 'has_text': 'fti'}) - repotest.do_monkey_patch() - -def tearDownModule(*args): - repotest.undo_monkey_patch() - global rqlhelper, schema - del rqlhelper, schema - -def eid_func_map(eid): - return {1: 'CWUser', - 2: 'Card', - 3: 'Affaire'}[eid] - -def _prepare_rewriter(rewriter_cls, kwargs): - class FakeVReg: - schema = schema - @staticmethod - def solutions(sqlcursor, rqlst, kwargs): - rqlhelper.compute_solutions(rqlst, {'eid': eid_func_map}, kwargs=kwargs) - class rqlhelper: - @staticmethod - def annotate(rqlst): - rqlhelper.annotate(rqlst) - @staticmethod - def simplify(mainrqlst, needcopy=False): - rqlhelper.simplify(rqlst, needcopy) - return rewriter_cls(mock_object(vreg=FakeVReg, user=(mock_object(eid=1)))) - -def rewrite(rqlst, snippets_map, kwargs, existingvars=None): - rewriter = _prepare_rewriter(rqlrewrite.RQLRewriter, kwargs) - snippets = [] - for v, exprs in sorted(snippets_map.items()): - rqlexprs = [isinstance(snippet, string_types) - and mock_object(snippet_rqlst=parse(u'Any X WHERE '+snippet).children[0], - expression=u'Any X WHERE '+snippet) - or snippet - for snippet in exprs] - snippets.append((dict([v]), rqlexprs)) - rqlhelper.compute_solutions(rqlst.children[0], {'eid': eid_func_map}, kwargs=kwargs) - rewriter.rewrite(rqlst.children[0], snippets, kwargs, existingvars) - test_vrefs(rqlst.children[0]) - return rewriter.rewritten - -def test_vrefs(node): - vrefmaps = {} - selects = [] - for vref in node.iget_nodes(nodes.VariableRef): - stmt = vref.stmt - try: - vrefmaps[stmt].setdefault(vref.name, set()).add(vref) - except KeyError: - vrefmaps[stmt] = {vref.name: set( (vref,) )} - selects.append(stmt) - assert node in selects, (node, selects) - for stmt in selects: - for var in stmt.defined_vars.values(): - assert var.stinfo['references'] - vrefmap = vrefmaps[stmt] - assert not (var.stinfo['references'] ^ vrefmap[var.name]), (node.as_string(), var, var.stinfo['references'], vrefmap[var.name]) - - -class RQLRewriteTC(TestCase): - """a faire: - - * optimisation: detecter les relations utilisees dans les rqlexpressions qui - sont presentes dans la requete de depart pour les reutiliser si possible - - * "has__permission" ? - """ - - def test_base_var(self): - constraint = ('X in_state S, U in_group G, P require_state S,' - 'P name "read", P require_group G') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) - self.assertEqual(rqlst.as_string(), - u'Any C WHERE C is Card, B eid %(D)s, ' - 'EXISTS(C in_state A, B in_group E, F require_state A, ' - 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission)') - - def test_multiple_var(self): - card_constraint = ('X in_state S, U in_group G, P require_state S,' - 'P name "read", P require_group G') - affaire_constraints = ('X ref LIKE "PUBLIC%"', 'U in_group G, G name "public"') - kwargs = {'u':2} - rqlst = parse(u'Any S WHERE S documented_by C, C eid %(u)s') - rewrite(rqlst, {('C', 'X'): (card_constraint,), ('S', 'X'): affaire_constraints}, - kwargs) - self.assertMultiLineEqual( - rqlst.as_string(), - u'Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, ' - 'EXISTS(C in_state A, B in_group E, F require_state A, ' - 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission), ' - '(EXISTS(S ref LIKE "PUBLIC%")) OR (EXISTS(B in_group G, G name "public", G is CWGroup)), ' - 'S is Affaire') - self.assertIn('D', kwargs) - - def test_or(self): - constraint = '(X identity U) OR (X in_state ST, CL identity U, CL in_state ST, ST name "subscribed")' - rqlst = parse(u'Any S WHERE S owned_by C, C eid %(u)s, S is in (CWUser, CWGroup)') - rewrite(rqlst, {('C', 'X'): (constraint,)}, {'u':1}) - self.assertEqual(rqlst.as_string(), - 'Any S WHERE S owned_by C, C eid %(u)s, S is IN(CWUser, CWGroup), A eid %(B)s, ' - 'EXISTS((C identity A) OR (C in_state D, E identity A, ' - 'E in_state D, D name "subscribed"), D is State, E is CWUser)') - - def test_simplified_rqlst(self): - constraint = ('X in_state S, U in_group G, P require_state S,' - 'P name "read", P require_group G') - rqlst = parse(u'Any 2') # this is the simplified rql st for Any X WHERE X eid 12 - rewrite(rqlst, {('2', 'X'): (constraint,)}, {}) - self.assertEqual(rqlst.as_string(), - u'Any 2 WHERE B eid %(C)s, ' - 'EXISTS(2 in_state A, B in_group D, E require_state A, ' - 'E name "read", E require_group D, A is State, D is CWGroup, E is CWPermission)') - - def test_optional_var_1(self): - constraint = ('X in_state S, U in_group G, P require_state S,' - 'P name "read", P require_group G') - rqlst = parse(u'Any A,C WHERE A documented_by C?') - rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) - self.assertEqual(rqlst.as_string(), - u'Any A,C WHERE A documented_by C?, A is Affaire ' - 'WITH C BEING ' - '(Any C WHERE EXISTS(C in_state B, D in_group F, G require_state B, G name "read", ' - 'G require_group F), D eid %(A)s, C is Card)') - - def test_optional_var_2(self): - constraint = ('X in_state S, U in_group G, P require_state S,' - 'P name "read", P require_group G') - rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T') - rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) - self.assertEqual(rqlst.as_string(), - u'Any A,C,T WHERE A documented_by C?, A is Affaire ' - 'WITH C,T BEING ' - '(Any C,T WHERE C title T, EXISTS(C in_state B, D in_group F, ' - 'G require_state B, G name "read", G require_group F), ' - 'D eid %(A)s, C is Card)') - - def test_optional_var_3(self): - constraint1 = ('X in_state S, U in_group G, P require_state S,' - 'P name "read", P require_group G') - constraint2 = 'X in_state S, S name "public"' - rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T') - rewrite(rqlst, {('C', 'X'): (constraint1, constraint2)}, {}) - self.assertEqual(rqlst.as_string(), - u'Any A,C,T WHERE A documented_by C?, A is Affaire ' - 'WITH C,T BEING (Any C,T WHERE C title T, ' - '(EXISTS(C in_state B, D in_group F, G require_state B, G name "read", G require_group F)) ' - 'OR (EXISTS(C in_state E, E name "public")), ' - 'D eid %(A)s, C is Card)') - - def test_optional_var_4(self): - constraint1 = 'A created_by U, X documented_by A' - constraint2 = 'A created_by U, X concerne A' - constraint3 = 'X created_by U' - rqlst = parse(u'Any X,LA,Y WHERE LA? documented_by X, LA concerne Y') - rewrite(rqlst, {('LA', 'X'): (constraint1, constraint2), - ('X', 'X'): (constraint3,), - ('Y', 'X'): (constraint3,)}, {}) - self.assertEqual(rqlst.as_string(), - u'Any X,LA,Y WHERE LA? documented_by X, LA concerne Y, B eid %(C)s, ' - 'EXISTS(X created_by B), EXISTS(Y created_by B), ' - 'X is Card, Y is IN(Division, Note, Societe) ' - 'WITH LA BEING (Any LA WHERE (EXISTS(A created_by B, LA documented_by A)) OR (EXISTS(E created_by B, LA concerne E)), ' - 'B eid %(D)s, LA is Affaire)') - - - def test_ambiguous_optional_same_exprs(self): - """See #3013535""" - # see test of the same name in RewriteFullTC: original problem is - # unreproducible here because it actually lies in - # RQLRewriter.insert_local_checks - rqlst = parse(u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s, X creation_date CD') - rewrite(rqlst, {('X', 'X'): ('X created_by U',),}, {'a': 3}) - self.assertEqual(rqlst.as_string(), - u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s WITH X,CD BEING (Any X,CD WHERE X creation_date CD, EXISTS(X created_by B), B eid %(A)s, X is IN(Division, Note, Societe))') - - def test_optional_var_inlined(self): - c1 = ('X require_permission P') - c2 = ('X inlined_card O, O require_permission P') - rqlst = parse(u'Any C,A,R WHERE A? inlined_card C, A ref R') - rewrite(rqlst, {('C', 'X'): (c1,), - ('A', 'X'): (c2,), - }, {}) - # XXX suboptimal - self.assertEqual(rqlst.as_string(), - "Any C,A,R WITH A,C,R BEING " - "(Any A,C,R WHERE A? inlined_card C, A ref R, " - "(A is NULL) OR (EXISTS(A inlined_card B, B require_permission D, " - "B is Card, D is CWPermission)), " - "A is Affaire, C is Card, EXISTS(C require_permission E, E is CWPermission))") - - # def test_optional_var_inlined_has_perm(self): - # c1 = ('X require_permission P') - # c2 = ('X inlined_card O, U has_read_permission O') - # rqlst = parse(u'Any C,A,R WHERE A? inlined_card C, A ref R') - # rewrite(rqlst, {('C', 'X'): (c1,), - # ('A', 'X'): (c2,), - # }, {}) - # self.assertEqual(rqlst.as_string(), - # "") - - def test_optional_var_inlined_imbricated_error(self): - c1 = ('X require_permission P') - c2 = ('X inlined_card O, O require_permission P') - rqlst = parse(u'Any C,A,R,A2,R2 WHERE A? inlined_card C, A ref R,A2? inlined_card C, A2 ref R2') - self.assertRaises(BadSchemaDefinition, - rewrite, rqlst, {('C', 'X'): (c1,), - ('A', 'X'): (c2,), - ('A2', 'X'): (c2,), - }, {}) - - def test_optional_var_inlined_linked(self): - c1 = ('X require_permission P') - c2 = ('X inlined_card O, O require_permission P') - rqlst = parse(u'Any A,W WHERE A inlined_card C?, C inlined_note N, ' - 'N inlined_affaire W') - rewrite(rqlst, {('C', 'X'): (c1,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any A,W WHERE A inlined_card C?, A is Affaire ' - 'WITH C,N,W BEING (Any C,N,W WHERE C inlined_note N, ' - 'N inlined_affaire W, EXISTS(C require_permission B), ' - 'C is Card, N is Note, W is Affaire)') - - def test_relation_optimization_1_lhs(self): - # since Card in_state State as monovalued cardinality, the in_state - # relation used in the rql expression can be ignored and S replaced by - # the variable from the incoming query - snippet = ('X in_state S, S name "hop"') - rqlst = parse(u'Card C WHERE C in_state STATE') - rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C in_state STATE, C is Card, ' - 'EXISTS(STATE name "hop"), STATE is State') - - def test_relation_optimization_1_rhs(self): - snippet = ('TW subworkflow_exit X, TW name "hop"') - rqlst = parse(u'WorkflowTransition C WHERE C subworkflow_exit EXIT') - rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, ' - 'EXISTS(C name "hop"), EXIT is SubWorkflowExitPoint') - - def test_relation_optimization_2_lhs(self): - # optional relation can be shared if also optional in the snippet - snippet = ('X in_state S?, S name "hop"') - rqlst = parse(u'Card C WHERE C in_state STATE?') - rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C in_state STATE?, C is Card, ' - 'EXISTS(STATE name "hop"), STATE is State') - def test_relation_optimization_2_rhs(self): - snippet = ('TW? subworkflow_exit X, TW name "hop"') - rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT') - rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, ' - 'EXISTS(C name "hop"), C is WorkflowTransition') - - def test_relation_optimization_3_lhs(self): - # optional relation in the snippet but not in the orig tree can be shared - snippet = ('X in_state S?, S name "hop"') - rqlst = parse(u'Card C WHERE C in_state STATE') - rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C in_state STATE, C is Card, ' - 'EXISTS(STATE name "hop"), STATE is State') - - def test_relation_optimization_3_rhs(self): - snippet = ('TW? subworkflow_exit X, TW name "hop"') - rqlst = parse(u'WorkflowTransition C WHERE C subworkflow_exit EXIT') - rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, ' - 'EXISTS(C name "hop"), EXIT is SubWorkflowExitPoint') - - def test_relation_non_optimization_1_lhs(self): - # but optional relation in the orig tree but not in the snippet can't be shared - snippet = ('X in_state S, S name "hop"') - rqlst = parse(u'Card C WHERE C in_state STATE?') - rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C in_state STATE?, C is Card, ' - 'EXISTS(C in_state A, A name "hop", A is State), STATE is State') - - def test_relation_non_optimization_1_rhs(self): - snippet = ('TW subworkflow_exit X, TW name "hop"') - rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT') - rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, ' - 'EXISTS(A subworkflow_exit EXIT, A name "hop", A is WorkflowTransition), ' - 'C is WorkflowTransition') - - def test_relation_non_optimization_2(self): - """See #3024730""" - # 'X inlined_note N' must not be shared with 'C inlined_note N' - # previously inserted, else this may introduce duplicated results, as N - # will then be shared by multiple EXISTS and so at SQL generation time, - # the table will be in the FROM clause of the outermost query - rqlst = parse(u'Any A,C WHERE A inlined_card C') - rewrite(rqlst, {('A', 'X'): ('X inlined_card C, C inlined_note N, N owned_by U',), - ('C', 'X'): ('X inlined_note N, N owned_by U',)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any A,C WHERE A inlined_card C, D eid %(E)s, ' - 'EXISTS(C inlined_note B, B owned_by D, B is Note), ' - 'EXISTS(C inlined_note F, F owned_by D, F is Note), ' - 'A is Affaire, C is Card') - - def test_unsupported_constraint_1(self): - # CWUser doesn't have require_permission - trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') - rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U') - self.assertRaises(Unauthorized, rewrite, rqlst, {('T', 'X'): (trinfo_constraint,)}, {}) - - def test_unsupported_constraint_2(self): - trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') - rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U') - rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X wf_info_for Y, Y in_group G, G name "managers"')}, {}) - self.assertEqual(rqlst.as_string(), - u'Any U,T WHERE U is CWUser, T wf_info_for U, ' - 'EXISTS(U in_group B, B name "managers", B is CWGroup), T is TrInfo') - - def test_unsupported_constraint_3(self): - self.skipTest('raise unauthorized for now') - trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') - rqlst = parse(u'Any T WHERE T wf_info_for X') - rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X in_group G, G name "managers"')}, {}) - self.assertEqual(rqlst.as_string(), - u'XXX dunno what should be generated') - - def test_add_ambiguity_exists(self): - constraint = ('X concerne Y') - rqlst = parse(u'Affaire X') - rewrite(rqlst, {('X', 'X'): (constraint,)}, {}) - self.assertEqual(rqlst.as_string(), - u"Any X WHERE X is Affaire, ((EXISTS(X concerne A, A is Division)) OR (EXISTS(X concerne C, C is Societe))) OR (EXISTS(X concerne B, B is Note))") - - def test_add_ambiguity_outerjoin(self): - constraint = ('X concerne Y') - rqlst = parse(u'Any X,C WHERE X? documented_by C') - rewrite(rqlst, {('X', 'X'): (constraint,)}, {}) - # ambiguity are kept in the sub-query, no need to be resolved using OR - self.assertEqual(rqlst.as_string(), - u"Any X,C WHERE X? documented_by C, C is Card WITH X BEING (Any X WHERE EXISTS(X concerne A), X is Affaire)") - - - def test_rrqlexpr_nonexistant_subject_1(self): - constraint = RRQLExpression('S owned_by U') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') - self.assertEqual(rqlst.as_string(), - u"Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)") - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') - self.assertEqual(rqlst.as_string(), - u"Any C WHERE C is Card") - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU') - self.assertEqual(rqlst.as_string(), - u"Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)") - - def test_rrqlexpr_nonexistant_subject_2(self): - constraint = RRQLExpression('S owned_by U, O owned_by U, O is Card') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C is Card, B eid %(D)s, EXISTS(A owned_by B, A is Card)') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU') - self.assertEqual(rqlst.as_string(), - 'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A, D owned_by A, D is Card)') - - def test_rrqlexpr_nonexistant_subject_3(self): - constraint = RRQLExpression('U in_group G, G name "users"') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') - self.assertEqual(rqlst.as_string(), - u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)') - - def test_rrqlexpr_nonexistant_subject_4(self): - constraint = RRQLExpression('U in_group G, G name "users", S owned_by U') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') - self.assertEqual(rqlst.as_string(), - u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", C owned_by A, D is CWGroup)') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') - self.assertEqual(rqlst.as_string(), - u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)') - - def test_rrqlexpr_nonexistant_subject_5(self): - constraint = RRQLExpression('S owned_by Z, O owned_by Z, O is Card') - rqlst = parse(u'Card C') - rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'S') - self.assertEqual(rqlst.as_string(), - u"Any C WHERE C is Card, EXISTS(C owned_by A, A is CWUser)") - - def test_rqlexpr_not_relation_1_1(self): - constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') - rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)') - rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X') - self.assertEqual(rqlst.as_string(), - u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire') - - def test_rqlexpr_not_relation_1_2(self): - constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') - rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)') - rewrite(rqlst, {('A', 'X'): (constraint,)}, {}, 'X') - self.assertEqual(rqlst.as_string(), - u'Any A WHERE NOT EXISTS(A documented_by C, C is Card), A is Affaire, EXISTS(A owned_by B, B login "hop", B is CWUser)') - - def test_rqlexpr_not_relation_2(self): - constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') - rqlst = rqlhelper.parse(u'Affaire A WHERE NOT A documented_by C', annotate=False) - rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X') - self.assertEqual(rqlst.as_string(), - u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire') - - def test_rqlexpr_multiexpr_outerjoin(self): - c1 = ERQLExpression('X owned_by Z, Z login "hop"', 'X') - c2 = ERQLExpression('X owned_by Z, Z login "hip"', 'X') - c3 = ERQLExpression('X owned_by Z, Z login "momo"', 'X') - rqlst = rqlhelper.parse(u'Any A WHERE A documented_by C?', annotate=False) - rewrite(rqlst, {('C', 'X'): (c1, c2, c3)}, {}, 'X') - self.assertEqual(rqlst.as_string(), - u'Any A WHERE A documented_by C?, A is Affaire ' - 'WITH C BEING (Any C WHERE ((EXISTS(C owned_by B, B login "hop")) ' - 'OR (EXISTS(C owned_by D, D login "momo"))) ' - 'OR (EXISTS(C owned_by A, A login "hip")), C is Card)') - - def test_multiple_erql_one_bad(self): - #: reproduce bug #2236985 - #: (rqlrewrite fails to remove rewritten entry for unsupported constraint and then crash) - #: - #: This check a very rare code path triggered by the four condition below - - # 1. c_ok introduce an ambiguity - c_ok = ERQLExpression('X concerne R') - # 2. c_bad is just plain wrong and won't be kept - # 3. but it declare a new variable - # 4. this variable require a rewrite - c_bad = ERQLExpression('X documented_by R, A in_state R') - - rqlst = parse(u'Any A, R WHERE A ref R, S is Affaire') - rewrite(rqlst, {('A', 'X'): (c_ok, c_bad)}, {}) - - def test_nonregr_is_instance_of(self): - user_expr = ERQLExpression('NOT X in_group AF, AF name "guests"') - rqlst = parse(u'Any O WHERE S use_email O, S is CWUser, O is_instance_of EmailAddress') - rewrite(rqlst, {('S', 'X'): (user_expr,)}, {}) - self.assertEqual(rqlst.as_string(), - 'Any O WHERE S use_email O, S is CWUser, O is EmailAddress, ' - 'EXISTS(NOT S in_group A, A name "guests", A is CWGroup)') - -from cubicweb.devtools.testlib import CubicWebTC -from logilab.common.decorators import classproperty - -class RewriteFullTC(CubicWebTC): - @classproperty - def config(cls): - return BaseApptestConfiguration(apphome=cls.datapath('rewrite')) - - def process(self, rql, args=None): - if args is None: - args = {} - querier = self.repo.querier - union = querier.parse(rql) - with self.admin_access.repo_cnx() as cnx: - querier.solutions(cnx, union, args) - querier._annotate(union) - plan = querier.plan_factory(union, args, cnx) - plan.preprocess(union) - return union - - def test_ambiguous_optional_same_exprs(self): - """See #3013535""" - edef1 = self.schema['Societe'] - edef2 = self.schema['Division'] - edef3 = self.schema['Note'] - with self.temporary_permissions((edef1, {'read': (ERQLExpression('X owned_by U'),)}), - (edef2, {'read': (ERQLExpression('X owned_by U'),)}), - (edef3, {'read': (ERQLExpression('X owned_by U'),)})): - union = self.process('Any A,AR,X,CD WHERE A concerne X?, A ref AR, X creation_date CD') - self.assertEqual('Any A,AR,X,CD WHERE A concerne X?, A ref AR, A is Affaire ' - 'WITH X,CD BEING (Any X,CD WHERE X creation_date CD, ' - 'EXISTS(X owned_by %(A)s), X is IN(Division, Note, Societe))', - union.as_string()) - - def test_ambiguous_optional_diff_exprs(self): - """See #3013554""" - self.skipTest('bad request generated (may generate duplicated results)') - edef1 = self.schema['Societe'] - edef2 = self.schema['Division'] - edef3 = self.schema['Note'] - with self.temporary_permissions((edef1, {'read': (ERQLExpression('X created_by U'),)}), - (edef2, {'read': ('users',)}), - (edef3, {'read': (ERQLExpression('X owned_by U'),)})): - union = self.process('Any A,AR,X,CD WHERE A concerne X?, A ref AR, X creation_date CD') - self.assertEqual(union.as_string(), 'not generated today') - - - def test_xxxx(self): - edef1 = self.schema['Societe'] - edef2 = self.schema['Division'] - read_expr = ERQLExpression('X responsable E, U has_read_permission E') - with self.temporary_permissions((edef1, {'read': (read_expr,)}), - (edef2, {'read': (read_expr,)})): - union = self.process('Any X,AA,AC,AD ORDERBY AD DESC ' - 'WHERE X responsable E, X nom AA, ' - 'X responsable AC?, AC modification_date AD') - self.assertEqual('Any X,AA,AC,AD ORDERBY AD DESC ' - 'WHERE X responsable E, X nom AA, ' - 'X responsable AC?, AC modification_date AD, ' - 'AC is CWUser, E is CWUser, X is IN(Division, Societe)', - union.as_string()) - - def test_question_mark_attribute_snippet(self): - # see #3661918 - from cubicweb.rqlrewrite import RQLRewriter - from logilab.common.decorators import monkeypatch - repotest.undo_monkey_patch() - orig_insert_snippets = RQLRewriter.insert_snippets - # patch insert_snippets and not rewrite, insert_snippets is already - # monkey patches (see above setupModule/repotest) - @monkeypatch(RQLRewriter) - def insert_snippets(self, snippets, varexistsmap=None): - # crash occurs if snippets are processed in a specific order, force - # destiny - if snippets[0][0] != {u'N': 'X'}: - snippets = list(reversed(snippets)) - return orig_insert_snippets(self, snippets, varexistsmap) - try: - with self.temporary_permissions( - (self.schema['Affaire'], - {'read': (ERQLExpression('X ref "blah"'), )}), - (self.schema['Note'], - {'read': (ERQLExpression( - 'EXISTS(X inlined_affaire Z), EXISTS(Z owned_by U)'), )}), - ): - union = self.process( - 'Any A,COUNT(N) GROUPBY A ' - 'WHERE A is Affaire, N? inlined_affaire A') - self.assertEqual('Any A,COUNT(N) GROUPBY A WHERE A is Affaire ' - 'WITH N,A BEING (Any N,A WHERE N? inlined_affaire A, ' - '(N is NULL) OR (EXISTS(EXISTS(N inlined_affaire B), ' - 'EXISTS(B owned_by %(E)s), B is Affaire)), ' - 'A is Affaire, N is Note, EXISTS(A ref "blah"))', - union.as_string()) - finally: - RQLRewriter.insert_snippets = orig_insert_snippets - - -class RQLRelationRewriterTC(TestCase): - # XXX valid rules: S and O specified, not in a SET, INSERT, DELETE scope - # valid uses: no outer join - - # Basic tests - def test_base_rule(self): - rules = {'participated_in': 'S contributor O'} - rqlst = rqlhelper.parse(u'Any X WHERE X participated_in S') - rule_rewrite(rqlst, rules) - self.assertEqual('Any X WHERE X contributor S', - rqlst.as_string()) - - def test_complex_rule_1(self): - rules = {'illustrator_of': ('C is Contribution, C contributor S, ' - 'C manifestation O, C role R, ' - 'R name "illustrator"')} - rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE C is Contribution, ' - 'C contributor A, C manifestation B, ' - 'C role D, D name "illustrator"', - rqlst.as_string()) - - def test_complex_rule_2(self): - rules = {'illustrator_of': ('C is Contribution, C contributor S, ' - 'C manifestation O, C role R, ' - 'R name "illustrator"')} - rqlst = rqlhelper.parse(u'Any A WHERE EXISTS(A illustrator_of B)') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A WHERE EXISTS(C is Contribution, ' - 'C contributor A, C manifestation B, ' - 'C role D, D name "illustrator")', - rqlst.as_string()) - - - def test_rewrite2(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B, C require_permission R, S' - 'require_state O') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, ' - 'D is Contribution, D contributor A, D manifestation B, D role E, ' - 'E name "illustrator"', - rqlst.as_string()) - - def test_rewrite3(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'Any A,B WHERE E require_permission T, A illustrator_of B') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE E require_permission T, ' - 'C is Contribution, C contributor A, C manifestation B, ' - 'C role D, D name "illustrator"', - rqlst.as_string()) - - def test_rewrite4(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE C require_permission R, ' - 'D is Contribution, D contributor A, D manifestation B, ' - 'D role E, E name "illustrator"', - rqlst.as_string()) - - def test_rewrite5(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B, ' - 'S require_state O') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, ' - 'D is Contribution, D contributor A, D manifestation B, D role E, ' - 'E name "illustrator"', - rqlst.as_string()) - - # Tests for the with clause - def test_rewrite_with(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'Any A,B WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WITH A,B BEING ' - '(Any X,Y WHERE A is Contribution, A contributor X, ' - 'A manifestation Y, A role B, B name "illustrator")', - rqlst.as_string()) - - def test_rewrite_with2(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'Any A,B WHERE T require_permission C WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE T require_permission C ' - 'WITH A,B BEING (Any X,Y WHERE A is Contribution, ' - 'A contributor X, A manifestation Y, A role B, B name "illustrator")', - rqlst.as_string()) - - def test_rewrite_with3(self): - rules = {'participated_in': 'S contributor O'} - rqlst = rqlhelper.parse(u'Any A,B WHERE A participated_in B ' - 'WITH A, B BEING(Any X,Y WHERE X contributor Y)') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE A contributor B WITH A,B BEING ' - '(Any X,Y WHERE X contributor Y)', - rqlst.as_string()) - - def test_rewrite_with4(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B ' - 'WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE C is Contribution, ' - 'C contributor A, C manifestation B, C role D, ' - 'D name "illustrator" WITH A,B BEING ' - '(Any X,Y WHERE A is Contribution, A contributor X, ' - 'A manifestation Y, A role B, B name "illustrator")', - rqlst.as_string()) - - # Tests for the union - def test_rewrite_union(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B) UNION' - '(Any X,Y WHERE X is CWUser, Z manifestation Y)') - rule_rewrite(rqlst, rules) - self.assertEqual('(Any A,B WHERE C is Contribution, ' - 'C contributor A, C manifestation B, C role D, ' - 'D name "illustrator") UNION (Any X,Y WHERE X is CWUser, Z manifestation Y)', - rqlst.as_string()) - - def test_rewrite_union2(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'(Any Y WHERE Y match W) UNION ' - '(Any A WHERE A illustrator_of B) UNION ' - '(Any Y WHERE Y is ArtWork)') - rule_rewrite(rqlst, rules) - self.assertEqual('(Any Y WHERE Y match W) ' - 'UNION (Any A WHERE C is Contribution, C contributor A, ' - 'C manifestation B, C role D, D name "illustrator") ' - 'UNION (Any Y WHERE Y is ArtWork)', - rqlst.as_string()) - - # Tests for the exists clause - def test_rewrite_exists(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, ' - 'EXISTS(B is ArtWork))') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE EXISTS(B is ArtWork), ' - 'C is Contribution, C contributor A, C manifestation B, C role D, ' - 'D name "illustrator"', - rqlst.as_string()) - - def test_rewrite_exists2(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'(Any A,B WHERE B contributor A, EXISTS(A illustrator_of W))') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE B contributor A, ' - 'EXISTS(C is Contribution, C contributor A, C manifestation W, ' - 'C role D, D name "illustrator")', - rqlst.as_string()) - - def test_rewrite_exists3(self): - rules = {'illustrator_of': 'C is Contribution, C contributor S, ' - 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, EXISTS(A illustrator_of W))') - rule_rewrite(rqlst, rules) - self.assertEqual('Any A,B WHERE EXISTS(C is Contribution, C contributor A, ' - 'C manifestation W, C role D, D name "illustrator"), ' - 'E is Contribution, E contributor A, E manifestation B, E role F, ' - 'F name "illustrator"', - rqlst.as_string()) - - # Test for GROUPBY - def test_rewrite_groupby(self): - rules = {'participated_in': 'S contributor O'} - rqlst = rqlhelper.parse(u'Any SUM(SA) GROUPBY S WHERE P participated_in S, P manifestation SA') - rule_rewrite(rqlst, rules) - self.assertEqual('Any SUM(SA) GROUPBY S WHERE P manifestation SA, P contributor S', - rqlst.as_string()) - - -class RQLRelationRewriterTC(CubicWebTC): - - appid = 'data/rewrite' - - def test_base_rule(self): - with self.admin_access.client_cnx() as cnx: - art = cnx.create_entity('ArtWork', name=u'Les travailleurs de la Mer') - role = cnx.create_entity('Role', name=u'illustrator') - vic = cnx.create_entity('Person', name=u'Victor Hugo') - contrib = cnx.create_entity('Contribution', code=96, contributor=vic, - manifestation=art, role=role) - rset = cnx.execute('Any X WHERE X illustrator_of S') - self.assertEqual([u'Victor Hugo'], - [result.name for result in rset.entities()]) - rset = cnx.execute('Any S WHERE X illustrator_of S, X eid %(x)s', - {'x': vic.eid}) - self.assertEqual([u'Les travailleurs de la Mer'], - [result.name for result in rset.entities()]) - - -def rule_rewrite(rqlst, kwargs=None): - rewriter = _prepare_rewriter(rqlrewrite.RQLRelationRewriter, kwargs) - rqlhelper.compute_solutions(rqlst.children[0], {'eid': eid_func_map}, - kwargs=kwargs) - rewriter.rewrite(rqlst) - for select in rqlst.children: - test_vrefs(select) - return rewriter.rewritten - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_rset.py --- a/test/unittest_rset.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,594 +0,0 @@ -# coding: utf-8 -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.utils""" - -from six import string_types -from six.moves import cPickle as pickle -from six.moves.urllib.parse import urlsplit - -from rql import parse - -from logilab.common.testlib import TestCase, unittest_main, mock_object - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.rset import NotAnEntity, ResultSet, attr_desc_iterator -from cubicweb import NoResultError, MultipleResultsError - - -def pprelcachedict(d): - res = {} - for k, (rset, related) in d.items(): - res[k] = sorted(v.eid for v in related) - return sorted(res.items()) - - -class AttrDescIteratorTC(TestCase): - """TestCase for cubicweb.rset.attr_desc_iterator""" - - def test_relations_description(self): - """tests relations_description() function""" - queries = { - 'Any U,L,M where U is CWUser, U login L, U mail M' : [(1, 'login', 'subject'), (2, 'mail', 'subject')], - 'Any U,L,M where U is CWUser, L is Foo, U mail M' : [(2, 'mail', 'subject')], - 'Any C,P where C is Company, C employs P' : [(1, 'employs', 'subject')], - 'Any C,P where C is Company, P employed_by P' : [], - 'Any C where C is Company, C employs P' : [], - } - for rql, relations in queries.items(): - result = list(attr_desc_iterator(parse(rql).children[0], 0, 0)) - self.assertEqual((rql, result), (rql, relations)) - - def test_relations_description_indexed(self): - """tests relations_description() function""" - queries = { - 'Any C,U,P,L,M where C is Company, C employs P, U is CWUser, U login L, U mail M' : - {0: [(2,'employs', 'subject')], 1: [(3,'login', 'subject'), (4,'mail', 'subject')]}, - } - for rql, results in queries.items(): - for idx, relations in results.items(): - result = list(attr_desc_iterator(parse(rql).children[0], idx, idx)) - self.assertEqual(result, relations) - - def test_subquery_callfunc(self): - rql = ('Any A,B,C,COUNT(D) GROUPBY A,B,C WITH A,B,C,D BEING ' - '(Any YEAR(CD), MONTH(CD), S, X WHERE X is CWUser, X creation_date CD, X in_state S)') - rqlst = parse(rql) - select, col = rqlst.locate_subquery(2, 'CWUser', None) - result = list(attr_desc_iterator(select, col, 2)) - self.assertEqual(result, []) - - def test_subquery_callfunc_2(self): - rql = ('Any X,S,L WHERE X in_state S WITH X, L BEING (Any X,MAX(L) GROUPBY X WHERE X is CWUser, T wf_info_for X, T creation_date L)') - rqlst = parse(rql) - select, col = rqlst.locate_subquery(0, 'CWUser', None) - result = list(attr_desc_iterator(select, col, 0)) - self.assertEqual(result, [(1, 'in_state', 'subject')]) - - -class ResultSetTC(CubicWebTC): - - def setUp(self): - super(ResultSetTC, self).setUp() - self.rset = ResultSet([[12, 'adim'], [13, 'syt']], - 'Any U,L where U is CWUser, U login L', - description=[['CWUser', 'String'], ['Bar', 'String']]) - self.rset.req = mock_object(vreg=self.vreg) - - def compare_urls(self, url1, url2): - info1 = urlsplit(url1) - info2 = urlsplit(url2) - self.assertEqual(info1[:3], info2[:3]) - if info1[3] != info2[3]: - params1 = dict(pair.split('=') for pair in info1[3].split('&')) - params2 = dict(pair.split('=') for pair in info1[3].split('&')) - self.assertDictEqual(params1, params2) - - def test_pickle(self): - del self.rset.req - rs2 = pickle.loads(pickle.dumps(self.rset)) - self.assertEqual(self.rset.rows, rs2.rows) - self.assertEqual(self.rset.rowcount, rs2.rowcount) - self.assertEqual(self.rset.rql, rs2.rql) - self.assertEqual(self.rset.description, rs2.description) - - def test_build_url(self): - with self.admin_access.web_request() as req: - baseurl = req.base_url() - self.compare_urls(req.build_url('view', vid='foo', rql='yo'), - '%sview?vid=foo&rql=yo' % baseurl) - self.compare_urls(req.build_url('view', _restpath='task/title/go'), - '%stask/title/go' % baseurl) - #self.compare_urls(req.build_url('view', _restpath='/task/title/go'), - # '%stask/title/go' % baseurl) - # empty _restpath should not crash - self.compare_urls(req.build_url('view', _restpath=''), baseurl) - self.assertNotIn('https', req.build_url('view', vid='foo', rql='yo', - __secure__=True)) - try: - self.config.global_set_option('https-url', 'https://testing.fr/') - self.assertTrue('https', req.build_url('view', vid='foo', rql='yo', - __secure__=True)) - self.compare_urls(req.build_url('view', vid='foo', rql='yo', - __secure__=True), - '%sview?vid=foo&rql=yo' % req.base_url(secure=True)) - finally: - self.config.global_set_option('https-url', None) - - - def test_build(self): - """test basic build of a ResultSet""" - rs = ResultSet([1,2,3], 'CWGroup X', description=['CWGroup', 'CWGroup', 'CWGroup']) - self.assertEqual(rs.rowcount, 3) - self.assertEqual(rs.rows, [1,2,3]) - self.assertEqual(rs.description, ['CWGroup', 'CWGroup', 'CWGroup']) - - - def test_limit(self): - rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], - 'Any U,L where U is CWUser, U login L', - description=[['CWUser', 'String']] * 3) - with self.admin_access.web_request() as req: - rs.req = req - rs.vreg = self.vreg - self.assertEqual(rs.limit(2).rows, [[12000, 'adim'], [13000, 'syt']]) - rs2 = rs.limit(2, offset=1) - self.assertEqual(rs2.rows, [[13000, 'syt'], [14000, 'nico']]) - self.assertEqual(rs2.get_entity(0, 0).cw_row, 0) - self.assertEqual(rs.limit(2, offset=2).rows, [[14000, 'nico']]) - self.assertEqual(rs.limit(2, offset=3).rows, []) - - def test_limit_2(self): - with self.admin_access.web_request() as req: - # drop user from cache for the sake of this test - req.drop_entity_cache(req.user.eid) - rs = req.execute('Any E,U WHERE E is CWEType, E created_by U') - # get entity on row 9. This will fill its created_by relation cache, - # with cwuser on row 9 as well - e1 = rs.get_entity(9, 0) - # get entity on row 10. This will fill its created_by relation cache, - # with cwuser built on row 9 - e2 = rs.get_entity(10, 0) - # limit result set from row 10 - rs.limit(1, 10, inplace=True) - # get back eid - e = rs.get_entity(0, 0) - self.assertTrue(e2 is e) - # rs.limit has properly removed cwuser for request cache, but it's - # still referenced by e/e2 relation cache - u = e.created_by[0] - # now ensure this doesn't trigger IndexError because cwuser.cw_row is 9 - # while now rset has only one row - u.cw_rset[u.cw_row] - - def test_filter(self): - rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], - 'Any U,L where U is CWUser, U login L', - description=[['CWUser', 'String']] * 3) - with self.admin_access.web_request() as req: - rs.req = req - rs.vreg = self.vreg - def test_filter(entity): - return entity.login != 'nico' - - rs2 = rs.filtered_rset(test_filter) - self.assertEqual(len(rs2), 2) - self.assertEqual([login for _, login in rs2], ['adim', 'syt']) - self.assertEqual(rs2.description, rs.description[1:]) - - def test_transform(self): - rs = ResultSet([[12, 'adim'], [13, 'syt'], [14, 'nico']], - 'Any U,L where U is CWUser, U login L', - description=[['CWUser', 'String']] * 3) - with self.admin_access.web_request() as req: - rs.req = req - def test_transform(row, desc): - return row[1:], desc[1:] - rs2 = rs.transformed_rset(test_transform) - - self.assertEqual(len(rs2), 3) - self.assertEqual(list(rs2), [['adim'],['syt'],['nico']]) - - def test_sort(self): - rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], - 'Any U,L where U is CWUser, U login L', - description=[['CWUser', 'String']] * 3) - with self.admin_access.web_request() as req: - rs.req = req - rs.vreg = self.vreg - - rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login']) - self.assertEqual(len(rs2), 3) - self.assertEqual([login for _, login in rs2], ['adim', 'nico', 'syt']) - # make sure rs is unchanged - self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) - - rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login'], reverse=True) - self.assertEqual(len(rs2), 3) - self.assertEqual([login for _, login in rs2], ['syt', 'nico', 'adim']) - # make sure rs is unchanged - self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) - - rs3 = rs.sorted_rset(lambda row: row[1], col=-1) - self.assertEqual(len(rs3), 3) - self.assertEqual([login for _, login in rs3], ['adim', 'nico', 'syt']) - # make sure rs is unchanged - self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) - - def test_split(self): - rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'], - [12000, 'adim', u'Jardiner facile'], - [13000, 'syt', u'Le carrelage en 42 leçons'], - [14000, 'nico', u'La tarte tatin en 15 minutes'], - [14000, 'nico', u"L'épluchage du castor commun"]], - 'Any U, L, T WHERE U is CWUser, U login L,'\ - 'D created_by U, D title T', - description=[['CWUser', 'String', 'String']] * 5) - with self.admin_access.web_request() as req: - rs.req = req - rs.vreg = self.vreg - rsets = rs.split_rset(lambda e:e.cw_attr_cache['login']) - self.assertEqual(len(rsets), 3) - self.assertEqual([login for _, login,_ in rsets[0]], ['adim', 'adim']) - self.assertEqual([login for _, login,_ in rsets[1]], ['syt']) - self.assertEqual([login for _, login,_ in rsets[2]], ['nico', 'nico']) - # make sure rs is unchanged - self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) - - rsets = rs.split_rset(lambda e:e.cw_attr_cache['login'], return_dict=True) - self.assertEqual(len(rsets), 3) - self.assertEqual([login for _, login,_ in rsets['nico']], ['nico', 'nico']) - self.assertEqual([login for _, login,_ in rsets['adim']], ['adim', 'adim']) - self.assertEqual([login for _, login,_ in rsets['syt']], ['syt']) - # make sure rs is unchanged - self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) - - rsets = rs.split_rset(lambda s: s.count('d'), col=2) - self.assertEqual(len(rsets), 2) - self.assertEqual([title for _, _, title in rsets[0]], - [u"Adim chez les pinguins", - u"Jardiner facile", - u"L'épluchage du castor commun",]) - self.assertEqual([title for _, _, title in rsets[1]], - [u"Le carrelage en 42 leçons", - u"La tarte tatin en 15 minutes",]) - # make sure rs is unchanged - self.assertEqual([title for _, _, title in rs], - [u'Adim chez les pinguins', - u'Jardiner facile', - u'Le carrelage en 42 leçons', - u'La tarte tatin en 15 minutes', - u"L'épluchage du castor commun"]) - - def test_cached_syntax_tree(self): - """make sure syntax tree is cached""" - rqlst1 = self.rset.syntax_tree() - rqlst2 = self.rset.syntax_tree() - self.assertIs(rqlst1, rqlst2) - - def test_get_entity_simple(self): - with self.admin_access.web_request() as req: - req.create_entity('CWUser', login=u'adim', upassword='adim', - surname=u'di mascio', firstname=u'adrien') - req.drop_entity_cache() - e = req.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0) - self.assertEqual(e.cw_attr_cache['surname'], 'di mascio') - self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') - self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'creation_date') - self.assertEqual(pprelcachedict(e._cw_related_cache), []) - e.complete() - self.assertEqual(e.cw_attr_cache['firstname'], 'adrien') - self.assertEqual(pprelcachedict(e._cw_related_cache), []) - - def test_get_entity_advanced(self): - with self.admin_access.web_request() as req: - req.create_entity('Bookmark', title=u'zou', path=u'/view') - req.drop_entity_cache() - req.execute('SET X bookmarked_by Y WHERE X is Bookmark, Y login "anon"') - rset = req.execute('Any X,Y,XT,YN WHERE X bookmarked_by Y, X title XT, Y login YN') - - e = rset.get_entity(0, 0) - self.assertEqual(e.cw_row, 0) - self.assertEqual(e.cw_col, 0) - self.assertEqual(e.cw_attr_cache['title'], 'zou') - self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'path') - self.assertEqual(e.view('text'), 'zou') - self.assertEqual(pprelcachedict(e._cw_related_cache), []) - - e = rset.get_entity(0, 1) - self.assertEqual(e.cw_row, 0) - self.assertEqual(e.cw_col, 1) - self.assertEqual(e.cw_attr_cache['login'], 'anon') - self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') - self.assertEqual(pprelcachedict(e._cw_related_cache), - []) - e.complete() - self.assertEqual(e.cw_attr_cache['firstname'], None) - self.assertEqual(e.view('text'), 'anon') - self.assertEqual(pprelcachedict(e._cw_related_cache), - []) - - self.assertRaises(NotAnEntity, rset.get_entity, 0, 2) - self.assertRaises(NotAnEntity, rset.get_entity, 0, 3) - - def test_get_entity_relation_cache_compt(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X,S WHERE X in_state S, X login "anon"') - e = rset.get_entity(0, 0) - seid = req.execute('State X WHERE X name "activated"')[0][0] - # for_user / in_group are prefetched in CWUser __init__, in_state should - # be filed from our query rset - self.assertEqual(pprelcachedict(e._cw_related_cache), - [('in_state_subject', [seid])]) - - def test_get_entity_advanced_prefilled_cache(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Bookmark', title=u'zou', path=u'path') - req.cnx.commit() - rset = req.execute('Any X,U,S,XT,UL,SN WHERE X created_by U, U in_state S, ' - 'X title XT, S name SN, U login UL, X eid %s' % e.eid) - e = rset.get_entity(0, 0) - self.assertEqual(e.cw_attr_cache['title'], 'zou') - self.assertEqual(pprelcachedict(e._cw_related_cache), - [('created_by_subject', [req.user.eid])]) - # first level of recursion - u = e.created_by[0] - self.assertEqual(u.cw_attr_cache['login'], 'admin') - self.assertRaises(KeyError, u.cw_attr_cache.__getitem__, 'firstname') - # second level of recursion - s = u.in_state[0] - self.assertEqual(s.cw_attr_cache['name'], 'activated') - self.assertRaises(KeyError, s.cw_attr_cache.__getitem__, 'description') - - - def test_get_entity_cache_with_left_outer_join(self): - with self.admin_access.web_request() as req: - eid = req.execute('INSERT CWUser E: E login "joe", E upassword "joe", E in_group G ' - 'WHERE G name "users"')[0][0] - rset = req.execute('Any X,E WHERE X eid %(x)s, X primary_email E?', {'x': eid}) - e = rset.get_entity(0, 0) - # if any of the assertion below fails with a KeyError, the relation is not cached - # related entities should be an empty list - self.assertEqual(e._cw_related_cache['primary_email_subject'][True], ()) - # related rset should be an empty rset - cached = e._cw_related_cache['primary_email_subject'][False] - self.assertIsInstance(cached, ResultSet) - self.assertEqual(cached.rowcount, 0) - - - def test_get_entity_union(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Bookmark', title=u'manger', path=u'path') - req.drop_entity_cache() - rset = req.execute('Any X,N ORDERBY N WITH X,N BEING ' - '((Any X,N WHERE X is Bookmark, X title N)' - ' UNION ' - ' (Any X,N WHERE X is CWGroup, X name N))') - expected = (('CWGroup', 'guests'), ('CWGroup', 'managers'), - ('Bookmark', 'manger'), ('CWGroup', 'owners'), - ('CWGroup', 'users')) - for entity in rset.entities(): # test get_entity for each row actually - etype, n = expected[entity.cw_row] - self.assertEqual(entity.cw_etype, etype) - attr = etype == 'Bookmark' and 'title' or 'name' - self.assertEqual(entity.cw_attr_cache[attr], n) - - def test_one(self): - with self.admin_access.web_request() as req: - req.create_entity('CWUser', login=u'cdevienne', - upassword=u'cdevienne', - surname=u'de Vienne', - firstname=u'Christophe') - e = req.execute('Any X WHERE X login "cdevienne"').one() - - self.assertEqual(e.surname, u'de Vienne') - - e = req.execute( - 'Any X, N WHERE X login "cdevienne", X surname N').one() - self.assertEqual(e.surname, u'de Vienne') - - e = req.execute( - 'Any N, X WHERE X login "cdevienne", X surname N').one(col=1) - self.assertEqual(e.surname, u'de Vienne') - - def test_one_no_rows(self): - with self.admin_access.web_request() as req: - with self.assertRaises(NoResultError): - req.execute('Any X WHERE X login "patanok"').one() - - def test_one_multiple_rows(self): - with self.admin_access.web_request() as req: - req.create_entity( - 'CWUser', login=u'cdevienne', upassword=u'cdevienne', - surname=u'de Vienne', firstname=u'Christophe') - - req.create_entity( - 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', - firstname=u'adrien') - - with self.assertRaises(MultipleResultsError): - req.execute('Any X WHERE X is CWUser').one() - - def test_related_entity_optional(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = req.execute('Any B,U,L WHERE B bookmarked_by U?, U login L') - entity, rtype = rset.related_entity(0, 2) - self.assertEqual(entity, None) - self.assertEqual(rtype, None) - - def test_related_entity_union_subquery_1(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = req.execute('Any X,N ORDERBY N WITH X,N BEING ' - '((Any X,N WHERE X is CWGroup, X name N)' - ' UNION ' - ' (Any X,N WHERE X is Bookmark, X title N))') - entity, rtype = rset.related_entity(0, 1) - self.assertEqual(entity.eid, e.eid) - self.assertEqual(rtype, 'title') - self.assertEqual(entity.title, 'aaaa') - entity, rtype = rset.related_entity(1, 1) - self.assertEqual(entity.cw_etype, 'CWGroup') - self.assertEqual(rtype, 'name') - self.assertEqual(entity.name, 'guests') - - def test_related_entity_union_subquery_2(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = req.execute('Any X,N ORDERBY N WHERE X is Bookmark WITH X,N BEING ' - '((Any X,N WHERE X is CWGroup, X name N)' - ' UNION ' - ' (Any X,N WHERE X is Bookmark, X title N))') - entity, rtype = rset.related_entity(0, 1) - self.assertEqual(entity.eid, e.eid) - self.assertEqual(rtype, 'title') - self.assertEqual(entity.title, 'aaaa') - - def test_related_entity_union_subquery_3(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = req.execute('Any X,N ORDERBY N WITH N,X BEING ' - '((Any N,X WHERE X is CWGroup, X name N)' - ' UNION ' - ' (Any N,X WHERE X is Bookmark, X title N))') - entity, rtype = rset.related_entity(0, 1) - self.assertEqual(entity.eid, e.eid) - self.assertEqual(rtype, 'title') - self.assertEqual(entity.title, 'aaaa') - - def test_related_entity_union_subquery_4(self): - with self.admin_access.web_request() as req: - e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = req.execute('Any X,X, N ORDERBY N WITH X,N BEING ' - '((Any X,N WHERE X is CWGroup, X name N)' - ' UNION ' - ' (Any X,N WHERE X is Bookmark, X title N))') - entity, rtype = rset.related_entity(0, 2) - self.assertEqual(entity.eid, e.eid) - self.assertEqual(rtype, 'title') - self.assertEqual(entity.title, 'aaaa') - - def test_related_entity_trap_subquery(self): - with self.admin_access.web_request() as req: - req.create_entity('Bookmark', title=u'test bookmark', path=u'') - req.execute('SET B bookmarked_by U WHERE U login "admin"') - rset = req.execute('Any B,T,L WHERE B bookmarked_by U, U login L ' - 'WITH B,T BEING (Any B,T WHERE B is Bookmark, B title T)') - rset.related_entity(0, 2) - - def test_related_entity_subquery_outerjoin(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X,S,L WHERE X in_state S ' - 'WITH X, L BEING (Any X,MAX(L) GROUPBY X ' - 'WHERE X is CWUser, T? wf_info_for X, T creation_date L)') - self.assertEqual(len(rset), 2) - rset.related_entity(0, 1) - rset.related_entity(0, 2) - - def test_entities(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any U,G WHERE U in_group G') - # make sure we have at least one element - self.assertTrue(rset) - self.assertEqual(set(e.e_schema.type for e in rset.entities(0)), - set(['CWUser',])) - self.assertEqual(set(e.e_schema.type for e in rset.entities(1)), - set(['CWGroup',])) - - def test_iter_rows_with_entities(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any U,UN,G,GN WHERE U in_group G, U login UN, G name GN') - # make sure we have at least one element - self.assertTrue(rset) - out = list(rset.iter_rows_with_entities())[0] - self.assertEqual( out[0].login, out[1] ) - self.assertEqual( out[2].name, out[3] ) - - def test_printable_rql(self): - with self.admin_access.web_request() as req: - rset = req.execute(u'CWEType X WHERE X final FALSE') - self.assertEqual(rset.printable_rql(), - 'Any X WHERE X final FALSE, X is CWEType') - - def test_searched_text(self): - with self.admin_access.web_request() as req: - rset = req.execute(u'Any X WHERE X has_text "foobar"') - self.assertEqual(rset.searched_text(), 'foobar') - rset = req.execute(u'Any X WHERE X has_text %(text)s', {'text' : 'foo'}) - self.assertEqual(rset.searched_text(), 'foo') - - def test_union_limited_rql(self): - with self.admin_access.web_request() as req: - rset = req.execute('(Any X,N WHERE X is Bookmark, X title N)' - ' UNION ' - '(Any X,N WHERE X is CWGroup, X name N)') - rset.limit(2, 10, inplace=True) - self.assertEqual(rset.limited_rql(), - 'Any A,B LIMIT 2 OFFSET 10 ' - 'WITH A,B BEING (' - '(Any X,N WHERE X is Bookmark, X title N) ' - 'UNION ' - '(Any X,N WHERE X is CWGroup, X name N)' - ')') - - def test_count_users_by_date(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any D, COUNT(U) GROUPBY D WHERE U is CWUser, U creation_date D') - self.assertEqual(rset.related_entity(0,0), (None, None)) - - def test_str(self): - with self.admin_access.web_request() as req: - rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)') - self.assertIsInstance(str(rset), string_types) - self.assertEqual(len(str(rset).splitlines()), 1) - - def test_repr(self): - with self.admin_access.web_request() as req: - rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)') - self.assertIsInstance(repr(rset), string_types) - self.assertTrue(len(repr(rset).splitlines()) > 1) - - rset = req.execute('(Any X WHERE X is CWGroup, X name "managers")') - self.assertIsInstance(str(rset), string_types) - self.assertEqual(len(str(rset).splitlines()), 1) - - def test_slice(self): - rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'], - [12000, 'adim', u'Jardiner facile'], - [13000, 'syt', u'Le carrelage en 42 leçons'], - [14000, 'nico', u'La tarte tatin en 15 minutes'], - [14000, 'nico', u"L'épluchage du castor commun"]], - 'Any U, L, T WHERE U is CWUser, U login L,'\ - 'D created_by U, D title T', - description=[['CWUser', 'String', 'String']] * 5) - self.assertEqual(rs[1::2], - [[12000, 'adim', u'Jardiner facile'], - [14000, 'nico', u'La tarte tatin en 15 minutes']]) - - def test_nonregr_symmetric_relation(self): - # see https://www.cubicweb.org/ticket/4739253 - with self.admin_access.client_cnx() as cnx: - p1 = cnx.create_entity('Personne', nom=u'sylvain') - cnx.create_entity('Personne', nom=u'denis', connait=p1) - cnx.commit() - rset = cnx.execute('Any X,Y WHERE X connait Y') - rset.get_entity(0, 1) # used to raise KeyError - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_rtags.py --- a/test/unittest_rtags.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,95 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from logilab.common.testlib import TestCase, unittest_main -from cubicweb.rtags import RelationTags, RelationTagsSet, RelationTagsDict - -class RelationTagsTC(TestCase): - - def test_rtags_expansion(self): - rtags = RelationTags() - rtags.tag_subject_of(('Societe', 'travaille', '*'), 'primary') - rtags.tag_subject_of(('*', 'evaluee', '*'), 'secondary') - rtags.tag_object_of(('*', 'tags', '*'), 'generated') - self.assertEqual(rtags.get('Note', 'evaluee', '*', 'subject'), - 'secondary') - self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), - 'primary') - self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), - None) - self.assertEqual(rtags.get('Note', 'tags', '*', 'subject'), - None) - self.assertEqual(rtags.get('*', 'tags', 'Note', 'object'), - 'generated') - self.assertEqual(rtags.get('Tag', 'tags', '*', 'object'), - 'generated') - -# self.assertEqual(rtags.rtag('evaluee', 'Note', 'subject'), set(('secondary', 'link'))) -# self.assertEqual(rtags.is_inlined('evaluee', 'Note', 'subject'), False) -# self.assertEqual(rtags.rtag('evaluee', 'Personne', 'subject'), set(('secondary', 'link'))) -# self.assertEqual(rtags.is_inlined('evaluee', 'Personne', 'subject'), False) -# self.assertEqual(rtags.rtag('ecrit_par', 'Note', 'object'), set(('inlineview', 'link'))) -# self.assertEqual(rtags.is_inlined('ecrit_par', 'Note', 'object'), True) -# class Personne2(Personne): -# id = 'Personne' -# __rtags__ = { -# ('evaluee', 'Note', 'subject') : set(('inlineview',)), -# } -# self.vreg.register(Personne2) -# rtags = Personne2.rtags -# self.assertEqual(rtags.rtag('evaluee', 'Note', 'subject'), set(('inlineview', 'link'))) -# self.assertEqual(rtags.is_inlined('evaluee', 'Note', 'subject'), True) -# self.assertEqual(rtags.rtag('evaluee', 'Personne', 'subject'), set(('secondary', 'link'))) -# self.assertEqual(rtags.is_inlined('evaluee', 'Personne', 'subject'), False) - - - def test_rtagset_expansion(self): - rtags = RelationTagsSet() - rtags.tag_subject_of(('Societe', 'travaille', '*'), 'primary') - rtags.tag_subject_of(('*', 'travaille', '*'), 'secondary') - self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), - set(('primary', 'secondary'))) - self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), - set(('secondary',))) - self.assertEqual(rtags.get('Note', 'tags', "*", 'subject'), - set()) - - def test_rtagdict_expansion(self): - rtags = RelationTagsDict() - rtags.tag_subject_of(('Societe', 'travaille', '*'), - {'key1': 'val1', 'key2': 'val1'}) - rtags.tag_subject_of(('*', 'travaille', '*'), - {'key1': 'val0', 'key3': 'val0'}) - rtags.tag_subject_of(('Societe', 'travaille', '*'), - {'key2': 'val2'}) - self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), - {'key1': 'val1', 'key2': 'val2', 'key3': 'val0'}) - self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), - {'key1': 'val0', 'key3': 'val0'}) - self.assertEqual(rtags.get('Note', 'tags', "*", 'subject'), - {}) - - rtags.setdefault(('Societe', 'travaille', '*', 'subject'), 'key1', 'val4') - rtags.setdefault(('Societe', 'travaille', '*', 'subject'), 'key4', 'val4') - self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), - {'key1': 'val1', 'key2': 'val2', 'key3': 'val0', 'key4': 'val4'}) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_schema.py --- a/test/unittest_schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,563 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.schema""" - -import sys -from os.path import join, isabs, basename, dirname - -from logilab.common.testlib import TestCase, unittest_main - -from rql import RQLSyntaxError - -from yams import ValidationError, BadSchemaDefinition -from yams.constraints import SizeConstraint, StaticVocabularyConstraint -from yams.buildobjs import (RelationDefinition, EntityType, RelationType, - Int, String, SubjectRelation, ComputedRelation) -from yams.reader import fill_schema - -from cubicweb.schema import ( - CubicWebSchema, CubicWebEntitySchema, CubicWebSchemaLoader, - RQLConstraint, RQLUniqueConstraint, RQLVocabularyConstraint, - RQLExpression, ERQLExpression, RRQLExpression, - normalize_expression, order_eschemas, guess_rrqlexpr_mainvars, - build_schema_from_namespace) -from cubicweb.devtools import TestServerConfiguration as TestConfiguration -from cubicweb.devtools.testlib import CubicWebTC - -DATADIR = join(dirname(__file__), 'data') - -# build a dummy schema ######################################################## - - -PERSONNE_PERMISSIONS = { - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', 'owners'), - 'add': ('managers', ERQLExpression('X travaille S, S owned_by U')), - 'delete': ('managers', 'owners',), - } - -CONCERNE_PERMISSIONS = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('U has_update_permission S')), - 'delete': ('managers', RRQLExpression('O owned_by U')), - } - -schema = CubicWebSchema('Test Schema') -enote = schema.add_entity_type(EntityType('Note')) -eaffaire = schema.add_entity_type(EntityType('Affaire')) -eperson = schema.add_entity_type(EntityType('Personne', __permissions__=PERSONNE_PERMISSIONS)) -esociete = schema.add_entity_type(EntityType('Societe')) - -RELS = ( - # attribute relations - ('Note date String'), - ('Note type String'), - ('Affaire sujet String'), - ('Affaire ref String'), - ('Personne nom String'), - ('Personne prenom String'), - ('Personne sexe String'), - ('Personne tel Int'), - ('Personne fax Int'), - ('Personne datenaiss Date'), - ('Personne promo String'), - # real relations - ('Personne travaille Societe'), - ('Personne evaluee Note'), - ('Societe evaluee Note'), - ('Personne concerne Affaire'), - ('Personne concerne Societe'), - ('Affaire concerne Societe'), - ) -done = {} -for rel in RELS: - _from, _type, _to = rel.split() - if not _type.lower() in done: - schema.add_relation_type(RelationType(_type)) - done[_type.lower()] = True - if _type == 'concerne': - schema.add_relation_def(RelationDefinition(_from, _type, _to, - __permissions__=CONCERNE_PERMISSIONS)) - else: - schema.add_relation_def(RelationDefinition(_from, _type, _to)) - -class CubicWebSchemaTC(TestCase): - - def test_rql_constraints_inheritance(self): - # isinstance(cstr, RQLVocabularyConstraint) - # -> expected to return RQLVocabularyConstraint and RQLConstraint - # instances but not RQLUniqueConstraint - # - # isinstance(cstr, RQLConstraint) - # -> expected to return RQLConstraint instances but not - # RQLVocabularyConstraint and RQLUniqueConstraint - self.assertFalse(issubclass(RQLUniqueConstraint, RQLVocabularyConstraint)) - self.assertFalse(issubclass(RQLUniqueConstraint, RQLConstraint)) - - def test_entity_perms(self): - self.assertEqual(eperson.get_groups('read'), set(('managers', 'users', 'guests'))) - self.assertEqual(eperson.get_groups('update'), set(('managers', 'owners',))) - self.assertEqual(eperson.get_groups('delete'), set(('managers', 'owners'))) - self.assertEqual(eperson.get_groups('add'), set(('managers',))) - self.assertEqual([str(e) for e in eperson.get_rqlexprs('add')], - ['Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s']) - eperson.set_action_permissions('read', ('managers',)) - self.assertEqual(eperson.get_groups('read'), set(('managers',))) - - def test_relation_perms(self): - rconcerne = schema.rschema('concerne').rdef('Personne', 'Societe') - self.assertEqual(rconcerne.get_groups('read'), set(('managers', 'users', 'guests'))) - self.assertEqual(rconcerne.get_groups('delete'), set(('managers',))) - self.assertEqual(rconcerne.get_groups('add'), set(('managers', ))) - rconcerne.set_action_permissions('read', ('managers',)) - self.assertEqual(rconcerne.get_groups('read'), set(('managers',))) - self.assertEqual([str(e) for e in rconcerne.get_rqlexprs('add')], - ['Any S,U WHERE U has_update_permission S, S eid %(s)s, U eid %(u)s']) - - def test_erqlexpression(self): - self.assertRaises(RQLSyntaxError, ERQLExpression, '1') - expr = ERQLExpression('X travaille S, S owned_by U') - self.assertEqual(str(expr), 'Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s') - expr = ERQLExpression('X foo S, S bar U, X baz XE, S quux SE HAVING XE > SE') - self.assertEqual(str(expr), 'Any X WHERE X foo S, S bar U, X baz XE, S quux SE, X eid %(x)s, U eid %(u)s HAVING XE > SE') - - def test_rrqlexpression(self): - self.assertRaises(Exception, RRQLExpression, '1') - self.assertRaises(RQLSyntaxError, RRQLExpression, 'O X Y') - expr = RRQLExpression('U has_update_permission O') - self.assertEqual(str(expr), 'Any O,U WHERE U has_update_permission O, O eid %(o)s, U eid %(u)s') - -loader = CubicWebSchemaLoader() -config = TestConfiguration('data', apphome=DATADIR) -config.bootstrap_cubes() - -class SchemaReaderClassTest(TestCase): - - def test_order_eschemas(self): - schema = loader.load(config) - self.assertEqual(order_eschemas([schema['Note'], schema['SubNote']]), - [schema['Note'], schema['SubNote']]) - self.assertEqual(order_eschemas([schema['SubNote'], schema['Note']]), - [schema['Note'], schema['SubNote']]) - - def test_knownValues_load_schema(self): - schema = loader.load(config) - self.assertIsInstance(schema, CubicWebSchema) - self.assertEqual(schema.name, 'data') - entities = sorted([str(e) for e in schema.entities()]) - expected_entities = ['Ami', 'BaseTransition', 'BigInt', 'Bookmark', 'Boolean', 'Bytes', 'Card', - 'Date', 'Datetime', 'Decimal', - 'CWCache', 'CWComputedRType', 'CWConstraint', - 'CWConstraintType', 'CWDataImport', 'CWEType', - 'CWAttribute', 'CWGroup', 'EmailAddress', - 'CWRelation', 'CWPermission', 'CWProperty', 'CWRType', - 'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig', - 'CWUniqueTogetherConstraint', 'CWUser', - 'ExternalUri', 'FakeFile', 'Float', 'Int', 'Interval', 'Note', - 'Password', 'Personne', 'Produit', - 'RQLExpression', 'Reference', - 'Service', 'Societe', 'State', 'StateFull', 'String', 'SubNote', 'SubWorkflowExitPoint', - 'Tag', 'TZDatetime', 'TZTime', 'Time', 'Transition', 'TrInfo', - 'Usine', - 'Workflow', 'WorkflowTransition'] - self.assertListEqual(sorted(expected_entities), entities) - relations = sorted([str(r) for r in schema.relations()]) - expected_relations = ['actionnaire', 'add_permission', 'address', 'alias', 'allowed_transition', 'associe', - 'bookmarked_by', 'by_transition', - - 'cardinality', 'comment', 'comment_format', - 'composite', 'condition', 'config', 'connait', - 'constrained_by', 'constraint_of', - 'content', 'content_format', 'contrat_exclusif', - 'created_by', 'creation_date', 'cstrtype', 'custom_workflow', - 'cwuri', 'cw_for_source', 'cw_import_of', 'cw_host_config_of', 'cw_schema', 'cw_source', - - 'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission', - 'description', 'description_format', 'destination_state', 'dirige', - - 'ean', 'ecrit_par', 'eid', 'end_timestamp', 'evaluee', 'expression', 'exprtype', 'extra_props', - - 'fabrique_par', 'final', 'firstname', 'for_user', 'formula', 'fournit', - 'from_entity', 'from_state', 'fulltext_container', 'fulltextindexed', - - 'has_group_permission', 'has_text', - 'identity', 'in_group', 'in_state', 'in_synchronization', 'indexed', - 'initial_state', 'inlined', 'internationalizable', 'is', 'is_instance_of', - - 'label', 'last_login_time', 'latest_retrieval', 'lieu', 'log', 'login', - - 'mainvars', 'match_host', 'modification_date', - - 'name', 'nom', - - 'options', 'ordernum', 'owned_by', - - 'parser', 'path', 'pkey', 'prefered_form', 'prenom', 'primary_email', - - 'read_permission', 'relation_type', 'relations', 'require_group', 'rule', - - 'specializes', 'start_timestamp', 'state_of', 'status', 'subworkflow', 'subworkflow_exit', 'subworkflow_state', 'surname', 'symmetric', 'synopsis', - - 'tags', 'timestamp', 'title', 'to_entity', 'to_state', 'transition_of', 'travaille', 'type', - - 'upassword', 'update_permission', 'url', 'uri', 'use_email', - - 'value', - - 'wf_info_for', 'wikiid', 'workflow_of', 'tr_count'] - - self.assertListEqual(sorted(expected_relations), relations) - - eschema = schema.eschema('CWUser') - rels = sorted(str(r) for r in eschema.subject_relations()) - self.assertListEqual(rels, ['created_by', 'creation_date', 'custom_workflow', - 'cw_source', 'cwuri', 'eid', - 'evaluee', 'firstname', 'has_group_permission', - 'has_text', 'identity', - 'in_group', 'in_state', 'is', - 'is_instance_of', 'last_login_time', - 'login', 'modification_date', 'owned_by', - 'primary_email', 'surname', 'upassword', - 'use_email']) - rels = sorted(r.type for r in eschema.object_relations()) - self.assertListEqual(rels, ['bookmarked_by', 'created_by', 'for_user', - 'identity', 'owned_by', 'wf_info_for']) - rschema = schema.rschema('relation_type') - properties = rschema.rdef('CWAttribute', 'CWRType') - self.assertEqual(properties.cardinality, '1*') - constraints = properties.constraints - self.assertEqual(len(constraints), 1, constraints) - constraint = constraints[0] - self.assertTrue(isinstance(constraint, RQLConstraint)) - self.assertEqual(constraint.expression, 'O final TRUE') - - def test_fulltext_container(self): - schema = loader.load(config) - self.assertIn('has_text', schema['CWUser'].subject_relations()) - self.assertNotIn('has_text', schema['EmailAddress'].subject_relations()) - - def test_permission_settings(self): - schema = loader.load(config) - aschema = schema['TrInfo'].rdef('comment') - self.assertEqual(aschema.get_groups('read'), - set(('managers', 'users', 'guests'))) - self.assertEqual(aschema.get_rqlexprs('read'), - ()) - self.assertEqual(aschema.get_groups('update'), - set(('managers',))) - self.assertEqual([x.expression for x in aschema.get_rqlexprs('update')], - ['U has_update_permission X']) - - def test_nonregr_allowed_type_names(self): - schema = CubicWebSchema('Test Schema') - schema.add_entity_type(EntityType('NaN')) - - def test_relation_perm_overriding(self): - loader = CubicWebSchemaLoader() - config = TestConfiguration('data', apphome=join(dirname(__file__), 'data_schemareader')) - config.bootstrap_cubes() - schema = loader.load(config) - rdef = next(iter(schema['in_group'].rdefs.values())) - self.assertEqual(rdef.permissions, - {'read': ('managers',), - 'add': ('managers',), - 'delete': ('managers',)}) - rdef = next(iter(schema['cw_for_source'].rdefs.values())) - self.assertEqual(rdef.permissions, - {'read': ('managers', 'users'), - 'add': ('managers',), - 'delete': ('managers',)}) - - def test_computed_attribute(self): - """Check schema finalization for computed attributes.""" - class Person(EntityType): - salary = Int() - - class works_for(RelationDefinition): - subject = 'Person' - object = 'Company' - cardinality = '?*' - - class Company(EntityType): - total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE ' - 'P works_for X, P salary SA') - good_schema = build_schema_from_namespace(vars().items()) - rdef = good_schema['Company'].rdef('total_salary') - # ensure 'X is Company' is added to the rqlst to avoid ambiguities, see #4901163 - self.assertEqual(str(rdef.formula_select), - 'Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA, X is Company') - # check relation definition permissions - self.assertEqual(rdef.permissions, - {'add': (), 'update': (), - 'read': ('managers', 'users', 'guests')}) - - class Company(EntityType): - total_salary = String(formula='Any SUM(SA) GROUPBY X WHERE ' - 'P works_for X, P salary SA') - - with self.assertRaises(BadSchemaDefinition) as exc: - bad_schema = build_schema_from_namespace(vars().items()) - - self.assertEqual(str(exc.exception), - 'computed attribute total_salary on Company: ' - 'computed attribute type (Int) mismatch with ' - 'specified type (String)') - - -class SchemaReaderComputedRelationAndAttributesTest(TestCase): - - def test_infer_computed_relation(self): - class Person(EntityType): - name = String() - - class Company(EntityType): - name = String() - - class Service(EntityType): - name = String() - - class works_for(RelationDefinition): - subject = 'Person' - object = 'Company' - - class produce(RelationDefinition): - subject = ('Person', 'Company') - object = 'Service' - - class achete(RelationDefinition): - subject = 'Person' - object = 'Service' - - class produces_and_buys(ComputedRelation): - rule = 'S produce O, S achete O' - - class produces_and_buys2(ComputedRelation): - rule = 'S works_for SO, SO produce O' - - class reproduce(ComputedRelation): - rule = 'S produce O' - - schema = build_schema_from_namespace(vars().items()) - - # check object/subject type - self.assertEqual([('Person','Service')], - list(schema['produces_and_buys'].rdefs.keys())) - self.assertEqual([('Person','Service')], - list(schema['produces_and_buys2'].rdefs.keys())) - self.assertCountEqual([('Company', 'Service'), ('Person', 'Service')], - list(schema['reproduce'].rdefs.keys())) - # check relation definitions are marked infered - rdef = schema['produces_and_buys'].rdefs[('Person','Service')] - self.assertTrue(rdef.infered) - # and have no add/delete permissions - self.assertEqual(rdef.permissions, - {'add': (), - 'delete': (), - 'read': ('managers', 'users', 'guests')}) - - class autoname(ComputedRelation): - rule = 'S produce X, X name O' - - with self.assertRaises(BadSchemaDefinition) as cm: - build_schema_from_namespace(vars().items()) - self.assertEqual(str(cm.exception), 'computed relations cannot be final') - - -class BadSchemaTC(TestCase): - def setUp(self): - self.loader = CubicWebSchemaLoader() - self.loader.defined = {} - self.loader.loaded_files = [] - self.loader.post_build_callbacks = [] - - def _test(self, schemafile, msg): - self.loader.handle_file(join(DATADIR, schemafile)) - sch = self.loader.schemacls('toto') - with self.assertRaises(BadSchemaDefinition) as cm: - fill_schema(sch, self.loader.defined, False) - self.assertEqual(str(cm.exception), msg) - - def test_lowered_etype(self): - self._test('lowered_etype.py', - "'my_etype' is not a valid name for an entity type. It should " - "start with an upper cased letter and be followed by at least " - "a lower cased letter") - - def test_uppered_rtype(self): - self._test('uppered_rtype.py', - "'ARelation' is not a valid name for a relation type. It should be lower cased") - - def test_rrqlexpr_on_etype(self): - self._test('rrqlexpr_on_eetype.py', - "can't use RRQLExpression on ToTo, use an ERQLExpression") - - def test_erqlexpr_on_rtype(self): - self._test('erqlexpr_on_ertype.py', - "can't use ERQLExpression on relation ToTo toto TuTu, use a RRQLExpression") - - def test_rqlexpr_on_rtype_read(self): - self._test('rqlexpr_on_ertype_read.py', - "can't use rql expression for read permission of relation ToTo toto TuTu") - - def test_rrqlexpr_on_attr(self): - self._test('rrqlexpr_on_attr.py', - "can't use RRQLExpression on attribute ToTo.attr[String], use an ERQLExpression") - - def test_rqlexpr_on_computedrel(self): - self._test('rqlexpr_on_computedrel.py', - "can't use rql expression for read permission of relation Subject computed Object") - - -class NormalizeExpressionTC(TestCase): - - def test(self): - self.assertEqual(normalize_expression('X bla Y,Y blur Z , Z zigoulou X '), - 'X bla Y, Y blur Z, Z zigoulou X') - self.assertEqual(normalize_expression('X bla Y, Y name "x,y"'), - 'X bla Y, Y name "x,y"') - - -class RQLExpressionTC(TestCase): - def test_comparison(self): - self.assertEqual(ERQLExpression('X is CWUser', 'X', 0), - ERQLExpression('X is CWUser', 'X', 0)) - self.assertNotEqual(ERQLExpression('X is CWUser', 'X', 0), - ERQLExpression('X is CWGroup', 'X', 0)) - - -class GuessRrqlExprMainVarsTC(TestCase): - def test_exists(self): - mainvars = guess_rrqlexpr_mainvars(normalize_expression('NOT EXISTS(O team_competition C, C level < 3, C concerns S)')) - self.assertEqual(mainvars, set(['S', 'O'])) - - -class RQLConstraintTC(CubicWebTC): - def test_user_constraint(self): - cstr = RQLConstraint('U identity O') - with self.admin_access.repo_cnx() as cnx: - anoneid = cnx.execute('Any X WHERE X login "anon"')[0][0] - self.assertRaises(ValidationError, - cstr.repo_check, cnx, 1, 'rel', anoneid) - self.assertEqual(cstr.repo_check(cnx, 1, cnx.user.eid), - None) # no validation error, constraint checked - - -class WorkflowShemaTC(CubicWebTC): - def test_trinfo_default_format(self): - with self.admin_access.web_request() as req: - tr = req.user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') - self.assertEqual(tr.comment_format, 'text/plain') - - -class CompositeSchemaTC(CubicWebTC): - composites = { - 'BaseTransition': [('condition', 'BaseTransition', 'RQLExpression', 'subject')], - 'CWAttribute': [('add_permission', 'CWAttribute', 'RQLExpression', 'subject'), - ('constrained_by', 'CWAttribute', 'CWConstraint', 'subject'), - ('read_permission', 'CWAttribute', 'RQLExpression', 'subject'), - ('update_permission', 'CWAttribute', 'RQLExpression', 'subject')], - 'CWEType': [('add_permission', 'CWEType', 'RQLExpression', 'subject'), - ('constraint_of', 'CWUniqueTogetherConstraint', 'CWEType', 'object'), - ('cw_schema', 'CWSourceSchemaConfig', 'CWEType', 'object'), - ('delete_permission', 'CWEType', 'RQLExpression', 'subject'), - ('from_entity', 'CWAttribute', 'CWEType', 'object'), - ('from_entity', 'CWRelation', 'CWEType', 'object'), - ('read_permission', 'CWEType', 'RQLExpression', 'subject'), - ('to_entity', 'CWAttribute', 'CWEType', 'object'), - ('to_entity', 'CWRelation', 'CWEType', 'object'), - ('update_permission', 'CWEType', 'RQLExpression', 'subject')], - 'CWRType': [('cw_schema', 'CWSourceSchemaConfig', 'CWRType', 'object'), - ('relation_type', 'CWAttribute', 'CWRType', 'object'), - ('relation_type', 'CWRelation', 'CWRType', 'object')], - 'CWRelation': [('add_permission', 'CWRelation', 'RQLExpression', 'subject'), - ('constrained_by', 'CWRelation', 'CWConstraint', 'subject'), - ('cw_schema', 'CWSourceSchemaConfig', 'CWRelation', 'object'), - ('delete_permission', 'CWRelation', 'RQLExpression', 'subject'), - ('read_permission', 'CWRelation', 'RQLExpression', 'subject')], - 'CWComputedRType': [('read_permission', 'CWComputedRType', 'RQLExpression', 'subject')], - 'CWSource': [('cw_for_source', 'CWSourceSchemaConfig', 'CWSource', 'object'), - ('cw_host_config_of', 'CWSourceHostConfig', 'CWSource', 'object'), - ('cw_import_of', 'CWDataImport', 'CWSource', 'object'), - ('cw_source', 'Ami', 'CWSource', 'object'), - ('cw_source', 'BaseTransition', 'CWSource', 'object'), - ('cw_source', 'Bookmark', 'CWSource', 'object'), - ('cw_source', 'CWAttribute', 'CWSource', 'object'), - ('cw_source', 'CWCache', 'CWSource', 'object'), - ('cw_source', 'CWComputedRType', 'CWSource', 'object'), - ('cw_source', 'CWConstraint', 'CWSource', 'object'), - ('cw_source', 'CWConstraintType', 'CWSource', 'object'), - ('cw_source', 'CWDataImport', 'CWSource', 'object'), - ('cw_source', 'CWEType', 'CWSource', 'object'), - ('cw_source', 'CWGroup', 'CWSource', 'object'), - ('cw_source', 'CWPermission', 'CWSource', 'object'), - ('cw_source', 'CWProperty', 'CWSource', 'object'), - ('cw_source', 'CWRType', 'CWSource', 'object'), - ('cw_source', 'CWRelation', 'CWSource', 'object'), - ('cw_source', 'CWSource', 'CWSource', 'object'), - ('cw_source', 'CWSourceHostConfig', 'CWSource', 'object'), - ('cw_source', 'CWSourceSchemaConfig', 'CWSource', 'object'), - ('cw_source', 'CWUniqueTogetherConstraint', 'CWSource', 'object'), - ('cw_source', 'CWUser', 'CWSource', 'object'), - ('cw_source', 'Card', 'CWSource', 'object'), - ('cw_source', 'EmailAddress', 'CWSource', 'object'), - ('cw_source', 'ExternalUri', 'CWSource', 'object'), - ('cw_source', 'FakeFile', 'CWSource', 'object'), - ('cw_source', 'Note', 'CWSource', 'object'), - ('cw_source', 'Personne', 'CWSource', 'object'), - ('cw_source', 'Produit', 'CWSource', 'object'), - ('cw_source', 'RQLExpression', 'CWSource', 'object'), - ('cw_source', 'Reference', 'CWSource', 'object'), - ('cw_source', 'Service', 'CWSource', 'object'), - ('cw_source', 'Societe', 'CWSource', 'object'), - ('cw_source', 'State', 'CWSource', 'object'), - ('cw_source', 'StateFull', 'CWSource', 'object'), - ('cw_source', 'SubNote', 'CWSource', 'object'), - ('cw_source', 'SubWorkflowExitPoint', 'CWSource', 'object'), - ('cw_source', 'Tag', 'CWSource', 'object'), - ('cw_source', 'TrInfo', 'CWSource', 'object'), - ('cw_source', 'Transition', 'CWSource', 'object'), - ('cw_source', 'Usine', 'CWSource', 'object'), - ('cw_source', 'Workflow', 'CWSource', 'object'), - ('cw_source', 'WorkflowTransition', 'CWSource', 'object')], - 'CWUser': [('for_user', 'CWProperty', 'CWUser', 'object'), - ('use_email', 'CWUser', 'EmailAddress', 'subject'), - ('wf_info_for', 'TrInfo', 'CWUser', 'object')], - 'StateFull': [('wf_info_for', 'TrInfo', 'StateFull', 'object')], - 'Transition': [('condition', 'Transition', 'RQLExpression', 'subject')], - 'Workflow': [('state_of', 'State', 'Workflow', 'object'), - ('transition_of', 'BaseTransition', 'Workflow', 'object'), - ('transition_of', 'Transition', 'Workflow', 'object'), - ('transition_of', 'WorkflowTransition', 'Workflow', 'object')], - 'WorkflowTransition': [('condition', 'WorkflowTransition', 'RQLExpression', 'subject'), - ('subworkflow_exit', 'WorkflowTransition', 'SubWorkflowExitPoint', 'subject')] - } - - def test_composite_entities(self): - schema = self.vreg.schema - self.assertEqual(sorted(self.composites), - [eschema.type for eschema in sorted(schema.entities()) - if eschema.is_composite]) - for etype in self.composites: - self.set_description('composite rdefs for %s' % etype) - yield self.assertEqual, self.composites[etype], \ - sorted([(r.rtype.type, r.subject.type, r.object.type, role) - for r, role in schema[etype].composite_rdef_roles]) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_spa2rql.py --- a/test/unittest_spa2rql.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,224 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -import unittest - -from logilab.common.testlib import TestCase, unittest_main -from cubicweb.devtools import TestServerConfiguration -from cubicweb.xy import xy - -SKIPCAUSE = None -try: - from cubicweb.spa2rql import Sparql2rqlTranslator -except ImportError as exc: - SKIPCAUSE = str(exc) - -xy.add_equivalence('Project', 'doap:Project') -xy.add_equivalence('Project creation_date', 'doap:Project doap:created') -xy.add_equivalence('Project name', 'doap:Project doap:name') -xy.add_equivalence('Project name', 'doap:Project dc:title') - - -config = TestServerConfiguration('data') -config.bootstrap_cubes() -schema = config.load_schema() - - -@unittest.skipIf(SKIPCAUSE, SKIPCAUSE) -class XYTC(TestCase): - def setUp(self): - self.tr = Sparql2rqlTranslator(schema) - - def _test(self, sparql, rql, args={}): - qi = self.tr.translate(sparql) - self.assertEqual(qi.finalize(), (rql, args)) - - def XXX_test_base_01(self): - self._test('SELECT * WHERE { }', 'Any X') - - - def test_base_is(self): - self._test(''' - PREFIX doap: - SELECT ?project - WHERE { - ?project a doap:Project; - }''', 'Any PROJECT WHERE PROJECT is Project') - - def test_base_rdftype(self): - self._test(''' - PREFIX doap: - PREFIX rdf: - SELECT ?project - WHERE { - ?project rdf:type doap:Project. - }''', 'Any PROJECT WHERE PROJECT is Project') - - def test_base_attr_sel(self): - self._test(''' - PREFIX doap: - SELECT ?created - WHERE { - ?project a doap:Project; - doap:created ?created. - }''', 'Any CREATED WHERE PROJECT creation_date CREATED, PROJECT is Project') - - - def test_base_attr_sel_distinct(self): - self._test(''' - PREFIX doap: - SELECT DISTINCT ?name - WHERE { - ?project a doap:Project; - doap:name ?name. - }''', 'DISTINCT Any NAME WHERE PROJECT name NAME, PROJECT is Project') - - - def test_base_attr_sel_reduced(self): - self._test(''' - PREFIX doap: - SELECT REDUCED ?name - WHERE { - ?project a doap:Project; - doap:name ?name. - }''', 'Any NAME WHERE PROJECT name NAME, PROJECT is Project') - - - def test_base_attr_sel_limit_offset(self): - self._test(''' - PREFIX doap: - SELECT ?name - WHERE { - ?project a doap:Project; - doap:name ?name. - } - LIMIT 20''', 'Any NAME LIMIT 20 WHERE PROJECT name NAME, PROJECT is Project') - self._test(''' - PREFIX doap: - SELECT ?name - WHERE { - ?project a doap:Project; - doap:name ?name. - } - LIMIT 20 OFFSET 10''', 'Any NAME LIMIT 20 OFFSET 10 WHERE PROJECT name NAME, PROJECT is Project') - - - def test_base_attr_sel_orderby(self): - self._test(''' - PREFIX doap: - SELECT ?name - WHERE { - ?project a doap:Project; - doap:name ?name; - doap:created ?created. - } - ORDER BY ?name DESC(?created)''', 'Any NAME ORDERBY NAME ASC, CREATED DESC WHERE PROJECT name NAME, PROJECT creation_date CREATED, PROJECT is Project') - - - def test_base_any_attr_sel(self): - self._test(''' - PREFIX dc: - SELECT ?x ?cd - WHERE { - ?x dc:date ?cd; - }''', 'Any X, CD WHERE X creation_date CD') - - - def test_base_any_attr_sel_amb(self): - xy.add_equivalence('Version publication_date', 'doap:Version dc:date') - try: - self._test(''' - PREFIX dc: - SELECT ?x ?cd - WHERE { - ?x dc:date ?cd; - }''', '(Any X, CD WHERE , X creation_date CD) UNION (Any X, CD WHERE , X publication_date CD, X is Version)') - finally: - xy.remove_equivalence('Version publication_date', 'doap:Version dc:date') - - - def test_base_any_attr_sel_amb_limit_offset(self): - xy.add_equivalence('Version publication_date', 'doap:Version dc:date') - try: - self._test(''' - PREFIX dc: - SELECT ?x ?cd - WHERE { - ?x dc:date ?cd; - } - LIMIT 20 OFFSET 10''', 'Any X, CD LIMIT 20 OFFSET 10 WITH X, CD BEING ((Any X, CD WHERE , X creation_date CD) UNION (Any X, CD WHERE , X publication_date CD, X is Version))') - finally: - xy.remove_equivalence('Version publication_date', 'doap:Version dc:date') - - - def test_base_any_attr_sel_amb_orderby(self): - xy.add_equivalence('Version publication_date', 'doap:Version dc:date') - try: - self._test(''' - PREFIX dc: - SELECT ?x ?cd - WHERE { - ?x dc:date ?cd; - } - ORDER BY DESC(?cd)''', 'Any X, CD ORDERBY CD DESC WITH X, CD BEING ((Any X, CD WHERE , X creation_date CD) UNION (Any X, CD WHERE , X publication_date CD, X is Version))') - finally: - xy.remove_equivalence('Version publication_date', 'doap:Version dc:date') - - - def test_restr_attr(self): - self._test(''' - PREFIX doap: - SELECT ?project - WHERE { - ?project a doap:Project; - doap:name "cubicweb". - }''', 'Any PROJECT WHERE PROJECT name %(a)s, PROJECT is Project', {'a': 'cubicweb'}) - - def test_dctitle_both_project_cwuser(self): - self._test(''' - PREFIX doap: - PREFIX dc: - SELECT ?project ?title - WHERE { - ?project a doap:Project; - dc:title ?title. - }''', 'Any PROJECT, TITLE WHERE PROJECT name TITLE, PROJECT is Project') - -# # Two elements in the group -# PREFIX : -# SELECT * -# { :p :q :r OPTIONAL { :a :b :c } -# :p :q :r OPTIONAL { :a :b :c } -# } - -# PREFIX : -# SELECT * -# { -# { ?s ?p ?o } UNION { ?a ?b ?c } -# } - -# PREFIX dob: -# PREFIX time: -# PREFIX dc: -# SELECT ?desc -# WHERE { -# dob:1D a time:ProperInterval; -# dc:description ?desc. -# } - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_toolsutils.py --- a/test/unittest_toolsutils.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,57 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.toolsutils import RQLExecuteMatcher - - -class RQLExecuteMatcherTests(TestCase): - def matched_query(self, text): - match = RQLExecuteMatcher.match(text) - if match is None: - return None - return match['rql_query'] - - def test_unknown_function_dont_match(self): - self.assertIsNone(self.matched_query('foo')) - self.assertIsNone(self.matched_query('rql(')) - self.assertIsNone(self.matched_query('hell("")')) - self.assertIsNone(self.matched_query('eval("rql(\'bla\'')) - - def test_rql_other_parameters_dont_match(self): - self.assertIsNone(self.matched_query('rql("Any X WHERE X eid %(x)s")')) - self.assertIsNone(self.matched_query('rql("Any X WHERE X eid %(x)s", {')) - self.assertIsNone(self.matched_query('session.execute("Any X WHERE X eid %(x)s")')) - self.assertIsNone(self.matched_query('session.execute("Any X WHERE X eid %(x)s", {')) - - def test_rql_function_match(self): - for func_expr in ('rql', 'session.execute'): - query = self.matched_query('%s("Any X WHERE X is ' % func_expr) - self.assertEqual(query, 'Any X WHERE X is ') - - def test_offseted_rql_function_match(self): - """check indentation is allowed""" - for func_expr in (' rql', ' session.execute'): - query = self.matched_query('%s("Any X WHERE X is ' % func_expr) - self.assertEqual(query, 'Any X WHERE X is ') - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_uilib.py --- a/test/unittest_uilib.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,202 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unittests for cubicweb.uilib""" - -__docformat__ = "restructuredtext en" - - -import pkg_resources - -try: - from unittest import skipIf -except ImportError: - from unittest2 import skipIf - -from logilab.common.testlib import DocTest, TestCase, unittest_main - -from cubicweb import uilib, utils as cwutils - -lxml_version = pkg_resources.get_distribution('lxml').version.split('.') - -class UILIBTC(TestCase): - - def test_remove_tags(self): - """make sure remove_tags remove all tags""" - data = [ - ('

      Hello

      ', 'Hello'), - ('

      Hello spam

      ', 'Hello spam'), - ('
      Hello', 'Hello'), - ('

      ', ''), - ] - for text, expected in data: - got = uilib.remove_html_tags(text) - self.assertEqual(got, expected) - - def test_fallback_safe_cut(self): - self.assertEqual(uilib.fallback_safe_cut(u'ab cd', 4), u'ab c...') - self.assertEqual(uilib.fallback_safe_cut(u'ab cd', 5), u'ab cd') - self.assertEqual(uilib.fallback_safe_cut(u'ab &d', 4), u'ab &...') - self.assertEqual(uilib.fallback_safe_cut(u'ab &d ef', 5), u'ab &d...') - self.assertEqual(uilib.fallback_safe_cut(u'ab ìd', 4), u'ab ì...') - self.assertEqual(uilib.fallback_safe_cut(u'& &d ef', 4), u'& &d...') - - def test_lxml_safe_cut(self): - self.assertEqual(uilib.safe_cut(u'aaa
      aaad
      ef', 4), u'

      aaa

      a...
      ') - self.assertEqual(uilib.safe_cut(u'aaa
      aaad
      ef', 7), u'

      aaa

      aaad
      ...') - self.assertEqual(uilib.safe_cut(u'aaa
      aaad
      ', 7), u'

      aaa

      aaad
      ') - # Missing ellipsis due to space management but we don't care - self.assertEqual(uilib.safe_cut(u'ab &d', 4), u'

      ab &...

      ') - - def test_cut(self): - """tests uilib.cut() behaviour""" - data = [ - ('hello', 'hello'), - ('hello world', 'hello wo...'), - ("hellO' world", "hellO..."), - ] - for text, expected in data: - got = uilib.cut(text, 8) - self.assertEqual(got, expected) - - def test_text_cut(self): - """tests uilib.text_cut() behaviour with no text""" - data = [('',''), - ("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod -tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, -quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo -consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse -cillum dolore eu fugiat nulla pariatur.""", - "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \ -tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, \ -quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo \ -consequat."), - ("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod -tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam, -quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo -consequat Duis aute irure dolor in reprehenderit in voluptate velit esse -cillum dolore eu fugiat nulla pariatur Excepteur sint occaecat cupidatat non -proident, sunt in culpa qui officia deserunt mollit anim id est laborum -""", - "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \ -tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam, \ -quis nostrud exercitation ullamco laboris nisi"), - ] - for text, expected in data: - got = uilib.text_cut(text, 30) - self.assertEqual(got, expected) - - def test_soup2xhtml_0(self): - self.assertEqual(uilib.soup2xhtml('hop\r\nhop', 'ascii'), - 'hop\nhop') - - def test_soup2xhtml_1_1(self): - self.assertEqual(uilib.soup2xhtml('hop', 'ascii'), - 'hop') - self.assertEqual(uilib.soup2xhtml('hop
      ', 'ascii'), - 'hop
      ') - self.assertEqual(uilib.soup2xhtml('hop
      ', 'ascii'), - 'hop
      ') - self.assertEqual(uilib.soup2xhtml('
      hop', 'ascii'), - '
      hop
      ') - self.assertEqual(uilib.soup2xhtml('hop
      hop', 'ascii'), - 'hop
      hop
      ') - - def test_soup2xhtml_1_2(self): - self.assertEqual(uilib.soup2xhtml('hop
      ', 'ascii'), - 'hop ') - self.assertEqual(uilib.soup2xhtml('
      hop', 'ascii'), - '
      hop') - self.assertEqual(uilib.soup2xhtml('hop
      hop', 'ascii'), - '
      hop
      hop') - - def test_soup2xhtml_2_1(self): - self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), - 'hop ') - self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), - ' hop') - self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), - 'hop hop') - - def test_soup2xhtml_2_2a(self): - self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), - 'hop ') - self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), - ' hop') - - @skipIf(lxml_version < ['2', '2'], 'expected behaviour on recent version of lxml only') - def test_soup2xhtml_2_2b(self): - self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), - 'hop hop') - - def test_soup2xhtml_3_1(self): - self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), - 'hop ') - self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), - ' hop') - self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), - 'hop hop') - - def test_soup2xhtml_3_2(self): - self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), - 'hop ') - self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), - ' hop') - self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), - 'hop hop') - - def test_soup2xhtml_3_3(self): - self.assertEqual(uilib.soup2xhtml(' hop ', 'ascii'), - ' hop ') - - def test_js(self): - self.assertEqual(str(uilib.js.pouet(1, "2")), - 'pouet(1,"2")') - self.assertEqual(str(uilib.js.cw.pouet(1, "2")), - 'cw.pouet(1,"2")') - self.assertEqual(str(uilib.js.cw.pouet(1, "2").pouet(None)), - 'cw.pouet(1,"2").pouet(null)') - self.assertEqual(str(uilib.js.cw.pouet(1, cwutils.JSString("$")).pouet(None)), - 'cw.pouet(1,$).pouet(null)') - self.assertEqual(str(uilib.js.cw.pouet(1, {'callback': cwutils.JSString("cw.cb")}).pouet(None)), - 'cw.pouet(1,{callback: cw.cb}).pouet(null)') - - - def test_embedded_css(self): - incoming = u"""voir le ticket

      text

      """ - expected = 'voir le ticket

      text

      ' - self.assertMultiLineEqual(uilib.soup2xhtml(incoming, 'ascii'), expected) - - def test_unknown_namespace(self): - incoming = '''\ -\ -\ -
      XXXXXXX
      ''' - expected = '''\ -\ -\ -
      XXXXXXX
      ''' - self.assertMultiLineEqual(uilib.soup2xhtml(incoming, 'ascii'), expected) - - -class DocTest(DocTest): - module = uilib - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_utils.py --- a/test/unittest_utils.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,319 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.utils""" - -import re -import decimal -import datetime - -from six.moves import range - -from logilab.common.testlib import TestCase, DocTest, unittest_main - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.utils import (make_uid, UStringIO, RepeatList, HTMLHead, - QueryCache, parse_repo_uri) -from cubicweb.entity import Entity - -try: - from cubicweb.utils import CubicWebJsonEncoder, json -except ImportError: - json = None - -class MakeUidTC(TestCase): - def test_1(self): - self.assertNotEqual(make_uid('xyz'), make_uid('abcd')) - self.assertNotEqual(make_uid('xyz'), make_uid('xyz')) - - def test_2(self): - d = set() - while len(d)<10000: - uid = make_uid('xyz') - if uid in d: - self.fail(len(d)) - if re.match('\d', uid): - self.fail('make_uid must not return something begining with ' - 'some numeric character, got %s' % uid) - d.add(uid) - - -class TestParseRepoUri(TestCase): - - def test_parse_repo_uri(self): - self.assertEqual(('inmemory', None, 'myapp'), - parse_repo_uri('myapp')) - self.assertEqual(('inmemory', None, 'myapp'), - parse_repo_uri('inmemory://myapp')) - with self.assertRaises(NotImplementedError): - parse_repo_uri('foo://bar') - - - -class TestQueryCache(TestCase): - def test_querycache(self): - c = QueryCache(ceiling=20) - # write only - for x in range(10): - c[x] = x - self.assertEqual(c._usage_report(), - {'transientcount': 0, - 'itemcount': 10, - 'permanentcount': 0}) - c = QueryCache(ceiling=10) - # we should also get a warning - for x in range(20): - c[x] = x - self.assertEqual(c._usage_report(), - {'transientcount': 0, - 'itemcount': 10, - 'permanentcount': 0}) - # write + reads - c = QueryCache(ceiling=20) - for n in range(4): - for x in range(10): - c[x] = x - c[x] - self.assertEqual(c._usage_report(), - {'transientcount': 10, - 'itemcount': 10, - 'permanentcount': 0}) - c = QueryCache(ceiling=20) - for n in range(17): - for x in range(10): - c[x] = x - c[x] - self.assertEqual(c._usage_report(), - {'transientcount': 0, - 'itemcount': 10, - 'permanentcount': 10}) - c = QueryCache(ceiling=20) - for n in range(17): - for x in range(10): - c[x] = x - if n % 2: - c[x] - if x % 2: - c[x] - self.assertEqual(c._usage_report(), - {'transientcount': 5, - 'itemcount': 10, - 'permanentcount': 5}) - -class UStringIOTC(TestCase): - def test_boolean_value(self): - self.assertTrue(UStringIO()) - - -class RepeatListTC(TestCase): - - def test_base(self): - l = RepeatList(3, (1, 3)) - self.assertEqual(l[0], (1, 3)) - self.assertEqual(l[2], (1, 3)) - self.assertEqual(l[-1], (1, 3)) - self.assertEqual(len(l), 3) - # XXX - self.assertEqual(l[4], (1, 3)) - - self.assertFalse(RepeatList(0, None)) - - def test_slice(self): - l = RepeatList(3, (1, 3)) - self.assertEqual(l[0:1], [(1, 3)]) - self.assertEqual(l[0:4], [(1, 3)]*3) - self.assertEqual(l[:], [(1, 3)]*3) - - def test_iter(self): - self.assertEqual(list(RepeatList(3, (1, 3))), - [(1, 3)]*3) - - def test_add(self): - l = RepeatList(3, (1, 3)) - self.assertEqual(l + [(1, 4)], [(1, 3)]*3 + [(1, 4)]) - self.assertEqual([(1, 4)] + l, [(1, 4)] + [(1, 3)]*3) - self.assertEqual(l + RepeatList(2, (2, 3)), [(1, 3)]*3 + [(2, 3)]*2) - - x = l + RepeatList(2, (1, 3)) - self.assertIsInstance(x, RepeatList) - self.assertEqual(len(x), 5) - self.assertEqual(x[0], (1, 3)) - - x = l + [(1, 3)] * 2 - self.assertEqual(x, [(1, 3)] * 5) - - def test_eq(self): - self.assertEqual(RepeatList(3, (1, 3)), - [(1, 3)]*3) - - def test_pop(self): - l = RepeatList(3, (1, 3)) - l.pop(2) - self.assertEqual(l, [(1, 3)]*2) - - -class JSONEncoderTC(TestCase): - def setUp(self): - if json is None: - self.skipTest('json not available') - - def encode(self, value): - return json.dumps(value, cls=CubicWebJsonEncoder) - - def test_encoding_dates(self): - self.assertEqual(self.encode(datetime.datetime(2009, 9, 9, 20, 30)), - '"2009/09/09 20:30:00"') - self.assertEqual(self.encode(datetime.date(2009, 9, 9)), - '"2009/09/09"') - self.assertEqual(self.encode(datetime.time(20, 30)), - '"20:30:00"') - - def test_encoding_decimal(self): - self.assertEqual(self.encode(decimal.Decimal('1.2')), '1.2') - - def test_encoding_bare_entity(self): - e = Entity(None) - e.cw_attr_cache['pouet'] = 'hop' - e.eid = 2 - self.assertEqual(json.loads(self.encode(e)), - {'pouet': 'hop', 'eid': 2}) - - def test_encoding_entity_in_list(self): - e = Entity(None) - e.cw_attr_cache['pouet'] = 'hop' - e.eid = 2 - self.assertEqual(json.loads(self.encode([e])), - [{'pouet': 'hop', 'eid': 2}]) - - def test_encoding_unknown_stuff(self): - self.assertEqual(self.encode(TestCase), 'null') - -class HTMLHeadTC(CubicWebTC): - - def htmlhead(self, datadir_url): - with self.admin_access.web_request() as req: - base_url = u'http://test.fr/data/' - req.datadir_url = base_url - head = HTMLHead(req) - return head - - def test_concat_urls(self): - base_url = u'http://test.fr/data/' - head = self.htmlhead(base_url) - urls = [base_url + u'bob1.js', - base_url + u'bob2.js', - base_url + u'bob3.js'] - result = head.concat_urls(urls) - expected = u'http://test.fr/data/??bob1.js,bob2.js,bob3.js' - self.assertEqual(result, expected) - - def test_group_urls(self): - base_url = u'http://test.fr/data/' - head = self.htmlhead(base_url) - urls_spec = [(base_url + u'bob0.js', None), - (base_url + u'bob1.js', None), - (u'http://ext.com/bob2.js', None), - (u'http://ext.com/bob3.js', None), - (base_url + u'bob4.css', 'all'), - (base_url + u'bob5.css', 'all'), - (base_url + u'bob6.css', 'print'), - (base_url + u'bob7.css', 'print'), - (base_url + u'bob8.css', ('all', u'[if IE 8]')), - (base_url + u'bob9.css', ('print', u'[if IE 8]')) - ] - result = head.group_urls(urls_spec) - expected = [(base_url + u'??bob0.js,bob1.js', None), - (u'http://ext.com/bob2.js', None), - (u'http://ext.com/bob3.js', None), - (base_url + u'??bob4.css,bob5.css', 'all'), - (base_url + u'??bob6.css,bob7.css', 'print'), - (base_url + u'bob8.css', ('all', u'[if IE 8]')), - (base_url + u'bob9.css', ('print', u'[if IE 8]')) - ] - self.assertEqual(list(result), expected) - - def test_getvalue_with_concat(self): - self.config.global_set_option('concat-resources', True) - base_url = u'http://test.fr/data/' - head = self.htmlhead(base_url) - head.add_js(base_url + u'bob0.js') - head.add_js(base_url + u'bob1.js') - head.add_js(u'http://ext.com/bob2.js') - head.add_js(u'http://ext.com/bob3.js') - head.add_css(base_url + u'bob4.css') - head.add_css(base_url + u'bob5.css') - head.add_css(base_url + u'bob6.css', 'print') - head.add_css(base_url + u'bob7.css', 'print') - head.add_ie_css(base_url + u'bob8.css') - head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]') - result = head.getvalue() - expected = u""" - - - - - - - -""" - self.assertEqual(result, expected) - - def test_getvalue_without_concat(self): - self.config.global_set_option('concat-resources', False) - try: - base_url = u'http://test.fr/data/' - head = self.htmlhead(base_url) - head.add_js(base_url + u'bob0.js') - head.add_js(base_url + u'bob1.js') - head.add_js(u'http://ext.com/bob2.js') - head.add_js(u'http://ext.com/bob3.js') - head.add_css(base_url + u'bob4.css') - head.add_css(base_url + u'bob5.css') - head.add_css(base_url + u'bob6.css', 'print') - head.add_css(base_url + u'bob7.css', 'print') - head.add_ie_css(base_url + u'bob8.css') - head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]') - result = head.getvalue() - expected = u""" - - - - - - - - - - -""" - self.assertEqual(result, expected) - finally: - self.config.global_set_option('concat-resources', True) - -class DocTest(DocTest): - from cubicweb import utils as module - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 test/unittest_vregistry.py --- a/test/unittest_vregistry.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,90 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from logilab.common.testlib import unittest_main, TestCase - -from os.path import join - -from cubicweb import CW_SOFTWARE_ROOT as BASE -from cubicweb.appobject import AppObject -from cubicweb.cwvreg import CWRegistryStore, UnknownProperty -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.view import EntityAdapter - -from cubes.card.entities import Card - -class YesSchema: - def __contains__(self, something): - return True - -WEBVIEWSDIR = join(BASE, 'web', 'views') - -class VRegistryTC(TestCase): - - def setUp(self): - config = TestServerConfiguration('data') - self.vreg = CWRegistryStore(config) - config.bootstrap_cubes() - self.vreg.schema = config.load_schema() - - def test_load_interface_based_vojects(self): - self.vreg.init_registration([WEBVIEWSDIR]) - self.vreg.load_file(join(BASE, 'entities', '__init__.py'), 'cubicweb.entities.__init__') - self.vreg.load_file(join(WEBVIEWSDIR, 'idownloadable.py'), 'cubicweb.web.views.idownloadable') - self.vreg.load_file(join(WEBVIEWSDIR, 'primary.py'), 'cubicweb.web.views.primary') - self.assertEqual(len(self.vreg['views']['primary']), 2) - self.vreg.initialization_completed() - self.assertEqual(len(self.vreg['views']['primary']), 1) - - - def test_load_subinterface_based_appobjects(self): - self.vreg.register_objects([join(BASE, 'web', 'views', 'idownloadable.py')]) - # check downloadlink was kicked - self.assertFalse(self.vreg['views'].get('downloadlink')) - # we've to emulate register_objects to add custom MyCard objects - path = [join(BASE, 'entities', '__init__.py'), - join(BASE, 'entities', 'adapters.py'), - join(BASE, 'web', 'views', 'idownloadable.py')] - filemods = self.vreg.init_registration(path, None) - for filepath, modname in filemods: - self.vreg.load_file(filepath, modname) - class CardIDownloadableAdapter(EntityAdapter): - __regid__ = 'IDownloadable' - self.vreg._loadedmods[__name__] = {} - self.vreg.register(CardIDownloadableAdapter) - self.vreg.initialization_completed() - # check progressbar isn't kicked - self.assertEqual(len(self.vreg['views']['downloadlink']), 1) - - def test_properties(self): - self.vreg.reset() - self.assertNotIn('system.version.cubicweb', self.vreg['propertydefs']) - self.assertTrue(self.vreg.property_info('system.version.cubicweb')) - self.assertRaises(UnknownProperty, self.vreg.property_info, 'a.non.existent.key') - - -class CWVregTC(CubicWebTC): - - def test_property_default_overriding(self): - # see data/views.py - from cubicweb.web.views.xmlrss import RSSIconBox - self.assertEqual(self.vreg.property_info(RSSIconBox._cwpropkey('visible'))['default'], True) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 toolsutils.py --- a/toolsutils.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,415 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some utilities for cubicweb command line tools""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -# XXX move most of this in logilab.common (shellutils ?) - -import io -import os, sys -import subprocess -from os import listdir, makedirs, environ, chmod, walk, remove -from os.path import exists, join, abspath, normpath -import re -from rlcompleter import Completer -try: - import readline -except ImportError: # readline not available, no completion - pass -try: - from os import symlink -except ImportError: - def symlink(*args): - raise NotImplementedError - -from six import add_metaclass - -from logilab.common.clcommands import Command as BaseCommand -from logilab.common.shellutils import ASK - -from cubicweb import warning # pylint: disable=E0611 -from cubicweb import ConfigurationError, ExecutionError - -def underline_title(title, car='-'): - return title+'\n'+(car*len(title)) - -def iter_dir(directory, condition_file=None, ignore=()): - """iterate on a directory""" - for sub in listdir(directory): - if sub in ('CVS', '.svn', '.hg'): - continue - if condition_file is not None and \ - not exists(join(directory, sub, condition_file)): - continue - if sub in ignore: - continue - yield sub - -def create_dir(directory): - """create a directory if it doesn't exist yet""" - try: - makedirs(directory) - print('-> created directory %s' % directory) - except OSError as ex: - import errno - if ex.errno != errno.EEXIST: - raise - print('-> no need to create existing directory %s' % directory) - -def create_symlink(source, target): - """create a symbolic link""" - if exists(target): - remove(target) - symlink(source, target) - print('[symlink] %s <-- %s' % (target, source)) - -def create_copy(source, target): - import shutil - print('[copy] %s <-- %s' % (target, source)) - shutil.copy2(source, target) - -def rm(whatever): - import shutil - shutil.rmtree(whatever) - print('-> removed %s' % whatever) - -def show_diffs(appl_file, ref_file, askconfirm=True): - """interactivly replace the old file with the new file according to - user decision - """ - import shutil - pipe = subprocess.Popen(['diff', '-u', appl_file, ref_file], stdout=subprocess.PIPE) - diffs = pipe.stdout.read() - if diffs: - if askconfirm: - print() - print(diffs) - action = ASK.ask('Replace ?', ('Y', 'n', 'q'), 'Y').lower() - else: - action = 'y' - if action == 'y': - try: - shutil.copyfile(ref_file, appl_file) - except IOError: - os.system('chmod a+w %s' % appl_file) - shutil.copyfile(ref_file, appl_file) - print('replaced') - elif action == 'q': - sys.exit(0) - else: - copy_file = appl_file + '.default' - copy = open(copy_file, 'w') - copy.write(open(ref_file).read()) - copy.close() - print('keep current version, the new file has been written to', copy_file) - else: - print('no diff between %s and %s' % (appl_file, ref_file)) - -SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py') -def copy_skeleton(skeldir, targetdir, context, - exclude=SKEL_EXCLUDE, askconfirm=False): - import shutil - from fnmatch import fnmatch - skeldir = normpath(skeldir) - targetdir = normpath(targetdir) - for dirpath, dirnames, filenames in walk(skeldir): - tdirpath = dirpath.replace(skeldir, targetdir) - create_dir(tdirpath) - for fname in filenames: - if any(fnmatch(fname, pat) for pat in exclude): - continue - fpath = join(dirpath, fname) - if 'CUBENAME' in fname: - tfpath = join(tdirpath, fname.replace('CUBENAME', context['cubename'])) - elif 'DISTNAME' in fname: - tfpath = join(tdirpath, fname.replace('DISTNAME', context['distname'])) - else: - tfpath = join(tdirpath, fname) - if fname.endswith('.tmpl'): - tfpath = tfpath[:-5] - if not askconfirm or not exists(tfpath) or \ - ASK.confirm('%s exists, overwrite?' % tfpath): - fill_templated_file(fpath, tfpath, context) - print('[generate] %s <-- %s' % (tfpath, fpath)) - elif exists(tfpath): - show_diffs(tfpath, fpath, askconfirm) - else: - shutil.copyfile(fpath, tfpath) - -def fill_templated_file(fpath, tfpath, context): - with io.open(fpath, encoding='ascii') as fobj: - template = fobj.read() - with io.open(tfpath, 'w', encoding='ascii') as fobj: - fobj.write(template % context) - -def restrict_perms_to_user(filepath, log=None): - """set -rw------- permission on the given file""" - if log: - log('set permissions to 0600 for %s', filepath) - else: - print('-> set permissions to 0600 for %s' % filepath) - chmod(filepath, 0o600) - -def read_config(config_file, raise_if_unreadable=False): - """read some simple configuration from `config_file` and return it as a - dictionary. If `raise_if_unreadable` is false (the default), an empty - dictionary will be returned if the file is inexistant or unreadable, else - :exc:`ExecutionError` will be raised. - """ - from logilab.common.fileutils import lines - config = current = {} - try: - for line in lines(config_file, comments='#'): - try: - option, value = line.split('=', 1) - except ValueError: - option = line.strip().lower() - if option[0] == '[': - # start a section - section = option[1:-1] - assert section not in config, \ - 'Section %s is defined more than once' % section - config[section] = current = {} - continue - sys.stderr.write('ignoring malformed line\n%r\n' % line) - continue - option = option.strip().replace(' ', '_') - value = value.strip() - current[option] = value or None - except IOError as ex: - if raise_if_unreadable: - raise ExecutionError('%s. Are you logged with the correct user ' - 'to use this instance?' % ex) - else: - warning('missing or non readable configuration file %s (%s)', - config_file, ex) - return config - - -_HDLRS = {} - -class metacmdhandler(type): - def __new__(mcs, name, bases, classdict): - cls = super(metacmdhandler, mcs).__new__(mcs, name, bases, classdict) - if getattr(cls, 'cfgname', None) and getattr(cls, 'cmdname', None): - _HDLRS.setdefault(cls.cmdname, []).append(cls) - return cls - - -@add_metaclass(metacmdhandler) -class CommandHandler(object): - """configuration specific helper for cubicweb-ctl commands""" - def __init__(self, config): - self.config = config - - -class Command(BaseCommand): - """base class for cubicweb-ctl commands""" - - def config_helper(self, config, required=True, cmdname=None): - if cmdname is None: - cmdname = self.name - for helpercls in _HDLRS.get(cmdname, ()): - if helpercls.cfgname == config.name: - return helpercls(config) - if config.name == 'all-in-one': - for helpercls in _HDLRS.get(cmdname, ()): - if helpercls.cfgname == 'repository': - return helpercls(config) - if required: - msg = 'No helper for command %s using %s configuration' % ( - cmdname, config.name) - raise ConfigurationError(msg) - - def fail(self, reason): - print("command failed:", reason) - sys.exit(1) - - -CONNECT_OPTIONS = ( - ("user", - {'short': 'u', 'type' : 'string', 'metavar': '', - 'help': 'connect as instead of being prompted to give it.', - } - ), - ("password", - {'short': 'p', 'type' : 'password', 'metavar': '', - 'help': 'automatically give for authentication instead of \ -being prompted to give it.', - }), - ("host", - {'short': 'H', 'type' : 'string', 'metavar': '', - 'default': None, - 'help': 'specify the name server\'s host name. Will be detected by \ -broadcast if not provided.', - }), - ) - -## cwshell helpers ############################################################# - -class AbstractMatcher(object): - """Abstract class for CWShellCompleter's matchers. - - A matcher should implement a ``possible_matches`` method. This - method has to return the list of possible completions for user's input. - Because of the python / readline interaction, each completion should - be a superset of the user's input. - - NOTE: readline tokenizes user's input and only passes last token to - completers. - """ - - def possible_matches(self, text): - """return possible completions for user's input. - - Parameters: - text: the user's input - - Return: - a list of completions. Each completion includes the original input. - """ - raise NotImplementedError() - - -class RQLExecuteMatcher(AbstractMatcher): - """Custom matcher for rql queries. - - If user's input starts with ``rql(`` or ``session.execute(`` and - the corresponding rql query is incomplete, suggest some valid completions. - """ - query_match_rgx = re.compile( - r'(?P\s*(?:rql)' # match rql, possibly indented - r'|' # or - r'\s*(?:\w+\.execute))' # match .execute, possibly indented - # end of - r'\(' # followed by a parenthesis - r'(?P["\'])' # a quote or double quote - r'(?P.*)') # and some content - - def __init__(self, local_ctx, req): - self.local_ctx = local_ctx - self.req = req - self.schema = req.vreg.schema - self.rsb = req.vreg['components'].select('rql.suggestions', req) - - @staticmethod - def match(text): - """check if ``text`` looks like a call to ``rql`` or ``session.execute`` - - Parameters: - text: the user's input - - Returns: - None if it doesn't match, the query structure otherwise. - """ - query_match = RQLExecuteMatcher.query_match_rgx.match(text) - if query_match is None: - return None - parameters_text = query_match.group('parameters') - quote_delim = query_match.group('quote_delim') - # first parameter is fully specified, no completion needed - if re.match(r"(.*?)%s" % quote_delim, parameters_text) is not None: - return None - func_prefix = query_match.group('func_prefix') - return { - # user's input - 'text': text, - # rql( or session.execute( - 'func_prefix': func_prefix, - # offset of rql query - 'rql_offset': len(func_prefix) + 2, - # incomplete rql query - 'rql_query': parameters_text, - } - - def possible_matches(self, text): - """call ``rql.suggestions`` component to complete user's input. - """ - # readline will only send last token, but we need the entire user's input - user_input = readline.get_line_buffer() - query_struct = self.match(user_input) - if query_struct is None: - return [] - else: - # we must only send completions of the last token => compute where it - # starts relatively to the rql query itself. - completion_offset = readline.get_begidx() - query_struct['rql_offset'] - rql_query = query_struct['rql_query'] - return [suggestion[completion_offset:] - for suggestion in self.rsb.build_suggestions(rql_query)] - - -class DefaultMatcher(AbstractMatcher): - """Default matcher: delegate to standard's `rlcompleter.Completer`` class - """ - def __init__(self, local_ctx): - self.completer = Completer(local_ctx) - - def possible_matches(self, text): - if "." in text: - return self.completer.attr_matches(text) - else: - return self.completer.global_matches(text) - - -class CWShellCompleter(object): - """Custom auto-completion helper for cubicweb-ctl shell. - - ``CWShellCompleter`` provides a ``complete`` method suitable for - ``readline.set_completer``. - - Attributes: - matchers: the list of ``AbstractMatcher`` instances that will suggest - possible completions - - The completion process is the following: - - - readline calls the ``complete`` method with user's input, - - the ``complete`` method asks for each known matchers if - it can suggest completions for user's input. - """ - - def __init__(self, local_ctx): - # list of matchers to ask for possible matches on completion - self.matchers = [DefaultMatcher(local_ctx)] - self.matchers.insert(0, RQLExecuteMatcher(local_ctx, local_ctx['session'])) - - def complete(self, text, state): - """readline's completer method - - cf http://docs.python.org/2/library/readline.html#readline.set_completer - for more details. - - Implementation inspired by `rlcompleter.Completer` - """ - if state == 0: - # reset self.matches - self.matches = [] - for matcher in self.matchers: - matches = matcher.possible_matches(text) - if matches: - self.matches = matches - break - else: - return None # no matcher able to handle `text` - try: - return self.matches[state] - except IndexError: - return None diff -r 058bb3dc685f -r 0b59724cb3f2 tox.ini --- a/tox.ini Mon Jan 04 18:40:30 2016 +0100 +++ b/tox.ini Sat Jan 16 13:48:51 2016 +0100 @@ -4,23 +4,23 @@ [testenv] sitepackages = True deps = - cubicweb: -r{toxinidir}/test/requirements.txt - devtools: -r{toxinidir}/devtools/test/requirements.txt - entities: -r{toxinidir}/entities/test/requirements.txt - etwist: -r{toxinidir}/etwist/test/requirements.txt - ext: -r{toxinidir}/ext/test/requirements.txt - hooks: -r{toxinidir}/hooks/test/requirements.txt - server: -r{toxinidir}/server/test/requirements.txt - sobjects: -r{toxinidir}/sobjects/test/requirements.txt - web: -r{toxinidir}/web/test/requirements.txt - wsgi: -r{toxinidir}/wsgi/test/requirements.txt + cubicweb: -r{toxinidir}/cubicweb/test/requirements.txt + devtools: -r{toxinidir}/cubicweb/devtools/test/requirements.txt + entities: -r{toxinidir}/cubicweb/entities/test/requirements.txt + etwist: -r{toxinidir}/cubicweb/etwist/test/requirements.txt + ext: -r{toxinidir}/cubicweb/ext/test/requirements.txt + hooks: -r{toxinidir}/cubicweb/hooks/test/requirements.txt + server: -r{toxinidir}/cubicweb/server/test/requirements.txt + sobjects: -r{toxinidir}/cubicweb/sobjects/test/requirements.txt + web: -r{toxinidir}/cubicweb/web/test/requirements.txt + wsgi: -r{toxinidir}/cubicweb/wsgi/test/requirements.txt commands = - {envpython} -c 'from logilab.common import pytest; pytest.run()' -t {toxinidir}/{envname}/test {posargs} + {envpython} -c 'from logilab.common import pytest; pytest.run()' -t {toxinidir}/cubicweb/{envname}/test {posargs} [testenv:cubicweb] commands = {envpython} -m pip install --upgrade --no-deps --quiet git+git://github.com/logilab/yapps@master#egg=yapps - {envpython} -c 'from logilab.common import pytest; pytest.run()' -t {toxinidir}/test {posargs} + {envpython} -c 'from logilab.common import pytest; pytest.run()' -t {toxinidir}/cubicweb/test {posargs} [testenv:doc] changedir = doc diff -r 058bb3dc685f -r 0b59724cb3f2 transaction.py --- a/transaction.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,127 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" undoable transaction objects. """ -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from cubicweb import RepositoryError - - -ACTION_LABELS = { - 'C': _('entity creation'), - 'U': _('entity update'), - 'D': _('entity deletion'), - 'A': _('relation add'), - 'R': _('relation removal'), - } - - -class NoSuchTransaction(RepositoryError): - # Used by CubicWebException - msg = _("there is no transaction #%s") - - def __init__(self, txuuid): - super(NoSuchTransaction, self).__init__(txuuid) - self.txuuid = txuuid - -class Transaction(object): - """an undoable transaction""" - - def __init__(self, cnx, uuid, time, ueid): - self.cnx = cnx - self.uuid = uuid - self.datetime = time - self.user_eid = ueid - - def _execute(self, *args, **kwargs): - """execute a query using either the req or the cnx""" - return self.cnx.execute(*args, **kwargs) - - - def __repr__(self): - return '' % ( - self.uuid, self.user_eid, self.datetime) - - def user(self): - """return the user entity which has done the transaction, - none if not found. - """ - return self.cnx.find('CWUser', eid=self.user_eid).one() - - def actions_list(self, public=True): - """return an ordered list of action effectued during that transaction - - if public is true, return only 'public' action, eg not ones triggered - under the cover by hooks. - """ - return self.cnx.transaction_actions(self.uuid, public) - - -class AbstractAction(object): - - def __init__(self, action, public, order): - self.action = action - self.public = public - self.order = order - - @property - def label(self): - return ACTION_LABELS[self.action] - - @property - def ertype(self): - """ Return the entity or relation type this action is related to""" - raise NotImplementedError(self) - - -class EntityAction(AbstractAction): - - def __init__(self, action, public, order, etype, eid, changes): - super(EntityAction, self).__init__(action, public, order) - self.etype = etype - self.eid = eid - self.changes = changes - - def __repr__(self): - return '<%s: %s %s (%s)>' % ( - self.label, self.eid, self.changes, - self.public and 'dbapi' or 'hook') - - @property - def ertype(self): - """ Return the entity or relation type this action is related to""" - return self.etype - - -class RelationAction(AbstractAction): - - def __init__(self, action, public, order, rtype, eidfrom, eidto): - super(RelationAction, self).__init__(action, public, order) - self.rtype = rtype - self.eid_from = eidfrom - self.eid_to = eidto - - def __repr__(self): - return '<%s: %s %s %s (%s)>' % ( - self.label, self.eid_from, self.rtype, self.eid_to, - self.public and 'dbapi' or 'hook') - - @property - def ertype(self): - """ Return the entity or relation type this action is related to""" - return self.rtype diff -r 058bb3dc685f -r 0b59724cb3f2 uilib.py --- a/uilib.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,589 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""user interface libraries - -contains some functions designed to help implementation of cubicweb user -interface. -""" - -__docformat__ = "restructuredtext en" - -import csv -import re -from io import StringIO - -from six import PY2, PY3, text_type, binary_type, string_types, integer_types - -from logilab.mtconverter import xml_escape, html_unescape -from logilab.common.date import ustrftime -from logilab.common.deprecation import deprecated - -from cubicweb import _ -from cubicweb.utils import js_dumps - - -def rql_for_eid(eid): - """return the rql query necessary to fetch entity with the given eid. This - function should only be used to generate link with rql inside, not to give - to cursor.execute (in which case you won't benefit from rql cache). - - :Parameters: - - `eid`: the eid of the entity we should search - :rtype: str - :return: the rql query - """ - return 'Any X WHERE X eid %s' % eid - -def eid_param(name, eid): - assert name is not None - assert eid is not None - return '%s:%s' % (name, eid) - -def print_bytes(value, req, props, displaytime=True): - return u'' - -def print_string(value, req, props, displaytime=True): - # don't translate empty value if you don't want strange results - if props is not None and value and props.get('internationalizable'): - return req._(value) - return value - -def print_int(value, req, props, displaytime=True): - return text_type(value) - -def print_date(value, req, props, displaytime=True): - return ustrftime(value, req.property_value('ui.date-format')) - -def print_time(value, req, props, displaytime=True): - return ustrftime(value, req.property_value('ui.time-format')) - -def print_tztime(value, req, props, displaytime=True): - return ustrftime(value, req.property_value('ui.time-format')) + u' UTC' - -def print_datetime(value, req, props, displaytime=True): - if displaytime: - return ustrftime(value, req.property_value('ui.datetime-format')) - return ustrftime(value, req.property_value('ui.date-format')) - -def print_tzdatetime(value, req, props, displaytime=True): - if displaytime: - return ustrftime(value, req.property_value('ui.datetime-format')) + u' UTC' - return ustrftime(value, req.property_value('ui.date-format')) - -_('%d years') -_('%d months') -_('%d weeks') -_('%d days') -_('%d hours') -_('%d minutes') -_('%d seconds') - -def print_timedelta(value, req, props, displaytime=True): - if isinstance(value, integer_types): - # `date - date`, unlike `datetime - datetime` gives an int - # (number of days), not a timedelta - # XXX should rql be fixed to return Int instead of Interval in - # that case? that would be probably the proper fix but we - # loose information on the way... - value = timedelta(days=value) - if value.days > 730 or value.days < -730: # 2 years - return req._('%d years') % (value.days // 365) - elif value.days > 60 or value.days < -60: # 2 months - return req._('%d months') % (value.days // 30) - elif value.days > 14 or value.days < -14: # 2 weeks - return req._('%d weeks') % (value.days // 7) - elif value.days > 2 or value.days < -2: - return req._('%d days') % int(value.days) - else: - minus = 1 if value.days >= 0 else -1 - if value.seconds > 3600: - return req._('%d hours') % (int(value.seconds // 3600) * minus) - elif value.seconds >= 120: - return req._('%d minutes') % (int(value.seconds // 60) * minus) - else: - return req._('%d seconds') % (int(value.seconds) * minus) - -def print_boolean(value, req, props, displaytime=True): - if value: - return req._('yes') - return req._('no') - -def print_float(value, req, props, displaytime=True): - return text_type(req.property_value('ui.float-format') % value) # XXX cast needed ? - -PRINTERS = { - 'Bytes': print_bytes, - 'String': print_string, - 'Int': print_int, - 'BigInt': print_int, - 'Date': print_date, - 'Time': print_time, - 'TZTime': print_tztime, - 'Datetime': print_datetime, - 'TZDatetime': print_tzdatetime, - 'Boolean': print_boolean, - 'Float': print_float, - 'Decimal': print_float, - 'Interval': print_timedelta, - } - -@deprecated('[3.14] use req.printable_value(attrtype, value, ...)') -def printable_value(req, attrtype, value, props=None, displaytime=True): - return req.printable_value(attrtype, value, props, displaytime) - -def css_em_num_value(vreg, propname, default): - """ we try to read an 'em' css property - if we get another unit we're out of luck and resort to the given default - (hence, it is strongly advised not to specify but ems for this css prop) - """ - propvalue = vreg.config.uiprops[propname].lower().strip() - if propvalue.endswith('em'): - try: - return float(propvalue[:-2]) - except Exception: - vreg.warning('css property %s looks malformed (%r)', - propname, propvalue) - else: - vreg.warning('css property %s should use em (currently is %r)', - propname, propvalue) - return default - -# text publishing ############################################################# - -from cubicweb.ext.markdown import markdown_publish # pylint: disable=W0611 - -try: - from cubicweb.ext.rest import rest_publish # pylint: disable=W0611 -except ImportError: - def rest_publish(entity, data): - """default behaviour if docutils was not found""" - return xml_escape(data) - - -TAG_PROG = re.compile(r'', re.U) -def remove_html_tags(text): - """Removes HTML tags from text - - >>> remove_html_tags('hi world') - 'hi world' - >>> - """ - return TAG_PROG.sub('', text) - - -REF_PROG = re.compile(r"([^<]*)", re.U) -def _subst_rql(view, obj): - delim, rql, descr = obj.groups() - return u'%s' % (view._cw.build_url(rql=rql), descr) - -def html_publish(view, text): - """replace links by """ - if not text: - return u'' - return REF_PROG.sub(lambda obj, view=view:_subst_rql(view, obj), text) - -# fallback implementation, nicer one defined below if lxml> 2.0 is available -def safe_cut(text, length): - """returns a string of length based on , removing any html - tags from given text if cut is necessary.""" - if text is None: - return u'' - noenttext = html_unescape(text) - text_nohtml = remove_html_tags(noenttext) - # try to keep html tags if text is short enough - if len(text_nohtml) <= length: - return text - # else if un-tagged text is too long, cut it - return xml_escape(text_nohtml[:length] + u'...') - -fallback_safe_cut = safe_cut - -REM_ROOT_HTML_TAGS = re.compile('', re.U) - -from lxml import etree, html -from lxml.html import clean, defs - -ALLOWED_TAGS = (defs.general_block_tags | defs.list_tags | defs.table_tags | - defs.phrase_tags | defs.font_style_tags | - set(('span', 'a', 'br', 'img', 'map', 'area', 'sub', 'sup', 'canvas')) - ) - -CLEANER = clean.Cleaner(allow_tags=ALLOWED_TAGS, remove_unknown_tags=False, - style=True, safe_attrs_only=True, - add_nofollow=False, - ) - -def soup2xhtml(data, encoding): - """tidy html soup by allowing some element tags and return the result - """ - # remove spurious and tags, then normalize line break - # (see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1) - data = REM_ROOT_HTML_TAGS.sub('', u'\n'.join(data.splitlines())) - xmltree = etree.HTML(CLEANER.clean_html('
      %s
      ' % data)) - # NOTE: lxml 2.0 does support encoding='unicode', but last time I (syt) - # tried I got weird results (lxml 2.2.8) - body = etree.tostring(xmltree[0], encoding=encoding) - # remove and and decode to unicode - snippet = body[6:-7].decode(encoding) - # take care to bad xhtml (for instance starting with
      ) which - # may mess with the
      we added below. Only remove it if it's - # still there... - if snippet.startswith('
      ') and snippet.endswith('
      '): - snippet = snippet[5:-6] - return snippet - - # lxml.Cleaner envelops text elements by internal logic (not accessible) - # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 - # TODO drop attributes in elements - # TODO add policy configuration (content only, embedded content, ...) - # XXX this is buggy for "

      text1

      text2

      "... - # XXX drop these two snippets action and follow the lxml behaviour - # XXX (tests need to be updated) - # if snippet.startswith('
      ') and snippet.endswith('
      '): - # snippet = snippet[5:-6] - # if snippet.startswith('

      ') and snippet.endswith('

      '): - # snippet = snippet[3:-4] - return snippet.decode(encoding) - -if hasattr(etree.HTML('
      test
      '), 'iter'): # XXX still necessary? - # pylint: disable=E0102 - def safe_cut(text, length): - """returns an html document of length based on , - and cut is necessary. - """ - if text is None: - return u'' - dom = etree.HTML(text) - curlength = 0 - add_ellipsis = False - for element in dom.iter(): - if curlength >= length: - parent = element.getparent() - parent.remove(element) - if curlength == length and (element.text or element.tail): - add_ellipsis = True - else: - if element.text is not None: - element.text = cut(element.text, length - curlength) - curlength += len(element.text) - if element.tail is not None: - if curlength < length: - element.tail = cut(element.tail, length - curlength) - curlength += len(element.tail) - elif curlength == length: - element.tail = '...' - else: - element.tail = '' - text = etree.tounicode(dom[0])[6:-7] # remove wrapping - if add_ellipsis: - return text + u'...' - return text - -def text_cut(text, nbwords=30, gotoperiod=True): - """from the given plain text, return a text with at least words, - trying to go to the end of the current sentence. - - :param nbwords: the minimum number of words required - :param gotoperiod: specifies if the function should try to go to - the first period after the cut (i.e. finish - the sentence if possible) - - Note that spaces are normalized. - """ - if text is None: - return u'' - words = text.split() - text = u' '.join(words) # normalize spaces - textlength = minlength = len(' '.join(words[:nbwords])) - if gotoperiod: - textlength = text.find('.', minlength) + 1 - if textlength == 0: # no period found - textlength = minlength - return text[:textlength] - -def cut(text, length): - """returns a string of a maximum length based on - (approximatively, since if text has been cut, '...' is added to the end of the string, - resulting in a string of len + 3) - """ - if text is None: - return u'' - if len(text) <= length: - return text - # else if un-tagged text is too long, cut it - return text[:length] + u'...' - - - -# HTML generation helper functions ############################################ - -class _JSId(object): - def __init__(self, id, parent=None): - self.id = id - self.parent = parent - def __unicode__(self): - if self.parent: - return u'%s.%s' % (self.parent, self.id) - return text_type(self.id) - __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') - def __getattr__(self, attr): - return _JSId(attr, self) - def __call__(self, *args): - return _JSCallArgs(args, self) - -class _JSCallArgs(_JSId): - def __init__(self, args, parent=None): - assert isinstance(args, tuple) - self.args = args - self.parent = parent - def __unicode__(self): - args = [] - for arg in self.args: - args.append(js_dumps(arg)) - if self.parent: - return u'%s(%s)' % (self.parent, ','.join(args)) - return ','.join(args) - __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') - -class _JS(object): - def __getattr__(self, attr): - return _JSId(attr) - -js = _JS() -js.__doc__ = """\ -magic object to return strings suitable to call some javascript function with -the given arguments (which should be correctly typed). - ->>> str(js.pouet(1, "2")) -'pouet(1,"2")' ->>> str(js.cw.pouet(1, "2")) -'cw.pouet(1,"2")' ->>> str(js.cw.pouet(1, "2").pouet(None)) -'cw.pouet(1,"2").pouet(null)' ->>> str(js.cw.pouet(1, JSString("$")).pouet(None)) -'cw.pouet(1,$).pouet(null)' ->>> str(js.cw.pouet(1, {'callback': JSString("cw.cb")}).pouet(None)) -'cw.pouet(1,{callback: cw.cb}).pouet(null)' -""" - -def domid(string): - """return a valid DOM id from a string (should also be usable in jQuery - search expression...) - """ - return string.replace('.', '_').replace('-', '_') - -HTML4_EMPTY_TAGS = frozenset(('base', 'meta', 'link', 'hr', 'br', 'param', - 'img', 'area', 'input', 'col')) - -def sgml_attributes(attrs): - return u' '.join(u'%s="%s"' % (attr, xml_escape(text_type(value))) - for attr, value in sorted(attrs.items()) - if value is not None) - -def simple_sgml_tag(tag, content=None, escapecontent=True, **attrs): - """generation of a simple sgml tag (eg without children tags) easier - - content and attri butes will be escaped - """ - value = u'<%s' % tag - if attrs: - try: - attrs['class'] = attrs.pop('klass') - except KeyError: - pass - value += u' ' + sgml_attributes(attrs) - if content: - if escapecontent: - content = xml_escape(text_type(content)) - value += u'>%s' % (content, tag) - else: - if tag in HTML4_EMPTY_TAGS: - value += u' />' - else: - value += u'>' % tag - return value - -def tooltipize(text, tooltip, url=None): - """make an HTML tooltip""" - url = url or '#' - return u'
      %s' % (url, tooltip, text) - -def toggle_action(nodeid): - """builds a HTML link that uses the js toggleVisibility function""" - return u"javascript: toggleVisibility('%s')" % nodeid - -def toggle_link(nodeid, label): - """builds a HTML link that uses the js toggleVisibility function""" - return u'%s' % (toggle_action(nodeid), label) - - -def ureport_as_html(layout): - from logilab.common.ureports import HTMLWriter - formater = HTMLWriter(True) - stream = StringIO() #UStringIO() don't want unicode assertion - formater.format(layout, stream) - res = stream.getvalue() - if isinstance(res, binary_type): - res = res.decode('UTF8') - return res - -# traceback formatting ######################################################## - -import traceback - -def exc_message(ex, encoding): - if PY3: - excmsg = str(ex) - else: - try: - excmsg = unicode(ex) - except Exception: - try: - excmsg = unicode(str(ex), encoding, 'replace') - except Exception: - excmsg = unicode(repr(ex), encoding, 'replace') - exctype = ex.__class__.__name__ - return u'%s: %s' % (exctype, excmsg) - - -def rest_traceback(info, exception): - """return a unicode ReST formated traceback""" - res = [u'Traceback\n---------\n::\n'] - for stackentry in traceback.extract_tb(info[2]): - res.append(u'\tFile %s, line %s, function %s' % tuple(stackentry[:3])) - if stackentry[3]: - data = xml_escape(stackentry[3]) - if PY2: - data = data.decode('utf-8', 'replace') - res.append(u'\t %s' % data) - res.append(u'\n') - try: - res.append(u'\t Error: %s\n' % exception) - except Exception: - pass - return u'\n'.join(res) - - -def html_traceback(info, exception, title='', - encoding='ISO-8859-1', body=''): - """ return an html formatted traceback from python exception infos. - """ - tcbk = info[2] - stacktb = traceback.extract_tb(tcbk) - strings = [] - if body: - strings.append(u'
      ') - # FIXME - strings.append(body) - strings.append(u'
      ') - if title: - strings.append(u'

      %s

      '% xml_escape(title)) - try: - strings.append(u'

      %s

      ' % xml_escape(str(exception)).replace("\n","
      ")) - except UnicodeError: - pass - strings.append(u'
      ') - for index, stackentry in enumerate(stacktb): - strings.append(u'File %s, line ' - u'%s, function ' - u'%s:
      '%( - xml_escape(stackentry[0]), stackentry[1], xml_escape(stackentry[2]))) - if stackentry[3]: - string = xml_escape(stackentry[3]) - if PY2: - string = string.decode('utf-8', 'replace') - strings.append(u'  %s
      \n' % (string)) - # add locals info for each entry - try: - local_context = tcbk.tb_frame.f_locals - html_info = [] - chars = 0 - for name, value in local_context.items(): - value = xml_escape(repr(value)) - info = u'%s=%s, ' % (name, value) - line_length = len(name) + len(value) - chars += line_length - # 150 is the result of *years* of research ;-) (CSS might be helpful here) - if chars > 150: - info = u'
      ' + info - chars = line_length - html_info.append(info) - boxid = 'ctxlevel%d' % index - strings.append(u'[%s]' % toggle_link(boxid, '+')) - strings.append(u'' % - (boxid, ''.join(html_info))) - tcbk = tcbk.tb_next - except Exception: - pass # doesn't really matter if we have no context info - strings.append(u'
      ') - return '\n'.join(strings) - -# csv files / unicode support ################################################# - -class UnicodeCSVWriter: - """proxies calls to csv.writer.writerow to be able to deal with unicode - - Under Python 3, this code no longer encodes anything.""" - - def __init__(self, wfunc, encoding, **kwargs): - self.writer = csv.writer(self, **kwargs) - self.wfunc = wfunc - self.encoding = encoding - - def write(self, data): - self.wfunc(data) - - def writerow(self, row): - if PY3: - self.writer.writerow(row) - return - csvrow = [] - for elt in row: - if isinstance(elt, text_type): - csvrow.append(elt.encode(self.encoding)) - else: - csvrow.append(str(elt)) - self.writer.writerow(csvrow) - - def writerows(self, rows): - for row in rows: - self.writerow(row) - - -# some decorators ############################################################# - -class limitsize(object): - def __init__(self, maxsize): - self.maxsize = maxsize - - def __call__(self, function): - def newfunc(*args, **kwargs): - ret = function(*args, **kwargs) - if isinstance(ret, string_types): - return ret[:self.maxsize] - return ret - return newfunc - - -def htmlescape(function): - def newfunc(*args, **kwargs): - ret = function(*args, **kwargs) - assert isinstance(ret, string_types) - return xml_escape(ret) - return newfunc diff -r 058bb3dc685f -r 0b59724cb3f2 utils.py --- a/utils.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,716 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Some utilities for CubicWeb server/clients.""" - -from __future__ import division - -__docformat__ = "restructuredtext en" - -import decimal -import datetime -import random -import re -import json - -from operator import itemgetter -from inspect import getargspec -from itertools import repeat -from uuid import uuid4 -from warnings import warn -from threading import Lock -from logging import getLogger - -from six import text_type -from six.moves.urllib.parse import urlparse - -from logilab.mtconverter import xml_escape -from logilab.common.deprecation import deprecated -from logilab.common.date import ustrftime - -_MARKER = object() - -# initialize random seed from current time -random.seed() - -def admincnx(appid): - from cubicweb.cwconfig import CubicWebConfiguration - from cubicweb.server.repository import Repository - from cubicweb.server.utils import TasksManager - config = CubicWebConfiguration.config_for(appid) - - login = config.default_admin_config['login'] - password = config.default_admin_config['password'] - - repo = Repository(config, TasksManager()) - session = repo.new_session(login, password=password) - return session.new_cnx() - - -def make_uid(key=None): - """Return a unique identifier string. - - if specified, `key` is used to prefix the generated uid so it can be used - for instance as a DOM id or as sql table name. - - See uuid.uuid4 documentation for the shape of the generated identifier, but - this is basically a 32 bits hexadecimal string. - """ - if key is None: - return uuid4().hex - return str(key) + uuid4().hex - - -def support_args(callable, *argnames): - """return true if the callable support given argument names""" - if isinstance(callable, type): - callable = callable.__init__ - argspec = getargspec(callable) - if argspec[2]: - return True - for argname in argnames: - if argname not in argspec[0]: - return False - return True - - -class wrap_on_write(object): - """ Sometimes it is convenient to NOT write some container element - if it happens that there is nothing to be written within, - but this cannot be known beforehand. - Hence one can do this: - - .. sourcecode:: python - - with wrap_on_write(w, '
      ', '
      ') as wow: - component.render_stuff(wow) - """ - def __init__(self, w, tag, closetag=None): - self.written = False - self.tag = text_type(tag) - self.closetag = closetag - self.w = w - - def __enter__(self): - return self - - def __call__(self, data): - if self.written is False: - self.w(self.tag) - self.written = True - self.w(data) - - def __exit__(self, exctype, value, traceback): - if self.written is True: - if self.closetag: - self.w(text_type(self.closetag)) - else: - self.w(self.tag.replace('<', '' % ( - id(self), self._item, self._size) - def __len__(self): - return self._size - def __iter__(self): - return repeat(self._item, self._size) - def __getitem__(self, index): - if isinstance(index, slice): - # XXX could be more efficient, but do we bother? - return ([self._item] * self._size)[index] - return self._item - def __delitem__(self, idc): - assert self._size > 0 - self._size -= 1 - def __add__(self, other): - if isinstance(other, RepeatList): - if other._item == self._item: - return RepeatList(self._size + other._size, self._item) - return ([self._item] * self._size) + other[:] - return ([self._item] * self._size) + other - def __radd__(self, other): - if isinstance(other, RepeatList): - if other._item == self._item: - return RepeatList(self._size + other._size, self._item) - return other[:] + ([self._item] * self._size) - return other[:] + ([self._item] * self._size) - def __eq__(self, other): - if isinstance(other, RepeatList): - return other._size == self._size and other._item == self._item - return self[:] == other - def __ne__(self, other): - return not (self == other) - def __hash__(self): - raise NotImplementedError - def pop(self, i): - self._size -= 1 - - -class UStringIO(list): - """a file wrapper which automatically encode unicode string to an encoding - specifed in the constructor - """ - - def __init__(self, tracewrites=False, *args, **kwargs): - self.tracewrites = tracewrites - super(UStringIO, self).__init__(*args, **kwargs) - - def __bool__(self): - return True - - __nonzero__ = __bool__ - - def write(self, value): - assert isinstance(value, text_type), u"unicode required not %s : %s"\ - % (type(value).__name__, repr(value)) - if self.tracewrites: - from traceback import format_stack - stack = format_stack(None)[:-1] - escaped_stack = xml_escape(json_dumps(u'\n'.join(stack))) - escaped_html = xml_escape(value).replace('\n', '
      \n') - tpl = u'%s' - value = tpl % (escaped_stack, escaped_html) - self.append(value) - - def getvalue(self): - return u''.join(self) - - def __repr__(self): - return '<%s at %#x>' % (self.__class__.__name__, id(self)) - - -class HTMLHead(UStringIO): - """wraps HTML header's stream - - Request objects use a HTMLHead instance to ease adding of - javascripts and stylesheets - """ - js_unload_code = u'''if (typeof(pageDataUnloaded) == 'undefined') { - jQuery(window).unload(unloadPageData); - pageDataUnloaded = true; -}''' - script_opening = u'' - - def __init__(self, req, *args, **kwargs): - super(HTMLHead, self).__init__(*args, **kwargs) - self.jsvars = [] - self.jsfiles = [] - self.cssfiles = [] - self.ie_cssfiles = [] - self.post_inlined_scripts = [] - self.pagedata_unload = False - self._cw = req - self.datadir_url = req.datadir_url - - def add_raw(self, rawheader): - self.write(rawheader) - - def define_var(self, var, value, override=True): - """adds a javascript var declaration / assginment in the header - - :param var: the variable name - :param value: the variable value (as a raw python value, - it will be jsonized later) - :param override: if False, don't set the variable value if the variable - is already defined. Default is True. - """ - self.jsvars.append( (var, value, override) ) - - def add_post_inline_script(self, content): - self.post_inlined_scripts.append(content) - - def add_onload(self, jscode): - self.add_post_inline_script(u"""$(cw).one('server-response', function(event) { -%s});""" % jscode) - - - def add_js(self, jsfile): - """adds `jsfile` to the list of javascripts used in the webpage - - This function checks if the file has already been added - :param jsfile: the script's URL - """ - if jsfile not in self.jsfiles: - self.jsfiles.append(jsfile) - - def add_css(self, cssfile, media='all'): - """adds `cssfile` to the list of javascripts used in the webpage - - This function checks if the file has already been added - :param cssfile: the stylesheet's URL - """ - if (cssfile, media) not in self.cssfiles: - self.cssfiles.append( (cssfile, media) ) - - def add_ie_css(self, cssfile, media='all', iespec=u'[if lt IE 8]'): - """registers some IE specific CSS""" - if (cssfile, media, iespec) not in self.ie_cssfiles: - self.ie_cssfiles.append( (cssfile, media, iespec) ) - - def add_unload_pagedata(self): - """registers onunload callback to clean page data on server""" - if not self.pagedata_unload: - self.post_inlined_scripts.append(self.js_unload_code) - self.pagedata_unload = True - - def concat_urls(self, urls): - """concatenates urls into one url usable by Apache mod_concat - - This method returns the url without modifying it if there is only - one element in the list - :param urls: list of local urls/filenames to concatenate - """ - if len(urls) == 1: - return urls[0] - len_prefix = len(self.datadir_url) - concated = u','.join(url[len_prefix:] for url in urls) - return (u'%s??%s' % (self.datadir_url, concated)) - - def group_urls(self, urls_spec): - """parses urls_spec in order to generate concatenated urls - for js and css includes - - This method checks if the file is local and if it shares options - with direct neighbors - :param urls_spec: entire list of urls/filenames to inspect - """ - concatable = [] - prev_islocal = False - prev_key = None - for url, key in urls_spec: - islocal = url.startswith(self.datadir_url) - if concatable and (islocal != prev_islocal or key != prev_key): - yield (self.concat_urls(concatable), prev_key) - del concatable[:] - if not islocal: - yield (url, key) - else: - concatable.append(url) - prev_islocal = islocal - prev_key = key - if concatable: - yield (self.concat_urls(concatable), prev_key) - - - def getvalue(self, skiphead=False): - """reimplement getvalue to provide a consistent (and somewhat browser - optimzed cf. http://stevesouders.com/cuzillion) order in external - resources declaration - """ - w = self.write - # 1/ variable declaration if any - if self.jsvars: - if skiphead: - w(u'') - else: - w(self.script_opening) - for var, value, override in self.jsvars: - vardecl = u'%s = %s;' % (var, json.dumps(value)) - if not override: - vardecl = (u'if (typeof %s == "undefined") {%s}' % - (var, vardecl)) - w(vardecl + u'\n') - if skiphead: - w(u'') - else: - w(self.script_closing) - # 2/ css files - ie_cssfiles = ((x, (y, z)) for x, y, z in self.ie_cssfiles) - if self.datadir_url and self._cw.vreg.config['concat-resources']: - cssfiles = self.group_urls(self.cssfiles) - ie_cssfiles = self.group_urls(ie_cssfiles) - jsfiles = (x for x, _ in self.group_urls((x, None) for x in self.jsfiles)) - else: - cssfiles = self.cssfiles - jsfiles = self.jsfiles - for cssfile, media in cssfiles: - w(u'\n' % - (media, xml_escape(cssfile))) - # 3/ ie css if necessary - if self.ie_cssfiles: # use self.ie_cssfiles because `ie_cssfiles` is a genexp - for cssfile, (media, iespec) in ie_cssfiles: - w(u' \n') - # 4/ js files - for jsfile in jsfiles: - if skiphead: - # Don't insert \n' % - xml_escape(jsfile)) - # 5/ post inlined scripts (i.e. scripts depending on other JS files) - if self.post_inlined_scripts: - if skiphead: - for script in self.post_inlined_scripts: - w(u'') - w(xml_escape(script)) - w(u'') - else: - w(self.script_opening) - w(u'\n\n'.join(self.post_inlined_scripts)) - w(self.script_closing) - # at the start of this function, the parent UStringIO may already have - # data in it, so we can't w(u'\n') at the top. Instead, we create - # a temporary UStringIO to get the same debugging output formatting - # if debugging is enabled. - headtag = UStringIO(tracewrites=self.tracewrites) - if not skiphead: - headtag.write(u'\n') - w(u'\n') - return headtag.getvalue() + super(HTMLHead, self).getvalue() - - -class HTMLStream(object): - """represents a HTML page. - - This is used my main templates so that HTML headers can be added - at any time during the page generation. - - HTMLStream uses the (U)StringIO interface to be compliant with - existing code. - """ - - def __init__(self, req): - self.tracehtml = req.tracehtml - # stream for - self.head = req.html_headers - # main stream - self.body = UStringIO(tracewrites=req.tracehtml) - # this method will be assigned to self.w in views - self.write = self.body.write - self.doctype = u'' - self._htmlattrs = [('lang', req.lang)] - # keep main_stream's reference on req for easier text/html demoting - req.main_stream = self - - @deprecated('[3.17] there are no namespaces in html, xhtml is not served any longer') - def add_namespace(self, prefix, uri): - pass - - @deprecated('[3.17] there are no namespaces in html, xhtml is not served any longer') - def set_namespaces(self, namespaces): - pass - - def add_htmlattr(self, attrname, attrvalue): - self._htmlattrs.append( (attrname, attrvalue) ) - - def set_htmlattrs(self, attrs): - self._htmlattrs = attrs - - def set_doctype(self, doctype, reset_xmldecl=None): - self.doctype = doctype - if reset_xmldecl is not None: - warn('[3.17] xhtml is no more supported', - DeprecationWarning, stacklevel=2) - - @property - def htmltag(self): - attrs = ' '.join('%s="%s"' % (attr, xml_escape(value)) - for attr, value in self._htmlattrs) - if attrs: - return '' % attrs - return '' - - def getvalue(self): - """writes HTML headers, closes tag and writes HTML body""" - if self.tracehtml: - css = u'\n'.join((u'span {', - u' font-family: monospace;', - u' word-break: break-all;', - u' word-wrap: break-word;', - u'}', - u'span:hover {', - u' color: red;', - u' text-decoration: underline;', - u'}')) - style = u'\n' % css - return (u'\n' - + u'\n\n%s\n\n' % style - + u'\n' - + u'' + xml_escape(self.doctype) + u'
      ' - + u'' + xml_escape(self.htmltag) + u'
      ' - + self.head.getvalue() - + self.body.getvalue() - + u'' + xml_escape(u'') + u'' - + u'\n') - return u'%s\n%s\n%s\n%s\n' % (self.doctype, - self.htmltag, - self.head.getvalue(), - self.body.getvalue()) - - -class CubicWebJsonEncoder(json.JSONEncoder): - """define a json encoder to be able to encode yams std types""" - - def default(self, obj): - if hasattr(obj, '__json_encode__'): - return obj.__json_encode__() - if isinstance(obj, datetime.datetime): - return ustrftime(obj, '%Y/%m/%d %H:%M:%S') - elif isinstance(obj, datetime.date): - return ustrftime(obj, '%Y/%m/%d') - elif isinstance(obj, datetime.time): - return obj.strftime('%H:%M:%S') - elif isinstance(obj, datetime.timedelta): - return (obj.days * 24 * 60 * 60) + obj.seconds - elif isinstance(obj, decimal.Decimal): - return float(obj) - try: - return json.JSONEncoder.default(self, obj) - except TypeError: - # we never ever want to fail because of an unknown type, - # just return None in those cases. - return None - -def json_dumps(value, **kwargs): - return json.dumps(value, cls=CubicWebJsonEncoder, **kwargs) - - -class JSString(str): - """use this string sub class in values given to :func:`js_dumps` to - insert raw javascript chain in some JSON string - """ - -def _dict2js(d, predictable=False): - if predictable: - it = sorted(d.items()) - else: - it = d.items() - res = [key + ': ' + js_dumps(val, predictable) - for key, val in it] - return '{%s}' % ', '.join(res) - -def _list2js(l, predictable=False): - return '[%s]' % ', '.join([js_dumps(val, predictable) for val in l]) - -def js_dumps(something, predictable=False): - """similar as :func:`json_dumps`, except values which are instances of - :class:`JSString` are expected to be valid javascript and will be output - as is - - >>> js_dumps({'hop': JSString('$.hop'), 'bar': None}, predictable=True) - '{bar: null, hop: $.hop}' - >>> js_dumps({'hop': '$.hop'}) - '{hop: "$.hop"}' - >>> js_dumps({'hip': {'hop': JSString('momo')}}) - '{hip: {hop: momo}}' - """ - if isinstance(something, dict): - return _dict2js(something, predictable) - if isinstance(something, list): - return _list2js(something, predictable) - if isinstance(something, JSString): - return something - return json_dumps(something, sort_keys=predictable) - -PERCENT_IN_URLQUOTE_RE = re.compile(r'%(?=[0-9a-fA-F]{2})') -def js_href(javascript_code): - """Generate a "javascript: ..." string for an href attribute. - - Some % which may be interpreted in a href context will be escaped. - - In an href attribute, url-quotes-looking fragments are interpreted before - being given to the javascript engine. Valid url quotes are in the form - ``%xx`` with xx being a byte in hexadecimal form. This means that ``%toto`` - will be unaltered but ``%babar`` will be mangled because ``ba`` is the - hexadecimal representation of 186. - - >>> js_href('alert("babar");') - 'javascript: alert("babar");' - >>> js_href('alert("%babar");') - 'javascript: alert("%25babar");' - >>> js_href('alert("%toto %babar");') - 'javascript: alert("%toto %25babar");' - >>> js_href('alert("%1337%");') - 'javascript: alert("%251337%");' - """ - return 'javascript: ' + PERCENT_IN_URLQUOTE_RE.sub(r'%25', javascript_code) - - -def parse_repo_uri(uri): - """ transform a command line uri into a (protocol, hostport, appid), e.g: - -> 'inmemory', None, '' - inmemory:// -> 'inmemory', None, '' - """ - parseduri = urlparse(uri) - scheme = parseduri.scheme - if scheme == '': - return ('inmemory', None, parseduri.path) - if scheme == 'inmemory': - return (scheme, None, parseduri.netloc) - raise NotImplementedError('URI protocol not implemented for `%s`' % uri) - - - -logger = getLogger('cubicweb.utils') - -class QueryCache(object): - """ a minimalist dict-like object to be used by the querier - and native source (replaces lgc.cache for this very usage) - - To be efficient it must be properly used. The usage patterns are - quite specific to its current clients. - - The ceiling value should be sufficiently high, else it will be - ruthlessly inefficient (there will be warnings when this happens). - A good (high enough) value can only be set on a per-application - value. A default, reasonnably high value is provided but tuning - e.g `rql-cache-size` can certainly help. - - There are two kinds of elements to put in this cache: - * frequently used elements - * occasional elements - - The former should finish in the _permanent structure after some - warmup. - - Occasional elements can be buggy requests (server-side) or - end-user (web-ui provided) requests. These have to be cleaned up - when they fill the cache, without evicting the useful, frequently - used entries. - """ - # quite arbitrary, but we want to never - # immortalize some use-a-little query - _maxlevel = 15 - - def __init__(self, ceiling=3000): - self._max = ceiling - # keys belonging forever to this cache - self._permanent = set() - # mapping of key (that can get wiped) to getitem count - self._transient = {} - self._data = {} - self._lock = Lock() - - def __len__(self): - with self._lock: - return len(self._data) - - def __getitem__(self, k): - with self._lock: - if k in self._permanent: - return self._data[k] - v = self._transient.get(k, _MARKER) - if v is _MARKER: - self._transient[k] = 1 - return self._data[k] - if v > self._maxlevel: - self._permanent.add(k) - self._transient.pop(k, None) - else: - self._transient[k] += 1 - return self._data[k] - - def __setitem__(self, k, v): - with self._lock: - if len(self._data) >= self._max: - self._try_to_make_room() - self._data[k] = v - - def pop(self, key, default=_MARKER): - with self._lock: - try: - if default is _MARKER: - return self._data.pop(key) - return self._data.pop(key, default) - finally: - if key in self._permanent: - self._permanent.remove(key) - else: - self._transient.pop(key, None) - - def clear(self): - with self._lock: - self._clear() - - def _clear(self): - self._permanent = set() - self._transient = {} - self._data = {} - - def _try_to_make_room(self): - current_size = len(self._data) - items = sorted(self._transient.items(), key=itemgetter(1)) - level = 0 - for k, v in items: - self._data.pop(k, None) - self._transient.pop(k, None) - if v > level: - datalen = len(self._data) - if datalen == 0: - return - if (current_size - datalen) / datalen > .1: - break - level = v - else: - # we removed cruft but everything is permanent - if len(self._data) >= self._max: - logger.warning('Cache %s is full.' % id(self)) - self._clear() - - def _usage_report(self): - with self._lock: - return {'itemcount': len(self._data), - 'transientcount': len(self._transient), - 'permanentcount': len(self._permanent)} - - def popitem(self): - raise NotImplementedError() - - def setdefault(self, key, default=None): - raise NotImplementedError() - - def update(self, other): - raise NotImplementedError() diff -r 058bb3dc685f -r 0b59724cb3f2 view.py --- a/view.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,534 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""abstract views and templates classes for CubicWeb web client""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from io import BytesIO -from warnings import warn -from functools import partial - -from six.moves import range - -from logilab.common.deprecation import deprecated -from logilab.common.registry import yes -from logilab.mtconverter import xml_escape - -from rql import nodes - -from cubicweb import NotAnEntity -from cubicweb.predicates import non_final_entity, nonempty_rset, none_rset -from cubicweb.appobject import AppObject -from cubicweb.utils import UStringIO, HTMLStream -from cubicweb.uilib import domid, js -from cubicweb.schema import display_name - -# robots control -NOINDEX = u'' -NOFOLLOW = u'' - -TRANSITIONAL_DOCTYPE_NOEXT = u'\n' -TRANSITIONAL_DOCTYPE = TRANSITIONAL_DOCTYPE_NOEXT # bw compat - -STRICT_DOCTYPE_NOEXT = u'\n' -STRICT_DOCTYPE = STRICT_DOCTYPE_NOEXT # bw compat - -# base view object ############################################################ - -class View(AppObject): - """This class is an abstraction of a view class, used as a base class for - every renderable object such as views, templates and other user interface - components. - - A `View` is instantiated to render a result set or part of a result - set. `View` subclasses may be parametrized using the following class - attributes: - - :py:attr:`templatable` indicates if the view may be embedded in a main - template or if it has to be rendered standalone (i.e. pure XML views must - not be embedded in the main template of HTML pages) - :py:attr:`content_type` if the view is not templatable, it should set the - `content_type` class attribute to the correct MIME type (text/xhtml being - the default) - :py:attr:`category` this attribute may be used in the interface to regroup - related objects (view kinds) together - - :py:attr:`paginable` - - :py:attr:`binary` - - - A view writes to its output stream thanks to its attribute `w` (the - append method of an `UStreamIO`, except for binary views). - - At instantiation time, the standard `_cw`, and `cw_rset` attributes are - added and the `w` attribute will be set at rendering time to a write - function to use. - """ - __registry__ = 'views' - - templatable = True - # content_type = 'application/xhtml+xml' # text/xhtml' - binary = False - add_to_breadcrumbs = True - category = 'view' - paginable = True - - def __init__(self, req=None, rset=None, **kwargs): - super(View, self).__init__(req, rset=rset, **kwargs) - self.w = None - - @property - def content_type(self): - return self._cw.html_content_type() - - def set_stream(self, w=None): - if self.w is not None: - return - if w is None: - if self.binary: - self._stream = stream = BytesIO() - else: - self._stream = stream = UStringIO() - w = stream.write - else: - stream = None - self.w = w - return stream - - # main view interface ##################################################### - - def render(self, w=None, **context): - """called to render a view object for a result set. - - This method is a dispatched to an actual method selected - according to optional row and col parameters, which are locating - a particular row or cell in the result set: - - * if row is specified, `cell_call` is called - * if none of them is supplied, the view is considered to apply on - the whole result set (which may be None in this case), `call` is - called - """ - # XXX use .cw_row/.cw_col - row = context.get('row') - if row is not None: - context.setdefault('col', 0) - view_func = self.cell_call - else: - view_func = self.call - stream = self.set_stream(w) - try: - view_func(**context) - except Exception: - self.debug('view call %s failed (context=%s)', view_func, context) - raise - # return stream content if we have created it - if stream is not None: - return self._stream.getvalue() - - def tal_render(self, template, variables): - """render a precompiled page template with variables in the given - dictionary as context - """ - from cubicweb.ext.tal import CubicWebContext - context = CubicWebContext() - context.update({'self': self, 'rset': self.cw_rset, '_' : self._cw._, - 'req': self._cw, 'user': self._cw.user}) - context.update(variables) - output = UStringIO() - template.expand(context, output) - return output.getvalue() - - # should default .call() method add a
      around each - # rset item - add_div_section = True - - def call(self, **kwargs): - """the view is called for an entire result set, by default loop - other rows of the result set and call the same view on the - particular row - - Views applicable on None result sets have to override this method - """ - rset = self.cw_rset - if rset is None: - raise NotImplementedError("%r an rset is required" % self) - wrap = self.templatable and len(rset) > 1 and self.add_div_section - # avoid re-selection if rset of size 1, we already have the most - # specific view - if rset.rowcount != 1: - kwargs.setdefault('initargs', self.cw_extra_kwargs) - for i in range(len(rset)): - if wrap: - self.w(u'
      ') - self.wview(self.__regid__, rset, row=i, **kwargs) - if wrap: - self.w(u"
      ") - else: - if wrap: - self.w(u'
      ') - kwargs.setdefault('col', 0) - self.cell_call(row=0, **kwargs) - if wrap: - self.w(u"
      ") - - def cell_call(self, row, col, **kwargs): - """the view is called for a particular result set cell""" - raise NotImplementedError(repr(self)) - - def linkable(self): - """return True if the view may be linked in a menu - - by default views without title are not meant to be displayed - """ - if not getattr(self, 'title', None): - return False - return True - - def is_primary(self): - return self.cw_extra_kwargs.get('is_primary', self.__regid__ == 'primary') - - def url(self): - """return the url associated with this view. Should not be - necessary for non linkable views, but a default implementation - is provided anyway. - """ - rset = self.cw_rset - if rset is None: - return self._cw.build_url('view', vid=self.__regid__) - coltypes = rset.column_types(0) - if len(coltypes) == 1: - etype = next(iter(coltypes)) - if not self._cw.vreg.schema.eschema(etype).final: - if len(rset) == 1: - entity = rset.get_entity(0, 0) - return entity.absolute_url(vid=self.__regid__) - # don't want to generate / url if there is some restriction - # on something else than the entity type - restr = rset.syntax_tree().children[0].where - # XXX norestriction is not correct here. For instance, in cases like - # "Any P,N WHERE P is Project, P name N" norestriction should equal - # True - norestriction = (isinstance(restr, nodes.Relation) and - restr.is_types_restriction()) - if norestriction: - return self._cw.build_url(etype.lower(), vid=self.__regid__) - return self._cw.build_url('view', rql=rset.printable_rql(), vid=self.__regid__) - - def set_request_content_type(self): - """set the content type returned by this view""" - self._cw.set_content_type(self.content_type) - - # view utilities ########################################################## - - def wview(self, __vid, rset=None, __fallback_vid=None, **kwargs): - """shortcut to self.view method automatically passing self.w as argument - """ - self._cw.view(__vid, rset, __fallback_vid, w=self.w, **kwargs) - - def whead(self, data): - self._cw.html_headers.write(data) - - def wdata(self, data): - """simple helper that escapes `data` and writes into `self.w`""" - self.w(xml_escape(data)) - - def html_headers(self): - """return a list of html headers (eg something to be inserted between - and of the returned page - - by default return a meta tag to disable robot indexation of the page - """ - return [NOINDEX] - - def page_title(self): - """returns a title according to the result set - used for the - title in the HTML header - """ - vtitle = self._cw.form.get('vtitle') - if vtitle: - return self._cw._(vtitle) - # class defined title will only be used if the resulting title doesn't - # seem clear enough - vtitle = getattr(self, 'title', None) or u'' - if vtitle: - vtitle = self._cw._(vtitle) - rset = self.cw_rset - if rset and rset.rowcount: - if rset.rowcount == 1: - try: - entity = rset.complete_entity(0, 0) - # use long_title to get context information if any - clabel = entity.dc_long_title() - except NotAnEntity: - clabel = display_name(self._cw, rset.description[0][0]) - clabel = u'%s (%s)' % (clabel, vtitle) - else : - etypes = rset.column_types(0) - if len(etypes) == 1: - etype = next(iter(etypes)) - clabel = display_name(self._cw, etype, 'plural') - else : - clabel = u'#[*] (%s)' % vtitle - else: - clabel = vtitle - return u'%s (%s)' % (clabel, self._cw.property_value('ui.site-title')) - - def field(self, label, value, row=True, show_label=True, w=None, tr=True, - table=False): - """read-only field""" - if w is None: - w = self.w - if table: - w(u'') - else: - w(u'
      ') - if show_label and label: - if tr: - label = display_name(self._cw, label) - if table: - w(u'%s' % label) - else: - w(u'%s ' % label) - if table: - if not (show_label and label): - w(u'%s' % value) - else: - w(u'%s' % value) - else: - w(u'%s
      ' % value) - - - -# concrete views base classes ################################################# - -class EntityView(View): - """base class for views applying on an entity (i.e. uniform result set)""" - __select__ = non_final_entity() - category = _('entityview') - - def call(self, **kwargs): - if self.cw_rset is None: - # * cw_extra_kwargs is the place where extra selection arguments are - # stored - # * when calling req.view('somevid', entity=entity), 'entity' ends - # up in cw_extra_kwargs and kwargs - # - # handle that to avoid a TypeError with a sanity check - # - # Notice that could probably be avoided by handling entity_call in - # .render - entity = self.cw_extra_kwargs.pop('entity') - if 'entity' in kwargs: - assert kwargs.pop('entity') is entity - self.entity_call(entity, **kwargs) - else: - super(EntityView, self).call(**kwargs) - - def cell_call(self, row, col, **kwargs): - self.entity_call(self.cw_rset.get_entity(row, col), **kwargs) - - def entity_call(self, entity, **kwargs): - raise NotImplementedError('%r %r' % (self.__regid__, self.__class__)) - - -class StartupView(View): - """base class for views which doesn't need a particular result set to be - displayed (so they can always be displayed!) - """ - __select__ = none_rset() - - category = _('startupview') - - def html_headers(self): - """return a list of html headers (eg something to be inserted between - and of the returned page - - by default startup views are indexed - """ - return [] - - -class EntityStartupView(EntityView): - """base class for entity views which may also be applied to None - result set (usually a default rql is provided by the view class) - """ - __select__ = none_rset() | non_final_entity() - - default_rql = None - - def __init__(self, req, rset=None, **kwargs): - super(EntityStartupView, self).__init__(req, rset=rset, **kwargs) - if rset is None: - # this instance is not in the "entityview" category - self.category = 'startupview' - - def startup_rql(self): - """return some rql to be executed if the result set is None""" - return self.default_rql - - def no_entities(self, **kwargs): - """override to display something when no entities were found""" - pass - - def call(self, **kwargs): - """override call to execute rql returned by the .startup_rql method if - necessary - """ - rset = self.cw_rset - if rset is None: - rset = self.cw_rset = self._cw.execute(self.startup_rql()) - if rset: - for i in range(len(rset)): - self.wview(self.__regid__, rset, row=i, **kwargs) - else: - self.no_entities(**kwargs) - - -class AnyRsetView(View): - """base class for views applying on any non empty result sets""" - __select__ = nonempty_rset() - - category = _('anyrsetview') - - def columns_labels(self, mainindex=0, tr=True): - """compute the label of the rset colums - - The logic is based on :meth:`~rql.stmts.Union.get_description`. - - :param mainindex: The index of the main variable. This is an hint to get - more accurate label for various situation - :type mainindex: int - - :param tr: Should the label be translated ? - :type tr: boolean - """ - if tr: - translate = partial(display_name, self._cw) - else: - translate = lambda val, *args,**kwargs: val - # XXX [0] because of missing Union support - rql_syntax_tree = self.cw_rset.syntax_tree() - rqlstdescr = rql_syntax_tree.get_description(mainindex, translate)[0] - labels = [] - for colidx, label in enumerate(rqlstdescr): - labels.append(self.column_label(colidx, label, translate)) - return labels - - def column_label(self, colidx, default, translate_func=None): - """return the label of a specified columns index - - Overwrite me if you need to compute specific label. - - :param colidx: The index of the column the call computes a label for. - :type colidx: int - - :param default: Default value. If ``"Any"`` the default value will be - recomputed as coma separated list for all possible - etypes name. - :type colidx: string - - :param translate_func: A function used to translate name. - :type colidx: function - """ - label = default - if label == 'Any': - etypes = self.cw_rset.column_types(colidx) - if translate_func is not None: - etypes = map(translate_func, etypes) - label = u','.join(etypes) - return label - - - -# concrete template base classes ############################################## - -class MainTemplate(View): - """main template are primary access point to render a full HTML page. - There is usually at least a regular main template and a simple fallback - one to display error if the first one failed - """ - - doctype = '' - - def set_stream(self, w=None): - if self.w is not None: - return - if w is None: - if self.binary: - self._stream = stream = BytesIO() - else: - self._stream = stream = HTMLStream(self._cw) - w = stream.write - else: - stream = None - self.w = w - return stream - - def write_doctype(self, xmldecl=True): - assert isinstance(self._stream, HTMLStream) - self._stream.doctype = self.doctype - if not xmldecl: - self._stream.xmldecl = u'' - - def linkable(self): - return False - -# concrete component base classes ############################################# - -class ReloadableMixIn(object): - """simple mixin for reloadable parts of UI""" - - @property - def domid(self): - return domid(self.__regid__) - - -class Component(ReloadableMixIn, View): - """base class for components""" - __registry__ = 'components' - __select__ = yes() - - # XXX huummm, much probably useless (should be...) - htmlclass = 'mainRelated' - @property - def cssclass(self): - return '%s %s' % (self.htmlclass, domid(self.__regid__)) - - # XXX should rely on ReloadableMixIn.domid - @property - def domid(self): - return '%sComponent' % domid(self.__regid__) - - -class Adapter(AppObject): - """base class for adapters""" - __registry__ = 'adapters' - - -class EntityAdapter(Adapter): - """base class for entity adapters (eg adapt an entity to an interface)""" - def __init__(self, _cw, **kwargs): - try: - self.entity = kwargs.pop('entity') - except KeyError: - self.entity = kwargs['rset'].get_entity(kwargs.get('row') or 0, - kwargs.get('col') or 0) - Adapter.__init__(self, _cw, **kwargs) diff -r 058bb3dc685f -r 0b59724cb3f2 vregistry.py --- a/vregistry.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from warnings import warn -from logilab.common.deprecation import class_moved -warn('[3.15] moved to logilab.common.registry', DeprecationWarning, stacklevel=2) -from logilab.common.registry import * - -VRegistry = class_moved(RegistryStore, old_name='VRegistry', message='[3.15] VRegistry moved to logilab.common.registry as RegistryStore') diff -r 058bb3dc685f -r 0b59724cb3f2 web/__init__.py --- a/web/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""CubicWeb web client core. You'll need a apache-modpython or twisted -publisher to get a full CubicWeb web application -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six.moves.urllib.parse import quote as urlquote -from logilab.common.deprecation import deprecated - -from cubicweb.web._exceptions import * -from cubicweb.utils import json_dumps -from cubicweb.uilib import eid_param - -assert json_dumps is not None, 'no json module installed' - -INTERNAL_FIELD_VALUE = '__cubicweb_internal_field__' - - -class stdmsgs(object): - """standard ui message (in a class for bw compat)""" - BUTTON_OK = (_('button_ok'), 'OK_ICON') - BUTTON_APPLY = (_('button_apply'), 'APPLY_ICON') - BUTTON_CANCEL = (_('button_cancel'), 'CANCEL_ICON') - BUTTON_DELETE = (_('button_delete'), 'TRASH_ICON') - YES = (_('yes'), None) - NO = (_('no'), None) - - -from logging import getLogger -LOGGER = getLogger('cubicweb.web') - -# XXX deprecated -FACETTES = set() - - -def jsonize(function): - def newfunc(*args, **kwargs): - value = function(*args, **kwargs) - try: - return json_dumps(value) - except TypeError: - return json_dumps(repr(value)) - return newfunc diff -r 058bb3dc685f -r 0b59724cb3f2 web/_exceptions.py --- a/web/_exceptions.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,108 +0,0 @@ -# pylint: disable=W0401,W0614 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""exceptions used in the core of the CubicWeb web application""" - -__docformat__ = "restructuredtext en" - -from six.moves import http_client - -from cubicweb._exceptions import * -from cubicweb.utils import json_dumps - - -class DirectResponse(Exception): - """Used to supply a twitted HTTP Response directly""" - def __init__(self, response): - self.response = response - -class InvalidSession(CubicWebException): - """raised when a session id is found but associated session is not found or - invalid""" - -# Publish related exception - -class PublishException(CubicWebException): - """base class for publishing related exception""" - - def __init__(self, *args, **kwargs): - self.status = kwargs.pop('status', http_client.OK) - super(PublishException, self).__init__(*args, **kwargs) - -class LogOut(PublishException): - """raised to ask for deauthentication of a logged in user""" - def __init__(self, url=None): - super(LogOut, self).__init__() - self.url = url - -class Redirect(PublishException): - """raised to redirect the http request""" - def __init__(self, location, status=http_client.SEE_OTHER): - super(Redirect, self).__init__(status=status) - self.location = location - -class StatusResponse(PublishException): - - def __init__(self, status, content=''): - super(StatusResponse, self).__init__(status=status) - self.content = content - - def __repr__(self): - return '%s(%r, %r)' % (self.__class__.__name__, self.status, self.content) - -# Publish related error - -class RequestError(PublishException): - """raised when a request can't be served because of a bad input""" - - def __init__(self, *args, **kwargs): - kwargs.setdefault('status', http_client.BAD_REQUEST) - super(RequestError, self).__init__(*args, **kwargs) - - -class NothingToEdit(RequestError): - """raised when an edit request doesn't specify any eid to edit""" - - def __init__(self, *args, **kwargs): - kwargs.setdefault('status', http_client.BAD_REQUEST) - super(NothingToEdit, self).__init__(*args, **kwargs) - -class ProcessFormError(RequestError): - """raised when posted data can't be processed by the corresponding field - """ - def __init__(self, *args, **kwargs): - kwargs.setdefault('status', http_client.BAD_REQUEST) - super(ProcessFormError, self).__init__(*args, **kwargs) - -class NotFound(RequestError): - """raised when something was not found. In most case, - a 404 error should be returned""" - - def __init__(self, *args, **kwargs): - kwargs.setdefault('status', http_client.NOT_FOUND) - super(NotFound, self).__init__(*args, **kwargs) - -class RemoteCallFailed(RequestError): - """raised when a json remote call fails - """ - def __init__(self, reason='', status=http_client.INTERNAL_SERVER_ERROR): - super(RemoteCallFailed, self).__init__(reason, status=status) - self.reason = reason - - def dumps(self): - return json_dumps({'reason': self.reason}) diff -r 058bb3dc685f -r 0b59724cb3f2 web/action.py --- a/web/action.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,113 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""abstract action classes for CubicWeb web client - -Actions are typically displayed in an action box, but can also be used -in other parts of the interface (the user menu, the footer, etc.). The -'order', 'category' and 'title' class attributes control how the action will -be displayed. The 'submenu' attribute is only used for actions in the -action box. - -The most important method from a developper point of view in the -:meth:'Action.url' method, which returns a URL on which the navigation -should be directed to perform the action. The common way of -writing that method is to simply return a URL to the current rset with a -special view (with `self._cw.build_url(...)` for instance) - -Many examples are available in :mod:`cubicweb.web.views.actions`. -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from cubicweb import target -from cubicweb.predicates import (partial_relation_possible, match_search_state, - one_line_rset) -from cubicweb.appobject import AppObject - - -class Action(AppObject): - """abstract action. Handle the .search_states attribute to match - request search state. - """ - __registry__ = 'actions' - __select__ = match_search_state('normal') - order = 99 - category = 'moreactions' - # actions in category 'moreactions' can specify a sub-menu in which they should be filed - submenu = None - - def actual_actions(self): - yield self - - def fill_menu(self, box, menu): - """add action(s) to the given submenu of the given box""" - for action in self.actual_actions(): - menu.append(box.action_link(action)) - - def html_class(self): - if self._cw.selected(self.url()): - return 'selected' - - def build_action(self, title, url, **kwargs): - return UnregisteredAction(self._cw, title, url, **kwargs) - - def url(self): - """return the url associated with this action""" - raise NotImplementedError - - -class UnregisteredAction(Action): - """non registered action, used to build boxes""" - category = None - id = None - - def __init__(self, req, title, url, **kwargs): - Action.__init__(self, req) - self.title = req._(title) - self._url = url - self.__dict__.update(kwargs) - - def url(self): - return self._url - - -class LinkToEntityAction(Action): - """base class for actions consisting to create a new object with an initial - relation set to an entity. - - Additionally to EntityAction behaviour, this class is parametrized using - .rtype, .role and .target_etype attributes to check if the action apply and - if the logged user has access to it (see - :class:`~cubicweb.selectors.partial_relation_possible` selector - documentation for more information). - """ - __select__ = (match_search_state('normal') & one_line_rset() - & partial_relation_possible(action='add', strict=True)) - - submenu = 'addrelated' - # to be defined in concrete classes - target_etype = rtype = None - - def url(self): - ttype = self.target_etype - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - linkto = '%s:%s:%s' % (self.rtype, entity.eid, target(self)) - return self._cw.vreg["etypes"].etype_class(ttype).cw_create_url(self._cw, - __redirectpath=entity.rest_path(), __linkto=linkto, - __redirectvid=self._cw.form.get('__redirectvid', '')) diff -r 058bb3dc685f -r 0b59724cb3f2 web/application.py --- a/web/application.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,532 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""CubicWeb web client application object""" - -__docformat__ = "restructuredtext en" - -import sys -from time import clock, time -from contextlib import contextmanager -from warnings import warn -import json - -from six import text_type, binary_type -from six.moves import http_client - -from logilab.common.deprecation import deprecated - -from rql import BadRQLQuery - -from cubicweb import set_log_methods, cwvreg -from cubicweb import ( - ValidationError, Unauthorized, Forbidden, - AuthenticationError, NoSelectableObject, - CW_EVENT_MANAGER) -from cubicweb.repoapi import anonymous_cnx -from cubicweb.web import LOGGER, component, cors -from cubicweb.web import ( - StatusResponse, DirectResponse, Redirect, NotFound, LogOut, - RemoteCallFailed, InvalidSession, RequestError, PublishException) - -from cubicweb.web.request import CubicWebRequestBase - -# make session manager available through a global variable so the debug view can -# print information about web session -SESSION_MANAGER = None - - -@contextmanager -def anonymized_request(req): - orig_cnx = req.cnx - anon_cnx = anonymous_cnx(orig_cnx.session.repo) - req.set_cnx(anon_cnx) - try: - with anon_cnx: - yield req - finally: - req.set_cnx(orig_cnx) - - - -class CookieSessionHandler(object): - """a session handler using a cookie to store the session identifier""" - - def __init__(self, appli): - self.repo = appli.repo - self.vreg = appli.vreg - self.session_manager = self.vreg['sessions'].select('sessionmanager', - repo=self.repo) - global SESSION_MANAGER - SESSION_MANAGER = self.session_manager - if self.vreg.config.mode != 'test': - # don't try to reset session manager during test, this leads to - # weird failures when running multiple tests - CW_EVENT_MANAGER.bind('after-registry-reload', - self.reset_session_manager) - - def reset_session_manager(self): - data = self.session_manager.dump_data() - self.session_manager = self.vreg['sessions'].select('sessionmanager', - repo=self.repo) - self.session_manager.restore_data(data) - global SESSION_MANAGER - SESSION_MANAGER = self.session_manager - - @property - def clean_sessions_interval(self): - return self.session_manager.clean_sessions_interval - - def clean_sessions(self): - """cleanup sessions which has not been unused since a given amount of - time - """ - self.session_manager.clean_sessions() - - def session_cookie(self, req): - """return a string giving the name of the cookie used to store the - session identifier. - """ - if req.https: - return '__%s_https_session' % self.vreg.config.appid - return '__%s_session' % self.vreg.config.appid - - def get_session(self, req): - """Return a session object corresponding to credentials held by the req - - Session id is searched from : - - # form variable - - cookie - - If no session id is found, try opening a new session with credentials - found in the request. - - Raises AuthenticationError if no session can be found or created. - """ - cookie = req.get_cookie() - sessioncookie = self.session_cookie(req) - try: - sessionid = str(cookie[sessioncookie].value) - session = self.get_session_by_id(req, sessionid) - except (KeyError, InvalidSession): # no valid session cookie - session = self.open_session(req) - return session - - def get_session_by_id(self, req, sessionid): - session = self.session_manager.get_session(req, sessionid) - session.mtime = time() - return session - - def open_session(self, req): - session = self.session_manager.open_session(req) - sessioncookie = self.session_cookie(req) - secure = req.https and req.base_url().startswith('https://') - req.set_cookie(sessioncookie, session.sessionid, - maxage=None, secure=secure, httponly=True) - if not session.anonymous_session: - self.session_manager.postlogin(req, session) - return session - - def logout(self, req, goto_url): - """logout from the instance by cleaning the session and raising - `AuthenticationError` - """ - self.session_manager.close_session(req.session) - req.remove_cookie(self.session_cookie(req)) - raise LogOut(url=goto_url) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -class CubicWebPublisher(object): - """the publisher is a singleton hold by the web frontend, and is responsible - to publish HTTP request. - - The http server will call its main entry point ``application.handle_request``. - - .. automethod:: cubicweb.web.application.CubicWebPublisher.main_handle_request - - You have to provide both a repository and web-server config at - initialization. In all in one instance both config will be the same. - """ - - def __init__(self, repo, config, session_handler_fact=CookieSessionHandler): - self.info('starting web instance from %s', config.apphome) - self.repo = repo - self.vreg = repo.vreg - # get instance's schema - if not self.vreg.initialized: - config.init_cubes(self.repo.get_cubes()) - self.vreg.init_properties(self.repo.properties()) - self.vreg.set_schema(self.repo.get_schema()) - # set the correct publish method - if config['query-log-file']: - from threading import Lock - self._query_log = open(config['query-log-file'], 'a') - self.handle_request = self.log_handle_request - self._logfile_lock = Lock() - else: - self._query_log = None - self.handle_request = self.main_handle_request - # instantiate session and url resolving helpers - self.session_handler = session_handler_fact(self) - self.set_urlresolver() - CW_EVENT_MANAGER.bind('after-registry-reload', self.set_urlresolver) - - def set_urlresolver(self): - self.url_resolver = self.vreg['components'].select('urlpublisher', - vreg=self.vreg) - - def get_session(self, req): - """Return a session object corresponding to credentials held by the req - - May raise AuthenticationError. - """ - return self.session_handler.get_session(req) - - # publish methods ######################################################### - - def log_handle_request(self, req, path): - """wrapper around _publish to log all queries executed for a given - accessed path - """ - def wrap_set_cnx(func): - def wrap_execute(cnx): - orig_execute = cnx.execute - def execute(rql, kwargs=None, build_descr=True): - tstart, cstart = time(), clock() - rset = orig_execute(rql, kwargs, build_descr=build_descr) - cnx.executed_queries.append((rql, kwargs, time() - tstart, clock() - cstart)) - return rset - return execute - def set_cnx(cnx): - func(cnx) - cnx.execute = wrap_execute(cnx) - cnx.executed_queries = [] - return set_cnx - req.set_cnx = wrap_set_cnx(req.set_cnx) - try: - return self.main_handle_request(req, path) - finally: - cnx = req.cnx - if cnx: - with self._logfile_lock: - try: - result = ['\n'+'*'*80] - result.append(req.url()) - result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q - for q in cnx.executed_queries] - cnx.executed_queries = [] - self._query_log.write('\n'.join(result).encode(req.encoding)) - self._query_log.flush() - except Exception: - self.exception('error while logging queries') - - - - def main_handle_request(self, req, path): - """Process an http request - - Arguments are: - - a Request object - - path of the request object - - It returns the content of the http response. HTTP header and status are - set on the Request object. - """ - if not isinstance(req, CubicWebRequestBase): - warn('[3.15] Application entry point arguments are now (req, path) ' - 'not (path, req)', DeprecationWarning, 2) - req, path = path, req - if req.authmode == 'http': - # activate realm-based auth - realm = self.vreg.config['realm'] - req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False) - content = b'' - try: - try: - session = self.get_session(req) - from cubicweb import repoapi - cnx = repoapi.Connection(session) - req.set_cnx(cnx) - except AuthenticationError: - # Keep the dummy session set at initialisation. - # such session with work to an some extend but raise an - # AuthenticationError on any database access. - import contextlib - @contextlib.contextmanager - def dummy(): - yield - cnx = dummy() - # XXX We want to clean up this approach in the future. But - # several cubes like registration or forgotten password rely on - # this principle. - - # nested try to allow LogOut to delegate logic to AuthenticationError - # handler - try: - ### Try to generate the actual request content - with cnx: - content = self.core_handle(req, path) - # Handle user log-out - except LogOut as ex: - # When authentification is handled by cookie the code that - # raised LogOut must has invalidated the cookie. We can just - # reload the original url without authentification - if self.vreg.config['auth-mode'] == 'cookie' and ex.url: - req.headers_out.setHeader('location', str(ex.url)) - if ex.status is not None: - req.status_out = http_client.SEE_OTHER - # When the authentification is handled by http we must - # explicitly ask for authentification to flush current http - # authentification information - else: - # Render "logged out" content. - # assignement to ``content`` prevent standard - # AuthenticationError code to overwrite it. - content = self.loggedout_content(req) - # let the explicitly reset http credential - raise AuthenticationError() - except Redirect as ex: - # authentication needs redirection (eg openid) - content = self.redirect_handler(req, ex) - # Wrong, absent or Reseted credential - except AuthenticationError: - # If there is an https url configured and - # the request does not use https, redirect to login form - https_url = self.vreg.config['https-url'] - if https_url and req.base_url() != https_url: - req.status_out = http_client.SEE_OTHER - req.headers_out.setHeader('location', https_url + 'login') - else: - # We assume here that in http auth mode the user *May* provide - # Authentification Credential if asked kindly. - if self.vreg.config['auth-mode'] == 'http': - req.status_out = http_client.UNAUTHORIZED - # In the other case (coky auth) we assume that there is no way - # for the user to provide them... - # XXX But WHY ? - else: - req.status_out = http_client.FORBIDDEN - # If previous error handling already generated a custom content - # do not overwrite it. This is used by LogOut Except - # XXX ensure we don't actually serve content - if not content: - content = self.need_login_content(req) - assert isinstance(content, binary_type) - return content - - - def core_handle(self, req, path): - """method called by the main publisher to process - - should return a string containing the resulting page or raise a - `NotFound` exception - - :type path: str - :param path: the path part of the url to publish - - :type req: `web.Request` - :param req: the request object - - :rtype: str - :return: the result of the pusblished url - """ - # don't log form values they may contains sensitive information - self.debug('publish "%s" (%s, form params: %s)', - path, req.session.sessionid, list(req.form)) - # remove user callbacks on a new request (except for json controllers - # to avoid callbacks being unregistered before they could be called) - tstart = clock() - commited = False - try: - ### standard processing of the request - try: - # apply CORS sanity checks - cors.process_request(req, self.vreg.config) - ctrlid, rset = self.url_resolver.process(req, path) - try: - controller = self.vreg['controllers'].select(ctrlid, req, - appli=self) - except NoSelectableObject: - raise Unauthorized(req._('not authorized')) - req.update_search_state() - result = controller.publish(rset=rset) - except cors.CORSPreflight: - # Return directly an empty 200 - req.status_out = 200 - result = b'' - except StatusResponse as ex: - warn('[3.16] StatusResponse is deprecated use req.status_out', - DeprecationWarning, stacklevel=2) - result = ex.content - req.status_out = ex.status - except Redirect as ex: - # Redirect may be raised by edit controller when everything went - # fine, so attempt to commit - result = self.redirect_handler(req, ex) - if req.cnx: - txuuid = req.cnx.commit() - commited = True - if txuuid is not None: - req.data['last_undoable_transaction'] = txuuid - ### error case - except NotFound as ex: - result = self.notfound_content(req) - req.status_out = ex.status - except ValidationError as ex: - result = self.validation_error_handler(req, ex) - except RemoteCallFailed as ex: - result = self.ajax_error_handler(req, ex) - except Unauthorized as ex: - req.data['errmsg'] = req._('You\'re not authorized to access this page. ' - 'If you think you should, please contact the site administrator.') - req.status_out = http_client.FORBIDDEN - result = self.error_handler(req, ex, tb=False) - except Forbidden as ex: - req.data['errmsg'] = req._('This action is forbidden. ' - 'If you think it should be allowed, please contact the site administrator.') - req.status_out = http_client.FORBIDDEN - result = self.error_handler(req, ex, tb=False) - except (BadRQLQuery, RequestError) as ex: - result = self.error_handler(req, ex, tb=False) - ### pass through exception - except DirectResponse: - if req.cnx: - req.cnx.commit() - raise - except (AuthenticationError, LogOut): - # the rollback is handled in the finally - raise - ### Last defense line - except BaseException as ex: - req.status_out = http_client.INTERNAL_SERVER_ERROR - result = self.error_handler(req, ex, tb=True) - finally: - if req.cnx and not commited: - try: - req.cnx.rollback() - except Exception: - pass # ignore rollback error at this point - self.add_undo_link_to_msg(req) - self.debug('query %s executed in %s sec', req.relative_path(), clock() - tstart) - return result - - # Error handlers - - def redirect_handler(self, req, ex): - """handle redirect - - comply to ex status - - set header field - - return empty content - """ - self.debug('redirecting to %s', str(ex.location)) - req.headers_out.setHeader('location', str(ex.location)) - assert 300 <= ex.status < 400 - req.status_out = ex.status - return b'' - - def validation_error_handler(self, req, ex): - ex.translate(req._) # translate messages using ui language - if '__errorurl' in req.form: - forminfo = {'error': ex, - 'values': req.form, - 'eidmap': req.data.get('eidmap', {}) - } - req.session.data[req.form['__errorurl']] = forminfo - # XXX form session key / __error_url should be differentiated: - # session key is 'url + #[%s]' %( - req.build_url('undo', txuuid=txuuid), req._('undo')) - req.append_to_redirect_message(msg) - - def ajax_error_handler(self, req, ex): - req.set_header('content-type', 'application/json') - status = http_client.INTERNAL_SERVER_ERROR - if isinstance(ex, PublishException) and ex.status is not None: - status = ex.status - if req.status_out < 400: - # don't overwrite it if it's already set - req.status_out = status - json_dumper = getattr(ex, 'dumps', lambda : json.dumps({'reason': text_type(ex)})) - return json_dumper().encode('utf-8') - - # special case handling - - def need_login_content(self, req): - return self.vreg['views'].main_template(req, 'login') - - def loggedout_content(self, req): - return self.vreg['views'].main_template(req, 'loggedout') - - def notfound_content(self, req): - req.form['vid'] = '404' - view = self.vreg['views'].select('404', req) - template = self.main_template_id(req) - return self.vreg['views'].main_template(req, template, view=view) - - # template stuff - - def main_template_id(self, req): - template = req.form.get('__template', req.property_value('ui.main-template')) - if template not in self.vreg['views']: - template = 'main-template' - return template - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -set_log_methods(CubicWebPublisher, LOGGER) -set_log_methods(CookieSessionHandler, LOGGER) diff -r 058bb3dc685f -r 0b59724cb3f2 web/box.py --- a/web/box.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,197 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""abstract box classes for CubicWeb web client""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six import add_metaclass - -from logilab.mtconverter import xml_escape -from logilab.common.deprecation import class_deprecated, class_renamed - -from cubicweb import Unauthorized, role as get_role -from cubicweb.schema import display_name -from cubicweb.predicates import no_cnx, one_line_rset -from cubicweb.view import View -from cubicweb.web import INTERNAL_FIELD_VALUE, stdmsgs -from cubicweb.web.htmlwidgets import (BoxLink, BoxWidget, SideBoxWidget, - RawBoxItem, BoxSeparator) -from cubicweb.web.action import UnregisteredAction - - -def sort_by_category(actions, categories_in_order=None): - """return a list of (category, actions_sorted_by_title)""" - result = [] - actions_by_cat = {} - for action in actions: - actions_by_cat.setdefault(action.category, []).append( - (action.title, action) ) - for key, values in actions_by_cat.items(): - actions_by_cat[key] = [act for title, act in sorted(values, key=lambda x: x[0])] - if categories_in_order: - for cat in categories_in_order: - if cat in actions_by_cat: - result.append( (cat, actions_by_cat[cat]) ) - for item in sorted(actions_by_cat.items()): - result.append(item) - return result - - -# old box system, deprecated ################################################### - -@add_metaclass(class_deprecated) -class BoxTemplate(View): - """base template for boxes, usually a (contextual) list of possible - actions. Various classes attributes may be used to control the box - rendering. - - You may override one of the formatting callbacks if this is not necessary - for your custom box. - - Classes inheriting from this class usually only have to override call - to fetch desired actions, and then to do something like :: - - box.render(self.w) - """ - __deprecation_warning__ = '[3.10] *BoxTemplate classes are deprecated, use *CtxComponent instead (%(cls)s)' - - __registry__ = 'ctxcomponents' - __select__ = ~no_cnx() - - categories_in_order = () - cw_property_defs = { - _('visible'): dict(type='Boolean', default=True, - help=_('display the box or not')), - _('order'): dict(type='Int', default=99, - help=_('display order of the box')), - # XXX 'incontext' boxes are handled by the default primary view - _('context'): dict(type='String', default='left', - vocabulary=(_('left'), _('incontext'), _('right')), - help=_('context where this box should be displayed')), - } - context = 'left' - - def sort_actions(self, actions): - """return a list of (category, actions_sorted_by_title)""" - return sort_by_category(actions, self.categories_in_order) - - def mk_action(self, title, url, escape=True, **kwargs): - """factory function to create dummy actions compatible with the - .format_actions method - """ - if escape: - title = xml_escape(title) - return self.box_action(self._action(title, url, **kwargs)) - - def _action(self, title, url, **kwargs): - return UnregisteredAction(self._cw, title, url, **kwargs) - - # formating callbacks - - def boxitem_link_tooltip(self, action): - if action.__regid__: - return u'keyword: %s' % action.__regid__ - return u'' - - def box_action(self, action): - klass = getattr(action, 'html_class', lambda: None)() - return BoxLink(action.url(), self._cw._(action.title), - klass, self.boxitem_link_tooltip(action)) - - -class RQLBoxTemplate(BoxTemplate): - """abstract box for boxes displaying the content of a rql query not - related to the current result set. - """ - - # to be defined in concrete classes - rql = title = None - - def to_display_rql(self): - assert self.rql is not None, self.__regid__ - return (self.rql,) - - def call(self, **kwargs): - try: - rset = self._cw.execute(*self.to_display_rql()) - except Unauthorized: - # can't access to something in the query, forget this box - return - if len(rset) == 0: - return - box = BoxWidget(self._cw._(self.title), self.__regid__) - for i, (teid, tname) in enumerate(rset): - entity = rset.get_entity(i, 0) - box.append(self.mk_action(tname, entity.absolute_url())) - box.render(w=self.w) - - -class UserRQLBoxTemplate(RQLBoxTemplate): - """same as rql box template but the rql is build using the eid of the - request's user - """ - - def to_display_rql(self): - assert self.rql is not None, self.__regid__ - return (self.rql, {'x': self._cw.user.eid}) - - -class EntityBoxTemplate(BoxTemplate): - """base class for boxes related to a single entity""" - __select__ = BoxTemplate.__select__ & one_line_rset() - context = 'incontext' - - def call(self, row=0, col=0, **kwargs): - """classes inheriting from EntityBoxTemplate should define cell_call""" - self.cell_call(row, col, **kwargs) - -from cubicweb.web.component import AjaxEditRelationCtxComponent, EditRelationMixIn - - -class EditRelationBoxTemplate(EditRelationMixIn, EntityBoxTemplate): - """base class for boxes which let add or remove entities linked - by a given relation - - subclasses should define at least id, rtype and target - class attributes. - """ - rtype = None - def cell_call(self, row, col, view=None, **kwargs): - self._cw.add_js('cubicweb.ajax.js') - entity = self.cw_rset.get_entity(row, col) - title = display_name(self._cw, self.rtype, get_role(self), - context=entity.cw_etype) - box = SideBoxWidget(title, self.__regid__) - related = self.related_boxitems(entity) - unrelated = self.unrelated_boxitems(entity) - box.extend(related) - if related and unrelated: - box.append(BoxSeparator()) - box.extend(unrelated) - box.render(self.w) - - def box_item(self, entity, etarget, rql, label): - label = super(EditRelationBoxTemplate, self).box_item( - entity, etarget, rql, label) - return RawBoxItem(label, liclass=u'invisible') - - -AjaxEditRelationBoxTemplate = class_renamed( - 'AjaxEditRelationBoxTemplate', AjaxEditRelationCtxComponent, - '[3.10] AjaxEditRelationBoxTemplate has been renamed to AjaxEditRelationCtxComponent (%(cls)s)') diff -r 058bb3dc685f -r 0b59724cb3f2 web/captcha.py --- a/web/captcha.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,98 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Simple captcha library, based on PIL. Monkey patch functions in this module -if you want something better... -""" - -__docformat__ = "restructuredtext en" - -from random import randint, choice -from io import BytesIO - -from six.moves import range - -from PIL import Image, ImageFont, ImageDraw, ImageFilter - - -from time import time - -from cubicweb import tags -from cubicweb.web import ProcessFormError, formwidgets as fw - - -def pil_captcha(text, fontfile, fontsize): - """Generate a captcha image. Return a PIL image object. - - adapted from http://code.activestate.com/recipes/440588/ - """ - # randomly select the foreground color - fgcolor = randint(0, 0xffff00) - # make the background color the opposite of fgcolor - bgcolor = fgcolor ^ 0xffffff - # create a font object - font = ImageFont.truetype(fontfile, fontsize) - # determine dimensions of the text - dim = font.getsize(text) - # create a new image slightly larger that the text - img = Image.new('RGB', (dim[0]+5, dim[1]+5), bgcolor) - draw = ImageDraw.Draw(img) - # draw 100 random colored boxes on the background - x, y = img.size - for num in range(100): - draw.rectangle((randint(0, x), randint(0, y), - randint(0, x), randint(0, y)), - fill=randint(0, 0xffffff)) - # add the text to the image - draw.text((3, 3), text, font=font, fill=fgcolor) - img = img.filter(ImageFilter.EDGE_ENHANCE_MORE) - return img - - -def captcha(fontfile, fontsize, size=5, format='JPEG'): - """Generate an arbitrary text, return it together with a buffer containing - the captcha image for the text - """ - text = u''.join(choice('QWERTYUOPASDFGHJKLZXCVBNM') for i in range(size)) - img = pil_captcha(text, fontfile, fontsize) - out = BytesIO() - img.save(out, format) - out.seek(0) - return text, out - - -class CaptchaWidget(fw.TextInput): - def render(self, form, field, renderer=None): - # t=int(time()*100) to make sure img is not cached - src = form._cw.build_url('view', vid='captcha', t=int(time()*100), - captchakey=field.input_name(form)) - img = tags.img(src=src, alt=u'captcha') - img = u'
      %s
      ' % img - return img + super(CaptchaWidget, self).render(form, field, renderer) - - def process_field_data(self, form, field): - captcha = form._cw.session.data.pop(field.input_name(form), None) - val = super(CaptchaWidget, self).process_field_data(form, field) - if val is None: - return val # required will be checked by field - if captcha is None: - msg = form._cw._('unable to check captcha, please try again') - raise ProcessFormError(msg) - elif val.lower() != captcha.lower(): - msg = form._cw._('incorrect captcha value') - raise ProcessFormError(msg) - return val diff -r 058bb3dc685f -r 0b59724cb3f2 web/component.py --- a/web/component.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,752 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""abstract component class and base components definition for CubicWeb web -client -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from six import PY3, add_metaclass, text_type - -from logilab.common.deprecation import class_deprecated, class_renamed, deprecated -from logilab.mtconverter import xml_escape - -from cubicweb import Unauthorized, role, target, tags -from cubicweb.schema import display_name -from cubicweb.uilib import js, domid -from cubicweb.utils import json_dumps, js_href -from cubicweb.view import ReloadableMixIn, Component -from cubicweb.predicates import (no_cnx, paginated_rset, one_line_rset, - non_final_entity, partial_relation_possible, - partial_has_related_entities) -from cubicweb.appobject import AppObject -from cubicweb.web import INTERNAL_FIELD_VALUE, stdmsgs - - -# abstract base class for navigation components ################################ - -class NavigationComponent(Component): - """abstract base class for navigation components""" - __regid__ = 'navigation' - __select__ = paginated_rset() - - cw_property_defs = { - _('visible'): dict(type='Boolean', default=True, - help=_('display the component or not')), - } - - page_size_property = 'navigation.page-size' - start_param = '__start' - stop_param = '__stop' - page_link_templ = u'%s' - selected_page_link_templ = u'%s' - previous_page_link_templ = next_page_link_templ = page_link_templ - - def __init__(self, req, rset, **kwargs): - super(NavigationComponent, self).__init__(req, rset=rset, **kwargs) - self.starting_from = 0 - self.total = rset.rowcount - - def get_page_size(self): - try: - return self._page_size - except AttributeError: - page_size = self.cw_extra_kwargs.get('page_size') - if page_size is None: - if 'page_size' in self._cw.form: - page_size = int(self._cw.form['page_size']) - else: - page_size = self._cw.property_value(self.page_size_property) - self._page_size = page_size - return page_size - - def set_page_size(self, page_size): - self._page_size = page_size - - page_size = property(get_page_size, set_page_size) - - def page_boundaries(self): - try: - stop = int(self._cw.form[self.stop_param]) + 1 - start = int(self._cw.form[self.start_param]) - except KeyError: - start, stop = 0, self.page_size - if start >= len(self.cw_rset): - start, stop = 0, self.page_size - self.starting_from = start - return start, stop - - def clean_params(self, params): - if self.start_param in params: - del params[self.start_param] - if self.stop_param in params: - del params[self.stop_param] - - def page_url(self, path, params, start=None, stop=None): - params = dict(params) - params['__fromnavigation'] = 1 - if start is not None: - params[self.start_param] = start - if stop is not None: - params[self.stop_param] = stop - view = self.cw_extra_kwargs.get('view') - if view is not None and hasattr(view, 'page_navigation_url'): - url = view.page_navigation_url(self, path, params) - elif path in ('json', 'ajax'): - # 'ajax' is the new correct controller, but the old 'json' - # controller should still be supported - url = self.ajax_page_url(**params) - else: - url = self._cw.build_url(path, **params) - # XXX hack to avoid opening a new page containing the evaluation of the - # js expression on ajax call - if url.startswith('javascript:'): - url += '; $.noop();' - return url - - def ajax_page_url(self, **params): - divid = params.setdefault('divid', 'pageContent') - params['rql'] = self.cw_rset.printable_rql() - return js_href("$(%s).loadxhtml(AJAX_PREFIX_URL, %s, 'get', 'swap')" % ( - json_dumps('#'+divid), js.ajaxFuncArgs('view', params))) - - def page_link(self, path, params, start, stop, content): - url = xml_escape(self.page_url(path, params, start, stop)) - if start == self.starting_from: - return self.selected_page_link_templ % (url, content, content) - return self.page_link_templ % (url, content, content) - - @property - def prev_icon_url(self): - return xml_escape(self._cw.data_url('go_prev.png')) - - @property - def next_icon_url(self): - return xml_escape(self._cw.data_url('go_next.png')) - - @property - def no_previous_page_link(self): - return (u'%s' % - (self.prev_icon_url, self._cw._('there is no previous page'))) - - @property - def no_next_page_link(self): - return (u'%s' % - (self.next_icon_url, self._cw._('there is no next page'))) - - @property - def no_content_prev_link(self): - return (u'%s' % ( - (self.prev_icon_url, self._cw._('no content prev link')))) - - @property - def no_content_next_link(self): - return (u'%s' % - (self.next_icon_url, self._cw._('no content next link'))) - - def previous_link(self, path, params, content=None, title=_('previous_results')): - if not content: - content = self.no_content_prev_link - start = self.starting_from - if not start : - return self.no_previous_page_link - start = max(0, start - self.page_size) - stop = start + self.page_size - 1 - url = xml_escape(self.page_url(path, params, start, stop)) - return self.previous_page_link_templ % (url, self._cw._(title), content) - - def next_link(self, path, params, content=None, title=_('next_results')): - if not content: - content = self.no_content_next_link - start = self.starting_from + self.page_size - if start >= self.total: - return self.no_next_page_link - stop = start + self.page_size - 1 - url = xml_escape(self.page_url(path, params, start, stop)) - return self.next_page_link_templ % (url, self._cw._(title), content) - - -# new contextual components system ############################################# - -def override_ctx(cls, **kwargs): - cwpdefs = cls.cw_property_defs.copy() - cwpdefs['context'] = cwpdefs['context'].copy() - cwpdefs['context'].update(kwargs) - return cwpdefs - - -class EmptyComponent(Exception): - """some selectable component has actually no content and should not be - rendered - """ - - -class Link(object): - """a link to a view or action in the ui. - - Use this rather than `cw.web.htmlwidgets.BoxLink`. - - Note this class could probably be avoided with a proper DOM on the server - side. - """ - newstyle = True - - def __init__(self, href, label, **attrs): - self.href = href - self.label = label - self.attrs = attrs - - def __unicode__(self): - return tags.a(self.label, href=self.href, **self.attrs) - - if PY3: - __str__ = __unicode__ - - def render(self, w): - w(tags.a(self.label, href=self.href, **self.attrs)) - - def __repr__(self): - return '<%s: href=%r label=%r %r>' % (self.__class__.__name__, - self.href, self.label, self.attrs) - - -class Separator(object): - """a menu separator. - - Use this rather than `cw.web.htmlwidgets.BoxSeparator`. - """ - newstyle = True - - def render(self, w): - w(u'
      ') - - -def _bwcompatible_render_item(w, item): - if hasattr(item, 'render'): - if getattr(item, 'newstyle', False): - if isinstance(item, Separator): - w(u'') - item.render(w) - w(u'
        ') - else: - w(u'
      • ') - item.render(w) - w(u'
      • ') - else: - item.render(w) # XXX displays
      • by itself - else: - w(u'
      • %s
      • ' % item) - - -class Layout(Component): - __regid__ = 'component_layout' - __abstract__ = True - - def init_rendering(self): - """init view for rendering. Return true if we should go on, false - if we should stop now. - """ - view = self.cw_extra_kwargs['view'] - try: - view.init_rendering() - except Unauthorized as ex: - self.warning("can't render %s: %s", view, ex) - return False - except EmptyComponent: - return False - return True - - -class LayoutableMixIn(object): - layout_id = None # to be defined in concret class - layout_args = {} - - def layout_render(self, w, **kwargs): - getlayout = self._cw.vreg['components'].select - layout = getlayout(self.layout_id, self._cw, **self.layout_select_args()) - layout.render(w) - - def layout_select_args(self): - args = dict(rset=self.cw_rset, row=self.cw_row, col=self.cw_col, - view=self) - args.update(self.layout_args) - return args - - -class CtxComponent(LayoutableMixIn, AppObject): - """base class for contextual components. The following contexts are - predefined: - - * boxes: 'left', 'incontext', 'right' - * section: 'navcontenttop', 'navcontentbottom', 'navtop', 'navbottom' - * other: 'ctxtoolbar' - - The 'incontext', 'navcontenttop', 'navcontentbottom' and 'ctxtoolbar' - contexts are handled by the default primary view, others by the default main - template. - - All subclasses may not support all those contexts (for instance if it can't - be displayed as box, or as a toolbar icon). You may restrict allowed context - as follows: - - .. sourcecode:: python - - class MyComponent(CtxComponent): - cw_property_defs = override_ctx(CtxComponent, - vocabulary=[list of contexts]) - context = 'my default context' - - You can configure a component's default context by simply giving an - appropriate value to the `context` class attribute, as seen above. - """ - __registry__ = 'ctxcomponents' - __select__ = ~no_cnx() - - categories_in_order = () - cw_property_defs = { - _('visible'): dict(type='Boolean', default=True, - help=_('display the box or not')), - _('order'): dict(type='Int', default=99, - help=_('display order of the box')), - _('context'): dict(type='String', default='left', - vocabulary=(_('left'), _('incontext'), _('right'), - _('navtop'), _('navbottom'), - _('navcontenttop'), _('navcontentbottom'), - _('ctxtoolbar')), - help=_('context where this component should be displayed')), - } - visible = True - order = 0 - context = 'left' - contextual = False - title = None - layout_id = 'component_layout' - - def render(self, w, **kwargs): - self.layout_render(w, **kwargs) - - def layout_select_args(self): - args = super(CtxComponent, self).layout_select_args() - try: - # XXX ensure context is given when the component is reloaded through - # ajax - args['context'] = self.cw_extra_kwargs['context'] - except KeyError: - args['context'] = self.cw_propval('context') - return args - - def init_rendering(self): - """init rendering callback: that's the good time to check your component - has some content to display. If not, you can still raise - :exc:`EmptyComponent` to inform it should be skipped. - - Also, :exc:`Unauthorized` will be caught, logged, then the component - will be skipped. - """ - self.items = [] - - @property - def domid(self): - """return the HTML DOM identifier for this component""" - return domid(self.__regid__) - - @property - def cssclass(self): - """return the CSS class name for this component""" - return domid(self.__regid__) - - def render_title(self, w): - """return the title for this component""" - if self.title: - w(self._cw._(self.title)) - - def render_body(self, w): - """return the body (content) for this component""" - raise NotImplementedError() - - def render_items(self, w, items=None, klass=u'boxListing'): - if items is None: - items = self.items - assert items - w(u'
          ' % klass) - for item in items: - _bwcompatible_render_item(w, item) - w(u'
        ') - - def append(self, item): - self.items.append(item) - - def action_link(self, action): - return self.link(self._cw._(action.title), action.url()) - - def link(self, title, url, **kwargs): - if self._cw.selected(url): - try: - kwargs['klass'] += ' selected' - except KeyError: - kwargs['klass'] = 'selected' - return Link(url, title, **kwargs) - - def separator(self): - return Separator() - - -class EntityCtxComponent(CtxComponent): - """base class for boxes related to a single entity""" - __select__ = CtxComponent.__select__ & non_final_entity() & one_line_rset() - context = 'incontext' - contextual = True - - def __init__(self, *args, **kwargs): - super(EntityCtxComponent, self).__init__(*args, **kwargs) - try: - entity = kwargs['entity'] - except KeyError: - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - self.entity = entity - - def layout_select_args(self): - args = super(EntityCtxComponent, self).layout_select_args() - args['entity'] = self.entity - return args - - @property - def domid(self): - return domid(self.__regid__) + text_type(self.entity.eid) - - def lazy_view_holder(self, w, entity, oid, registry='views'): - """add a holder and return a URL that may be used to replace this - holder by the html generate by the view specified by registry and - identifier. Registry defaults to 'views'. - """ - holderid = '%sHolder' % self.domid - w(u'
        ' % holderid) - params = self.cw_extra_kwargs.copy() - params.pop('view', None) - params.pop('entity', None) - form = params.pop('formparams', {}) - if entity.has_eid(): - eid = entity.eid - else: - eid = None - form['etype'] = entity.cw_etype - form['tempEid'] = entity.eid - args = [json_dumps(x) for x in (registry, oid, eid, params)] - return self._cw.ajax_replace_url( - holderid, fname='render', arg=args, **form) - - -# high level abstract classes ################################################## - -class RQLCtxComponent(CtxComponent): - """abstract box for boxes displaying the content of a rql query not related - to the current result set. - - Notice that this class's init_rendering implemention is overwriting context - result set (eg `cw_rset`) with the result set returned by execution of - `to_display_rql()`. - """ - rql = None - - def to_display_rql(self): - """return arguments to give to self._cw.execute, as a tuple, to build - the result set to be displayed by this box. - """ - assert self.rql is not None, self.__regid__ - return (self.rql,) - - def init_rendering(self): - super(RQLCtxComponent, self).init_rendering() - self.cw_rset = self._cw.execute(*self.to_display_rql()) - if not self.cw_rset: - raise EmptyComponent() - - def render_body(self, w): - rset = self.cw_rset - if len(rset[0]) == 2: - items = [] - for i, (eid, label) in enumerate(rset): - entity = rset.get_entity(i, 0) - items.append(self.link(label, entity.absolute_url())) - else: - items = [self.link(e.dc_title(), e.absolute_url()) - for e in rset.entities()] - self.render_items(w, items) - - -class EditRelationMixIn(ReloadableMixIn): - - def box_item(self, entity, etarget, fname, label): - """builds HTML link to edit relation between `entity` and `etarget`""" - args = {role(self) : entity.eid, target(self): etarget.eid} - # for each target, provide a link to edit the relation - jscall = js.cw.utils.callAjaxFuncThenReload(fname, - self.rtype, - args['subject'], - args['object']) - return u'[%s] %s' % ( - xml_escape(text_type(jscall)), label, etarget.view('incontext')) - - def related_boxitems(self, entity): - return [self.box_item(entity, etarget, 'delete_relation', u'-') - for etarget in self.related_entities(entity)] - - def related_entities(self, entity): - return entity.related(self.rtype, role(self), entities=True) - - def unrelated_boxitems(self, entity): - return [self.box_item(entity, etarget, 'add_relation', u'+') - for etarget in self.unrelated_entities(entity)] - - def unrelated_entities(self, entity): - """returns the list of unrelated entities, using the entity's - appropriate vocabulary function - """ - skip = set(text_type(e.eid) for e in entity.related(self.rtype, role(self), - entities=True)) - skip.add(None) - skip.add(INTERNAL_FIELD_VALUE) - filteretype = getattr(self, 'etype', None) - entities = [] - form = self._cw.vreg['forms'].select('edition', self._cw, - rset=self.cw_rset, - row=self.cw_row or 0) - field = form.field_by_name(self.rtype, role(self), entity.e_schema) - for _, eid in field.vocabulary(form): - if eid not in skip: - entity = self._cw.entity_from_eid(eid) - if filteretype is None or entity.cw_etype == filteretype: - entities.append(entity) - return entities - -# XXX should be a view usable using uicfg -class EditRelationCtxComponent(EditRelationMixIn, EntityCtxComponent): - """base class for boxes which let add or remove entities linked by a given - relation - - subclasses should define at least id, rtype and target class attributes. - """ - # to be defined in concrete classes - rtype = None - - def render_title(self, w): - w(display_name(self._cw, self.rtype, role(self), - context=self.entity.cw_etype)) - - def render_body(self, w): - self._cw.add_js('cubicweb.ajax.js') - related = self.related_boxitems(self.entity) - unrelated = self.unrelated_boxitems(self.entity) - self.items.extend(related) - if related and unrelated: - self.items.append(u'
        ') - self.items.extend(unrelated) - self.render_items(w) - - -class AjaxEditRelationCtxComponent(EntityCtxComponent): - __select__ = EntityCtxComponent.__select__ & ( - partial_relation_possible(action='add') | partial_has_related_entities()) - - # view used to display related entties - item_vid = 'incontext' - # values separator when multiple values are allowed - separator = ',' - # msgid of the message to display when some new relation has been added/removed - added_msg = None - removed_msg = None - - # to be defined in concrete classes - rtype = role = target_etype = None - # class attributes below *must* be set in concrete classes (additionally to - # rtype / role [/ target_etype]. They should correspond to js_* methods on - # the json controller - - # function(eid) - # -> expected to return a list of values to display as input selector - # vocabulary - fname_vocabulary = None - - # function(eid, value) - # -> handle the selector's input (eg create necessary entities and/or - # relations). If the relation is multiple, you'll get a list of value, else - # a single string value. - fname_validate = None - - # function(eid, linked entity eid) - # -> remove the relation - fname_remove = None - - def __init__(self, *args, **kwargs): - super(AjaxEditRelationCtxComponent, self).__init__(*args, **kwargs) - self.rdef = self.entity.e_schema.rdef(self.rtype, self.role, self.target_etype) - - def render_title(self, w): - w(self.rdef.rtype.display_name(self._cw, self.role, - context=self.entity.cw_etype)) - - def add_js_css(self): - self._cw.add_js(('jquery.ui.js', 'cubicweb.widgets.js')) - self._cw.add_js(('cubicweb.ajax.js', 'cubicweb.ajax.box.js')) - self._cw.add_css('jquery.ui.css') - return True - - def render_body(self, w): - req = self._cw - entity = self.entity - related = entity.related(self.rtype, self.role) - if self.role == 'subject': - mayadd = self.rdef.has_perm(req, 'add', fromeid=entity.eid) - else: - mayadd = self.rdef.has_perm(req, 'add', toeid=entity.eid) - js_css_added = False - if mayadd: - js_css_added = self.add_js_css() - _ = req._ - if related: - maydel = None - w(u'') - for rentity in related.entities(): - if maydel is None: - # Only check permission for the first related. - if self.role == 'subject': - fromeid, toeid = entity.eid, rentity.eid - else: - fromeid, toeid = rentity.eid, entity.eid - maydel = self.rdef.has_perm( - req, 'delete', fromeid=fromeid, toeid=toeid) - # for each related entity, provide a link to remove the relation - subview = rentity.view(self.item_vid) - if maydel: - if not js_css_added: - js_css_added = self.add_js_css() - jscall = text_type(js.ajaxBoxRemoveLinkedEntity( - self.__regid__, entity.eid, rentity.eid, - self.fname_remove, - self.removed_msg and _(self.removed_msg))) - w(u'' - '' % (xml_escape(jscall), - subview)) - else: - w(u'' % (subview)) - w(u'
        %s
        %s
        ') - else: - w(_('no related entity')) - if mayadd: - multiple = self.rdef.role_cardinality(self.role) in '*+' - w(u'
        ') - jscall = text_type(js.ajaxBoxShowSelector( - self.__regid__, entity.eid, self.fname_vocabulary, - self.fname_validate, self.added_msg and _(self.added_msg), - _(stdmsgs.BUTTON_OK[0]), _(stdmsgs.BUTTON_CANCEL[0]), - multiple and self.separator)) - w('%s' % ( - xml_escape(jscall), - multiple and _('add_relation') or _('update_relation'))) - w(u'') - w(u'
        ' % self.domid) - w(u'
        ') - - -class RelatedObjectsCtxComponent(EntityCtxComponent): - """a contextual component to display entities related to another""" - __select__ = EntityCtxComponent.__select__ & partial_has_related_entities() - context = 'navcontentbottom' - rtype = None - role = 'subject' - - vid = 'list' - - def render_body(self, w): - rset = self.entity.related(self.rtype, role(self)) - self._cw.view(self.vid, rset, w=w) - - -# old contextual components, deprecated ######################################## - -@add_metaclass(class_deprecated) -class EntityVComponent(Component): - """abstract base class for additinal components displayed in content - headers and footer according to: - - * the displayed entity's type - * a context (currently 'header' or 'footer') - - it should be configured using .accepts, .etype, .rtype, .target and - .context class attributes - """ - __deprecation_warning__ = '[3.10] *VComponent classes are deprecated, use *CtxComponent instead (%(cls)s)' - - __registry__ = 'ctxcomponents' - __select__ = one_line_rset() - - cw_property_defs = { - _('visible'): dict(type='Boolean', default=True, - help=_('display the component or not')), - _('order'): dict(type='Int', default=99, - help=_('display order of the component')), - _('context'): dict(type='String', default='navtop', - vocabulary=(_('navtop'), _('navbottom'), - _('navcontenttop'), _('navcontentbottom'), - _('ctxtoolbar')), - help=_('context where this component should be displayed')), - } - - context = 'navcontentbottom' - - def call(self, view=None): - if self.cw_rset is None: - self.entity_call(self.cw_extra_kwargs.pop('entity')) - else: - self.cell_call(0, 0, view=view) - - def cell_call(self, row, col, view=None): - self.entity_call(self.cw_rset.get_entity(row, col), view=view) - - def entity_call(self, entity, view=None): - raise NotImplementedError() - -class RelatedObjectsVComponent(EntityVComponent): - """a section to display some related entities""" - __select__ = EntityVComponent.__select__ & partial_has_related_entities() - - vid = 'list' - # to be defined in concrete classes - rtype = title = None - - def rql(self): - """override this method if you want to use a custom rql query""" - return None - - def cell_call(self, row, col, view=None): - rql = self.rql() - if rql is None: - entity = self.cw_rset.get_entity(row, col) - rset = entity.related(self.rtype, role(self)) - else: - eid = self.cw_rset[row][col] - rset = self._cw.execute(self.rql(), {'x': eid}) - if not rset.rowcount: - return - self.w(u'
        ' % self.cssclass) - self.w(u'

        %s

        \n' % self._cw._(self.title).capitalize()) - self.wview(self.vid, rset) - self.w(u'
        ') diff -r 058bb3dc685f -r 0b59724cb3f2 web/controller.py --- a/web/controller.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,221 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""abstract controller classe for CubicWeb web client""" - -__docformat__ = "restructuredtext en" - -from six import PY2 - -from logilab.mtconverter import xml_escape -from logilab.common.registry import yes -from logilab.common.deprecation import deprecated - -from cubicweb.appobject import AppObject -from cubicweb.mail import format_mail -from cubicweb.web import LOGGER, Redirect, RequestError - - -NAVIGATION_PARAMETERS = (('vid', '__redirectvid'), - ('rql', '__redirectrql'), - ('__redirectpath', '__redirectpath'), - ('__redirectparams', '__redirectparams'), - ) -NAV_FORM_PARAMETERS = tuple(fp for ap, fp in NAVIGATION_PARAMETERS) - -def redirect_params(form): - """transform redirection parameters into navigation parameters - """ - params = {} - # extract navigation parameters from redirection parameters - for navparam, redirectparam in NAVIGATION_PARAMETERS: - if navparam == redirectparam: - continue - if redirectparam in form: - params[navparam] = form[redirectparam] - return params - -def append_url_params(url, params): - """append raw parameters to the url. Given parameters, if any, are expected - to be already url-quoted. - """ - if params: - if not '?' in url: - url += '?' - else: - url += '&' - url += params - return url - - -class Controller(AppObject): - """a controller is responsible to make necessary stuff to publish - a request. There is usually at least one standard "view" controller - and another linked by forms to edit objects ("edit"). - """ - __registry__ = 'controllers' - __select__ = yes() - - def __init__(self, *args, **kwargs): - self.appli = kwargs.pop('appli', None) - super(Controller, self).__init__(*args, **kwargs) - # attributes use to control after edition redirection - self._after_deletion_path = None - self._edited_entity = None - - def publish(self, rset=None): - """publish the current request, with an optional input rset""" - raise NotImplementedError - - # generic methods useful for concrete implementations ###################### - - def process_rql(self): - """execute rql if specified""" - req = self._cw - rql = req.form.get('rql') - if rql: - req.ensure_ro_rql(rql) - if PY2 and not isinstance(rql, unicode): - rql = unicode(rql, req.encoding) - pp = req.vreg['components'].select_or_none('magicsearch', req) - if pp is not None: - return pp.process_query(rql) - if 'eid' in req.form and not isinstance(req.form['eid'], list): - return req.eid_rset(req.form['eid']) - return None - - def notify_edited(self, entity): - """called by edit_entity() to notify which entity is edited""" - # NOTE: we can't use entity.rest_path() at this point because - # rest_path() could rely on schema constraints (such as a required - # relation) that might not be satisfied yet (in case of creations) - if not self._edited_entity: - self._edited_entity = entity - - @deprecated('[3.18] call view.set_http_cache_headers then ' - '.is_client_cache_valid() method and return instead') - def validate_cache(self, view): - view.set_http_cache_headers() - self._cw.validate_cache() - - def sendmail(self, recipient, subject, body): - senderemail = self._cw.user.cw_adapt_to('IEmailable').get_email() - msg = format_mail({'email' : senderemail, - 'name' : self._cw.user.dc_title(),}, - [recipient], body, subject) - if not self._cw.vreg.config.sendmails([(msg, [recipient])]): - msg = self._cw._('could not connect to the SMTP server') - url = self._cw.build_url(__message=msg) - raise Redirect(url) - - def reset(self): - """reset form parameters and redirect to a view determinated by given - parameters - """ - newparams = {} - # sets message if needed - # XXX - don't call .message twice since it pops the id - msg = self._cw.message - if msg: - newparams['_cwmsgid'] = self._cw.set_redirect_message(msg) - if '__action_apply' in self._cw.form: - self._return_to_edition_view(newparams) - else: - self._return_to_original_view(newparams) - - def _return_to_original_view(self, newparams): - """validate-button case""" - # transforms __redirect[*] parameters into regular form parameters - newparams.update(redirect_params(self._cw.form)) - # find out if we have some explicit `rql` needs - rql = newparams.pop('rql', None) - # if rql is needed (explicit __redirectrql or multiple deletions for - # instance), we have to use the old `view?rql=...` form - if rql: - path = 'view' - newparams['rql'] = rql - elif '__redirectpath' in self._cw.form: - # if redirect path was explicitly specified in the form, use it - path = self._cw.form['__redirectpath'] - if (self._edited_entity and path != self._edited_entity.rest_path() - and '_cwmsgid' in newparams): - # are we here on creation or modification? - if any(eid == self._edited_entity.eid - for eid in self._cw.data.get('eidmap', {}).values()): - msg = self._cw._('click here to see created entity') - else: - msg = self._cw._('click here to see edited entity') - msg = u'(%s)' % (xml_escape(self._edited_entity.absolute_url()), msg) - self._cw.append_to_redirect_message(msg) - elif self._after_deletion_path: - # else it should have been set during form processing - path, params = self._after_deletion_path - params = dict(params) # params given as tuple - params.update(newparams) - newparams = params - elif self._edited_entity: - # clear caches in case some attribute participating to the rest path - # has been modified - self._edited_entity.cw_clear_all_caches() - path = self._edited_entity.rest_path() - else: - path = 'view' - url = self._cw.build_url(path, **newparams) - url = append_url_params(url, self._cw.form.get('__redirectparams')) - raise Redirect(url) - - def _return_to_edition_view(self, newparams): - """apply-button case""" - form = self._cw.form - if self._edited_entity: - path = self._edited_entity.rest_path() - newparams.pop('rql', None) - # else, fallback on the old `view?rql=...` url form - elif 'rql' in self._cw.form: - path = 'view' - newparams['rql'] = form['rql'] - else: - self.warning('the edited data seems inconsistent') - path = 'view' - # pick up the correction edition view - if form.get('__form_id'): - newparams['vid'] = form['__form_id'] - # re-insert copy redirection parameters - for redirectparam in NAV_FORM_PARAMETERS: - if redirectparam in form: - newparams[redirectparam] = form[redirectparam] - raise Redirect(self._cw.build_url(path, **newparams)) - - - def _redirect(self, newparams): - """Raise a redirect. We use __redirectpath if it specified, else we - return to the home page. - """ - if '__redirectpath' in self._cw.form: - # if redirect path was explicitly specified in the form, use it - path = self._cw.form['__redirectpath'] - url = self._cw.build_url(path) - url = append_url_params(url, self._cw.form.get('__redirectparams')) - else: - url = self._cw.base_url() - # The newparams must update the params in all cases - url = self._cw.rebuild_url(url, **newparams) - raise Redirect(url) - - -from cubicweb import set_log_methods -set_log_methods(Controller, LOGGER) diff -r 058bb3dc685f -r 0b59724cb3f2 web/cors.py --- a/web/cors.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2014 Logilab, PARIS - -"""A set of utility functions to handle CORS requests - -Unless specified, all references in this file are related to: - http://www.w3.org/TR/cors - -The provided implementation roughly follows: - http://www.html5rocks.com/static/images/cors_server_flowchart.png - -See also: - https://developer.mozilla.org/en-US/docs/HTTP/Access_control_CORS - -""" - -from six.moves.urllib.parse import urlsplit - -from cubicweb.web import LOGGER -info = LOGGER.info - -class CORSFailed(Exception): - """Raised when cross origin resource sharing checks failed""" - - -class CORSPreflight(Exception): - """Raised when cross origin resource sharing checks detects the - request as a valid preflight request""" - - -def process_request(req, config): - """ - Process a request to apply CORS specification algorithms - - Check whether the CORS specification is respected and set corresponding - headers to ensure response complies with the specification. - - In case of non-compliance, no CORS-related header is set. - """ - base_url = urlsplit(req.base_url()) - expected_host = '://'.join((base_url.scheme, base_url.netloc)) - if not req.get_header('Origin') or req.get_header('Origin') == expected_host: - # not a CORS request, nothing to do - return - try: - # handle cross origin resource sharing (CORS) - if req.http_method() == 'OPTIONS': - if req.get_header('Access-Control-Request-Method'): - # preflight CORS request - process_preflight(req, config) - else: # Simple CORS or actual request - process_simple(req, config) - except CORSFailed as exc: - info('Cross origin resource sharing failed: %s' % exc) - except CORSPreflight: - info('Cross origin resource sharing: valid Preflight request %s') - raise - -def process_preflight(req, config): - """cross origin resource sharing (preflight) - Cf http://www.w3.org/TR/cors/#resource-preflight-requests - """ - origin = check_origin(req, config) - allowed_methods = set(config['access-control-allow-methods']) - allowed_headers = set(config['access-control-allow-headers']) - try: - method = req.get_header('Access-Control-Request-Method') - except ValueError: - raise CORSFailed('Access-Control-Request-Method is incorrect') - if method not in allowed_methods: - raise CORSFailed('Method is not allowed') - try: - req.get_header('Access-Control-Request-Headers', ()) - except ValueError: - raise CORSFailed('Access-Control-Request-Headers is incorrect') - req.set_header('Access-Control-Allow-Methods', allowed_methods, raw=False) - req.set_header('Access-Control-Allow-Headers', allowed_headers, raw=False) - - process_common(req, config, origin) - raise CORSPreflight() - -def process_simple(req, config): - """Handle the Simple Cross-Origin Request case - """ - origin = check_origin(req, config) - exposed_headers = config['access-control-expose-headers'] - if exposed_headers: - req.set_header('Access-Control-Expose-Headers', exposed_headers, raw=False) - process_common(req, config, origin) - -def process_common(req, config, origin): - req.set_header('Access-Control-Allow-Origin', origin) - # in CW, we always support credential/authentication - req.set_header('Access-Control-Allow-Credentials', 'true') - -def check_origin(req, config): - origin = req.get_header('Origin').lower() - allowed_origins = config.get('access-control-allow-origin') - if not allowed_origins: - raise CORSFailed('access-control-allow-origin is not configured') - if '*' not in allowed_origins and origin not in allowed_origins: - raise CORSFailed('Origin is not allowed') - # bit of sanity check; see "6.3 Security" - myhost = urlsplit(req.base_url()).netloc - host = req.get_header('Host') - if host != myhost: - info('cross origin resource sharing detected possible ' - 'DNS rebinding attack Host header != host of base_url: ' - '%s != %s' % (host, myhost)) - raise CORSFailed('Host header and hostname do not match') - # include "Vary: Origin" header (see 6.4) - req.headers_out.addHeader('Vary', 'Origin') - return origin diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/accessories-text-editor.png Binary file web/data/accessories-text-editor.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/add_button.png Binary file web/data/add_button.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/asc.gif Binary file web/data/asc.gif has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/banner.png Binary file web/data/banner.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/bg.gif Binary file web/data/bg.gif has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/bg_trame_grise.png Binary file web/data/bg_trame_grise.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/black-check.png Binary file web/data/black-check.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/black-uncheck.png Binary file web/data/black-uncheck.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/bullet.png Binary file web/data/bullet.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/bullet_orange.png Binary file web/data/bullet_orange.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/button.png Binary file web/data/button.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/calendar.gif Binary file web/data/calendar.gif has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/cancel.png Binary file web/data/cancel.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/contextFreeBoxHeader.png Binary file web/data/contextFreeBoxHeader.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/contextualBoxHeader.png Binary file web/data/contextualBoxHeader.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/critical.png Binary file web/data/critical.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/cubicweb.acl.css --- a/web/data/cubicweb.acl.css Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,51 +0,0 @@ -/* styles for access control forms) - * - * :organization: Logilab - * :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. - * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr - */ - -/******************************************************************************/ -/* security edition form (views/management.py) web/views/schema.py */ -/******************************************************************************/ - -h2.schema{ - color: %(aColor)s; -} - -table.schemaInfo td a.users{ - color : #00CC33; - font-weight: bold; -} - -table.schemaInfo td a.guests{ - color: #ff7700; - font-weight: bold; -} - -table.schemaInfo td a.owners{ - color: #8b0000; - font-weight: bold; -} - -table.schemaInfo td a.managers{ - color: #000000; - font-weight: bold; -} - -.discret, -table.schemaInfo td a.grey{ - color:#666; -} - -a.grey:hover{ - color:#000; -} - -.red{ - color: #ff7700; - } - -div#schema_security{ - width:100%; - } \ No newline at end of file diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/cubicweb.ajax.box.js --- a/web/data/cubicweb.ajax.box.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,96 +0,0 @@ -/** - * Functions for ajax boxes. - * - * :organization: Logilab - * :copyright: 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. - * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr - * - */ - -function ajaxBoxValidateSelectorInput(boxid, eid, separator, fname, msg) { - var holderid = cw.utils.domid(boxid) + eid + 'Holder'; - var value = $('#' + holderid + 'Input').val(); - if (separator) { - value = $.map(value.split(separator), jQuery.trim); - } - var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs(fname, null, eid, value)); - d.addCallback(function() { - $('#' + holderid).empty(); - var formparams = ajaxFuncArgs('render', null, 'ctxcomponents', boxid, eid); - $('#' + cw.utils.domid(boxid) + eid).loadxhtml(AJAX_BASE_URL, formparams, null, 'swap'); - if (msg) { - document.location.hash = '#header'; - updateMessage(msg); - } - }); -} - -function ajaxBoxRemoveLinkedEntity(boxid, eid, relatedeid, delfname, msg) { - var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs(delfname, null, eid, relatedeid)); - d.addCallback(function() { - var formparams = ajaxFuncArgs('render', null, 'ctxcomponents', boxid, eid); - $('#' + cw.utils.domid(boxid) + eid).loadxhtml(AJAX_BASE_URL, formparams, null, 'swap'); - if (msg) { - document.location.hash = '#header'; - updateMessage(msg); - } - }); -} - -/** - * .. function:: ajaxBoxShowSelector(boxid, eid, unrelfname, - * addfname, msg, - * oklabel, cancellabel, - * separator=None) - * - * Display an ajax selector within a box of regid `boxid`, for entity with eid - * `eid`. - * - * Other parameters are: - * - * * `addfname`, name of the json controller method to call to add a relation - * - * * `msg`, message to display to the user when a relation has been added - * - * * `oklabel`/`cancellabel`, OK/cancel buttons label - * - * * `separator`, items separator if the field is multi-valued (will be - * considered mono-valued when not specified) - */ -function ajaxBoxShowSelector(boxid, eid, - unrelfname, - addfname, msg, - oklabel, cancellabel, - separator) { - var holderid = cw.utils.domid(boxid) + eid + 'Holder'; - var holder = $('#' + holderid); - if (holder.children().length) { - holder.empty(); - } - else { - var inputid = holderid + 'Input'; - var deferred = loadRemote(AJAX_BASE_URL, ajaxFuncArgs(unrelfname, null, eid)); - deferred.addCallback(function (unrelated) { - var input = INPUT({'type': 'text', 'id': inputid, 'size': 20}); - holder.append(input).show(); - var $input = $(input); - $input.keypress(function (evt) { - if (evt.keyCode == $.ui.keyCode.ENTER) { - ajaxBoxValidateSelectorInput(boxid, eid, separator, addfname, msg); - } - }); - $input.cwautocomplete(unrelated, {multiple: Boolean(separator)}); - var buttons = DIV({'class' : "sgformbuttons"}, - A({href : "javascript: $.noop();", - onclick : cw.utils.strFuncCall('ajaxBoxValidateSelectorInput', - boxid, eid, separator, addfname, msg)}, - oklabel), - ' / ', - A({'href' : "javascript: $.noop();", - 'onclick' : '$("#' + holderid + '").empty()'}, - cancellabel)); - holder.append(buttons); - $input.focus(); - }); - } -} diff -r 058bb3dc685f -r 0b59724cb3f2 web/data/cubicweb.ajax.js --- a/web/data/cubicweb.ajax.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,743 +0,0 @@ -/* copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. - * contact http://www.logilab.fr/ -- mailto:contact@logilab.fr - * - * This file is part of CubicWeb. - * - * CubicWeb is free software: you can redistribute it and/or modify it under the - * terms of the GNU Lesser General Public License as published by the Free - * Software Foundation, either version 2.1 of the License, or (at your option) - * any later version. - * - * CubicWeb is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS - * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more - * details. - * - * You should have received a copy of the GNU Lesser General Public License along - * with CubicWeb. If not, see . - */ - -/** - * .. function:: Deferred - * - * dummy ultra minimalist implementation of deferred for jQuery - */ - -cw.ajax = new Namespace('cw.ajax'); - -function Deferred() { - this.__init__(this); -} - -jQuery.extend(Deferred.prototype, { - __init__: function() { - this._onSuccess = []; - this._onFailure = []; - this._req = null; - this._result = null; - this._error = null; - }, - - addCallback: function(callback) { - if (this._req && (this._req.readyState == 4) && this._result) { - var args = [this._result, this._req]; - jQuery.merge(args, cw.utils.sliceList(arguments, 1)); - callback.apply(null, args); - } - else { - this._onSuccess.push([callback, cw.utils.sliceList(arguments, 1)]); - } - return this; - }, - - addErrback: function(callback) { - if (this._req && this._req.readyState == 4 && this._error) { - callback.apply(null, [this._error, this._req]); - } - else { - this._onFailure.push([callback, cw.utils.sliceList(arguments, 1)]); - } - return this; - }, - - success: function(result) { - this._result = result; - for (var i = 0; i < this._onSuccess.length; i++) { - var callback = this._onSuccess[i][0]; - var args = [result, this._req]; - jQuery.merge(args, this._onSuccess[i][1]); - callback.apply(null, args); - } - }, - - error: function(xhr, status, error) { - this._error = error; - for (var i = 0; i < this._onFailure.length; i++) { - var callback = this._onFailure[i][0]; - var args = [error, this._req]; - jQuery.merge(args, this._onFailure[i][1]); - if (callback !== undefined) - callback.apply(null, args); - } - } - -}); - -var AJAX_PREFIX_URL = 'ajax'; -var JSON_BASE_URL = BASE_URL + 'json?'; -var AJAX_BASE_URL = BASE_URL + AJAX_PREFIX_URL + '?'; - - -jQuery.extend(cw.ajax, { - /* variant of jquery evalScript with cache: true in ajax call */ - _evalscript: function ( i, elem ) { - var src = elem.getAttribute('src'); - if (src) { - jQuery.ajax({ - url: src, - async: false, - cache: true, - dataType: "script" - }); - } else { - jQuery.globalEval( elem.text || elem.textContent || elem.innerHTML || "" ); - } - if ( elem.parentNode ) { - elem.parentNode.removeChild( elem ); - } - }, - - evalscripts: function ( scripts ) { - if ( scripts.length ) { - jQuery.each(scripts, cw.ajax._evalscript); - } - }, - - /** - * returns true if `url` is a mod_concat-like url - * (e.g. http://..../data??resource1.js,resource2.js) - */ - _modconcatLikeUrl: function(url) { - var modconcat_rgx = new RegExp('(' + BASE_URL + 'data/([a-z0-9]+/)?)\\?\\?(.+)'); - return modconcat_rgx.exec(url); - }, - - /** - * decomposes a mod_concat-like url into its corresponding list of - * resources' urls - * >>> _listResources('http://foo.com/data/??a.js,b.js,c.js') - * ['http://foo.com/data/a.js', 'http://foo.com/data/b.js', 'http://foo.com/data/c.js'] - */ - _listResources: function(src) { - var resources = []; - var groups = cw.ajax._modconcatLikeUrl(src); - if (groups == null) { - resources.push(src); - } else { - var dataurl = groups[1]; - $.each(cw.utils.lastOf(groups).split(','), - function() { - resources.push(dataurl + this); - } - ); - } - return resources; - }, - - _buildMissingResourcesUrl: function(url, loadedResources) { - var resources = cw.ajax._listResources(url); - var missingResources = $.grep(resources, function(resource) { - return $.inArray(resource, loadedResources) == -1; - }); - cw.utils.extend(loadedResources, missingResources); - var missingResourceUrl = null; - if (missingResources.length == 1) { - // only one resource missing: build a node with a single resource url - // (maybe the browser has it in cache already) - missingResourceUrl = missingResources[0]; - } else if (missingResources.length > 1) { - // several resources missing: build a node with a concatenated - // resources url - var dataurl = cw.ajax._modconcatLikeUrl(url)[1]; - var missing_path = $.map(missingResources, function(resource) { - return resource.substring(dataurl.length); - }); - missingResourceUrl = dataurl + '??' + missing_path.join(','); - } - return missingResourceUrl; - }, - - _loadAjaxStylesheets: function($responseHead, $head) { - $responseHead.find('link[href]').each(function(i) { - var $srcnode = $(this); - var url = $srcnode.attr('href'); - if (url) { - var missingStylesheetsUrl = cw.ajax._buildMissingResourcesUrl(url, cw.loaded_links); - // compute concat-like url for missing resources and append - // element to $head - if (missingStylesheetsUrl) { - // IE has problems with dynamic CSS insertions. One symptom (among others) - // is a "1 item remaining" message in the status bar. (cf. #2356261) - // document.createStyleSheet needs to be used for this, although it seems - // that IE can't create more than 31 additional stylesheets with - // document.createStyleSheet. - if ($.browser.msie) { - document.createStyleSheet(missingStylesheetsUrl); - } else { - $srcnode.attr('href', missingStylesheetsUrl); - $srcnode.appendTo($head); - } - } - } - }); - $responseHead.find('link[href]').remove(); - }, - - _loadAjaxScripts: function($responseHead, $head) { - $responseHead.find('cubicweb\\:script').each(function(i) { - var $srcnode = $(this); - var url = $srcnode.attr('src'); - if (url) { - var missingScriptsUrl = cw.ajax._buildMissingResourcesUrl(url, cw.loaded_scripts); - if (missingScriptsUrl) { - $srcnode.attr('src', missingScriptsUrl); - /* special handling of - - - - - - - - - - - - - - -
        -

        cubicweb.ajax.js functions tests

        -

        -
          - - diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/data/static/jstests/test_ajax.js --- a/web/test/data/static/jstests/test_ajax.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,274 +0,0 @@ -$(document).ready(function() { - - QUnit.module("ajax", { - setup: function() { - this.scriptsLength = $('head script[src]').length-1; - this.cssLength = $('head link[rel=stylesheet]').length-1; - // re-initialize cw loaded cache so that each tests run in a - // clean environment, have a lookt at _loadAjaxHtmlHead implementation - // in cubicweb.ajax.js for more information. - cw.loaded_scripts = []; - cw.loaded_links = []; - }, - teardown: function() { - $('head script[src]:lt(' + ($('head script[src]').length - 1 - this.scriptsLength) + ')').remove(); - $('head link[rel=stylesheet]:gt(' + this.cssLength + ')').remove(); - } - }); - - function jsSources() { - return $.map($('head script[src]'), function(script) { - return script.getAttribute('src'); - }); - } - - QUnit.test('test simple h1 inclusion (ajax_url0.html)', function (assert) { - assert.expect(3); - assert.equal($('#qunit-fixture').children().length, 0); - var done = assert.async(); - $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') - .addCallback(function() { - try { - assert.equal($('#qunit-fixture').children().length, 1); - assert.equal($('#qunit-fixture h1').html(), 'Hello'); - } finally { - done(); - }; - } - ); - }); - - QUnit.test('test simple html head inclusion (ajax_url1.html)', function (assert) { - assert.expect(6); - var scriptsIncluded = jsSources(); - assert.equal(jQuery.inArray('http://foo.js', scriptsIncluded), - 1); - var done = assert.async(); - $('#qunit-fixture').loadxhtml('static/jstests/ajax_url1.html', null, 'GET') - .addCallback(function() { - try { - var origLength = scriptsIncluded.length; - scriptsIncluded = jsSources(); - // check that foo.js has been prepended to - assert.equal(scriptsIncluded.length, origLength + 1); - assert.equal(scriptsIncluded.indexOf('http://foo.js'), 0); - // check that
          has been removed - assert.equal($('#qunit-fixture').children().length, 1); - assert.equal($('div.ajaxHtmlHead').length, 0); - assert.equal($('#qunit-fixture h1').html(), 'Hello'); - } finally { - done(); - }; - } - ); - }); - - QUnit.test('test addCallback', function (assert) { - assert.expect(3); - assert.equal($('#qunit-fixture').children().length, 0); - var done = assert.async(); - var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); - d.addCallback(function() { - try { - assert.equal($('#qunit-fixture').children().length, 1); - assert.equal($('#qunit-fixture h1').html(), 'Hello'); - } finally { - done(); - }; - }); - }); - - QUnit.test('test callback after synchronous request', function (assert) { - assert.expect(1); - var deferred = new Deferred(); - var result = jQuery.ajax({ - url: 'static/jstests/ajax_url0.html', - async: false, - beforeSend: function(xhr) { - deferred._req = xhr; - }, - success: function(data, status) { - deferred.success(data); - } - }); - var done = assert.async(); - deferred.addCallback(function() { - try { - // add an assertion to ensure the callback is executed - assert.ok(true, "callback is executed"); - } finally { - done(); - }; - }); - }); - - QUnit.test('test addCallback with parameters', function (assert) { - assert.expect(3); - assert.equal($('#qunit-fixture').children().length, 0); - var done = assert.async(); - var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); - d.addCallback(function(data, req, arg1, arg2) { - try { - assert.equal(arg1, 'Hello'); - assert.equal(arg2, 'world'); - } finally { - done(); - }; - }, - 'Hello', 'world'); - }); - - QUnit.test('test callback after synchronous request with parameters', function (assert) { - assert.expect(3); - var deferred = new Deferred(); - deferred.addCallback(function(data, req, arg1, arg2) { - // add an assertion to ensure the callback is executed - try { - assert.ok(true, "callback is executed"); - assert.equal(arg1, 'Hello'); - assert.equal(arg2, 'world'); - } finally { - done(); - }; - }, - 'Hello', 'world'); - deferred.addErrback(function() { - // throw an exception to start errback chain - try { - throw this._error; - } finally { - done(); - }; - }); - var done = assert.async(); - var result = jQuery.ajax({ - url: 'static/jstests/ajax_url0.html', - async: false, - beforeSend: function(xhr) { - deferred._req = xhr; - }, - success: function(data, status) { - deferred.success(data); - } - }); - }); - - QUnit.test('test addErrback', function (assert) { - assert.expect(1); - var done = assert.async(); - var d = $('#qunit-fixture').loadxhtml('static/jstests/nonexistent.html', null, 'GET'); - d.addCallback(function() { - // should not be executed - assert.ok(false, "callback is executed"); - }); - d.addErrback(function() { - try { - assert.ok(true, "errback is executed"); - } finally { - done(); - }; - }); - }); - - QUnit.test('test callback execution order', function (assert) { - assert.expect(3); - var counter = 0; - var done = assert.async(); - var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); - d.addCallback(function() { - assert.equal(++counter, 1); // should be executed first - }); - d.addCallback(function() { - assert.equal(++counter, 2); - }); - d.addCallback(function() { - try { - assert.equal(++counter, 3); - } finally { - done(); - } - }); - }); - - QUnit.test('test already included resources are ignored (ajax_url1.html)', function (assert) { - assert.expect(10); - var scriptsIncluded = jsSources(); - // NOTE: - assert.equal(jQuery.inArray('http://foo.js', scriptsIncluded), -1); - assert.equal($('head link').length, 1); - /* use endswith because in pytest context we have an absolute path */ - assert.ok($('head link').attr('href').endswith('/qunit.css'), 'qunit.css is loaded'); - var done = assert.async(); - $('#qunit-fixture').loadxhtml('static/jstests/ajax_url1.html', null, 'GET') - .addCallback(function() { - var origLength = scriptsIncluded.length; - scriptsIncluded = jsSources(); - try { - // check that foo.js has been inserted in - assert.equal(scriptsIncluded.length, origLength + 1); - assert.equal(scriptsIncluded.indexOf('http://foo.js'), 0); - // check that
          has been removed - assert.equal($('#qunit-fixture').children().length, 1); - assert.equal($('div.ajaxHtmlHead').length, 0); - assert.equal($('#qunit-fixture h1').html(), 'Hello'); - // qunit.css is not added twice - assert.equal($('head link').length, 1); - /* use endswith because in pytest context we have an absolute path */ - assert.ok($('head link').attr('href').endswith('/qunit.css'), 'qunit.css is loaded'); - } finally { - done(); - } - } - ); - }); - - QUnit.test('test synchronous request loadRemote', function (assert) { - var res = loadRemote('static/jstests/ajaxresult.json', {}, - 'GET', true); - assert.deepEqual(res, ['foo', 'bar']); - }); - - QUnit.test('test event on CubicWeb', function (assert) { - assert.expect(1); - var done = assert.async(); - var events = null; - $(CubicWeb).bind('server-response', function() { - // check that server-response event on CubicWeb is triggered - events = 'CubicWeb'; - }); - $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') - .addCallback(function() { - try { - assert.equal(events, 'CubicWeb'); - } finally { - done(); - }; - } - ); - }); - - QUnit.test('test event on node', function (assert) { - assert.expect(3); - var done = assert.async(); - var nodes = []; - $('#qunit-fixture').bind('server-response', function() { - nodes.push('node'); - }); - $(CubicWeb).bind('server-response', function() { - nodes.push('CubicWeb'); - }); - $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') - .addCallback(function() { - try { - assert.equal(nodes.length, 2); - // check that server-response event on CubicWeb is triggered - // only once and event server-response on node is triggered - assert.equal(nodes[0], 'CubicWeb'); - assert.equal(nodes[1], 'node'); - } finally { - done(); - }; - } - ); - }); -}); - diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/data/static/jstests/test_htmlhelpers.html --- a/web/test/data/static/jstests/test_htmlhelpers.html Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - -
          -

          cubicweb.htmlhelpers.js functions tests

          -

          -
            - - diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/data/static/jstests/test_htmlhelpers.js --- a/web/test/data/static/jstests/test_htmlhelpers.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,36 +0,0 @@ -$(document).ready(function() { - - QUnit.module("module2", { - setup: function() { - $('#qunit-fixture').append(''); - } - }); - - QUnit.test("test first selected", function (assert) { - $('#theselect').append('' + - '' + - '' + - ''); - var selected = firstSelected(document.getElementById("theselect")); - assert.equal(selected.value, 'bar'); - }); - - QUnit.test("test first selected 2", function (assert) { - $('#theselect').append('' + - '' + - '' + - ''); - var selected = firstSelected(document.getElementById("theselect")); - assert.equal(selected, null); - }); - - QUnit.module("visibilty"); - QUnit.test('toggleVisibility', function (assert) { - $('#qunit-fixture').append('
            '); - toggleVisibility('foo'); - assert.ok($('#foo').hasClass('hidden'), 'check hidden class is set'); - }); - -}); - diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/data/static/jstests/test_utils.html --- a/web/test/data/static/jstests/test_utils.html Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - -
            -

            cw.utils functions tests

            -

            -
              - - diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/data/static/jstests/test_utils.js --- a/web/test/data/static/jstests/test_utils.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,92 +0,0 @@ -$(document).ready(function() { - - QUnit.module("datetime"); - - QUnit.test("test full datetime", function (assert) { - assert.equal(cw.utils.toISOTimestamp(new Date(1986, 3, 18, 10, 30, 0, 0)), - '1986-04-18 10:30:00'); - }); - - QUnit.test("test only date", function (assert) { - assert.equal(cw.utils.toISOTimestamp(new Date(1986, 3, 18)), '1986-04-18 00:00:00'); - }); - - QUnit.test("test null", function (assert) { - assert.equal(cw.utils.toISOTimestamp(null), null); - }); - - QUnit.module("parsing"); - QUnit.test("test basic number parsing", function (assert) { - var d = strptime('2008/08/08', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); - d = strptime('2008/8/8', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); - d = strptime('8/8/8', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [8, 8, 8, 0, 0]); - d = strptime('0/8/8', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [0, 8, 8, 0, 0]); - d = strptime('-10/8/8', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [-10, 8, 8, 0, 0]); - d = strptime('-35000', '%Y'); - assert.deepEqual(datetuple(d), [-35000, 1, 1, 0, 0]); - }); - - QUnit.test("test custom format parsing", function (assert) { - var d = strptime('2008-08-08', '%Y-%m-%d'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); - d = strptime('2008 - ! 08: 08', '%Y - ! %m: %d'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); - d = strptime('2008-08-08 12:14', '%Y-%m-%d %H:%M'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 12, 14]); - d = strptime('2008-08-08 1:14', '%Y-%m-%d %H:%M'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 1, 14]); - d = strptime('2008-08-08 01:14', '%Y-%m-%d %H:%M'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 1, 14]); - }); - - QUnit.module("sliceList"); - QUnit.test("test slicelist", function (assert) { - var list = ['a', 'b', 'c', 'd', 'e', 'f']; - assert.deepEqual(cw.utils.sliceList(list, 2), ['c', 'd', 'e', 'f']); - assert.deepEqual(cw.utils.sliceList(list, 2, -2), ['c', 'd']); - assert.deepEqual(cw.utils.sliceList(list, -3), ['d', 'e', 'f']); - assert.deepEqual(cw.utils.sliceList(list, 0, -2), ['a', 'b', 'c', 'd']); - assert.deepEqual(cw.utils.sliceList(list), list); - }); - - QUnit.module("formContents", { - setup: function() { - $('#qunit-fixture').append(''); - } - }); - // XXX test fckeditor - QUnit.test("test formContents", function (assert) { - $('#test-form').append(''); - $('#test-form').append(' '); - $('#test-form').append(''); - $('#test-form').append(''); - $('#test-form').append(''); - $('#test-form').append(''); - $('#test-form').append(''); - $('#theselect').append('' + - ''); - //Append an unchecked radio input : should not be in formContents list - $('#test-form').append(''); - $('#test-form').append(''); - assert.deepEqual(cw.utils.formContents($('#test-form')[0]), [ - ['input-text', 'mytextarea', 'choice', 'check', 'theselect'], - ['toto', 'Hello World!', 'no', 'no', 'foo'] - ]); - }); -}); - diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/data/static/jstests/utils.js --- a/web/test/data/static/jstests/utils.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -function datetuple(d) { - return [d.getFullYear(), d.getMonth()+1, d.getDate(), - d.getHours(), d.getMinutes()]; -} - -function pprint(obj) { - print('{'); - for(k in obj) { - print(' ' + k + ' = ' + obj[k]); - } - print('}'); -} - -function arrayrepr(array) { - return '[' + array.join(', ') + ']'; -} - -function assertArrayEquals(array1, array2) { - if (array1.length != array2.length) { - throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); - } - for (var i=0; i. - -from cubicweb.predicates import has_related_entities -from cubicweb.web.views.ajaxcontroller import ajaxfunc -from cubicweb.web.views.ibreadcrumbs import IBreadCrumbsAdapter - -def _recursive_replace_stream_by_content(tree): - """ Search for streams (i.e. object that have a 'read' method) in a tree - (which branches are lists or tuples), and substitute them by their content, - leaving other leafs identical. A copy of the tree with only lists as - branches is returned. - """ - if not isinstance(tree, (list, tuple)): - if hasattr(tree, 'read'): - return tree.read() - return tree - else: - return [_recursive_replace_stream_by_content(value) - for value in tree] - - -@ajaxfunc(output_type='json') -def fileupload(self): - """ Return a json copy of the web request formin which uploaded files - are read and their content substitute the received streams. - """ - try: - result_dict = {} - for key, value in self._cw.form.items(): - result_dict[key] = _recursive_replace_stream_by_content(value) - return result_dict - except Exception as ex: - import traceback as tb - tb.print_exc(ex) - - -class FolderIBreadCrumbsAdapter(IBreadCrumbsAdapter): - __select__ = IBreadCrumbsAdapter.__select__ & has_related_entities('filed_under') - - def parent_entity(self): - return self.entity.filed_under[0] diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/requirements.txt --- a/web/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -requests -webtest -Twisted -cubicweb-blog -cubicweb-file -cubicweb-tag diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/test_jscript.py --- a/web/test/test_jscript.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,38 +0,0 @@ -from cubicweb.devtools import qunit - -from os import path as osp - - -class JScript(qunit.QUnitTestCase): - - all_js_tests = ( - ("/static/jstests/test_utils.js", ( - "/data/cubicweb.js", - "/data/cubicweb.compat.js", - "/data/cubicweb.python.js", - "/static/jstests/utils.js", - ), - ), - - ("/static/jstests/test_htmlhelpers.js", ( - "/data/cubicweb.js", - "/data/cubicweb.compat.js", - "/data/cubicweb.python.js", - "/data/cubicweb.htmlhelpers.js", - ), - ), - - ("/static/jstests/test_ajax.js", ( - "/data/cubicweb.python.js", - "/data/cubicweb.js", - "/data/cubicweb.compat.js", - "/data/cubicweb.htmlhelpers.js", - "/data/cubicweb.ajax.js", - ), - ), - ) - - -if __name__ == '__main__': - from unittest import main - main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/test_views.py --- a/web/test/test_views.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,86 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""automatic tests""" -from cubicweb.devtools.testlib import AutoPopulateTest, AutomaticWebTest -from cubicweb.view import AnyRsetView - -class AutomaticWebTest(AutomaticWebTest): - application_rql = [ - 'Any L,F WHERE E is CWUser, E login L, E firstname F', - 'Any L,F,E WHERE E is CWUser, E login L, E firstname F', - 'Any COUNT(X) WHERE X is CWUser', - ] - - def to_test_etypes(self): - # We do not really want to test cube views here. So we can drop testing - # some EntityType. The two Blog types below require the sioc cube that - # we do not want to add as a dependency. - etypes = super(AutomaticWebTest, self).to_test_etypes() - etypes -= set(('Blog', 'BlogEntry')) - return etypes - - -class SomeView(AnyRsetView): - __regid__ = 'someview' - - def call(self): - self._cw.add_js('spam.js') - self._cw.add_js('spam.js') - - -class ManualCubicWebTCs(AutoPopulateTest): - - def test_regr_copy_view(self): - """regression test: make sure we can ask a copy of a - composite entity - """ - with self.admin_access.web_request() as req: - rset = req.execute(u'CWUser X WHERE X login "admin"') - self.view('copy', rset, req=req) - - def test_sortable_js_added(self): - with self.admin_access.web_request() as req: - # sortable.js should not be included by default - rset = req.execute('CWUser X') - self.assertNotIn(b'jquery.tablesorter.js', self.view('oneline', rset, req=req).source) - - with self.admin_access.web_request() as req: - # but should be included by the tableview - rset = req.execute('Any P,F,S LIMIT 1 WHERE P is CWUser, P firstname F, P surname S') - self.assertIn(b'jquery.tablesorter.js', self.view('table', rset, req=req).source) - - def test_js_added_only_once(self): - with self.admin_access.web_request() as req: - self.vreg._loadedmods[__name__] = {} - self.vreg.register(SomeView) - rset = req.execute('CWUser X') - source = self.view('someview', rset, req=req).source - self.assertEqual(source.count(b'spam.js'), 1) - - def test_unrelateddivs(self): - with self.admin_access.client_cnx() as cnx: - group = cnx.create_entity('CWGroup', name=u'R&D') - cnx.commit() - with self.admin_access.web_request(relation='in_group_subject') as req: - rset = req.execute(u'Any X WHERE X is CWUser, X login "admin"') - self.view('unrelateddivs', rset, req=req) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/testutils.js --- a/web/test/testutils.js Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -function datetuple(d) { - return [d.getFullYear(), d.getMonth()+1, d.getDate(), - d.getHours(), d.getMinutes()]; -} - -function pprint(obj) { - print('{'); - for(k in obj) { - print(' ' + k + ' = ' + obj[k]); - } - print('}'); -} - -function arrayrepr(array) { - return '[' + array.join(', ') + ']'; -} - -function assertArrayEquals(array1, array2) { - if (array1.length != array2.length) { - throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); - } - for (var i=0; i. -"""unit tests for cubicweb.web.application""" - -import base64 - -from six import text_type -from six.moves import http_client -from six.moves.http_cookies import SimpleCookie - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.decorators import clear_cache, classproperty - -from cubicweb import AuthenticationError -from cubicweb import view -from cubicweb.devtools.testlib import CubicWebTC, real_error_handling -from cubicweb.devtools.fake import FakeRequest -from cubicweb.web import LogOut, Redirect, INTERNAL_FIELD_VALUE -from cubicweb.web.views.basecontrollers import ViewController -from cubicweb.web.application import anonymized_request -from cubicweb import repoapi - -class FakeMapping: - """emulates a mapping module""" - def __init__(self): - self.ENTITIES_MAP = {} - self.ATTRIBUTES_MAP = {} - self.RELATIONS_MAP = {} - -class MockCursor: - def __init__(self): - self.executed = [] - def execute(self, rql, args=None, build_descr=False): - args = args or {} - self.executed.append(rql % args) - - -class FakeController(ViewController): - - def __init__(self, form=None): - self._cw = FakeRequest() - self._cw.form = form or {} - self._cursor = MockCursor() - self._cw.execute = self._cursor.execute - - def new_cursor(self): - self._cursor = MockCursor() - self._cw.execute = self._cursor.execute - - def set_form(self, form): - self._cw.form = form - - -class RequestBaseTC(TestCase): - def setUp(self): - self._cw = FakeRequest() - - - def test_list_arg(self): - """tests the list_arg() function""" - list_arg = self._cw.list_form_param - self.assertEqual(list_arg('arg3', {}), []) - d = {'arg1' : "value1", - 'arg2' : ('foo', INTERNAL_FIELD_VALUE,), - 'arg3' : ['bar']} - self.assertEqual(list_arg('arg1', d, True), ['value1']) - self.assertEqual(d, {'arg2' : ('foo', INTERNAL_FIELD_VALUE), 'arg3' : ['bar'],}) - self.assertEqual(list_arg('arg2', d, True), ['foo']) - self.assertEqual({'arg3' : ['bar'],}, d) - self.assertEqual(list_arg('arg3', d), ['bar',]) - self.assertEqual({'arg3' : ['bar'],}, d) - - - def test_from_controller(self): - self._cw.vreg['controllers'] = {'view': 1, 'login': 1} - self.assertEqual(self._cw.from_controller(), 'view') - req = FakeRequest(url='project?vid=list') - req.vreg['controllers'] = {'view': 1, 'login': 1} - # this assertion is just to make sure that relative_path can be - # correctly computed as it is used in from_controller() - self.assertEqual(req.relative_path(False), 'project') - self.assertEqual(req.from_controller(), 'view') - # test on a valid non-view controller - req = FakeRequest(url='login?x=1&y=2') - req.vreg['controllers'] = {'view': 1, 'login': 1} - self.assertEqual(req.relative_path(False), 'login') - self.assertEqual(req.from_controller(), 'login') - - -class UtilsTC(TestCase): - """test suite for misc application utilities""" - - def setUp(self): - self.ctrl = FakeController() - - #def test_which_mapping(self): - # """tests which mapping is used (application or core)""" - # init_mapping() - # from cubicweb.common import mapping - # self.assertEqual(mapping.MAPPING_USED, 'core') - # sys.modules['mapping'] = FakeMapping() - # init_mapping() - # self.assertEqual(mapping.MAPPING_USED, 'application') - # del sys.modules['mapping'] - - def test_execute_linkto(self): - """tests the execute_linkto() function""" - self.assertEqual(self.ctrl.execute_linkto(), None) - self.assertEqual(self.ctrl._cursor.executed, - []) - - self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:object', - 'eid': 8}) - self.ctrl.execute_linkto() - self.assertEqual(self.ctrl._cursor.executed, - ['SET Y works_for X WHERE X eid 8, Y eid %s' % i - for i in (12, 13, 14)]) - - self.ctrl.new_cursor() - self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:subject', - 'eid': 8}) - self.ctrl.execute_linkto() - self.assertEqual(self.ctrl._cursor.executed, - ['SET X works_for Y WHERE X eid 8, Y eid %s' % i - for i in (12, 13, 14)]) - - - self.ctrl.new_cursor() - self.ctrl._cw.form = {'__linkto' : 'works_for:12_13_14:object'} - self.ctrl.execute_linkto(eid=8) - self.assertEqual(self.ctrl._cursor.executed, - ['SET Y works_for X WHERE X eid 8, Y eid %s' % i - for i in (12, 13, 14)]) - - self.ctrl.new_cursor() - self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:subject'}) - self.ctrl.execute_linkto(eid=8) - self.assertEqual(self.ctrl._cursor.executed, - ['SET X works_for Y WHERE X eid 8, Y eid %s' % i - for i in (12, 13, 14)]) - - -class ApplicationTC(CubicWebTC): - - @classproperty - def config(cls): - try: - return cls.__dict__['_config'] - except KeyError: - config = super(ApplicationTC, cls).config - config.global_set_option('allow-email-login', True) - return config - - def test_cnx_user_groups_sync(self): - with self.admin_access.client_cnx() as cnx: - user = cnx.user - self.assertEqual(user.groups, set(('managers',))) - cnx.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) - user = cnx.user - self.assertEqual(user.groups, set(('managers',))) - cnx.commit() - user = cnx.user - self.assertEqual(user.groups, set(('managers', 'guests'))) - # cleanup - cnx.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) - cnx.commit() - - def test_publish_validation_error(self): - with self.admin_access.web_request() as req: - user = req.user - eid = text_type(user.eid) - req.form = { - 'eid': eid, - '__type:'+eid: 'CWUser', '_cw_entity_fields:'+eid: 'login-subject', - 'login-subject:'+eid: '', # ERROR: no login specified - # just a sample, missing some necessary information for real life - '__errorurl': 'view?vid=edition...' - } - path, params = self.expect_redirect_handle_request(req, 'edit') - forminfo = req.session.data['view?vid=edition...'] - eidmap = forminfo['eidmap'] - self.assertEqual(eidmap, {}) - values = forminfo['values'] - self.assertEqual(values['login-subject:'+eid], '') - self.assertEqual(values['eid'], eid) - error = forminfo['error'] - self.assertEqual(error.entity, user.eid) - self.assertEqual(error.errors['login-subject'], 'required field') - - - def test_validation_error_dont_loose_subentity_data_ctrl(self): - """test creation of two linked entities - - error occurs on the web controller - """ - with self.admin_access.web_request() as req: - # set Y before X to ensure both entities are edited, not only X - req.form = {'eid': ['Y', 'X'], '__maineid': 'X', - '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject', - # missing required field - 'login-subject:X': u'', - # but email address is set - '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject', - 'address-subject:Y': u'bougloup@logilab.fr', - 'use_email-object:Y': 'X', - # necessary to get validation error handling - '__errorurl': 'view?vid=edition...', - } - path, params = self.expect_redirect_handle_request(req, 'edit') - forminfo = req.session.data['view?vid=edition...'] - self.assertEqual(set(forminfo['eidmap']), set('XY')) - self.assertEqual(forminfo['eidmap']['X'], None) - self.assertIsInstance(forminfo['eidmap']['Y'], int) - self.assertEqual(forminfo['error'].entity, 'X') - self.assertEqual(forminfo['error'].errors, - {'login-subject': 'required field'}) - self.assertEqual(forminfo['values'], req.form) - - - def test_validation_error_dont_loose_subentity_data_repo(self): - """test creation of two linked entities - - error occurs on the repository - """ - with self.admin_access.web_request() as req: - # set Y before X to ensure both entities are edited, not only X - req.form = {'eid': ['Y', 'X'], '__maineid': 'X', - '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject,upassword-subject', - # already existent user - 'login-subject:X': u'admin', - 'upassword-subject:X': u'admin', 'upassword-subject-confirm:X': u'admin', - '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject', - 'address-subject:Y': u'bougloup@logilab.fr', - 'use_email-object:Y': 'X', - # necessary to get validation error handling - '__errorurl': 'view?vid=edition...', - } - path, params = self.expect_redirect_handle_request(req, 'edit') - forminfo = req.session.data['view?vid=edition...'] - self.assertEqual(set(forminfo['eidmap']), set('XY')) - self.assertIsInstance(forminfo['eidmap']['X'], int) - self.assertIsInstance(forminfo['eidmap']['Y'], int) - self.assertEqual(forminfo['error'].entity, forminfo['eidmap']['X']) - self.assertEqual(forminfo['error'].errors, - {'login-subject': u'the value "admin" is already used, use another one'}) - self.assertEqual(forminfo['values'], req.form) - - def test_ajax_view_raise_arbitrary_error(self): - class ErrorAjaxView(view.View): - __regid__ = 'test.ajax.error' - def call(self): - raise Exception('whatever') - with self.temporary_appobjects(ErrorAjaxView): - with real_error_handling(self.app) as app: - with self.admin_access.web_request(vid='test.ajax.error') as req: - req.ajax_request = True - page = app.handle_request(req, '') - self.assertEqual(http_client.INTERNAL_SERVER_ERROR, - req.status_out) - - def _test_cleaned(self, kwargs, injected, cleaned): - with self.admin_access.web_request(**kwargs) as req: - page = self.app_handle_request(req, 'view') - self.assertNotIn(injected.encode('ascii'), page) - self.assertIn(cleaned.encode('ascii'), page) - - def test_nonregr_script_kiddies(self): - """test against current script injection""" - injected = 'toto' - cleaned = 'toto' - for kwargs in ({'vid': injected}, - {'vtitle': injected}, - ): - yield self._test_cleaned, kwargs, injected, cleaned - - def test_site_wide_eproperties_sync(self): - # XXX work in all-in-one configuration but not in twisted for instance - # in which case we need a kindof repo -> http server notification - # protocol - vreg = self.app.vreg - # default value - self.assertEqual(vreg.property_value('ui.language'), 'en') - with self.admin_access.client_cnx() as cnx: - cnx.execute('INSERT CWProperty X: X value "fr", X pkey "ui.language"') - self.assertEqual(vreg.property_value('ui.language'), 'en') - cnx.commit() - self.assertEqual(vreg.property_value('ui.language'), 'fr') - cnx.execute('SET X value "de" WHERE X pkey "ui.language"') - self.assertEqual(vreg.property_value('ui.language'), 'fr') - cnx.commit() - self.assertEqual(vreg.property_value('ui.language'), 'de') - cnx.execute('DELETE CWProperty X WHERE X pkey "ui.language"') - self.assertEqual(vreg.property_value('ui.language'), 'de') - cnx.commit() - self.assertEqual(vreg.property_value('ui.language'), 'en') - - # authentication tests #################################################### - - def test_http_auth_no_anon(self): - req, origsession = self.init_authentication('http') - self.assertAuthFailure(req) - self.app.handle_request(req, 'login') - self.assertEqual(401, req.status_out) - clear_cache(req, 'get_authorization') - authstr = base64.encodestring(('%s:%s' % (self.admlogin, self.admpassword)).encode('ascii')) - req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) - self.assertAuthSuccess(req, origsession) - self.assertRaises(LogOut, self.app_handle_request, req, 'logout') - self.assertEqual(len(self.open_sessions), 0) - - def test_cookie_auth_no_anon(self): - req, origsession = self.init_authentication('cookie') - self.assertAuthFailure(req) - try: - form = self.app.handle_request(req, 'login') - except Redirect as redir: - self.fail('anonymous user should get login form') - clear_cache(req, 'get_authorization') - self.assertIn(b'__login', form) - self.assertIn(b'__password', form) - self.assertFalse(req.cnx) # Mock cnx are False - req.form['__login'] = self.admlogin - req.form['__password'] = self.admpassword - self.assertAuthSuccess(req, origsession) - self.assertRaises(LogOut, self.app_handle_request, req, 'logout') - self.assertEqual(len(self.open_sessions), 0) - - def test_login_by_email(self): - with self.admin_access.client_cnx() as cnx: - login = cnx.user.login - address = login + u'@localhost' - cnx.execute('INSERT EmailAddress X: X address %(address)s, U primary_email X ' - 'WHERE U login %(login)s', {'address': address, 'login': login}) - cnx.commit() - # # option allow-email-login not set - req, origsession = self.init_authentication('cookie') - # req.form['__login'] = address - # req.form['__password'] = self.admpassword - # self.assertAuthFailure(req) - # option allow-email-login set - #origsession.login = address - self.set_option('allow-email-login', True) - req.form['__login'] = address - req.form['__password'] = self.admpassword - self.assertAuthSuccess(req, origsession) - self.assertRaises(LogOut, self.app_handle_request, req, 'logout') - self.assertEqual(len(self.open_sessions), 0) - - def _reset_cookie(self, req): - # preparing the suite of the test - # set session id in cookie - cookie = SimpleCookie() - sessioncookie = self.app.session_handler.session_cookie(req) - cookie[sessioncookie] = req.session.sessionid - req.set_request_header('Cookie', cookie[sessioncookie].OutputString(), - raw=True) - clear_cache(req, 'get_authorization') - - def _test_auth_anon(self, req): - asession = self.app.get_session(req) - # important otherwise _reset_cookie will not use the right session - req.set_cnx(repoapi.Connection(asession)) - self.assertEqual(len(self.open_sessions), 1) - self.assertEqual(asession.login, 'anon') - self.assertTrue(asession.anonymous_session) - self._reset_cookie(req) - - def _test_anon_auth_fail(self, req): - self.assertEqual(1, len(self.open_sessions)) - session = self.app.get_session(req) - # important otherwise _reset_cookie will not use the right session - req.set_cnx(repoapi.Connection(session)) - self.assertEqual(req.message, 'authentication failure') - self.assertEqual(req.session.anonymous_session, True) - self.assertEqual(1, len(self.open_sessions)) - self._reset_cookie(req) - - def test_http_auth_anon_allowed(self): - req, origsession = self.init_authentication('http', 'anon') - self._test_auth_anon(req) - authstr = base64.encodestring(b'toto:pouet') - req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) - self._test_anon_auth_fail(req) - authstr = base64.encodestring(('%s:%s' % (self.admlogin, self.admpassword)).encode('ascii')) - req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) - self.assertAuthSuccess(req, origsession) - self.assertRaises(LogOut, self.app_handle_request, req, 'logout') - self.assertEqual(len(self.open_sessions), 0) - - def test_cookie_auth_anon_allowed(self): - req, origsession = self.init_authentication('cookie', 'anon') - self._test_auth_anon(req) - req.form['__login'] = 'toto' - req.form['__password'] = 'pouet' - self._test_anon_auth_fail(req) - req.form['__login'] = self.admlogin - req.form['__password'] = self.admpassword - self.assertAuthSuccess(req, origsession) - self.assertRaises(LogOut, self.app_handle_request, req, 'logout') - self.assertEqual(0, len(self.open_sessions)) - - def test_anonymized_request(self): - with self.admin_access.web_request() as req: - self.assertEqual(self.admlogin, req.session.user.login) - # admin should see anon + admin - self.assertEqual(2, len(list(req.find('CWUser')))) - with anonymized_request(req): - self.assertEqual('anon', req.session.login, 'anon') - # anon should only see anon user - self.assertEqual(1, len(list(req.find('CWUser')))) - self.assertEqual(self.admlogin, req.session.login) - self.assertEqual(2, len(list(req.find('CWUser')))) - - def test_non_regr_optional_first_var(self): - with self.admin_access.web_request() as req: - # expect a rset with None in [0][0] - req.form['rql'] = 'rql:Any OV1, X WHERE X custom_workflow OV1?' - self.app_handle_request(req) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_breadcrumbs.py --- a/web/test/unittest_breadcrumbs.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from cubicweb.devtools.testlib import CubicWebTC - - -class BreadCrumbsTC(CubicWebTC): - - def test_base(self): - with self.admin_access.web_request() as req: - f1 = req.create_entity('Folder', name=u'par&ent') - f2 = req.create_entity('Folder', name=u'chi&ld') - req.cnx.execute('SET F2 filed_under F1 WHERE F1 eid %(f1)s, F2 eid %(f2)s', - {'f1' : f1.eid, 'f2' : f2.eid}) - req.cnx.commit() - self.assertEqual(f2.view('breadcrumbs'), - '' - 'chi&ld' % f2.eid) - childrset = f2.as_rset() - ibc = self.vreg['ctxcomponents'].select('breadcrumbs', req, rset=childrset) - l = [] - ibc.render(l.append) - self.assertMultiLineEqual(' > ' - 'Folder_plural' - ' > par&ent > \n' - '' - 'chi&ld' % (f1.eid, f2.eid), - ''.join(l)) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_controller.py --- a/web/test/unittest_controller.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,59 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb.web.controller unit tests - -""" - -from datetime import datetime, date, time - -from logilab.common.testlib import unittest_main - -from cubicweb.devtools import testlib - -class BaseControllerTC(testlib.CubicWebTC): - - def test_parse_datetime_ok(self): - with self.admin_access.web_request() as req: - ctrl = self.vreg['controllers'].select('view', req) - pd = ctrl._cw.parse_datetime - self.assertIsInstance(pd('2006/06/24 12:18'), datetime) - self.assertIsInstance(pd('2006/06/24'), date) - self.assertIsInstance(pd('2006/06/24 12:18', 'Datetime'), datetime) - self.assertIsInstance(pd('2006/06/24', 'Datetime'), datetime) - self.assertIsInstance(pd('2006/06/24', 'Date'), date) - self.assertIsInstance(pd('12:18', 'Time'), time) - - def test_parse_datetime_ko(self): - with self.admin_access.web_request() as req: - ctrl = self.vreg['controllers'].select('view', req) - pd = ctrl._cw.parse_datetime - self.assertRaises(ValueError, - pd, '2006/06/24 12:188', 'Datetime') - self.assertRaises(ValueError, - pd, '2006/06/240', 'Datetime') - self.assertRaises(ValueError, - pd, '2006/06/24 12:18', 'Date') - self.assertRaises(ValueError, - pd, '2006/24/06', 'Date') - self.assertRaises(ValueError, - pd, '2006/06/240', 'Date') - self.assertRaises(ValueError, - pd, '12:188', 'Time') - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_facet.py --- a/web/test/unittest_facet.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,421 +0,0 @@ -from logilab.common.date import datetime2ticks -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web import facet - -class BaseFacetTC(CubicWebTC): - - def prepare_rqlst(self, req, rql='CWUser X', mainvar='X', - expected_baserql='Any X WHERE X is CWUser', - expected_preparedrql='DISTINCT Any WHERE X is CWUser'): - rset = req.cnx.execute(rql) - rqlst = rset.syntax_tree().copy() - filtered_variable, baserql = facet.init_facets(rset, rqlst.children[0], - mainvar=mainvar) - self.assertEqual(filtered_variable.name, mainvar) - self.assertEqual(baserql, expected_baserql) - self.assertEqual(rqlst.as_string(), expected_preparedrql) - return rset, rqlst, filtered_variable - - def _in_group_facet(self, req, cls=facet.RelationFacet, no_relation=False): - rset, rqlst, filtered_variable = self.prepare_rqlst(req) - cls.no_relation = no_relation - f = cls(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - f.__regid__ = 'in_group' - f.rtype = 'in_group' - f.role = 'subject' - f.target_attr = 'name' - guests, managers = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' - 'WHERE G name GN, G name IN ("guests", "managers")')] - groups = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' - 'WHERE G name GN, G name IN ("guests", "managers")')] - return f, groups - - def test_relation_simple(self): - with self.admin_access.web_request() as req: - f, (guests, managers) = self._in_group_facet(req) - self.assertEqual(f.vocabulary(), - [(u'guests', guests), (u'managers', managers)]) - # ensure rqlst is left unmodified - self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - [str(guests), str(managers)]) - # ensure rqlst is left unmodified - self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - f._cw.form[f.__regid__] = str(guests) - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X in_group D, D eid %s' % guests) - - def test_relation_multiple_and(self): - with self.admin_access.web_request() as req: - f, (guests, managers) = self._in_group_facet(req) - f._cw.form[f.__regid__] = [str(guests), str(managers)] - f._cw.form[f.__regid__ + '_andor'] = 'AND' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X in_group A, B eid %s, X in_group B, A eid %s' % (guests, managers)) - - def test_relation_multiple_or(self): - with self.admin_access.web_request() as req: - f, (guests, managers) = self._in_group_facet(req) - f._cw.form[f.__regid__] = [str(guests), str(managers)] - f._cw.form[f.__regid__ + '_andor'] = 'OR' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X in_group A, A eid IN(%s, %s)' % (guests, managers)) - - def test_relation_optional_rel(self): - with self.admin_access.web_request() as req: - rset = req.cnx.execute(u'Any X,GROUP_CONCAT(GN) GROUPBY X ' - 'WHERE X in_group G?, G name GN, NOT G name "users"') - rqlst = rset.syntax_tree().copy() - select = rqlst.children[0] - filtered_variable, baserql = facet.init_facets(rset, select) - - f = facet.RelationFacet(req, rset=rset, - select=select, - filtered_variable=filtered_variable) - f.rtype = 'in_group' - f.role = 'subject' - f.target_attr = 'name' - guests, managers = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' - 'WHERE G name GN, G name IN ("guests", "managers")')] - self.assertEqual(f.vocabulary(), - [(u'guests', guests), (u'managers', managers)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"') - #rqlst = rset.syntax_tree() - self.assertEqual(sorted(f.possible_values()), - [str(guests), str(managers)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"') - req.form[f.__regid__] = str(guests) - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users", X in_group D, D eid %s' % guests) - - def test_relation_no_relation_1(self): - with self.admin_access.web_request() as req: - f, (guests, managers) = self._in_group_facet(req, no_relation=True) - self.assertEqual(f.vocabulary(), - [(u'guests', guests), (u'managers', managers)]) - self.assertEqual(f.possible_values(), - [str(guests), str(managers)]) - f._cw.create_entity('CWUser', login=u'hop', upassword='toto') - self.assertEqual(f.vocabulary(), - [(u'', ''), (u'guests', guests), (u'managers', managers)]) - self.assertEqual(f.possible_values(), - [str(guests), str(managers), '']) - f._cw.form[f.__regid__] = '' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, NOT X in_group G') - - def test_relation_no_relation_2(self): - with self.admin_access.web_request() as req: - f, (guests, managers) = self._in_group_facet(req, no_relation=True) - f._cw.form[f.__regid__] = ['', guests] - f.select.save_state() - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, (NOT X in_group B) OR (X in_group A, A eid %s)' % guests) - f.select.recover() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser') - - def test_relationattribute(self): - with self.admin_access.web_request() as req: - f, (guests, managers) = self._in_group_facet(req, cls=facet.RelationAttributeFacet) - self.assertEqual(f.vocabulary(), - [(u'guests', u'guests'), (u'managers', u'managers')]) - # ensure rqlst is left unmodified - self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - ['guests', 'managers']) - # ensure rqlst is left unmodified - self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - f._cw.form[f.__regid__] = u'guests' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X in_group E, E name "guests"') - - def test_hasrelation(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req) - f = facet.HasRelationFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.__regid__ = 'has_group' - f.rtype = 'in_group' - f.role = 'subject' - f._cw.form[f.__regid__] = 'feed me' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, EXISTS(X in_group A)') - - def test_daterange(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req) - f = facet.DateRangeFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.rtype = 'creation_date' - mind, maxd = req.cnx.execute('Any MIN(CD), MAX(CD) WHERE X is CWUser, X creation_date CD')[0] - self.assertEqual(f.vocabulary(), - [(str(mind), mind), - (str(maxd), maxd)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - [str(mind), str(maxd)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form['%s_inf' % f.__regid__] = str(datetime2ticks(mind)) - req.form['%s_sup' % f.__regid__] = str(datetime2ticks(mind)) - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X creation_date >= "%s", ' - 'X creation_date <= "%s"' - % (mind.strftime('%Y/%m/%d'), - mind.strftime('%Y/%m/%d'))) - - def test_attribute(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req) - f = facet.AttributeFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.rtype = 'login' - self.assertEqual(f.vocabulary(), - [(u'admin', u'admin'), (u'anon', u'anon')]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - ['admin', 'anon']) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form[f.__regid__] = u'admin' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X login "admin"') - - def test_bitfield(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req, - 'CWAttribute X WHERE X ordernum XO', - expected_baserql='Any X WHERE X ordernum XO, X is CWAttribute', - expected_preparedrql='DISTINCT Any WHERE X ordernum XO, X is CWAttribute') - f = facet.BitFieldFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.choices = [('un', 1,), ('deux', 2,)] - f.rtype = 'ordernum' - self.assertEqual(f.vocabulary(), - [(u'deux', 2), (u'un', 1)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X ordernum XO, X is CWAttribute') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - ['2', '1']) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X ordernum XO, X is CWAttribute') - req.form[f.__regid__] = '3' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X ordernum XO, X is CWAttribute, X ordernum C HAVING 3 = (C & 3)") - - def test_bitfield_0_value(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req, - 'CWAttribute X WHERE X ordernum XO', - expected_baserql='Any X WHERE X ordernum XO, X is CWAttribute', - expected_preparedrql='DISTINCT Any WHERE X ordernum XO, X is CWAttribute') - f = facet.BitFieldFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.choices = [('zero', 0,), ('un', 1,), ('deux', 2,)] - f.rtype = 'ordernum' - self.assertEqual(f.vocabulary(), - [(u'deux', 2), (u'un', 1), (u'zero', 0)]) - self.assertEqual(f.possible_values(), - ['2', '1', '0']) - req.form[f.__regid__] = '0' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X ordernum XO, X is CWAttribute, X ordernum C HAVING 0 = C") - - def test_rql_path_eid(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req) - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'O' - label_variable = 'OL' - f = RPF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - self.assertEqual(f.vocabulary(), [(u'admin', req.user.eid),]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - [str(req.user.eid),]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form[f.__regid__] = '1' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X created_by F, F owned_by G, G eid 1") - - def test_rql_path_eid_no_label(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req) - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'O' - f = RPF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - self.assertEqual(f.vocabulary(), [(str(req.user.eid), req.user.eid),]) - - def test_rql_path_attr(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req) - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'OL' - f = RPF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - - self.assertEqual(f.vocabulary(), [(u'admin', 'admin'),]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - self.assertEqual(f.possible_values(), ['admin',]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form[f.__regid__] = u'admin' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H login "admin"') - - def test_rql_path_check_filter_label_variable(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'OL' - label_variable = 'OL' - self.assertRaises(AssertionError, RPF, req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - - - def test_rqlpath_range(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepare_rqlst(req) - class RRF(facet.DateRangeRQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O creation_date OL')] - filter_variable = 'OL' - f = RRF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - mind, maxd = req.cnx.execute('Any MIN(CD), MAX(CD) WHERE X is CWUser, X created_by U, U owned_by O, O creation_date CD')[0] - self.assertEqual(f.vocabulary(), [(str(mind), mind), - (str(maxd), maxd)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - self.assertEqual(f.possible_values(), - [str(mind), str(maxd)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form['%s_inf' % f.__regid__] = str(datetime2ticks(mind)) - req.form['%s_sup' % f.__regid__] = str(datetime2ticks(mind)) - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H creation_date >= "%s", ' - 'H creation_date <= "%s"' - % (mind.strftime('%Y/%m/%d'), - mind.strftime('%Y/%m/%d'))) - - def prepareg_aggregat_rqlst(self, req): - return self.prepare_rqlst(req, - u'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' - 'X modification_date XM, Y creation_date YD, Y is CWGroup ' - 'HAVING DAY(XD)>=DAY(YD) AND DAY(XM)<=DAY(YD)', 'X', - expected_baserql=u'Any 1,COUNT(X) WHERE X is CWUser, X creation_date XD, ' - 'X modification_date XM, Y creation_date YD, Y is CWGroup ' - 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)', - expected_preparedrql=u'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' - 'X modification_date XM, Y creation_date YD, Y is CWGroup ' - 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') - - - def test_aggregat_query_cleanup_select(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) - select = rqlst.children[0] - facet.cleanup_select(select, filtered_variable=filtered_variable) - self.assertEqual(select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' - 'X modification_date XM, Y creation_date YD, Y is CWGroup ' - 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') - - def test_aggregat_query_rql_path(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'OL' - f = RPF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - self.assertEqual(f.vocabulary(), [(u'admin', u'admin')]) - self.assertEqual(f.possible_values(), ['admin']) - req.form[f.__regid__] = u'admin' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' - 'X modification_date XM, Y creation_date YD, Y is CWGroup, ' - 'X created_by G, G owned_by H, H login "admin" ' - 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') - - def test_aggregat_query_attribute(self): - with self.admin_access.web_request() as req: - rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) - f = facet.AttributeFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.rtype = 'login' - self.assertEqual(f.vocabulary(), - [(u'admin', u'admin'), (u'anon', u'anon')]) - self.assertEqual(f.possible_values(), - ['admin', 'anon']) - req.form[f.__regid__] = u'admin' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' - 'X modification_date XM, Y creation_date YD, Y is CWGroup, X login "admin" ' - 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_form.py --- a/web/test/unittest_form.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,286 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -import time - -from xml.etree.ElementTree import fromstring -from lxml import html - -from six import text_type - -from logilab.common.testlib import unittest_main - -from cubicweb import Binary, ValidationError -from cubicweb.mttransforms import HAS_TAL -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web.formfields import (IntField, StringField, RichTextField, - PasswordField, DateTimeField, - FileField, EditableFileField) -from cubicweb.web.formwidgets import PasswordInput, Input, DateTimePicker -from cubicweb.web.views.forms import EntityFieldsForm, FieldsForm -from cubicweb.web.views.workflow import ChangeStateForm -from cubicweb.web.views.formrenderers import FormRenderer - - -class FieldsFormTC(CubicWebTC): - - def test_form_field_format(self): - with self.admin_access.web_request() as req: - form = FieldsForm(req, None) - self.assertEqual(StringField().format(form), 'text/plain') - req.cnx.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"') - req.cnx.commit() - self.assertEqual(StringField().format(form), 'text/rest') - - - def test_process_posted(self): - class AForm(FieldsForm): - anint = IntField() - astring = StringField() - with self.admin_access.web_request(anint='1', astring='2', _cw_fields='anint,astring') as req: - form = AForm(req) - self.assertEqual(form.process_posted(), {'anint': 1, 'astring': '2'}) - with self.admin_access.web_request(anint='1a', astring='2b', _cw_fields='anint,astring') as req: - form = AForm(req) - self.assertRaises(ValidationError, form.process_posted) - - -class EntityFieldsFormTC(CubicWebTC): - - def test_form_field_choices(self): - with self.admin_access.web_request() as req: - b = req.create_entity('BlogEntry', title=u'di mascii code', content=u'a best-seller') - t = req.create_entity('Tag', name=u'x') - form1 = self.vreg['forms'].select('edition', req, entity=t) - choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] - self.assertIn(text_type(b.eid), choices) - form2 = self.vreg['forms'].select('edition', req, entity=b) - choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] - self.assertIn(text_type(t.eid), choices) - - b.cw_clear_all_caches() - t.cw_clear_all_caches() - req.cnx.execute('SET X tags Y WHERE X is Tag, Y is BlogEntry') - - choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] - self.assertIn(text_type(b.eid), choices) - choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] - self.assertIn(text_type(t.eid), choices) - - def test_form_field_choices_new_entity(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('CWUser')(req) - form = self.vreg['forms'].select('edition', req, entity=e) - unrelated = [rview for rview, reid in form.field_by_name('in_group', 'subject').choices(form)] - # should be default groups but owners, i.e. managers, users, guests - self.assertEqual(unrelated, [u'guests', u'managers', u'users']) - - def test_consider_req_form_params(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('CWUser')(req) - e.eid = 'A' - with self.admin_access.web_request(login=u'toto') as toto_req: - form = EntityFieldsForm(toto_req, None, entity=e) - field = StringField(name='login', role='subject', eidparam=True) - form.append_field(field) - form.build_context({}) - self.assertEqual(field.widget.values(form, field), (u'toto',)) - - def test_linkto_field_duplication_inout(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('CWUser')(req) - e.eid = 'A' - e._cw = req - geid = req.cnx.execute('CWGroup X WHERE X name "users"')[0][0] - req.form['__linkto'] = 'in_group:%s:subject' % geid - form = self.vreg['forms'].select('edition', req, entity=e) - form.content_type = 'text/html' - data = [] - form.render(w=data.append) - pageinfo = self._check_html(u'\n'.join(data), form, template=None) - inputs = pageinfo.find_tag('select', False) - ok = False - for selectnode in pageinfo.matching_nodes('select', name='from_in_group-subject:A'): - for optionnode in selectnode: - self.assertEqual(optionnode.get('value'), str(geid)) - self.assertEqual(ok, False) - ok = True - inputs = pageinfo.find_tag('input', False) - self.assertFalse(list(pageinfo.matching_nodes('input', name='__linkto'))) - - def test_reledit_composite_field(self): - with self.admin_access.web_request() as req: - rset = req.execute('INSERT BlogEntry X: X title "cubicweb.org", X content "hop"') - form = self.vreg['views'].select('reledit', req, - rset=rset, row=0, rtype='content') - data = form.render(row=0, rtype='content', formid='base', action='edit_rtype') - self.assertIn('content_format', data) - - - def test_form_generation_time(self): - with self.admin_access.web_request() as req: - e = req.create_entity('BlogEntry', title=u'cubicweb.org', content=u"hop") - expected_field_name = '__form_generation_time:%d' % e.eid - - ts_before = time.time() - form = self.vreg['forms'].select('edition', req, entity=e) - ts_after = time.time() - - data = [] - form.render(action='edit', w=data.append) - html_form = html.fromstring(''.join(data)).forms[0] - fields = dict(html_form.form_values()) - self.assertIn(expected_field_name, fields) - ts = float(fields[expected_field_name]) - self.assertTrue(ts_before < ts < ts_after) - - - # form tests ############################################################## - - def test_form_inheritance(self): - with self.admin_access.web_request() as req: - class CustomChangeStateForm(ChangeStateForm): - hello = IntField(name='youlou') - creation_date = DateTimeField(widget=DateTimePicker) - form = CustomChangeStateForm(req, redirect_path='perdu.com', - entity=req.user) - data = [] - form.render(w=data.append, - formvalues=dict(state=123, trcomment=u'', - trcomment_format=u'text/plain')) - - def test_change_state_form(self): - with self.admin_access.web_request() as req: - form = ChangeStateForm(req, redirect_path='perdu.com', - entity=req.user) - data = [] - form.render(w=data.append, - formvalues=dict(state=123, trcomment=u'', - trcomment_format=u'text/plain')) - - # fields tests ############################################################ - - def _render_entity_field(self, req, name, form): - form.build_context({}) - renderer = FormRenderer(req) - return form.field_by_name(name, 'subject').render(form, renderer) - - def _test_richtextfield(self, req, expected): - class RTFForm(EntityFieldsForm): - description = RichTextField(eidparam=True, role='subject') - state = self.vreg['etypes'].etype_class('State')(req) - state.eid = 'S' - form = RTFForm(req, redirect_path='perdu.com', entity=state) - # make it think it can use fck editor anyway - form.field_by_name('description', 'subject').format = lambda form, field=None: 'text/html' - self.assertMultiLineEqual(self._render_entity_field(req, 'description', form), - expected % {'eid': state.eid}) - - - def test_richtextfield_1(self): - with self.admin_access.web_request() as req: - req.use_fckeditor = lambda: False - self._test_richtextfield(req, '''''') - - - def test_richtextfield_2(self): - with self.admin_access.web_request() as req: - req.use_fckeditor = lambda: True - self._test_richtextfield(req, '') - - - def test_filefield(self): - class FFForm(EntityFieldsForm): - data = FileField( - format_field=StringField(name='data_format', max_length=50, - eidparam=True, role='subject'), - encoding_field=StringField(name='data_encoding', max_length=20, - eidparam=True, role='subject'), - eidparam=True, role='subject') - with self.admin_access.web_request() as req: - file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', - data=Binary(b'new widgets system')) - form = FFForm(req, redirect_path='perdu.com', entity=file) - self.assertMultiLineEqual(self._render_entity_field(req, 'data', form), - ''' -show advanced fields - -
              - -detach attached file''' % {'eid': file.eid}) - - - def test_editablefilefield(self): - class EFFForm(EntityFieldsForm): - data = EditableFileField( - format_field=StringField('data_format', max_length=50, - eidparam=True, role='subject'), - encoding_field=StringField('data_encoding', max_length=20, - eidparam=True, role='subject'), - eidparam=True, role='subject') - with self.admin_access.web_request() as req: - file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', - data=Binary(b'new widgets system')) - form = EFFForm(req, redirect_path='perdu.com', entity=file) - self.assertMultiLineEqual(self._render_entity_field(req, 'data', form), - ''' -show advanced fields - -
              - -detach attached file -

              You can either submit a new file using the browse button above, or choose to remove already uploaded file by checking the "detach attached file" check-box, or edit file content online with the widget below.

              -''' % {'eid': file.eid}) - - - def test_passwordfield(self): - class PFForm(EntityFieldsForm): - upassword = PasswordField(eidparam=True, role='subject') - with self.admin_access.web_request() as req: - form = PFForm(req, redirect_path='perdu.com', entity=req.user) - self.assertMultiLineEqual(self._render_entity_field(req, 'upassword', form), - ''' -
              - -  -confirm password''' % {'eid': req.user.eid}) - - - # def test_datefield(self): - # class DFForm(EntityFieldsForm): - # creation_date = DateTimeField(widget=Input) - # form = DFForm(self.req, entity=self.entity) - # init, cur = (fromstring(self._render_entity_field(attr, form)).get('value') - # for attr in ('edits-creation_date', 'creation_date')) - # self.assertEqual(init, cur) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_formfields.py --- a/web/test/unittest_formfields.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,199 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unittests for cw.web.formfields""" - -from logilab.common.testlib import TestCase, unittest_main, mock_object as mock - -from yams.constraints import StaticVocabularyConstraint, SizeConstraint - -import cubicweb -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web.formwidgets import PasswordInput, TextArea, Select, Radio -from cubicweb.web.formfields import * -from cubicweb.web.views.forms import EntityFieldsForm, FieldsForm - -from cubes.file.entities import File - -def setUpModule(*args): - global schema - config = TestServerConfiguration('data', apphome=GuessFieldTC.datadir) - config.bootstrap_cubes() - schema = config.load_schema() - -class GuessFieldTC(CubicWebTC): - - def test_state_fields(self): - with self.admin_access.web_request() as req: - title_field = guess_field(schema['State'], schema['name'], req=req) - self.assertIsInstance(title_field, StringField) - self.assertEqual(title_field.required, True) - -# synopsis_field = guess_field(schema['State'], schema['synopsis']) -# self.assertIsInstance(synopsis_field, StringField) -# self.assertIsInstance(synopsis_field.widget, TextArea) -# self.assertEqual(synopsis_field.required, False) -# self.assertEqual(synopsis_field.help, 'an abstract for this state') - - with self.admin_access.web_request() as req: - description_field = guess_field(schema['State'], schema['description'], req=req) - self.assertIsInstance(description_field, RichTextField) - self.assertEqual(description_field.required, False) - self.assertEqual(description_field.format_field, None) - - # description_format_field = guess_field(schema['State'], schema['description_format']) - # self.assertEqual(description_format_field, None) - - with self.admin_access.web_request() as req: - description_format_field = guess_field(schema['State'], schema['description_format'], - req=req) - self.assertEqual(description_format_field.internationalizable, True) - self.assertEqual(description_format_field.sort, True) - -# wikiid_field = guess_field(schema['State'], schema['wikiid']) -# self.assertIsInstance(wikiid_field, StringField) -# self.assertEqual(wikiid_field.required, False) - - - def test_cwuser_fields(self): - with self.admin_access.web_request() as req: - upassword_field = guess_field(schema['CWUser'], schema['upassword'], req=req) - self.assertIsInstance(upassword_field, StringField) - self.assertIsInstance(upassword_field.widget, PasswordInput) - self.assertEqual(upassword_field.required, True) - - with self.admin_access.web_request() as req: - last_login_time_field = guess_field(schema['CWUser'], schema['last_login_time'], req=req) - self.assertIsInstance(last_login_time_field, DateTimeField) - self.assertEqual(last_login_time_field.required, False) - - with self.admin_access.web_request() as req: - in_group_field = guess_field(schema['CWUser'], schema['in_group'], req=req) - self.assertIsInstance(in_group_field, RelationField) - self.assertEqual(in_group_field.required, True) - self.assertEqual(in_group_field.role, 'subject') - self.assertEqual(in_group_field.help, 'groups grant permissions to the user') - - with self.admin_access.web_request() as req: - owned_by_field = guess_field(schema['CWUser'], schema['owned_by'], 'object', req=req) - self.assertIsInstance(owned_by_field, RelationField) - self.assertEqual(owned_by_field.required, False) - self.assertEqual(owned_by_field.role, 'object') - - - def test_file_fields(self): - # data_format_field = guess_field(schema['File'], schema['data_format']) - # self.assertEqual(data_format_field, None) - # data_encoding_field = guess_field(schema['File'], schema['data_encoding']) - # self.assertEqual(data_encoding_field, None) - # data_name_field = guess_field(schema['File'], schema['data_name']) - # self.assertEqual(data_name_field, None) - - with self.admin_access.web_request() as req: - data_field = guess_field(schema['File'], schema['data'], req=req) - self.assertIsInstance(data_field, FileField) - self.assertEqual(data_field.required, True) - self.assertIsInstance(data_field.format_field, StringField) - self.assertIsInstance(data_field.encoding_field, StringField) - self.assertIsInstance(data_field.name_field, StringField) - - def test_constraints_priority(self): - with self.admin_access.web_request() as req: - salesterm_field = guess_field(schema['Salesterm'], schema['reason'], req=req) - constraints = schema['reason'].rdef('Salesterm', 'String').constraints - self.assertEqual([c.__class__ for c in constraints], - [SizeConstraint, StaticVocabularyConstraint]) - self.assertIsInstance(salesterm_field, StringField) - self.assertIsInstance(salesterm_field.widget, Select) - - - def test_bool_field_base(self): - with self.admin_access.web_request() as req: - field = guess_field(schema['CWAttribute'], schema['indexed'], req=req) - self.assertIsInstance(field, BooleanField) - self.assertEqual(field.required, False) - self.assertIsInstance(field.widget, Radio) - self.assertEqual(field.vocabulary(mock(_cw=mock(_=cubicweb._))), - [(u'yes', '1'), (u'no', '')]) - - def test_bool_field_explicit_choices(self): - with self.admin_access.web_request() as req: - field = guess_field(schema['CWAttribute'], schema['indexed'], - choices=[(u'maybe', '1'), (u'no', '')], req=req) - self.assertIsInstance(field.widget, Radio) - self.assertEqual(field.vocabulary(mock(req=mock(_=cubicweb._))), - [(u'maybe', '1'), (u'no', '')]) - - -class MoreFieldsTC(CubicWebTC): - def test_rtf_format_field(self): - with self.admin_access.web_request() as req: - req.use_fckeditor = lambda: False - e = self.vreg['etypes'].etype_class('State')(req) - form = EntityFieldsForm(req, entity=e) - description_field = guess_field(schema['State'], schema['description']) - description_format_field = description_field.get_format_field(form) - self.assertEqual(description_format_field.internationalizable, True) - self.assertEqual(description_format_field.sort, True) - # unlike below, initial is bound to form.form_field_format - self.assertEqual(description_format_field.value(form), 'text/plain') - req.cnx.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"') - req.cnx.commit() - self.assertEqual(description_format_field.value(form), 'text/rest') - - - def test_property_key_field(self): - from cubicweb.web.views.cwproperties import PropertyKeyField - with self.admin_access.web_request() as req: - field = PropertyKeyField(name='test') - e = self.vreg['etypes'].etype_class('CWProperty')(req) - renderer = self.vreg['formrenderers'].select('base', req) - form = EntityFieldsForm(req, entity=e) - form.formvalues = {} - field.render(form, renderer) - - -class CompoundFieldTC(CubicWebTC): - - def test_multipart(self): - """Ensures that compound forms have needs_multipart set if their - children require it""" - class AForm(FieldsForm): - comp = CompoundField([IntField(), StringField()]) - with self.admin_access.web_request() as req: - aform = AForm(req, None) - self.assertFalse(aform.needs_multipart) - class MForm(FieldsForm): - comp = CompoundField([IntField(), FileField()]) - with self.admin_access.web_request() as req: - mform = MForm(req, None) - self.assertTrue(mform.needs_multipart) - - -class UtilsTC(TestCase): - def test_vocab_sort(self): - self.assertEqual(vocab_sort([('Z', 1), ('A', 2), - ('Group 1', None), ('Y', 3), ('B', 4), - ('Group 2', None), ('X', 5), ('C', 6)]), - [('A', 2), ('Z', 1), - ('Group 1', None), ('B', 4), ('Y', 3), - ('Group 2', None), ('C', 6), ('X', 5)] - ) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_formwidgets.py --- a/web/test/unittest_formwidgets.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,71 +0,0 @@ -# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unittests for cw.web.formwidgets""" - -from logilab.common.testlib import unittest_main, mock_object as mock - -from cubicweb.devtools import fake -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web import formwidgets, formfields -from cubicweb.web.views.forms import FieldsForm - - -class WidgetsTC(CubicWebTC): - - def test_editableurl_widget(self): - field = formfields.guess_field(self.schema['Bookmark'], self.schema['path']) - widget = formwidgets.EditableURLWidget() - req = fake.FakeRequest(form={'path-subjectfqs:A': 'param=value&vid=view'}) - form = mock(_cw=req, formvalues={}, edited_entity=mock(eid='A')) - self.assertEqual(widget.process_field_data(form, field), - '?param=value%26vid%3Dview') - - def test_bitselect_widget(self): - field = formfields.guess_field(self.schema['CWAttribute'], self.schema['ordernum']) - field.choices = [('un', '1',), ('deux', '2',)] - widget = formwidgets.BitSelect(settabindex=False) - req = fake.FakeRequest(form={'ordernum-subject:A': ['1', '2']}) - form = mock(_cw=req, formvalues={}, edited_entity=mock(eid='A'), - form_previous_values=()) - self.assertMultiLineEqual(widget._render(form, field, None), - '''\ -''') - self.assertEqual(widget.process_field_data(form, field), - 3) - - def test_xml_escape_checkbox(self): - class TestForm(FieldsForm): - bool = formfields.BooleanField(ignore_req_params=True, - choices=[('python >> others', '1')], - widget=formwidgets.CheckBox()) - with self.admin_access.web_request() as req: - form = TestForm(req, None) - form.build_context() - field = form.field_by_name('bool') - widget = field.widget - self.assertMultiLineEqual(widget._render(form, field, None), - '') - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_http.py --- a/web/test/unittest_http.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,418 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -import contextlib - -from logilab.common.testlib import TestCase, unittest_main, tag, Tags - -from cubicweb.devtools.fake import FakeRequest -from cubicweb.devtools.testlib import CubicWebTC - - -def _test_cache(hin, hout, method='GET'): - """forge and process an HTTP request using given headers in/out and method, - then return it once its .is_client_cache_valid() method has been called. - - req.status_out is None if the page should have been calculated. - """ - # forge request - req = FakeRequest(method=method) - for key, value in hin: - req._headers_in.addRawHeader(key, str(value)) - for key, value in hout: - req.headers_out.addRawHeader(key, str(value)) - # process - req.status_out = None - req.is_client_cache_valid() - return req - -class HTTPCache(TestCase): - """Check that the http cache logiac work as expected - (as far as we understood the RFC) - - """ - tags = TestCase.tags | Tags('http', 'cache') - - - def assertCache(self, expected, status, situation=''): - """simple assert for nicer message""" - if expected != status: - if expected is None: - expected = "MODIFIED" - if status is None: - status = "MODIFIED" - msg = 'expected %r got %r' % (expected, status) - if situation: - msg = "%s - when: %s" % (msg, situation) - self.fail(msg) - - def test_IN_none_OUT_none(self): - #: test that no caching is requested when not data is available - #: on any side - req =_test_cache((), ()) - self.assertIsNone(req.status_out) - - def test_IN_Some_OUT_none(self): - #: test that no caching is requested when no data is available - #: server (origin) side - hin = [('if-modified-since','Sat, 14 Apr 2012 14:39:32 GM'), - ] - req = _test_cache(hin, ()) - self.assertIsNone(req.status_out) - hin = [('if-none-match','babar/huitre'), - ] - req = _test_cache(hin, ()) - self.assertIsNone(req.status_out) - hin = [('if-modified-since','Sat, 14 Apr 2012 14:39:32 GM'), - ('if-none-match','babar/huitre'), - ] - req = _test_cache(hin, ()) - self.assertIsNone(req.status_out) - - def test_IN_none_OUT_Some(self): - #: test that no caching is requested when no data is provided - #: by the client - hout = [('last-modified','Sat, 14 Apr 2012 14:39:32 GM'), - ] - req = _test_cache((), hout) - self.assertIsNone(req.status_out) - hout = [('etag','babar/huitre'), - ] - req = _test_cache((), hout) - self.assertIsNone(req.status_out) - hout = [('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), - ('etag','babar/huitre'), - ] - req = _test_cache((), hout) - self.assertIsNone(req.status_out) - - @tag('last_modified') - def test_last_modified_newer(self): - #: test the proper behavior of modification date only - # newer - hin = [('if-modified-since', 'Sat, 13 Apr 2012 14:39:32 GM'), - ] - hout = [('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), - ] - req = _test_cache(hin, hout) - self.assertCache(None, req.status_out, 'origin is newer than client') - - @tag('last_modified') - def test_last_modified_older(self): - # older - hin = [('if-modified-since', 'Sat, 15 Apr 2012 14:39:32 GM'), - ] - hout = [('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), - ] - req = _test_cache(hin, hout) - self.assertCache(304, req.status_out, 'origin is older than client') - - @tag('last_modified') - def test_last_modified_same(self): - # same - hin = [('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), - ] - hout = [('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), - ] - req = _test_cache(hin, hout) - self.assertCache(304, req.status_out, 'origin is equal to client') - - @tag('etag') - def test_etag_mismatch(self): - #: test the proper behavior of etag only - # etag mismatch - hin = [('if-none-match', 'babar'), - ] - hout = [('etag', 'celestine'), - ] - req = _test_cache(hin, hout) - self.assertCache(None, req.status_out, 'etag mismatch') - - @tag('etag') - def test_etag_match(self): - # etag match - hin = [('if-none-match', 'babar'), - ] - hout = [('etag', 'babar'), - ] - req = _test_cache(hin, hout) - self.assertCache(304, req.status_out, 'etag match') - # etag match in multiple - hin = [('if-none-match', 'loutre'), - ('if-none-match', 'babar'), - ] - hout = [('etag', 'babar'), - ] - req = _test_cache(hin, hout) - self.assertCache(304, req.status_out, 'etag match in multiple') - # client use "*" as etag - hin = [('if-none-match', '*'), - ] - hout = [('etag', 'babar'), - ] - req = _test_cache(hin, hout) - self.assertCache(304, req.status_out, 'client use "*" as etag') - - @tag('etag', 'last_modified') - def test_both(self): - #: test the proper behavior of etag only - # both wrong - hin = [('if-none-match', 'babar'), - ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), - ] - hout = [('etag', 'loutre'), - ('last-modified', 'Sat, 15 Apr 2012 14:39:32 GM'), - ] - req = _test_cache(hin, hout) - self.assertCache(None, req.status_out, 'both wrong') - - @tag('etag', 'last_modified') - def test_both_etag_mismatch(self): - # both etag mismatch - hin = [('if-none-match', 'babar'), - ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), - ] - hout = [('etag', 'loutre'), - ('last-modified', 'Sat, 13 Apr 2012 14:39:32 GM'), - ] - req = _test_cache(hin, hout) - self.assertCache(None, req.status_out, 'both but etag mismatch') - - @tag('etag', 'last_modified') - def test_both_but_modified(self): - # both but modified - hin = [('if-none-match', 'babar'), - ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), - ] - hout = [('etag', 'babar'), - ('last-modified', 'Sat, 15 Apr 2012 14:39:32 GM'), - ] - req = _test_cache(hin, hout) - self.assertCache(None, req.status_out, 'both but modified') - - @tag('etag', 'last_modified') - def test_both_ok(self): - # both ok - hin = [('if-none-match', 'babar'), - ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), - ] - hout = [('etag', 'babar'), - ('last-modified', 'Sat, 13 Apr 2012 14:39:32 GM'), - ] - req = _test_cache(hin, hout) - self.assertCache(304, req.status_out, 'both ok') - - @tag('etag', 'HEAD') - def test_head_verb(self): - #: check than FOUND 200 is properly raise without content on HEAD request - #: This logic does not really belong here :-/ - # modified - hin = [('if-none-match', 'babar'), - ] - hout = [('etag', 'rhino/really-not-babar'), - ] - req = _test_cache(hin, hout, method='HEAD') - self.assertCache(None, req.status_out, 'modifier HEAD verb') - # not modified - hin = [('if-none-match', 'babar'), - ] - hout = [('etag', 'babar'), - ] - req = _test_cache(hin, hout, method='HEAD') - self.assertCache(304, req.status_out, 'not modifier HEAD verb') - - @tag('etag', 'POST') - def test_post_verb(self): - # modified - hin = [('if-none-match', 'babar'), - ] - hout = [('etag', 'rhino/really-not-babar'), - ] - req = _test_cache(hin, hout, method='POST') - self.assertCache(None, req.status_out, 'modifier HEAD verb') - # not modified - hin = [('if-none-match', 'babar'), - ] - hout = [('etag', 'babar'), - ] - req = _test_cache(hin, hout, method='POST') - self.assertCache(412, req.status_out, 'not modifier HEAD verb') - - -alloworig = 'access-control-allow-origin' -allowmethods = 'access-control-allow-methods' -allowheaders = 'access-control-allow-headers' -allowcreds = 'access-control-allow-credentials' -exposeheaders = 'access-control-expose-headers' -maxage = 'access-control-max-age' - -requestmethod = 'access-control-request-method' -requestheaders = 'access-control-request-headers' - -class _BaseAccessHeadersTC(CubicWebTC): - - @contextlib.contextmanager - def options(self, **options): - for k, values in options.items(): - self.config.set_option(k, values) - try: - yield - finally: - for k in options: - self.config.set_option(k, '') - def check_no_cors(self, req): - self.assertEqual(None, req.get_response_header(alloworig)) - self.assertEqual(None, req.get_response_header(allowmethods)) - self.assertEqual(None, req.get_response_header(allowheaders)) - self.assertEqual(None, req.get_response_header(allowcreds)) - self.assertEqual(None, req.get_response_header(exposeheaders)) - self.assertEqual(None, req.get_response_header(maxage)) - - -class SimpleAccessHeadersTC(_BaseAccessHeadersTC): - - def test_noaccess(self): - with self.admin_access.web_request() as req: - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_noorigin(self): - with self.options(**{alloworig: '*'}): - with self.admin_access.web_request() as req: - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_origin_noaccess(self): - with self.admin_access.web_request() as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_origin_noaccess_bad_host(self): - with self.options(**{alloworig: '*'}): - with self.admin_access.web_request() as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - # in these tests, base_url is http://testing.fr/cubicweb/ - req.set_request_header('Host', 'badhost.net') - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_explicit_origin_noaccess(self): - with self.options(**{alloworig: ['http://www.toto.org', 'http://othersite.fr']}): - with self.admin_access.web_request() as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - # in these tests, base_url is http://testing.fr/cubicweb/ - req.set_request_header('Host', 'testing.fr') - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_origin_access(self): - with self.options(**{alloworig: '*'}): - with self.admin_access.web_request() as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - # in these tests, base_url is http://testing.fr/cubicweb/ - req.set_request_header('Host', 'testing.fr') - data = self.app_handle_request(req) - self.assertEqual('http://www.cubicweb.org', - req.get_response_header(alloworig)) - - def test_explicit_origin_access(self): - with self.options(**{alloworig: ['http://www.cubicweb.org', 'http://othersite.fr']}): - with self.admin_access.web_request() as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - # in these tests, base_url is http://testing.fr/cubicweb/ - req.set_request_header('Host', 'testing.fr') - data = self.app_handle_request(req) - self.assertEqual('http://www.cubicweb.org', - req.get_response_header(alloworig)) - - def test_origin_access_headers(self): - with self.options(**{alloworig: '*', - exposeheaders: ['ExposeHead1', 'ExposeHead2'], - allowheaders: ['AllowHead1', 'AllowHead2'], - allowmethods: ['GET', 'POST', 'OPTIONS']}): - with self.admin_access.web_request() as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - # in these tests, base_url is http://testing.fr/cubicweb/ - req.set_request_header('Host', 'testing.fr') - data = self.app_handle_request(req) - self.assertEqual('http://www.cubicweb.org', - req.get_response_header(alloworig)) - self.assertEqual("true", - req.get_response_header(allowcreds)) - self.assertEqual(['ExposeHead1', 'ExposeHead2'], - req.get_response_header(exposeheaders)) - self.assertEqual(None, req.get_response_header(allowmethods)) - self.assertEqual(None, req.get_response_header(allowheaders)) - - -class PreflightAccessHeadersTC(_BaseAccessHeadersTC): - - def test_noaccess(self): - with self.admin_access.web_request(method='OPTIONS') as req: - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_noorigin(self): - with self.options(**{alloworig: '*'}): - with self.admin_access.web_request(method='OPTIONS') as req: - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_origin_noaccess(self): - with self.admin_access.web_request(method='OPTIONS') as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_origin_noaccess_bad_host(self): - with self.options(**{alloworig: '*'}): - with self.admin_access.web_request(method='OPTIONS') as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - # in these tests, base_url is http://testing.fr/cubicweb/ - req.set_request_header('Host', 'badhost.net') - data = self.app_handle_request(req) - self.check_no_cors(req) - - def test_origin_access(self): - with self.options(**{alloworig: '*', - exposeheaders: ['ExposeHead1', 'ExposeHead2'], - allowheaders: ['AllowHead1', 'AllowHead2'], - allowmethods: ['GET', 'POST', 'OPTIONS']}): - with self.admin_access.web_request(method='OPTIONS') as req: - req.set_request_header('Origin', 'http://www.cubicweb.org') - # in these tests, base_url is http://testing.fr/cubicweb/ - req.set_request_header('Host', 'testing.fr') - req.set_request_header(requestmethod, 'GET') - - data = self.app_handle_request(req) - self.assertEqual(200, req.status_out) - self.assertEqual('http://www.cubicweb.org', - req.get_response_header(alloworig)) - self.assertEqual("true", - req.get_response_header(allowcreds)) - self.assertEqual(set(['GET', 'POST', 'OPTIONS']), - req.get_response_header(allowmethods)) - self.assertEqual(set(['AllowHead1', 'AllowHead2']), - req.get_response_header(allowheaders)) - self.assertEqual(None, - req.get_response_header(exposeheaders)) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_http_headers.py --- a/web/test/unittest_http_headers.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,18 +0,0 @@ -import unittest - -from cubicweb.web import http_headers - - -class TestGenerators(unittest.TestCase): - def test_generate_true_false(self): - for v in (True, 1, 'true', 'True', 'TRUE'): - self.assertEqual('true', http_headers.generateTrueFalse(v)) - for v in (False, 0, 'false', 'False', 'FALSE'): - self.assertEqual('false', http_headers.generateTrueFalse(v)) - - with self.assertRaises(ValueError): - http_headers.generateTrueFalse('any value') - -if __name__ == '__main__': - from unittest import main - main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_idownloadable.py --- a/web/test/unittest_idownloadable.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from functools import partial - -from logilab.common.testlib import unittest_main - -from cubicweb.devtools.testlib import CubicWebTC, real_error_handling -from cubicweb import view -from cubicweb.predicates import is_instance - -class IDownloadableUser(view.EntityAdapter): - __regid__ = 'IDownloadable' - __select__ = is_instance('CWUser') - - def download_content_type(self): - """return MIME type of the downloadable content""" - return 'text/plain' - - def download_encoding(self): - """return encoding of the downloadable content""" - return 'ascii' - - def download_file_name(self): - """return file name of the downloadable content""" - return self.entity.name() + '.txt' - - def download_data(self): - return b'Babar is not dead!' - - -class BrokenIDownloadableGroup(IDownloadableUser): - __regid__ = 'IDownloadable' - __select__ = is_instance('CWGroup') - - def download_file_name(self): - return self.entity.name + '.txt' - - def download_data(self): - raise IOError() - -class IDownloadableTC(CubicWebTC): - - def setUp(self): - super(IDownloadableTC, self).setUp() - self.vreg.register(IDownloadableUser) - self.addCleanup(partial(self.vreg.unregister, IDownloadableUser)) - - def test_header_simple_case(self): - with self.admin_access.web_request() as req: - req.form['vid'] = 'download' - req.form['eid'] = str(req.user.eid) - data = self.ctrl_publish(req, 'view') - get = req.headers_out.getRawHeaders - self.assertEqual(['attachment;filename="admin.txt"'], - get('content-disposition')) - self.assertEqual(['text/plain;charset=ascii'], - get('content-type')) - self.assertEqual(b'Babar is not dead!', data) - - def test_header_with_space(self): - with self.admin_access.web_request() as req: - self.create_user(req, login=u'c c l a', password='babar') - req.cnx.commit() - with self.new_access(u'c c l a').web_request() as req: - req.form['vid'] = 'download' - req.form['eid'] = str(req.user.eid) - data = self.ctrl_publish(req,'view') - get = req.headers_out.getRawHeaders - self.assertEqual(['attachment;filename="c c l a.txt"'], - get('content-disposition')) - self.assertEqual(['text/plain;charset=ascii'], - get('content-type')) - self.assertEqual(b'Babar is not dead!', data) - - def test_header_with_space_and_comma(self): - with self.admin_access.web_request() as req: - self.create_user(req, login=u'c " l\\ a', password='babar') - req.cnx.commit() - with self.new_access(u'c " l\\ a').web_request() as req: - req.form['vid'] = 'download' - req.form['eid'] = str(req.user.eid) - data = self.ctrl_publish(req,'view') - get = req.headers_out.getRawHeaders - self.assertEqual([r'attachment;filename="c \" l\\ a.txt"'], - get('content-disposition')) - self.assertEqual(['text/plain;charset=ascii'], - get('content-type')) - self.assertEqual(b'Babar is not dead!', data) - - def test_header_unicode_filename(self): - with self.admin_access.web_request() as req: - self.create_user(req, login=u'cécilia', password='babar') - req.cnx.commit() - with self.new_access(u'cécilia').web_request() as req: - req.form['vid'] = 'download' - req.form['eid'] = str(req.user.eid) - self.ctrl_publish(req,'view') - get = req.headers_out.getRawHeaders - self.assertEqual(['''attachment;filename="ccilia.txt";filename*=utf-8''c%C3%A9cilia.txt'''], - get('content-disposition')) - - def test_header_unicode_long_filename(self): - name = u'Bèrte_hô_grand_nôm_ça_va_totallement_déborder_de_la_limite_là' - with self.admin_access.web_request() as req: - self.create_user(req, login=name, password='babar') - req.cnx.commit() - with self.new_access(name).web_request() as req: - req.form['vid'] = 'download' - req.form['eid'] = str(req.user.eid) - self.ctrl_publish(req,'view') - get = req.headers_out.getRawHeaders - self.assertEqual(["""attachment;filename="Brte_h_grand_nm_a_va_totallement_dborder_de_la_limite_l.txt";filename*=utf-8''B%C3%A8rte_h%C3%B4_grand_n%C3%B4m_%C3%A7a_va_totallement_d%C3%A9border_de_la_limite_l%C3%A0.txt"""], - get('content-disposition')) - - - def test_download_data_error(self): - self.vreg.register(BrokenIDownloadableGroup) - self.addCleanup(partial(self.vreg.unregister, BrokenIDownloadableGroup)) - with self.admin_access.web_request() as req: - req.form['vid'] = 'download' - req.form['eid'] = str(req.execute('CWGroup X WHERE X name "managers"')[0][0]) - with real_error_handling(self.app): - data = self.app_handle_request(req) - get = req.headers_out.getRawHeaders - self.assertEqual(['text/html;charset=UTF-8'], - get('content-type')) - self.assertEqual(None, - get('content-disposition')) - self.assertEqual(req.status_out, 500) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_magicsearch.py --- a/web/test/unittest_magicsearch.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,366 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Unit tests for cw.web.views.magicsearch""" - -import sys -from contextlib import contextmanager - -from six.moves import range - -from logilab.common.testlib import TestCase, unittest_main - -from rql import BadRQLQuery, RQLSyntaxError - -from cubicweb.devtools.testlib import CubicWebTC - - -translations = { - u'CWUser' : u"Utilisateur", - u'EmailAddress' : u"Adresse", - u'name' : u"nom", - u'alias' : u"nom", - u'surname' : u"nom", - u'firstname' : u"prénom", - u'state' : u"état", - u'address' : u"adresse", - u'use_email' : u"adel", - } - -def _translate(msgid): - return translations.get(msgid, msgid) - -def _ctxtranslate(ctx, msgid): - return _translate(msgid) - -from cubicweb.web.views.magicsearch import translate_rql_tree, QSPreProcessor, QueryTranslator - -class QueryTranslatorTC(CubicWebTC): - """test suite for QueryTranslatorTC""" - - @contextmanager - def proc(self): - with self.admin_access.web_request() as req: - self.vreg.config.translations = {'en': (_translate, _ctxtranslate)} - proc = self.vreg['components'].select('magicsearch', req) - proc = [p for p in proc.processors if isinstance(p, QueryTranslator)][0] - yield proc - - def test_basic_translations(self): - """tests basic translations (no ambiguities)""" - with self.proc() as proc: - rql = u"Any C WHERE C is Adresse, P adel C, C adresse 'Logilab'" - rql, = proc.preprocess_query(rql) - self.assertEqual(rql, 'Any C WHERE C is EmailAddress, P use_email C, C address "Logilab"') - - def test_ambiguous_translations(self): - """tests possibly ambiguous translations""" - with self.proc() as proc: - rql = u"Any P WHERE P adel C, C is EmailAddress, C nom 'Logilab'" - rql, = proc.preprocess_query(rql) - self.assertEqual(rql, 'Any P WHERE P use_email C, C is EmailAddress, C alias "Logilab"') - rql = u"Any P WHERE P is Utilisateur, P adel C, P nom 'Smith'" - rql, = proc.preprocess_query(rql) - self.assertEqual(rql, 'Any P WHERE P is CWUser, P use_email C, P surname "Smith"') - - -class QSPreProcessorTC(CubicWebTC): - """test suite for QSPreProcessor""" - - @contextmanager - def proc(self): - self.vreg.config.translations = {'en': (_translate, _ctxtranslate)} - with self.admin_access.web_request() as req: - proc = self.vreg['components'].select('magicsearch', req) - proc = [p for p in proc.processors if isinstance(p, QSPreProcessor)][0] - proc._cw = req - yield proc - - def test_entity_translation(self): - """tests QSPreProcessor._get_entity_name()""" - with self.proc() as proc: - translate = proc._get_entity_type - self.assertEqual(translate(u'EmailAddress'), "EmailAddress") - self.assertEqual(translate(u'emailaddress'), "EmailAddress") - self.assertEqual(translate(u'Adresse'), "EmailAddress") - self.assertEqual(translate(u'adresse'), "EmailAddress") - self.assertRaises(BadRQLQuery, translate, 'whatever') - - def test_attribute_translation(self): - """tests QSPreProcessor._get_attribute_name""" - with self.proc() as proc: - translate = proc._get_attribute_name - eschema = self.schema.eschema('CWUser') - self.assertEqual(translate(u'prénom', eschema), "firstname") - self.assertEqual(translate(u'nom', eschema), 'surname') - eschema = self.schema.eschema('EmailAddress') - self.assertEqual(translate(u'adresse', eschema), "address") - self.assertEqual(translate(u'nom', eschema), 'alias') - # should fail if the name is not an attribute for the given entity schema - self.assertRaises(BadRQLQuery, translate, 'whatever', eschema) - self.assertRaises(BadRQLQuery, translate, 'prénom', eschema) - - def test_one_word_query(self): - """tests the 'one word shortcut queries'""" - with self.proc() as proc: - transform = proc._one_word_query - self.assertEqual(transform('123'), - ('Any X WHERE X eid %(x)s', {'x': 123}, 'x')) - self.assertEqual(transform('CWUser'), - ('CWUser C',)) - self.assertEqual(transform('Utilisateur'), - ('CWUser C',)) - self.assertEqual(transform('Adresse'), - ('EmailAddress E',)) - self.assertEqual(transform('adresse'), - ('EmailAddress E',)) - self.assertRaises(BadRQLQuery, transform, 'Workcases') - - def test_two_words_query(self): - """tests the 'two words shortcut queries'""" - with self.proc() as proc: - transform = proc._two_words_query - self.assertEqual(transform('CWUser', 'E'), - ("CWUser E",)) - self.assertEqual(transform('CWUser', 'Smith'), - ('CWUser C ORDERBY FTIRANK(C) DESC WHERE C has_text %(text)s', {'text': 'Smith'})) - self.assertEqual(transform('utilisateur', 'Smith'), - ('CWUser C ORDERBY FTIRANK(C) DESC WHERE C has_text %(text)s', {'text': 'Smith'})) - self.assertEqual(transform(u'adresse', 'Logilab'), - ('EmailAddress E ORDERBY FTIRANK(E) DESC WHERE E has_text %(text)s', {'text': 'Logilab'})) - self.assertEqual(transform(u'adresse', 'Logi%'), - ('EmailAddress E WHERE E alias LIKE %(text)s', {'text': 'Logi%'})) - self.assertRaises(BadRQLQuery, transform, "pers", "taratata") - - def test_three_words_query(self): - """tests the 'three words shortcut queries'""" - with self.proc() as proc: - transform = proc._three_words_query - self.assertEqual(transform('utilisateur', u'prénom', 'cubicweb'), - ('CWUser C WHERE C firstname %(text)s', {'text': 'cubicweb'})) - self.assertEqual(transform('utilisateur', 'nom', 'cubicweb'), - ('CWUser C WHERE C surname %(text)s', {'text': 'cubicweb'})) - self.assertEqual(transform(u'adresse', 'nom', 'cubicweb'), - ('EmailAddress E WHERE E alias %(text)s', {'text': 'cubicweb'})) - self.assertEqual(transform('EmailAddress', 'nom', 'cubicweb'), - ('EmailAddress E WHERE E alias %(text)s', {'text': 'cubicweb'})) - self.assertEqual(transform('utilisateur', u'prénom', 'cubicweb%'), - ('CWUser C WHERE C firstname LIKE %(text)s', {'text': 'cubicweb%'})) - # expanded shortcuts - self.assertEqual(transform('CWUser', 'use_email', 'Logilab'), - ('CWUser C ORDERBY FTIRANK(C1) DESC WHERE C use_email C1, C1 has_text %(text)s', {'text': 'Logilab'})) - self.assertEqual(transform('CWUser', 'use_email', '%Logilab'), - ('CWUser C WHERE C use_email C1, C1 alias LIKE %(text)s', {'text': '%Logilab'})) - self.assertRaises(BadRQLQuery, transform, 'word1', 'word2', 'word3') - - def test_quoted_queries(self): - """tests how quoted queries are handled""" - queries = [ - (u'Adresse "My own EmailAddress"', ('EmailAddress E ORDERBY FTIRANK(E) DESC WHERE E has_text %(text)s', {'text': u'My own EmailAddress'})), - (u'Utilisateur prénom "Jean Paul"', ('CWUser C WHERE C firstname %(text)s', {'text': 'Jean Paul'})), - (u'Utilisateur firstname "Jean Paul"', ('CWUser C WHERE C firstname %(text)s', {'text': 'Jean Paul'})), - (u'CWUser firstname "Jean Paul"', ('CWUser C WHERE C firstname %(text)s', {'text': 'Jean Paul'})), - ] - with self.proc() as proc: - transform = proc._quoted_words_query - for query, expected in queries: - self.assertEqual(transform(query), expected) - self.assertRaises(BadRQLQuery, transform, "unquoted rql") - self.assertRaises(BadRQLQuery, transform, 'pers "Jean Paul"') - self.assertRaises(BadRQLQuery, transform, 'CWUser firstname other "Jean Paul"') - - def test_process_query(self): - """tests how queries are processed""" - queries = [ - (u'Utilisateur', (u"CWUser C",)), - (u'Utilisateur P', (u"CWUser P",)), - (u'Utilisateur cubicweb', (u'CWUser C ORDERBY FTIRANK(C) DESC WHERE C has_text %(text)s', {'text': u'cubicweb'})), - (u'CWUser prénom cubicweb', (u'CWUser C WHERE C firstname %(text)s', {'text': 'cubicweb'},)), - ] - with self.proc() as proc: - for query, expected in queries: - self.assertEqual(proc.preprocess_query(query), expected) - self.assertRaises(BadRQLQuery, - proc.preprocess_query, 'Any X WHERE X is Something') - - - -## Processor Chains tests ############################################ - -class ProcessorChainTC(CubicWebTC): - """test suite for magic_search's processor chains""" - - @contextmanager - def proc(self): - self.vreg.config.translations = {'en': (_translate, _ctxtranslate)} - with self.admin_access.web_request() as req: - proc = self.vreg['components'].select('magicsearch', req) - yield proc - - def test_main_preprocessor_chain(self): - """tests QUERY_PROCESSOR""" - queries = [ - (u'foo', - ("Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s", {'text': u'foo'})), - # XXX this sounds like a language translator test... - # and it fails - (u'Utilisateur Smith', - ('CWUser C ORDERBY FTIRANK(C) DESC WHERE C has_text %(text)s', {'text': u'Smith'})), - (u'utilisateur nom Smith', - ('CWUser C WHERE C surname %(text)s', {'text': u'Smith'})), - (u'Any P WHERE P is Utilisateur, P nom "Smith"', - ('Any P WHERE P is CWUser, P surname "Smith"', None)), - ] - with self.proc() as proc: - for query, expected in queries: - rset = proc.process_query(query) - self.assertEqual((rset.rql, rset.args), expected) - - def test_accentuated_fulltext(self): - """we must be able to type accentuated characters in the search field""" - with self.proc() as proc: - rset = proc.process_query(u'écrire') - self.assertEqual(rset.rql, "Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s") - self.assertEqual(rset.args, {'text': u'écrire'}) - - def test_explicit_component(self): - with self.proc() as proc: - self.assertRaises(RQLSyntaxError, - proc.process_query, u'rql: CWUser E WHERE E noattr "Smith",') - self.assertRaises(BadRQLQuery, - proc.process_query, u'rql: CWUser E WHERE E noattr "Smith"') - rset = proc.process_query(u'text: utilisateur Smith') - self.assertEqual(rset.rql, 'Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s') - self.assertEqual(rset.args, {'text': u'utilisateur Smith'}) - - -class RQLSuggestionsBuilderTC(CubicWebTC): - def suggestions(self, rql): - with self.admin_access.web_request() as req: - rbs = self.vreg['components'].select('rql.suggestions', req) - return rbs.build_suggestions(rql) - - def test_no_restrictions_rql(self): - self.assertListEqual([], self.suggestions('')) - self.assertListEqual([], self.suggestions('An')) - self.assertListEqual([], self.suggestions('Any X')) - self.assertListEqual([], self.suggestions('Any X, Y')) - - def test_invalid_rql(self): - self.assertListEqual([], self.suggestions('blabla')) - self.assertListEqual([], self.suggestions('Any X WHERE foo, bar')) - - def test_is_rql(self): - self.assertListEqual(['Any X WHERE X is %s' % eschema - for eschema in sorted(self.vreg.schema.entities()) - if not eschema.final], - self.suggestions('Any X WHERE X is')) - - self.assertListEqual(['Any X WHERE X is Personne', 'Any X WHERE X is Project'], - self.suggestions('Any X WHERE X is P')) - - self.assertListEqual(['Any X WHERE X is Personne, Y is Personne', - 'Any X WHERE X is Personne, Y is Project'], - self.suggestions('Any X WHERE X is Personne, Y is P')) - - - def test_relations_rql(self): - self.assertListEqual(['Any X WHERE X is Personne, X ass A', - 'Any X WHERE X is Personne, X datenaiss A', - 'Any X WHERE X is Personne, X description A', - 'Any X WHERE X is Personne, X fax A', - 'Any X WHERE X is Personne, X nom A', - 'Any X WHERE X is Personne, X prenom A', - 'Any X WHERE X is Personne, X promo A', - 'Any X WHERE X is Personne, X salary A', - 'Any X WHERE X is Personne, X sexe A', - 'Any X WHERE X is Personne, X tel A', - 'Any X WHERE X is Personne, X test A', - 'Any X WHERE X is Personne, X titre A', - 'Any X WHERE X is Personne, X travaille A', - 'Any X WHERE X is Personne, X web A', - ], - self.suggestions('Any X WHERE X is Personne, X ')) - self.assertListEqual(['Any X WHERE X is Personne, X tel A', - 'Any X WHERE X is Personne, X test A', - 'Any X WHERE X is Personne, X titre A', - 'Any X WHERE X is Personne, X travaille A', - ], - self.suggestions('Any X WHERE X is Personne, X t')) - # try completion on selected - self.assertListEqual(['Any X WHERE X is Personne, Y is Societe, X tel A', - 'Any X WHERE X is Personne, Y is Societe, X test A', - 'Any X WHERE X is Personne, Y is Societe, X titre A', - 'Any X WHERE X is Personne, Y is Societe, X travaille Y', - ], - self.suggestions('Any X WHERE X is Personne, Y is Societe, X t')) - # invalid relation should not break - self.assertListEqual([], - self.suggestions('Any X WHERE X is Personne, X asdasd')) - - def test_attribute_vocabulary_rql(self): - self.assertListEqual(['Any X WHERE X is Personne, X promo "bon"', - 'Any X WHERE X is Personne, X promo "pasbon"', - ], - self.suggestions('Any X WHERE X is Personne, X promo "')) - self.assertListEqual(['Any X WHERE X is Personne, X promo "pasbon"', - ], - self.suggestions('Any X WHERE X is Personne, X promo "p')) - # "bon" should be considered complete, hence no suggestion - self.assertListEqual([], - self.suggestions('Any X WHERE X is Personne, X promo "bon"')) - # no valid vocabulary starts with "po" - self.assertListEqual([], - self.suggestions('Any X WHERE X is Personne, X promo "po')) - - def test_attribute_value_rql(self): - # suggestions should contain any possible value for - # a given attribute (limited to 10) - with self.admin_access.web_request() as req: - for i in range(15): - req.create_entity('Personne', nom=u'n%s' % i, prenom=u'p%s' % i) - req.cnx.commit() - self.assertListEqual(['Any X WHERE X is Personne, X nom "n0"', - 'Any X WHERE X is Personne, X nom "n1"', - 'Any X WHERE X is Personne, X nom "n10"', - 'Any X WHERE X is Personne, X nom "n11"', - 'Any X WHERE X is Personne, X nom "n12"', - 'Any X WHERE X is Personne, X nom "n13"', - 'Any X WHERE X is Personne, X nom "n14"', - 'Any X WHERE X is Personne, X nom "n2"', - 'Any X WHERE X is Personne, X nom "n3"', - 'Any X WHERE X is Personne, X nom "n4"', - 'Any X WHERE X is Personne, X nom "n5"', - 'Any X WHERE X is Personne, X nom "n6"', - 'Any X WHERE X is Personne, X nom "n7"', - 'Any X WHERE X is Personne, X nom "n8"', - 'Any X WHERE X is Personne, X nom "n9"', - ], - self.suggestions('Any X WHERE X is Personne, X nom "')) - self.assertListEqual(['Any X WHERE X is Personne, X nom "n1"', - 'Any X WHERE X is Personne, X nom "n10"', - 'Any X WHERE X is Personne, X nom "n11"', - 'Any X WHERE X is Personne, X nom "n12"', - 'Any X WHERE X is Personne, X nom "n13"', - 'Any X WHERE X is Personne, X nom "n14"', - ], - self.suggestions('Any X WHERE X is Personne, X nom "n1')) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_propertysheet.py --- a/web/test/unittest_propertysheet.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,63 +0,0 @@ -import os -from os.path import join, dirname -from shutil import rmtree -import errno -import tempfile -from unittest import TestCase, main - -from cubicweb.web.propertysheet import PropertySheet, lazystr - - -DATADIR = join(dirname(__file__), 'data') - - -class PropertySheetTC(TestCase): - - def setUp(self): - uicache = join(DATADIR, 'uicache') - try: - os.makedirs(uicache) - except OSError as err: - if err.errno != errno.EEXIST: - raise - self.cachedir = tempfile.mkdtemp(dir=uicache) - - def tearDown(self): - rmtree(self.cachedir) - - def data(self, filename): - return join(DATADIR, filename) - - def test(self): - ps = PropertySheet(self.cachedir, datadir_url='http://cwtest.com') - ps.load(self.data('sheet1.py')) - ps.load(self.data('sheet2.py')) - # defined by sheet1 - self.assertEqual(ps['logo'], 'http://cwtest.com/logo.png') - # defined by sheet1, overriden by sheet2 - self.assertEqual(ps['bgcolor'], '#FFFFFF') - # defined by sheet2 - self.assertEqual(ps['fontcolor'], 'black') - # defined by sheet1, extended by sheet2 - self.assertEqual(ps['stylesheets'], ['http://cwtest.com/cubicweb.css', - 'http://cwtest.com/mycube.css']) - # lazy string defined by sheet1 - self.assertIsInstance(ps['lazy'], lazystr) - self.assertEqual(str(ps['lazy']), '#FFFFFF') - # test compilation - self.assertEqual(ps.compile('a {bgcolor: %(bgcolor)s; size: 1%;}'), - 'a {bgcolor: #FFFFFF; size: 1%;}') - self.assertEqual(ps.process_resource(DATADIR, 'pouet.css'), - self.cachedir) - self.assertFalse(ps.need_reload()) - os.utime(self.data('sheet1.py'), None) - self.assertTrue(ps.need_reload()) - ps.reload() - self.assertFalse(ps.need_reload()) - ps.process_resource(DATADIR, 'pouet.css') # put in cache - os.utime(self.data('pouet.css'), None) - self.assertFalse(ps.need_reload()) - - -if __name__ == '__main__': - main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_reledit.py --- a/web/test/unittest_reledit.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,225 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -mainly regression-preventing tests for reledit views -""" - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web.views.uicfg import reledit_ctrl - -class ReleditMixinTC(object): - - def setup_database(self): - with self.admin_access.client_cnx() as cnx: - self.proj = cnx.create_entity('Project', title=u'cubicweb-world-domination').eid - self.tick = cnx.create_entity('Ticket', title=u'write the code').eid - self.toto = cnx.create_entity('Personne', nom=u'Toto').eid - cnx.commit() - -class ClickAndEditFormTC(ReleditMixinTC, CubicWebTC): - - def test_default_config(self): - reledit = {'title': '''
              cubicweb-world-domination
              ''', - 'long_desc': '''
              <not specified>
              ''', - 'manager': '''
              <not specified>
              ''', - 'composite_card11_2ttypes': """<not specified>""", - 'concerns': """<not specified>"""} - - with self.admin_access.web_request() as req: - proj = req.entity_from_eid(self.proj) - - for rschema, ttypes, role in proj.e_schema.relation_definitions(includefinal=True): - if rschema not in reledit: - continue - rtype = rschema.type - self.assertMultiLineEqual(reledit[rtype] % {'eid': self.proj}, - proj.view('reledit', rtype=rtype, role=role), - rtype) - - def test_default_forms(self): - self.skipTest('Need to check if this test should still run post reledit/doreledit merge') - doreledit = {'title': """
              cubicweb-world-domination
              -
              - - - - - - - - - - - - - - -
              - - - -
              - -
              - - - - -
              -
              - -
              """, - - 'long_desc': """
              <not specified>
              -
              - - - - - - - - - - - - - - - - -
              - - - - - - - - - - -
              - -
              - -
              - -
              - - - - -
              -
              - -
              """, - - 'manager': """
              <not specified>
              -
              - - - - - - - - - - - - - - - - -
              - - - -
              - -
              - - - - -
              -
              - -
              """, - 'composite_card11_2ttypes': """<not specified>""", - 'concerns': """<not specified>""" - } - for rschema, ttypes, role in self.proj.e_schema.relation_definitions(includefinal=True): - if rschema not in doreledit: - continue - rtype = rschema.type - self.assertMultiLineEqual(doreledit[rtype] % {'eid': self.proj.eid, 'toto': self.toto.eid}, - self.proj.view('doreledit', rtype=rtype, role=role, - formid='edition' if rtype == 'long_desc' else 'base'), - rtype) - -class ClickAndEditFormUICFGTC(ReleditMixinTC, CubicWebTC): - - def setup_database(self): - super(ClickAndEditFormUICFGTC, self).setup_database() - with self.admin_access.client_cnx() as cnx: - cnx.execute('SET T concerns P WHERE T eid %(t)s, P eid %(p)s', {'t': self.tick, 'p': self.proj}) - cnx.execute('SET P manager T WHERE P eid %(p)s, T eid %(t)s', {'p': self.proj, 't': self.toto}) - cnx.commit() - - def test_with_uicfg(self): - old_rctl = reledit_ctrl._tagdefs.copy() - reledit_ctrl.tag_attribute(('Project', 'title'), - {'novalue_label': '', 'reload': True}) - reledit_ctrl.tag_subject_of(('Project', 'long_desc', '*'), - {'reload': True, 'edit_target': 'rtype', - 'novalue_label': u'<long_desc is required>'}) - reledit_ctrl.tag_subject_of(('Project', 'manager', '*'), - {'edit_target': 'related'}) - reledit_ctrl.tag_subject_of(('Project', 'composite_card11_2ttypes', '*'), - {'edit_target': 'related'}) - reledit_ctrl.tag_object_of(('Ticket', 'concerns', 'Project'), - {'edit_target': 'rtype'}) - reledit = { - 'title': """<div id="title-subject-%(eid)s-reledit" onmouseout="jQuery('#title-subject-%(eid)s').addClass('invisible')" onmouseover="jQuery('#title-subject-%(eid)s').removeClass('invisible')" class="releditField"><div id="title-subject-%(eid)s-value" class="editableFieldValue">cubicweb-world-domination</div><div id="title-subject-%(eid)s" class="editableField invisible"><div id="title-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm('base', %(eid)s, 'title', 'subject', 'title-subject-%(eid)s', true, '', 'edit_rtype');" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""", - 'long_desc': """<div id="long_desc-subject-%(eid)s-reledit" onmouseout="jQuery('#long_desc-subject-%(eid)s').addClass('invisible')" onmouseover="jQuery('#long_desc-subject-%(eid)s').removeClass('invisible')" class="releditField"><div id="long_desc-subject-%(eid)s-value" class="editableFieldValue"><long_desc is required></div><div id="long_desc-subject-%(eid)s" class="editableField invisible"><div id="long_desc-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm('base', %(eid)s, 'long_desc', 'subject', 'long_desc-subject-%(eid)s', true, 'autolimited', 'edit_rtype');" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""", - 'manager': """<div id="manager-subject-%(eid)s-reledit" onmouseout="jQuery('#manager-subject-%(eid)s').addClass('invisible')" onmouseover="jQuery('#manager-subject-%(eid)s').removeClass('invisible')" class="releditField"><div id="manager-subject-%(eid)s-value" class="editableFieldValue"><a href="http://testing.fr/cubicweb/personne/%(toto)s" title="">Toto</a></div><div id="manager-subject-%(eid)s" class="editableField invisible"><div id="manager-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm('edition', %(eid)s, 'manager', 'subject', 'manager-subject-%(eid)s', false, 'autolimited', 'edit_related');" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div><div id="manager-subject-%(eid)s-delete" class="editableField" onclick="cw.reledit.loadInlineEditionForm('deleteconf', %(eid)s, 'manager', 'subject', 'manager-subject-%(eid)s', false, 'autolimited', 'delete');" title="click to delete this value"><img title="click to delete this value" src="http://testing.fr/cubicweb/data/cancel.png" alt="click to delete this value"/></div></div></div>""", - 'composite_card11_2ttypes': """<not specified>""", - 'concerns': """<div id="concerns-object-%(eid)s-reledit" onmouseout="jQuery('#concerns-object-%(eid)s').addClass('invisible')" onmouseover="jQuery('#concerns-object-%(eid)s').removeClass('invisible')" class="releditField"><div id="concerns-object-%(eid)s-value" class="editableFieldValue"><a href="http://testing.fr/cubicweb/ticket/%(tick)s" title="">write the code</a></div><div id="concerns-object-%(eid)s" class="editableField invisible"><div id="concerns-object-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm('base', %(eid)s, 'concerns', 'object', 'concerns-object-%(eid)s', false, 'autolimited', 'edit_rtype');" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""" - } - with self.admin_access.web_request() as req: - proj = req.entity_from_eid(self.proj) - for rschema, ttypes, role in proj.e_schema.relation_definitions(includefinal=True): - if rschema not in reledit: - continue - rtype = rschema.type - self.assertMultiLineEqual(reledit[rtype] % {'eid': self.proj, 'toto': self.toto, 'tick': self.tick}, - proj.view('reledit', rtype=rtype, role=role), - rtype) - reledit_ctrl.clear() - reledit_ctrl._tagdefs.update(old_rctl) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_request.py --- a/web/test/unittest_request.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,89 +0,0 @@ -"""misc. unittests for utility functions -""" - -from logilab.common.testlib import TestCase, unittest_main - -from functools import partial - -from cubicweb.devtools.fake import FakeConfig - -from cubicweb.web.request import (CubicWebRequestBase, _parse_accept_header, - _mimetype_sort_key, _mimetype_parser, _charset_sort_key) - - - -class AcceptParserTC(TestCase): - - def test_parse_accept(self): - parse_accept_header = partial(_parse_accept_header, - value_parser=_mimetype_parser, - value_sort_key=_mimetype_sort_key) - # compare scores - self.assertEqual(parse_accept_header("audio/*;q=0.2, audio/basic"), - [( ('audio/basic', ('audio', 'basic', {}), 1.0 ) ), - ( ('audio/*', ('audio', '*', {}), 0.2 ) )]) - self.assertEqual(parse_accept_header("text/plain;q=0.5, text/html, text/x-dvi;q=0.8, text/x-c"), - [( ('text/html', ('text', 'html', {}), 1.0 ) ), - ( ('text/x-c', ('text', 'x-c', {}), 1.0 ) ), - ( ('text/x-dvi', ('text', 'x-dvi', {}), 0.8 ) ), - ( ('text/plain', ('text', 'plain', {}), 0.5 ) )]) - # compare mimetype precedence for a same given score - self.assertEqual(parse_accept_header("audio/*, audio/basic"), - [( ('audio/basic', ('audio', 'basic', {}), 1.0 ) ), - ( ('audio/*', ('audio', '*', {}), 1.0 ) )]) - self.assertEqual(parse_accept_header("text/*, text/html, text/html;level=1, */*"), - [( ('text/html', ('text', 'html', {'level': '1'}), 1.0 ) ), - ( ('text/html', ('text', 'html', {}), 1.0 ) ), - ( ('text/*', ('text', '*', {}), 1.0 ) ), - ( ('*/*', ('*', '*', {}), 1.0 ) )]) - # free party - self.assertEqual(parse_accept_header("text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"), - [( ('text/html', ('text', 'html', {'level': '1'}), 1.0 ) ), - ( ('text/html', ('text', 'html', {}), 0.7 ) ), - ( ('*/*', ('*', '*', {}), 0.5 ) ), - ( ('text/html', ('text', 'html', {'level': '2'}), 0.4 ) ), - ( ('text/*', ('text', '*', {}), 0.3 ) ) - ]) - # chrome sample header - self.assertEqual(parse_accept_header("application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"), - [( ('application/xhtml+xml', ('application', 'xhtml+xml', {}), 1.0 ) ), - ( ('application/xml', ('application', 'xml', {}), 1.0 ) ), - ( ('image/png', ('image', 'png', {}), 1.0 ) ), - ( ('text/html', ('text', 'html', {}), 0.9 ) ), - ( ('text/plain', ('text', 'plain', {}), 0.8 ) ), - ( ('*/*', ('*', '*', {}), 0.5 ) ), - ]) - - def test_parse_accept_language(self): - self.assertEqual(_parse_accept_header('fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3'), - [('fr', 'fr', 1.0), ('fr-fr', 'fr-fr', 0.8), - ('en-us', 'en-us', 0.5), ('en', 'en', 0.3)]) - - def test_parse_accept_charset(self): - parse_accept_header = partial(_parse_accept_header, - value_sort_key=_charset_sort_key) - self.assertEqual(parse_accept_header('ISO-8859-1,utf-8;q=0.7,*;q=0.7'), - [('ISO-8859-1', 'ISO-8859-1', 1.0), - ('utf-8', 'utf-8', 0.7), - ('*', '*', 0.7)]) - - def test_base_url(self): - dummy_vreg = type('DummyVreg', (object,), {})() - dummy_vreg.config = FakeConfig() - dummy_vreg.config['base-url'] = 'http://babar.com/' - dummy_vreg.config['https-url'] = 'https://toto.com/' - - req = CubicWebRequestBase(dummy_vreg, https=False) - self.assertEqual('http://babar.com/', req.base_url()) - self.assertEqual('http://babar.com/', req.base_url(False)) - self.assertEqual('https://toto.com/', req.base_url(True)) - - req = CubicWebRequestBase(dummy_vreg, https=True) - self.assertEqual('https://toto.com/', req.base_url()) - self.assertEqual('http://babar.com/', req.base_url(False)) - self.assertEqual('https://toto.com/', req.base_url(True)) - - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_uicfg.py --- a/web/test/unittest_uicfg.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,139 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -import copy -from logilab.common.testlib import tag -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web import uihelper, formwidgets as fwdgs -from cubicweb.web.views import uicfg - -abaa = uicfg.actionbox_appearsin_addmenu - -class UICFGTC(CubicWebTC): - - def test_default_actionbox_appearsin_addmenu_config(self): - self.assertFalse(abaa.etype_get('TrInfo', 'wf_info_for', 'object', 'CWUser')) - - - -class DefinitionOrderTC(CubicWebTC): - """This test check that when multiple definition could match a key, only - the more accurate apply""" - - def setUp(self): - super(DefinitionOrderTC, self).setUp() - for rtag in (uicfg.autoform_section, uicfg.autoform_field_kwargs): - rtag._old_tagdefs = copy.deepcopy(rtag._tagdefs) - new_def = ( - (('*', 'login', '*'), - {'formtype':'main', 'section':'hidden'}), - (('*', 'login', '*'), - {'formtype':'muledit', 'section':'hidden'}), - (('CWUser', 'login', '*'), - {'formtype':'main', 'section':'attributes'}), - (('CWUser', 'login', '*'), - {'formtype':'muledit', 'section':'attributes'}), - (('CWUser', 'login', 'String'), - {'formtype':'main', 'section':'inlined'}), - (('CWUser', 'login', 'String'), - {'formtype':'inlined', 'section':'attributes'}), - ) - for key, kwargs in new_def: - uicfg.autoform_section.tag_subject_of(key, **kwargs) - - def tearDown(self): - super(DefinitionOrderTC, self).tearDown() - for rtag in (uicfg.autoform_section, uicfg.autoform_field_kwargs): - rtag._tagdefs = rtag._old_tagdefs - - @tag('uicfg') - def test_definition_order_hidden(self): - result = uicfg.autoform_section.get('CWUser', 'login', 'String', 'subject') - expected = set(['main_inlined', 'muledit_attributes', 'inlined_attributes']) - self.assertSetEqual(result, expected) - - @tag('uihelper', 'order', 'func') - def test_uihelper_set_fields_order(self): - afk_get = uicfg.autoform_field_kwargs.get - self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {}) - uihelper.set_fields_order('CWUser', ('login', 'firstname', 'surname')) - self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {'order': 1}) - - @tag('uicfg', 'order', 'func') - def test_uicfg_primaryview_set_fields_order(self): - pvdc = uicfg.primaryview_display_ctrl - pvdc.set_fields_order('CWUser', ('login', 'firstname', 'surname')) - self.assertEqual(pvdc.get('CWUser', 'login', 'String', 'subject'), {'order': 0}) - self.assertEqual(pvdc.get('CWUser', 'firstname', 'String', 'subject'), {'order': 1}) - self.assertEqual(pvdc.get('CWUser', 'surname', 'String', 'subject'), {'order': 2}) - - @tag('uihelper', 'kwargs', 'func') - def test_uihelper_set_field_kwargs(self): - afk_get = uicfg.autoform_field_kwargs.get - self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {}) - wdg = fwdgs.TextInput({'size': 30}) - uihelper.set_field_kwargs('CWUser', 'firstname', widget=wdg) - self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {'widget': wdg}) - - @tag('uihelper', 'hidden', 'func') - def test_uihelper_hide_fields(self): - # original conf : in_group is edited in 'attributes' section everywhere - section_conf = uicfg.autoform_section.get('CWUser', 'in_group', '*', 'subject') - self.assertCountEqual(section_conf, ['main_attributes', 'muledit_attributes']) - # hide field in main form - uihelper.hide_fields('CWUser', ('login', 'in_group')) - section_conf = uicfg.autoform_section.get('CWUser', 'in_group', '*', 'subject') - self.assertCountEqual(section_conf, ['main_hidden', 'muledit_attributes']) - # hide field in muledit form - uihelper.hide_fields('CWUser', ('login', 'in_group'), formtype='muledit') - section_conf = uicfg.autoform_section.get('CWUser', 'in_group', '*', 'subject') - self.assertCountEqual(section_conf, ['main_hidden', 'muledit_hidden']) - - @tag('uihelper', 'hidden', 'formconfig') - def test_uihelper_formconfig(self): - afk_get = uicfg.autoform_field_kwargs.get - class CWUserFormConfig(uihelper.FormConfig): - etype = 'CWUser' - hidden = ('in_group',) - fields_order = ('login', 'firstname') - section_conf = uicfg.autoform_section.get('CWUser', 'in_group', '*', 'subject') - self.assertCountEqual(section_conf, ['main_hidden', 'muledit_attributes']) - self.assertEqual(afk_get('CWUser', 'firstname', 'String', 'subject'), {'order': 1}) - - -class UicfgRegistryTC(CubicWebTC): - - def test_default_uicfg_object(self): - 'CW default ui config objects must be registered in uicfg registry' - onames = ('autoform_field', 'autoform_section', 'autoform_field_kwargs') - for oname in onames: - obj = self.vreg['uicfg'].select_or_none(oname) - self.assertTrue(obj is not None, '%s not found in uicfg registry' - % oname) - - def test_custom_uicfg(self): - ASRT = uicfg.AutoformSectionRelationTags - custom_afs = ASRT() - custom_afs.__select__ = ASRT.__select__ & ASRT.__select__ - self.vreg['uicfg'].register(custom_afs) - obj = self.vreg['uicfg'].select_or_none('autoform_section') - self.assertTrue(obj is custom_afs) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_urlpublisher.py --- a/web/test/unittest_urlpublisher.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""Unit tests for url publishing service""" - -import re - -from logilab.common.testlib import unittest_main - -from cubicweb.rset import ResultSet -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.fake import FakeRequest -from cubicweb.web import NotFound, Redirect, views -from cubicweb.web.views.urlrewrite import SimpleReqRewriter - - -class URLPublisherTC(CubicWebTC): - """test suite for QSPreProcessor""" - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - self.create_user(cnx, u'ÿsaÿe') - b = cnx.create_entity('BlogEntry', title=u'hell\'o', content=u'blabla') - # take care: Tag's name normalized to lower case - c = cnx.create_entity('Tag', name=u'yo') - cnx.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', - {'c':c.eid, 'b':b.eid}) - cnx.commit() - - def process(self, req, url): - return self.app.url_resolver.process(req, url) - - def test_raw_path(self): - """tests raw path resolution'""" - with self.admin_access.web_request() as req: - self.assertEqual(self.process(req, 'view'), ('view', None)) - self.assertEqual(self.process(req, 'edit'), ('edit', None)) - self.assertRaises(NotFound, self.process, req, 'whatever') - - def test_eid_path(self): - """tests eid path resolution""" - with self.admin_access.web_request() as req: - self.assertIsInstance(self.process(req, '123')[1], ResultSet) - self.assertEqual(len(self.process(req, '123')[1]), 1) - self.assertRaises(NotFound, self.process, req, '123/345') - self.assertRaises(NotFound, self.process, req, 'not_eid') - - def test_rest_path_etype(self): - """tests the rest path resolution""" - with self.admin_access.web_request() as req: - ctrl, rset = self.process(req, 'CWEType') - self.assertEqual(ctrl, 'view') - self.assertEqual(rset.description[0][0], 'CWEType') - self.assertEqual("Any X,AA,AB ORDERBY AB WHERE X is_instance_of CWEType, " - "X modification_date AA, X name AB", - rset.printable_rql()) - self.assertEqual(req.form['vid'], 'sameetypelist') - - def test_rest_path_by_attr(self): - with self.admin_access.web_request() as req: - ctrl, rset = self.process(req, 'CWUser/login/admin') - self.assertEqual(ctrl, 'view') - self.assertEqual(len(rset), 1) - self.assertEqual(rset.description[0][0], 'CWUser') - self.assertEqual('Any X,AA,AB,AC,AD WHERE X is_instance_of CWUser, ' - 'X firstname AA, X login AB, X modification_date AC, ' - 'X surname AD, X login "admin"', - rset.printable_rql()) - - def test_rest_path_unique_attr(self): - with self.admin_access.web_request() as req: - ctrl, rset = self.process(req, 'cwuser/admin') - self.assertEqual(ctrl, 'view') - self.assertEqual(len(rset), 1) - self.assertEqual(rset.description[0][0], 'CWUser') - self.assertEqual('Any X,AA,AB,AC,AD WHERE X is_instance_of CWUser, ' - 'X firstname AA, X login AB, X modification_date AC, ' - 'X surname AD, X login "admin"', - rset.printable_rql()) - self.assertEqual(req.form['vid'], 'primary') - - def test_rest_path_eid(self): - with self.admin_access.web_request() as req: - ctrl, rset = self.process(req, 'cwuser/eid/%s' % req.user.eid) - self.assertEqual(ctrl, 'view') - self.assertEqual(len(rset), 1) - self.assertEqual(rset.description[0][0], 'CWUser') - self.assertEqual('Any X,AA,AB,AC,AD WHERE X is_instance_of CWUser, ' - 'X firstname AA, X login AB, X modification_date AC, ' - 'X surname AD, X eid %s' % rset[0][0], - rset.printable_rql()) - - def test_rest_path_non_ascii_paths(self): - with self.admin_access.web_request() as req: - ctrl, rset = self.process(req, 'CWUser/login/%C3%BFsa%C3%BFe') - self.assertEqual(ctrl, 'view') - self.assertEqual(len(rset), 1) - self.assertEqual(rset.description[0][0], 'CWUser') - self.assertEqual(u'Any X,AA,AB,AC,AD WHERE X is_instance_of CWUser, ' - u'X firstname AA, X login AB, X modification_date AC, ' - u'X surname AD, X login "\xffsa\xffe"', - rset.printable_rql()) - - def test_rest_path_quoted_paths(self): - with self.admin_access.web_request() as req: - ctrl, rset = self.process(req, 'BlogEntry/title/hell%27o') - self.assertEqual(ctrl, 'view') - self.assertEqual(len(rset), 1) - self.assertEqual(rset.description[0][0], 'BlogEntry') - self.assertEqual(u'Any X,AA,AB,AC WHERE X is_instance_of BlogEntry, ' - 'X creation_date AA, X modification_date AB, X title AC, ' - 'X title "hell\'o"', - rset.printable_rql()) - - def test_rest_path_use_vid_from_rset(self): - with self.admin_access.web_request(headers={'Accept': 'application/rdf+xml'}) as req: - views.VID_BY_MIMETYPE['application/rdf+xml'] = 'rdf' - try: - ctrl, rset = self.process(req, 'CWEType') - finally: - views.VID_BY_MIMETYPE.pop('application/rdf+xml') - self.assertEqual(req.form['vid'], 'rdf') - - def test_rest_path_errors(self): - with self.admin_access.web_request() as req: - self.assertRaises(NotFound, self.process, req, 'CWUser/eid/30000') - self.assertRaises(NotFound, self.process, req, 'Workcases') - self.assertRaises(NotFound, self.process, req, 'CWUser/inexistant_attribute/joe') - - def test_action_path(self): - """tests the action path resolution""" - with self.admin_access.web_request() as req: - self.assertRaises(Redirect, self.process, req, '1/edit') - self.assertRaises(Redirect, self.process, req, 'Tag/name/yo/edit') - self.assertRaises(Redirect, self.process, req, 'Tag/yo/edit') - self.assertRaises(NotFound, self.process, req, 'view/edit') - self.assertRaises(NotFound, self.process, req, '1/non_action') - self.assertRaises(NotFound, self.process, req, 'CWUser/login/admin/non_action') - - def test_regexp_path(self): - """tests the regexp path resolution""" - with self.admin_access.web_request() as req: - ctrl, rset = self.process(req, 'add/Task') - self.assertEqual(ctrl, 'view') - self.assertEqual(rset, None) - self.assertEqual(req.form, {'etype': "Task", 'vid': "creation"}) - self.assertRaises(NotFound, self.process, req, 'add/foo/bar') - - def test_nonascii_path(self): - oldrules = SimpleReqRewriter.rules - SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo'))] - with self.admin_access.web_request() as req: - try: - path = str(FakeRequest().url_quote(u'été')) - ctrl, rset = self.process(req, path) - self.assertEqual(rset, None) - self.assertEqual(req.form, {'vid': "foo"}) - finally: - SimpleReqRewriter.rules = oldrules - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_urlrewrite.py --- a/web/test/unittest_urlrewrite.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,223 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from six import text_type - -from logilab.common import tempattr - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.fake import FakeRequest - -from cubicweb.web.views.urlrewrite import (SimpleReqRewriter, SchemaBasedRewriter, - rgx, rgx_action) - - -class UrlRewriteTC(CubicWebTC): - - def test_auto_extend_rules(self): - class Rewriter(SimpleReqRewriter): - rules = [ - ('foo', dict(rql='Foo F')), - ('/index', dict(vid='index2')), - ] - rules = [] - for pattern, values in Rewriter.rules: - if hasattr(pattern, 'pattern'): - pattern = pattern.pattern - rules.append((pattern, values)) - self.assertListEqual(rules, [ - ('foo' , dict(rql='Foo F')), - ('/index' , dict(vid='index2')), - ('/_', dict(vid='manage')), - ('/_registry', dict(vid='registry')), - ('/schema', dict(vid='schema')), - ('/myprefs', dict(vid='propertiesform')), - ('/siteconfig', dict(vid='systempropertiesform')), - ('/siteinfo', dict(vid='siteinfo')), - ('/manage', dict(vid='manage')), - ('/notfound', dict(vid='404')), - ('/error', dict(vid='error')), - ('/sparql', dict(vid='sparql')), - ('/processinfo', dict(vid='processinfo')), - ('/cwuser$', {'vid': 'cw.users-and-groups-management', - 'tab': 'cw_users_management'}), - ('/cwgroup$', {'vid': 'cw.users-and-groups-management', - 'tab': 'cw_groups_management'}), - ('/cwsource$', {'vid': 'cw.sources-management'}), - ('/schema/([^/]+?)/?$', {'rql': r'Any X WHERE X is CWEType, X name "\1"', 'vid': 'primary'}), - ('/add/([^/]+?)/?$' , dict(vid='creation', etype=r'\1')), - ('/doc/images/(.+?)/?$', dict(fid='\\1', vid='wdocimages')), - ('/doc/?$', dict(fid='main', vid='wdoc')), - ('/doc/(.+?)/?$', dict(fid='\\1', vid='wdoc')), - # now in SchemaBasedRewriter - #('/search/(.+)$', dict(rql=r'Any X WHERE X has_text "\1"')), - ]) - - - def test_no_extend_rules(self): - class Rewriter(SimpleReqRewriter): - ignore_baseclass_rules = True - rules = [ - ('foo', dict(rql='Foo F')), - ('/index', dict(vid='index2')), - ] - self.assertListEqual(Rewriter.rules, [ - ('foo' , dict(rql='Foo F')), - ('/index' , dict(vid='index2')), - ]) - - def test_basic_transformation(self): - """test simple string-based rewrite""" - req = FakeRequest() - rewriter = SimpleReqRewriter(req) - self.assertRaises(KeyError, rewriter.rewrite, req, '/view?vid=whatever') - self.assertEqual(req.form, {}) - rewriter.rewrite(req, '/index') - self.assertEqual(req.form, {'vid' : "index"}) - - def test_regexp_transformation(self): - """test regexp-based rewrite""" - req = FakeRequest() - rewriter = SimpleReqRewriter(req) - rewriter.rewrite(req, '/add/Task') - self.assertEqual(req.form, {'vid' : "creation", 'etype' : "Task"}) - req = FakeRequest() - rewriter.rewrite(req, '/add/Task/') - self.assertEqual(req.form, {'vid' : "creation", 'etype' : "Task"}) - - def test_inheritance(self): - BaseTransition = self.vreg['etypes'].etype_class('BaseTransition') - with self.admin_access.web_request() as req: - x = req.create_entity('WorkflowTransition', name=u'test') - ctrlid, rset = self.app.url_resolver.process(req, 'basetransition/%s' % x.eid) - self.assertEqual(ctrlid, 'view') - self.assertEqual(x.eid, rset[0][0]) - # cw_rest_attr_info is cached but clear_cache doesn't like cached class - # method - del BaseTransition._cw_rest_attr_info_cache_ - try: - with tempattr(BaseTransition, 'rest_attr', 'name'): - - ctrlid, rset = self.app.url_resolver.process(req, 'basetransition/%s' % x.name) - self.assertEqual(ctrlid, 'view') - self.assertEqual(x.eid, rset[0][0]) - finally: - del BaseTransition._cw_rest_attr_info_cache_ - - - -class RgxActionRewriteTC(CubicWebTC): - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - p1 = self.create_user(cnx, u'user1') - p1.cw_set(firstname=u'joe', surname=u'Dalton') - p2 = self.create_user(cnx, u'user2') - p2.cw_set(firstname=u'jack', surname=u'Dalton') - self.p1eid = p1.eid - cnx.commit() - - def test_rgx_action_with_transforms(self): - class TestSchemaBasedRewriter(SchemaBasedRewriter): - rules = [ - (rgx('/(?P<sn>\w+)/(?P<fn>\w+)'), - rgx_action(r'Any X WHERE X surname %(sn)s, ' - 'X firstname %(fn)s', - argsgroups=('sn', 'fn'), - transforms={'sn' : text_type.capitalize, - 'fn' : text_type.lower,})), - ] - with self.admin_access.web_request() as req: - rewriter = TestSchemaBasedRewriter(req) - _pmid, rset = rewriter.rewrite(req, u'/DaLToN/JoE') - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][0], self.p1eid) - self.assertEqual(rset.description[0][0], 'CWUser') - - def test_inheritance_precedence(self): - RQL1 = 'Any C WHERE C is CWEType' - RQL2 = 'Any C WHERE C is CWUser' - - class BaseRewriter(SchemaBasedRewriter): - rules = [ - (rgx('/collector(.*)'), - rgx_action(rql=RQL1, - form=dict(vid='baseindex')), - ), - ] - class Rewriter(BaseRewriter): - rules = [ - (rgx('/collector/something(/?)'), - rgx_action(rql=RQL2, - form=dict(vid='index')), - ), - ] - - with self.admin_access.web_request() as req: - rewriter = Rewriter(req) - _pmid, rset = rewriter.rewrite(req, '/collector') - self.assertEqual(rset.rql, RQL1) - self.assertEqual(req.form, {'vid' : "baseindex"}) - _pmid, rset = rewriter.rewrite(req, '/collector/something') - self.assertEqual(rset.rql, RQL2) - self.assertEqual(req.form, {'vid' : "index"}) - _pmid, rset = rewriter.rewrite(req, '/collector/something/') - self.assertEqual(req.form, {'vid' : "index"}) - self.assertEqual(rset.rql, RQL2) - _pmid, rset = rewriter.rewrite(req, '/collector/somethingelse/') - self.assertEqual(rset.rql, RQL1) - self.assertEqual(req.form, {'vid' : "baseindex"}) - - def test_inheritance_precedence_same_rgx(self): - RQL1 = 'Any C WHERE C is CWEType' - RQL2 = 'Any C WHERE C is CWUser' - - class BaseRewriter(SchemaBasedRewriter): - rules = [ - (rgx('/collector(.*)'), - rgx_action(rql=RQL1, - form=dict(vid='baseindex')), - ), - ] - class Rewriter(BaseRewriter): - rules = [ - (rgx('/collector(.*)'), - rgx_action(rql=RQL2, - form=dict(vid='index')), - ), - ] - - with self.admin_access.web_request() as req: - rewriter = Rewriter(req) - _pmid, rset = rewriter.rewrite(req, '/collector') - self.assertEqual(rset.rql, RQL2) - self.assertEqual(req.form, {'vid' : "index"}) - _pmid, rset = rewriter.rewrite(req, '/collector/something') - self.assertEqual(rset.rql, RQL2) - self.assertEqual(req.form, {'vid' : "index"}) - _pmid, rset = rewriter.rewrite(req, '/collector/something/') - self.assertEqual(req.form, {'vid' : "index"}) - self.assertEqual(rset.rql, RQL2) - _pmid, rset = rewriter.rewrite(req, '/collector/somethingelse/') - self.assertEqual(rset.rql, RQL2) - self.assertEqual(req.form, {'vid' : "index"}) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_actions.py --- a/web/test/unittest_views_actions.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,64 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from logilab.common.testlib import unittest_main - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web.views import actions, uicfg - - -class ActionsTC(CubicWebTC): - def test_view_action(self): - with self.admin_access.web_request(vid='rss', rql='CWUser X') as req: - rset = req.execute('CWUser X') - actions = self.vreg['actions'].poss_visible_objects(req, rset=rset) - vaction = [action for action in actions if action.__regid__ == 'view'][0] - self.assertEqual(vaction.url(), 'http://testing.fr/cubicweb/view?rql=CWUser%20X') - - def test_has_editable_relations(self): - """ensure has_editable_relation predicate used by ModifyAction - return positive score if there is only some inlined forms - """ - # The schema only allows the anonymous user to modify his/her own - # EmailAddress if it is set, not to create one. Since the 'anon' CWUser - # entity is created without any associated EmailAddress entities, there - # are no attributes nor relations that can be edited: the "modify" - # action should not appear. - with self.new_access('anon').web_request() as req: - predicate = actions.has_editable_relation() - self.assertEqual(predicate(None, req, rset=req.user.as_rset()), - 0) - # being allowed to 'add' the relation is not enough - use_email = self.schema['use_email'].rdefs['CWUser', 'EmailAddress'] - with self.temporary_permissions((use_email, {'add': ('guests',)})): - with self.new_access('anon').web_request() as req: - predicate = actions.has_editable_relation() - self.assertEqual(predicate(None, req, rset=req.user.as_rset()), - 0) - # if we also allow creating the target etype, then the "modify" action - # should appear - with self.temporary_permissions((use_email, {'add': ('guests',)}), - EmailAddress={'add': ('guests',)}): - with self.new_access('anon').web_request() as req: - predicate = actions.has_editable_relation() - self.assertEqual(predicate(None, req, rset=req.user.as_rset()), - 1) - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_apacherewrite.py --- a/web/test/unittest_views_apacherewrite.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,60 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -""" - -""" -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.web.views.apacherewrite import * - -class ApacheURLRewriteTC(TestCase): - - def test(self): - class MyAppRules(ApacheURLRewrite): - rules = [ - RewriteCond('logilab\.fr', match='host', - rules=[('/(.*)', r'http://www.logilab.fr/\1')], - action='redirect'), - RewriteCond('(www)\.logilab\.fr', match='host', action='stop'), - RewriteCond('/(data|json)/', match='path', action='stop'), - RewriteCond('(?P<cat>.*)\.logilab\.fr', match='host', - rules=[('/(.*)', r'/m_%(cat)s/\1')]), - ] - urlrewriter = MyAppRules() - req = None # not used in the above rules, so keep a simple TestCase here - try: - urlrewriter.rewrite('logilab.fr', '/whatever', req) - self.fail('redirect exception expected') - except Redirect as ex: - self.assertEqual(ex.location, 'http://www.logilab.fr/whatever') - self.assertEqual(urlrewriter.rewrite('www.logilab.fr', '/whatever', req), - '/whatever') - self.assertEqual(urlrewriter.rewrite('www.logilab.fr', '/json/bla', req), - '/json/bla') - self.assertEqual(urlrewriter.rewrite('abcd.logilab.fr', '/json/bla', req), - '/json/bla') - self.assertEqual(urlrewriter.rewrite('abcd.logilab.fr', '/data/bla', req), - '/data/bla') - self.assertEqual(urlrewriter.rewrite('abcd.logilab.fr', '/whatever', req), - '/m_abcd/whatever') - self.assertEqual(urlrewriter.rewrite('abcd.fr', '/whatever', req), - '/whatever') - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_basecontrollers.py --- a/web/test/unittest_views_basecontrollers.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1087 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""cubicweb.web.views.basecontrollers unit tests""" - -from six import text_type -from six.moves.urllib.parse import urlsplit, urlunsplit, urljoin, parse_qs - -import lxml - -from logilab.common.testlib import unittest_main -from logilab.common.decorators import monkeypatch - -from cubicweb import Binary, NoSelectableObject, ValidationError -from cubicweb.schema import RRQLExpression -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.webtest import CubicWebTestTC -from cubicweb.utils import json_dumps -from cubicweb.uilib import rql_for_eid -from cubicweb.web import Redirect, RemoteCallFailed -import cubicweb.server.session -from cubicweb.server.session import Connection as OldConnection -from cubicweb.web.views.autoform import get_pending_inserts, get_pending_deletes -from cubicweb.web.views.basecontrollers import JSonController, xhtmlize, jsonize -from cubicweb.web.views.ajaxcontroller import ajaxfunc, AjaxFunction -import cubicweb.transaction as tx -from cubicweb.server.hook import Hook, Operation -from cubicweb.predicates import is_instance - - -class ViewControllerTC(CubicWebTestTC): - def test_view_ctrl_with_valid_cache_headers(self): - resp = self.webapp.get('/manage') - self.assertEqual(resp.etag, 'manage/guests') - self.assertEqual(resp.status_code, 200) - cache_headers = {'if-modified-since': resp.headers['Last-Modified'], - 'if-none-match': resp.etag} - resp = self.webapp.get('/manage', headers=cache_headers) - self.assertEqual(resp.status_code, 304) - self.assertEqual(len(resp.body), 0) - - -def req_form(user): - return {'eid': [str(user.eid)], - '_cw_entity_fields:%s' % user.eid: '_cw_generic_field', - '__type:%s' % user.eid: user.__regid__ - } - - -class EditControllerTC(CubicWebTC): - - def setUp(self): - CubicWebTC.setUp(self) - self.assertIn('users', self.schema.eschema('CWGroup').get_groups('read')) - - def tearDown(self): - CubicWebTC.tearDown(self) - self.assertIn('users', self.schema.eschema('CWGroup').get_groups('read')) - - def test_noparam_edit(self): - """check behaviour of this controller without any form parameter - """ - with self.admin_access.web_request() as req: - with self.assertRaises(ValidationError) as cm: - self.ctrl_publish(req) - self.assertEqual(cm.exception.errors, {None: u'no selected entities'}) - - def test_validation_unique(self): - """test creation of two linked entities - """ - with self.admin_access.web_request() as req: - req.form = {'eid': 'X', '__type:X': 'CWUser', - '_cw_entity_fields:X': 'login-subject,upassword-subject', - 'login-subject:X': u'admin', - 'upassword-subject:X': u'toto', - 'upassword-subject-confirm:X': u'toto', - } - with self.assertRaises(ValidationError) as cm: - self.ctrl_publish(req) - cm.exception.translate(text_type) - self.assertEqual({'login-subject': 'the value "admin" is already used, use another one'}, - cm.exception.errors) - - def test_simultaneous_edition_only_one_commit(self): - """ Allow two simultaneous edit view of the same entity as long as only one commits - """ - with self.admin_access.web_request() as req: - e = req.create_entity('BlogEntry', title=u'cubicweb.org', content=u"hop") - expected_path = e.rest_path() - req.cnx.commit() - form = self.vreg['views'].select('edition', req, rset=e.as_rset(), row=0) - html_form = lxml.html.fromstring(form.render(w=None, action='edit')).forms[0] - - with self.admin_access.web_request() as req2: - form2 = self.vreg['views'].select('edition', req, rset=e.as_rset(), row=0) - - with self.admin_access.web_request(**dict(html_form.form_values())) as req: - path, args = self.expect_redirect_handle_request(req, path='edit') - self.assertEqual(path, expected_path) - - def test_simultaneous_edition_refuse_second_commit(self): - """ Disallow committing changes to an entity edited in between """ - with self.admin_access.web_request() as req: - e = req.create_entity('BlogEntry', title=u'cubicweb.org', content=u"hop") - eid = e.eid - req.cnx.commit() - form = self.vreg['views'].select('edition', req, rset=e.as_rset(), row=0) - html_form = lxml.html.fromstring(form.render(w=None, action='edit')).forms[0] - - with self.admin_access.web_request() as req2: - e = req2.entity_from_eid(eid) - e.cw_set(content = u"hip") - req2.cnx.commit() - - form_field_name = "content-subject:%d" % eid - form_values = dict(html_form.form_values()) - assert form_field_name in form_values - form_values[form_field_name] = u'yep' - with self.admin_access.web_request(**form_values) as req: - with self.assertRaises(ValidationError) as cm: - self.ctrl_publish(req) - reported_eid, dict_info = cm.exception.args - self.assertEqual(reported_eid, eid) - self.assertIn(None, dict_info) - self.assertIn("has changed since you started to edit it.", dict_info[None]) - - def test_user_editing_itself(self): - """checking that a manager user can edit itself - """ - with self.admin_access.web_request() as req: - user = req.user - groupeids = [eid for eid, in req.execute('CWGroup G WHERE G name ' - 'in ("managers", "users")')] - groups = [text_type(eid) for eid in groupeids] - eid = text_type(user.eid) - req.form = { - 'eid': eid, '__type:'+eid: 'CWUser', - '_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject,in_group-subject', - 'login-subject:'+eid: text_type(user.login), - 'surname-subject:'+eid: u'Th\xe9nault', - 'firstname-subject:'+eid: u'Sylvain', - 'in_group-subject:'+eid: groups, - } - self.expect_redirect_handle_request(req, 'edit') - e = req.execute('Any X WHERE X eid %(x)s', - {'x': user.eid}).get_entity(0, 0) - self.assertEqual(e.firstname, u'Sylvain') - self.assertEqual(e.surname, u'Th\xe9nault') - self.assertEqual(e.login, user.login) - self.assertEqual([g.eid for g in e.in_group], groupeids) - - def test_user_can_change_its_password(self): - with self.admin_access.repo_cnx() as cnx: - self.create_user(cnx, u'user') - cnx.commit() - with self.new_access(u'user').web_request() as req: - eid = text_type(req.user.eid) - req.form = { - 'eid': eid, '__maineid' : eid, - '__type:'+eid: 'CWUser', - '_cw_entity_fields:'+eid: 'upassword-subject', - 'upassword-subject:'+eid: 'tournicoton', - 'upassword-subject-confirm:'+eid: 'tournicoton', - } - path, params = self.expect_redirect_handle_request(req, 'edit') - req.cnx.commit() # commit to check we don't get late validation error for instance - self.assertEqual(path, 'cwuser/user') - self.assertNotIn('vid', params) - - def test_user_editing_itself_no_relation(self): - """checking we can edit an entity without specifying some required - relations (meaning no changes) - """ - with self.admin_access.web_request() as req: - user = req.user - groupeids = [g.eid for g in user.in_group] - eid = text_type(user.eid) - req.form = { - 'eid': eid, - '__type:'+eid: 'CWUser', - '_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject', - 'login-subject:'+eid: text_type(user.login), - 'firstname-subject:'+eid: u'Th\xe9nault', - 'surname-subject:'+eid: u'Sylvain', - } - self.expect_redirect_handle_request(req, 'edit') - e = req.execute('Any X WHERE X eid %(x)s', - {'x': user.eid}).get_entity(0, 0) - self.assertEqual(e.login, user.login) - self.assertEqual(e.firstname, u'Th\xe9nault') - self.assertEqual(e.surname, u'Sylvain') - self.assertEqual([g.eid for g in e.in_group], groupeids) - self.assertEqual(e.cw_adapt_to('IWorkflowable').state, 'activated') - - - def test_create_multiple_linked(self): - with self.admin_access.web_request() as req: - gueid = req.execute('CWGroup G WHERE G name "users"')[0][0] - req.form = {'eid': ['X', 'Y'], '__maineid' : 'X', - '__type:X': 'CWUser', - '_cw_entity_fields:X': 'login-subject,upassword-subject,surname-subject,in_group-subject', - 'login-subject:X': u'adim', - 'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto', - 'surname-subject:X': u'Di Mascio', - 'in_group-subject:X': text_type(gueid), - - '__type:Y': 'EmailAddress', - '_cw_entity_fields:Y': 'address-subject,use_email-object', - 'address-subject:Y': u'dima@logilab.fr', - 'use_email-object:Y': 'X', - } - path, _params = self.expect_redirect_handle_request(req, 'edit') - # should be redirected on the created person - self.assertEqual(path, 'cwuser/adim') - e = req.execute('Any P WHERE P surname "Di Mascio"').get_entity(0, 0) - self.assertEqual(e.surname, 'Di Mascio') - email = e.use_email[0] - self.assertEqual(email.address, 'dima@logilab.fr') - - def test_create_mandatory_inlined(self): - with self.admin_access.web_request() as req: - req.form = {'eid': ['X', 'Y'], '__maineid' : 'X', - - '__type:X': 'Salesterm', - '_cw_entity_fields:X': '', - - '__type:Y': 'File', - '_cw_entity_fields:Y': 'data-subject,described_by_test-object', - 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), - 'described_by_test-object:Y': 'X', - } - path, _params = self.expect_redirect_handle_request(req, 'edit') - self.assertTrue(path.startswith('salesterm/'), path) - eid = path.split('/')[1] - salesterm = req.entity_from_eid(eid) - # The NOT NULL constraint of mandatory relation implies that the File - # must be created before the Salesterm, otherwise Salesterm insertion - # will fail. - # NOTE: sqlite does have NOT NULL constraint, unlike Postgres so the - # insertion does not fail and we have to check dumbly that File is - # created before. - self.assertGreater(salesterm.eid, salesterm.described_by_test[0].eid) - - def test_create_mandatory_inlined2(self): - with self.admin_access.web_request() as req: - req.form = {'eid': ['X', 'Y'], '__maineid' : 'X', - - '__type:X': 'Salesterm', - '_cw_entity_fields:X': 'described_by_test-subject', - 'described_by_test-subject:X': 'Y', - - '__type:Y': 'File', - '_cw_entity_fields:Y': 'data-subject', - 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), - } - path, _params = self.expect_redirect_handle_request(req, 'edit') - self.assertTrue(path.startswith('salesterm/'), path) - eid = path.split('/')[1] - salesterm = req.entity_from_eid(eid) - # The NOT NULL constraint of mandatory relation implies that the File - # must be created before the Salesterm, otherwise Salesterm insertion - # will fail. - # NOTE: sqlite does have NOT NULL constraint, unlike Postgres so the - # insertion does not fail and we have to check dumbly that File is - # created before. - self.assertGreater(salesterm.eid, salesterm.described_by_test[0].eid) - - def test_edit_mandatory_inlined3_object(self): - # non regression test for #3120495. Without the fix, leads to - # "unhashable type: 'list'" error - with self.admin_access.web_request() as req: - cwrelation = text_type(req.execute('CWEType X WHERE X name "CWSource"')[0][0]) - req.form = {'eid': [cwrelation], '__maineid' : cwrelation, - - '__type:'+cwrelation: 'CWEType', - '_cw_entity_fields:'+cwrelation: 'to_entity-object', - 'to_entity-object:'+cwrelation: [9999, 9998], - } - with req.cnx.deny_all_hooks_but(): - path, _params = self.expect_redirect_handle_request(req, 'edit') - self.assertTrue(path.startswith('cwetype/CWSource'), path) - - def test_edit_multiple_linked(self): - with self.admin_access.web_request() as req: - peid = text_type(self.create_user(req, u'adim').eid) - req.form = {'eid': [peid, 'Y'], '__maineid': peid, - - '__type:'+peid: u'CWUser', - '_cw_entity_fields:'+peid: u'surname-subject', - 'surname-subject:'+peid: u'Di Masci', - - '__type:Y': u'EmailAddress', - '_cw_entity_fields:Y': u'address-subject,use_email-object', - 'address-subject:Y': u'dima@logilab.fr', - 'use_email-object:Y': peid, - } - path, _params = self.expect_redirect_handle_request(req, 'edit') - # should be redirected on the created person - self.assertEqual(path, 'cwuser/adim') - e = req.execute('Any P WHERE P surname "Di Masci"').get_entity(0, 0) - email = e.use_email[0] - self.assertEqual(email.address, 'dima@logilab.fr') - - # with self.admin_access.web_request() as req: - emaileid = text_type(email.eid) - req.form = {'eid': [peid, emaileid], - - '__type:'+peid: u'CWUser', - '_cw_entity_fields:'+peid: u'surname-subject', - 'surname-subject:'+peid: u'Di Masci', - - '__type:'+emaileid: u'EmailAddress', - '_cw_entity_fields:'+emaileid: u'address-subject,use_email-object', - 'address-subject:'+emaileid: u'adim@logilab.fr', - 'use_email-object:'+emaileid: peid, - } - self.expect_redirect_handle_request(req, 'edit') - email.cw_clear_all_caches() - self.assertEqual(email.address, 'adim@logilab.fr') - - def test_password_confirm(self): - """test creation of two linked entities - """ - with self.admin_access.web_request() as req: - user = req.user - req.form = {'eid': 'X', - '__cloned_eid:X': text_type(user.eid), '__type:X': 'CWUser', - '_cw_entity_fields:X': 'login-subject,upassword-subject', - 'login-subject:X': u'toto', - 'upassword-subject:X': u'toto', - } - with self.assertRaises(ValidationError) as cm: - self.ctrl_publish(req) - self.assertEqual({'upassword-subject': u'password and confirmation don\'t match'}, - cm.exception.errors) - req.form = {'__cloned_eid:X': text_type(user.eid), - 'eid': 'X', '__type:X': 'CWUser', - '_cw_entity_fields:X': 'login-subject,upassword-subject', - 'login-subject:X': u'toto', - 'upassword-subject:X': u'toto', - 'upassword-subject-confirm:X': u'tutu', - } - with self.assertRaises(ValidationError) as cm: - self.ctrl_publish(req) - self.assertEqual({'upassword-subject': u'password and confirmation don\'t match'}, - cm.exception.errors) - - - def test_interval_bound_constraint_success(self): - with self.admin_access.repo_cnx() as cnx: - feid = cnx.execute('INSERT File X: X data_name "toto.txt", X data %(data)s', - {'data': Binary(b'yo')})[0][0] - cnx.commit() - - with self.admin_access.web_request(rollbackfirst=True) as req: - req.form = {'eid': ['X'], - '__type:X': 'Salesterm', - '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', - 'amount-subject:X': u'-10', - 'described_by_test-subject:X': text_type(feid), - } - with self.assertRaises(ValidationError) as cm: - self.ctrl_publish(req) - cm.exception.translate(text_type) - self.assertEqual({'amount-subject': 'value -10 must be >= 0'}, - cm.exception.errors) - - with self.admin_access.web_request(rollbackfirst=True) as req: - req.form = {'eid': ['X'], - '__type:X': 'Salesterm', - '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', - 'amount-subject:X': u'110', - 'described_by_test-subject:X': text_type(feid), - } - with self.assertRaises(ValidationError) as cm: - self.ctrl_publish(req) - cm.exception.translate(text_type) - self.assertEqual(cm.exception.errors, {'amount-subject': 'value 110 must be <= 100'}) - - with self.admin_access.web_request(rollbackfirst=True) as req: - req.form = {'eid': ['X'], - '__type:X': 'Salesterm', - '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', - 'amount-subject:X': u'10', - 'described_by_test-subject:X': text_type(feid), - } - self.expect_redirect_handle_request(req, 'edit') - # should be redirected on the created - #eid = params['rql'].split()[-1] - e = req.execute('Salesterm X').get_entity(0, 0) - self.assertEqual(e.amount, 10) - - def test_interval_bound_constraint_validateform(self): - """Test the FormValidatorController controller on entity with - constrained attributes""" - with self.admin_access.repo_cnx() as cnx: - feid = cnx.execute('INSERT File X: X data_name "toto.txt", X data %(data)s', - {'data': Binary(b'yo')})[0][0] - seid = cnx.create_entity('Salesterm', amount=0, described_by_test=feid).eid - cnx.commit() - - # ensure a value that violate a constraint is properly detected - with self.admin_access.web_request(rollbackfirst=True) as req: - req.form = {'eid': [text_type(seid)], - '__type:%s'%seid: 'Salesterm', - '_cw_entity_fields:%s'%seid: 'amount-subject', - 'amount-subject:%s'%seid: u'-10', - } - self.assertMultiLineEqual('''<script type="text/javascript"> - window.parent.handleFormValidationResponse('entityForm', null, null, [false, [%s, {"amount-subject": "value -10 must be >= 0"}], null], null); -</script>'''%seid, self.ctrl_publish(req, 'validateform').decode('ascii')) - - # ensure a value that comply a constraint is properly processed - with self.admin_access.web_request(rollbackfirst=True) as req: - req.form = {'eid': [text_type(seid)], - '__type:%s'%seid: 'Salesterm', - '_cw_entity_fields:%s'%seid: 'amount-subject', - 'amount-subject:%s'%seid: u'20', - } - self.assertMultiLineEqual('''<script type="text/javascript"> - window.parent.handleFormValidationResponse('entityForm', null, null, [true, "http://testing.fr/cubicweb/view", null], null); -</script>''', self.ctrl_publish(req, 'validateform').decode('ascii')) - self.assertEqual(20, req.execute('Any V WHERE X amount V, X eid %(eid)s', - {'eid': seid})[0][0]) - - with self.admin_access.web_request(rollbackfirst=True) as req: - req.form = {'eid': ['X'], - '__type:X': 'Salesterm', - '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', - 'amount-subject:X': u'0', - 'described_by_test-subject:X': text_type(feid), - } - - # ensure a value that is modified in an operation on a modify - # hook works as it should (see - # https://www.cubicweb.org/ticket/2509729 ) - class MyOperation(Operation): - def precommit_event(self): - self.entity.cw_set(amount=-10) - class ValidationErrorInOpAfterHook(Hook): - __regid__ = 'valerror-op-after-hook' - __select__ = Hook.__select__ & is_instance('Salesterm') - events = ('after_add_entity',) - def __call__(self): - MyOperation(self._cw, entity=self.entity) - - with self.temporary_appobjects(ValidationErrorInOpAfterHook): - self.assertMultiLineEqual('''<script type="text/javascript"> - window.parent.handleFormValidationResponse('entityForm', null, null, [false, ["X", {"amount-subject": "value -10 must be >= 0"}], null], null); -</script>''', self.ctrl_publish(req, 'validateform').decode('ascii')) - - self.assertMultiLineEqual('''<script type="text/javascript"> - window.parent.handleFormValidationResponse('entityForm', null, null, [true, "http://testing.fr/cubicweb/view", null], null); -</script>''', self.ctrl_publish(req, 'validateform').decode('ascii')) - - def test_req_pending_insert(self): - """make sure req's pending insertions are taken into account""" - with self.admin_access.web_request() as req: - tmpgroup = req.create_entity('CWGroup', name=u"test") - user = req.user - req.cnx.commit() - with self.admin_access.web_request(**req_form(user)) as req: - req.session.data['pending_insert'] = set([(user.eid, 'in_group', tmpgroup.eid)]) - self.expect_redirect_handle_request(req, 'edit') - usergroups = [gname for gname, in - req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', - {'u': user.eid})] - self.assertCountEqual(usergroups, ['managers', 'test']) - self.assertEqual(get_pending_inserts(req), []) - - def test_req_pending_delete(self): - """make sure req's pending deletions are taken into account""" - with self.admin_access.web_request() as req: - user = req.user - groupeid = req.execute('INSERT CWGroup G: G name "test", U in_group G WHERE U eid %(x)s', - {'x': user.eid})[0][0] - usergroups = [gname for gname, in - req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', - {'u': user.eid})] - # just make sure everything was set correctly - self.assertCountEqual(usergroups, ['managers', 'test']) - req.cnx.commit() - # now try to delete the relation - with self.admin_access.web_request(**req_form(user)) as req: - req.session.data['pending_delete'] = set([(user.eid, 'in_group', groupeid)]) - self.expect_redirect_handle_request(req, 'edit') - usergroups = [gname for gname, in - req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', - {'u': user.eid})] - self.assertCountEqual(usergroups, ['managers']) - self.assertEqual(get_pending_deletes(req), []) - - def test_redirect_apply_button(self): - with self.admin_access.web_request() as req: - redirectrql = rql_for_eid(4012) # whatever - req.form = { - 'eid': 'A', '__maineid' : 'A', - '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'content-subject,title-subject', - 'content-subject:A': u'"13:03:43"', - 'title-subject:A': u'huuu', - '__redirectrql': redirectrql, - '__redirectvid': 'primary', - '__redirectparams': 'toto=tutu&tata=titi', - '__form_id': 'edition', - '__action_apply': '', - } - path, params = self.expect_redirect_handle_request(req, 'edit') - self.assertTrue(path.startswith('blogentry/')) - eid = path.split('/')[1] - self.assertEqual(params['vid'], 'edition') - self.assertNotEqual(int(eid), 4012) - self.assertEqual(params['__redirectrql'], redirectrql) - self.assertEqual(params['__redirectvid'], 'primary') - self.assertEqual(params['__redirectparams'], 'toto=tutu&tata=titi') - - def test_redirect_ok_button(self): - with self.admin_access.web_request() as req: - redirectrql = rql_for_eid(4012) # whatever - req.form = { - 'eid': 'A', '__maineid' : 'A', - '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'content-subject,title-subject', - 'content-subject:A': u'"13:03:43"', - 'title-subject:A': u'huuu', - '__redirectrql': redirectrql, - '__redirectvid': 'primary', - '__redirectparams': 'toto=tutu&tata=titi', - '__form_id': 'edition', - } - path, params = self.expect_redirect_handle_request(req, 'edit') - self.assertEqual(path, 'view') - self.assertEqual(params['rql'], redirectrql) - self.assertEqual(params['vid'], 'primary') - self.assertEqual(params['tata'], 'titi') - self.assertEqual(params['toto'], 'tutu') - - def test_redirect_delete_button(self): - with self.admin_access.web_request() as req: - eid = req.create_entity('BlogEntry', title=u'hop', content=u'hop').eid - req.form = {'eid': text_type(eid), '__type:%s'%eid: 'BlogEntry', - '__action_delete': ''} - path, params = self.expect_redirect_handle_request(req, 'edit') - self.assertEqual(path, 'blogentry') - self.assertIn('_cwmsgid', params) - eid = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid - req.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s', - {'x': req.user.eid, 'e': eid}) - req.cnx.commit() - req.form = {'eid': text_type(eid), '__type:%s'%eid: 'EmailAddress', - '__action_delete': ''} - path, params = self.expect_redirect_handle_request(req, 'edit') - self.assertEqual(path, 'cwuser/admin') - self.assertIn('_cwmsgid', params) - eid1 = req.create_entity('BlogEntry', title=u'hop', content=u'hop').eid - eid2 = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid - req.form = {'eid': [text_type(eid1), text_type(eid2)], - '__type:%s'%eid1: 'BlogEntry', - '__type:%s'%eid2: 'EmailAddress', - '__action_delete': ''} - path, params = self.expect_redirect_handle_request(req, 'edit') - self.assertEqual(path, 'view') - self.assertIn('_cwmsgid', params) - - def test_simple_copy(self): - with self.admin_access.web_request() as req: - blog = req.create_entity('Blog', title=u'my-blog') - blogentry = req.create_entity('BlogEntry', title=u'entry1', - content=u'content1', entry_of=blog) - req.form = {'__maineid' : 'X', 'eid': 'X', - '__cloned_eid:X': blogentry.eid, '__type:X': 'BlogEntry', - '_cw_entity_fields:X': 'title-subject,content-subject', - 'title-subject:X': u'entry1-copy', - 'content-subject:X': u'content1', - } - self.expect_redirect_handle_request(req, 'edit') - blogentry2 = req.find('BlogEntry', title=u'entry1-copy').one() - self.assertEqual(blogentry2.entry_of[0].eid, blog.eid) - - def test_skip_copy_for(self): - with self.admin_access.web_request() as req: - blog = req.create_entity('Blog', title=u'my-blog') - blogentry = req.create_entity('BlogEntry', title=u'entry1', - content=u'content1', entry_of=blog) - blogentry.__class__.cw_skip_copy_for = [('entry_of', 'subject')] - try: - req.form = {'__maineid' : 'X', 'eid': 'X', - '__cloned_eid:X': blogentry.eid, '__type:X': 'BlogEntry', - '_cw_entity_fields:X': 'title-subject,content-subject', - 'title-subject:X': u'entry1-copy', - 'content-subject:X': u'content1', - } - self.expect_redirect_handle_request(req, 'edit') - blogentry2 = req.find('BlogEntry', title=u'entry1-copy').one() - # entry_of should not be copied - self.assertEqual(len(blogentry2.entry_of), 0) - finally: - blogentry.__class__.cw_skip_copy_for = [] - - def test_nonregr_eetype_etype_editing(self): - """non-regression test checking that a manager user can edit a CWEType entity - """ - with self.admin_access.web_request() as req: - groupeids = sorted(eid - for eid, in req.execute('CWGroup G ' - 'WHERE G name in ("managers", "users")')) - groups = [text_type(eid) for eid in groupeids] - cwetypeeid = req.execute('CWEType X WHERE X name "CWEType"')[0][0] - basegroups = [text_type(eid) - for eid, in req.execute('CWGroup G ' - 'WHERE X read_permission G, X eid %(x)s', - {'x': cwetypeeid})] - cwetypeeid = text_type(cwetypeeid) - req.form = { - 'eid': cwetypeeid, - '__type:'+cwetypeeid: 'CWEType', - '_cw_entity_fields:'+cwetypeeid: 'name-subject,final-subject,description-subject,read_permission-subject', - 'name-subject:'+cwetypeeid: u'CWEType', - 'final-subject:'+cwetypeeid: '', - 'description-subject:'+cwetypeeid: u'users group', - 'read_permission-subject:'+cwetypeeid: groups, - } - try: - self.expect_redirect_handle_request(req, 'edit') - e = req.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}).get_entity(0, 0) - self.assertEqual(e.name, 'CWEType') - self.assertEqual(sorted(g.eid for g in e.read_permission), groupeids) - finally: - # restore - req.execute('SET X read_permission Y WHERE X name "CWEType", ' - 'Y eid IN (%s), NOT X read_permission Y' % (','.join(basegroups))) - req.cnx.commit() - - def test_nonregr_strange_text_input(self): - """non-regression test checking text input containing "13:03:43" - - this seems to be postgres (tsearch?) specific - """ - with self.admin_access.web_request() as req: - req.form = { - 'eid': 'A', '__maineid' : 'A', - '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'title-subject,content-subject', - 'title-subject:A': u'"13:03:40"', - 'content-subject:A': u'"13:03:43"',} - path, _params = self.expect_redirect_handle_request(req, 'edit') - self.assertTrue(path.startswith('blogentry/')) - eid = path.split('/')[1] - e = req.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}).get_entity(0, 0) - self.assertEqual(e.title, '"13:03:40"') - self.assertEqual(e.content, '"13:03:43"') - - - def test_nonregr_multiple_empty_email_addr(self): - with self.admin_access.web_request() as req: - gueid = req.execute('CWGroup G WHERE G name "users"')[0][0] - req.form = {'eid': ['X', 'Y'], - - '__type:X': 'CWUser', - '_cw_entity_fields:X': 'login-subject,upassword-subject,in_group-subject', - 'login-subject:X': u'adim', - 'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto', - 'in_group-subject:X': repr(gueid), - - '__type:Y': 'EmailAddress', - '_cw_entity_fields:Y': 'address-subject,alias-subject,use_email-object', - 'address-subject:Y': u'', - 'alias-subject:Y': u'', - 'use_email-object:Y': 'X', - } - with self.assertRaises(ValidationError) as cm: - self.ctrl_publish(req) - self.assertEqual(cm.exception.errors, {'address-subject': u'required field'}) - - def test_nonregr_copy(self): - with self.admin_access.web_request() as req: - user = req.user - req.form = {'__maineid' : 'X', 'eid': 'X', - '__cloned_eid:X': user.eid, '__type:X': 'CWUser', - '_cw_entity_fields:X': 'login-subject,upassword-subject', - 'login-subject:X': u'toto', - 'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto', - } - path, _params = self.expect_redirect_handle_request(req, 'edit') - self.assertEqual(path, 'cwuser/toto') - e = req.execute('Any X WHERE X is CWUser, X login "toto"').get_entity(0, 0) - self.assertEqual(e.login, 'toto') - self.assertEqual(e.in_group[0].name, 'managers') - - - def test_nonregr_rollback_on_validation_error(self): - with self.admin_access.web_request() as req: - p = self.create_user(req, u"doe") - # do not try to skip 'primary_email' for this test - old_skips = p.__class__.skip_copy_for - p.__class__.skip_copy_for = () - try: - e = req.create_entity('EmailAddress', address=u'doe@doe.com') - req.execute('SET P use_email E, P primary_email E WHERE P eid %(p)s, E eid %(e)s', - {'p' : p.eid, 'e' : e.eid}) - req.form = {'eid': 'X', - '__cloned_eid:X': p.eid, '__type:X': 'CWUser', - '_cw_entity_fields:X': 'login-subject,surname-subject', - 'login-subject': u'dodo', - 'surname-subject:X': u'Boom', - '__errorurl' : "whatever but required", - } - # try to emulate what really happens in the web application - # 1/ validate form => EditController.publish raises a ValidationError - # which fires a Redirect - # 2/ When re-publishing the copy form, the publisher implicitly commits - try: - self.app_handle_request(req, 'edit') - except Redirect: - req.form['rql'] = 'Any X WHERE X eid %s' % p.eid - req.form['vid'] = 'copy' - self.app_handle_request(req, 'view') - rset = req.execute('CWUser P WHERE P surname "Boom"') - self.assertEqual(len(rset), 0) - finally: - p.__class__.skip_copy_for = old_skips - - def test_regr_inlined_forms(self): - with self.admin_access.web_request() as req: - self.schema['described_by_test'].inlined = False - try: - req.data['eidmap'] = {} - req.data['pending_others'] = set() - req.data['pending_inlined'] = {} - req.form = {'eid': ['X', 'Y'], '__maineid' : 'X', - - '__type:X': 'Salesterm', - '_cw_entity_fields:X': 'described_by_test-subject', - 'described_by_test-subject:X': 'Y', - - '__type:Y': 'File', - '_cw_entity_fields:Y': 'data-subject', - 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), - } - values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2)) - for eid in req.edited_eids()) - editctrl = self.vreg['controllers'].select('edit', req) - # don't call publish to enforce select order - editctrl.errors = [] - editctrl._to_create = {} - editctrl.edit_entity(values_by_eid['X']) # #3064653 raise ValidationError - editctrl.edit_entity(values_by_eid['Y']) - finally: - self.schema['described_by_test'].inlined = False - - -class ReportBugControllerTC(CubicWebTC): - - def test_usable_by_guest(self): - with self.new_access(u'anon').web_request() as req: - self.assertRaises(NoSelectableObject, - self.vreg['controllers'].select, 'reportbug', req) - with self.new_access(u'anon').web_request(description='hop') as req: - self.vreg['controllers'].select('reportbug', req) - - -class AjaxControllerTC(CubicWebTC): - tested_controller = 'ajax' - - def ctrl(self, req=None): - req = req or self.request(url='http://whatever.fr/') - return self.vreg['controllers'].select(self.tested_controller, req) - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - self.pytag = cnx.create_entity('Tag', name=u'python') - self.cubicwebtag = cnx.create_entity('Tag', name=u'cubicweb') - self.john = self.create_user(cnx, u'John') - cnx.commit() - - ## tests ################################################################## - def test_simple_exec(self): - with self.admin_access.web_request(rql='CWUser P WHERE P login "John"', - pageid='123', fname='view') as req: - ctrl = self.ctrl(req) - rset = self.john.as_rset() - rset.req = req - source = ctrl.publish() - self.assertTrue(source.startswith(b'<div>')) - -# def test_json_exec(self): -# rql = 'Any T,N WHERE T is Tag, T name N' -# ctrl = self.ctrl(self.request(mode='json', rql=rql, pageid='123')) -# self.assertEqual(ctrl.publish(), -# json_dumps(self.execute(rql).rows)) - - def test_remote_add_existing_tag(self): - with self.remote_calling('tag_entity', self.john.eid, ['python']) as (_, req): - self.assertCountEqual( - [tname for tname, in req.execute('Any N WHERE T is Tag, T name N')], - ['python', 'cubicweb']) - self.assertEqual( - req.execute('Any N WHERE T tags P, P is CWUser, T name N').rows, - [['python']]) - - def test_remote_add_new_tag(self): - with self.remote_calling('tag_entity', self.john.eid, ['javascript']) as (_, req): - self.assertCountEqual( - [tname for tname, in req.execute('Any N WHERE T is Tag, T name N')], - ['python', 'cubicweb', 'javascript']) - self.assertEqual( - req.execute('Any N WHERE T tags P, P is CWUser, T name N').rows, - [['javascript']]) - - def test_maydel_perms(self): - """Check that AjaxEditRelationCtxComponent calls rdef.check with a - sufficient context""" - with self.remote_calling('tag_entity', self.john.eid, ['python']) as (_, req): - req.cnx.commit() - with self.temporary_permissions( - (self.schema['tags'].rdefs['Tag', 'CWUser'], - {'delete': (RRQLExpression('S owned_by U'), )}, )): - with self.admin_access.web_request(rql='CWUser P WHERE P login "John"', - pageid='123', fname='view') as req: - ctrl = self.ctrl(req) - rset = self.john.as_rset() - rset.req = req - source = ctrl.publish() - # maydel jscall - self.assertIn(b'ajaxBoxRemoveLinkedEntity', source) - - def test_pending_insertion(self): - with self.remote_calling('add_pending_inserts', [['12', 'tags', '13']]) as (_, req): - deletes = get_pending_deletes(req) - self.assertEqual(deletes, []) - inserts = get_pending_inserts(req) - self.assertEqual(inserts, ['12:tags:13']) - with self.remote_calling('add_pending_inserts', [['12', 'tags', '14']]) as (_, req): - deletes = get_pending_deletes(req) - self.assertEqual(deletes, []) - inserts = get_pending_inserts(req) - self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14']) - inserts = get_pending_inserts(req, 12) - self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14']) - inserts = get_pending_inserts(req, 13) - self.assertEqual(inserts, ['12:tags:13']) - inserts = get_pending_inserts(req, 14) - self.assertEqual(inserts, ['12:tags:14']) - req.remove_pending_operations() - - def test_pending_deletion(self): - with self.remote_calling('add_pending_delete', ['12', 'tags', '13']) as (_, req): - inserts = get_pending_inserts(req) - self.assertEqual(inserts, []) - deletes = get_pending_deletes(req) - self.assertEqual(deletes, ['12:tags:13']) - with self.remote_calling('add_pending_delete', ['12', 'tags', '14']) as (_, req): - inserts = get_pending_inserts(req) - self.assertEqual(inserts, []) - deletes = get_pending_deletes(req) - self.assertCountEqual(deletes, ['12:tags:13', '12:tags:14']) - deletes = get_pending_deletes(req, 12) - self.assertCountEqual(deletes, ['12:tags:13', '12:tags:14']) - deletes = get_pending_deletes(req, 13) - self.assertEqual(deletes, ['12:tags:13']) - deletes = get_pending_deletes(req, 14) - self.assertEqual(deletes, ['12:tags:14']) - req.remove_pending_operations() - - def test_remove_pending_operations(self): - with self.remote_calling('add_pending_delete', ['12', 'tags', '13']): - pass - with self.remote_calling('add_pending_inserts', [['12', 'tags', '14']]) as (_, req): - inserts = get_pending_inserts(req) - self.assertEqual(inserts, ['12:tags:14']) - deletes = get_pending_deletes(req) - self.assertEqual(deletes, ['12:tags:13']) - req.remove_pending_operations() - self.assertEqual(get_pending_deletes(req), []) - self.assertEqual(get_pending_inserts(req), []) - - def test_add_inserts(self): - with self.remote_calling('add_pending_inserts', - [('12', 'tags', '13'), ('12', 'tags', '14')]) as (_, req): - inserts = get_pending_inserts(req) - self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14']) - req.remove_pending_operations() - - - # silly tests - def test_external_resource(self): - with self.remote_calling('external_resource', 'RSS_LOGO') as (res, _): - self.assertEqual(json_dumps(self.config.uiprops['RSS_LOGO']).encode('ascii'), - res) - - def test_i18n(self): - with self.remote_calling('i18n', ['bimboom']) as (res, _): - self.assertEqual(json_dumps(['bimboom']).encode('ascii'), res) - - def test_format_date(self): - with self.remote_calling('format_date', '2007-01-01 12:00:00') as (res, _): - self.assertEqual(json_dumps('2007/01/01').encode('ascii'), res) - - def test_ajaxfunc_noparameter(self): - @ajaxfunc - def foo(self, x, y): - return 'hello' - self.assertEqual(foo(object, 1, 2), 'hello') - appobject = foo.__appobject__ - self.assertTrue(issubclass(appobject, AjaxFunction)) - self.assertEqual(appobject.__regid__, 'foo') - self.assertEqual(appobject.check_pageid, False) - self.assertEqual(appobject.output_type, None) - with self.admin_access.web_request() as req: - f = appobject(req) - self.assertEqual(f(12, 13), 'hello') - - def test_ajaxfunc_checkpageid(self): - @ajaxfunc(check_pageid=True) - def foo(self, x, y): - return 'hello' - self.assertEqual(foo(object, 1, 2), 'hello') - appobject = foo.__appobject__ - self.assertTrue(issubclass(appobject, AjaxFunction)) - self.assertEqual(appobject.__regid__, 'foo') - self.assertEqual(appobject.check_pageid, True) - self.assertEqual(appobject.output_type, None) - # no pageid - with self.admin_access.web_request() as req: - f = appobject(req) - self.assertRaises(RemoteCallFailed, f, 12, 13) - - def test_ajaxfunc_json(self): - @ajaxfunc(output_type='json') - def foo(self, x, y): - return x + y - self.assertEqual(foo(object, 1, 2), 3) - appobject = foo.__appobject__ - self.assertTrue(issubclass(appobject, AjaxFunction)) - self.assertEqual(appobject.__regid__, 'foo') - self.assertEqual(appobject.check_pageid, False) - self.assertEqual(appobject.output_type, 'json') - # no pageid - with self.admin_access.web_request() as req: - f = appobject(req) - self.assertEqual(f(12, 13), '25') - - -class JSonControllerTC(AjaxControllerTC): - # NOTE: this class performs the same tests as AjaxController but with - # deprecated 'json' controller (i.e. check backward compatibility) - tested_controller = 'json' - - def setUp(self): - super(JSonControllerTC, self).setUp() - self.exposed_remote_funcs = [fname for fname in dir(JSonController) - if fname.startswith('js_')] - - def tearDown(self): - super(JSonControllerTC, self).tearDown() - for funcname in dir(JSonController): - # remove functions added dynamically during tests - if funcname.startswith('js_') and funcname not in self.exposed_remote_funcs: - delattr(JSonController, funcname) - - def test_monkeypatch_jsoncontroller(self): - with self.assertRaises(RemoteCallFailed): - with self.remote_calling('foo'): - pass - @monkeypatch(JSonController) - def js_foo(self): - return u'hello' - with self.remote_calling('foo') as (res, _): - self.assertEqual(res, b'hello') - - def test_monkeypatch_jsoncontroller_xhtmlize(self): - with self.assertRaises(RemoteCallFailed): - with self.remote_calling('foo'): - pass - @monkeypatch(JSonController) - @xhtmlize - def js_foo(self): - return u'hello' - with self.remote_calling('foo') as (res, _): - self.assertEqual(b'<div>hello</div>', res) - - def test_monkeypatch_jsoncontroller_jsonize(self): - with self.assertRaises(RemoteCallFailed): - with self.remote_calling('foo'): - pass - @monkeypatch(JSonController) - @jsonize - def js_foo(self): - return 12 - with self.remote_calling('foo') as (res, _): - self.assertEqual(res, b'12') - - def test_monkeypatch_jsoncontroller_stdfunc(self): - @monkeypatch(JSonController) - @jsonize - def js_reledit_form(self): - return 12 - with self.remote_calling('reledit_form') as (res, _): - self.assertEqual(res, b'12') - - -class UndoControllerTC(CubicWebTC): - - def setUp(self): - class Connection(OldConnection): - """Force undo feature to be turned on in all case""" - undo_actions = property(lambda tx: True, lambda x, y:None) - cubicweb.server.session.Connection = Connection - super(UndoControllerTC, self).setUp() - - def tearDown(self): - super(UndoControllerTC, self).tearDown() - cubicweb.server.session.Connection = OldConnection - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - self.toto = self.create_user(cnx, u'toto', - password=u'toto', - groups=('users',), - commit=False) - self.txuuid_toto = cnx.commit() - self.toto_email = cnx.create_entity('EmailAddress', - address=u'toto@logilab.org', - reverse_use_email=self.toto) - self.txuuid_toto_email = cnx.commit() - - def test_no_such_transaction(self): - with self.admin_access.web_request() as req: - txuuid = u"12345acbd" - req.form['txuuid'] = txuuid - controller = self.vreg['controllers'].select('undo', req) - with self.assertRaises(tx.NoSuchTransaction) as cm: - result = controller.publish(rset=None) - self.assertEqual(cm.exception.txuuid, txuuid) - - def assertURLPath(self, url, expected_path, expected_params=None): - """ This assert that the path part of `url` matches expected path - - TODO : implement assertion on the expected_params too - """ - with self.admin_access.web_request() as req: - scheme, netloc, path, query, fragment = urlsplit(url) - query_dict = parse_qs(query) - expected_url = urljoin(req.base_url(), expected_path) - self.assertEqual( urlunsplit((scheme, netloc, path, None, None)), expected_url) - - def test_redirect_redirectpath(self): - "Check that the potential __redirectpath is honored" - with self.admin_access.web_request() as req: - txuuid = self.txuuid_toto_email - req.form['txuuid'] = txuuid - rpath = "toto" - req.form['__redirectpath'] = rpath - controller = self.vreg['controllers'].select('undo', req) - with self.assertRaises(Redirect) as cm: - result = controller.publish(rset=None) - self.assertURLPath(cm.exception.location, rpath) - - -class LoginControllerTC(CubicWebTC): - - def test_login_with_dest(self): - with self.admin_access.web_request() as req: - req.form = {'postlogin_path': 'elephants/babar'} - with self.assertRaises(Redirect) as cm: - self.ctrl_publish(req, ctrl='login') - self.assertEqual(req.build_url('elephants/babar'), cm.exception.location) - - def test_login_no_dest(self): - with self.admin_access.web_request() as req: - with self.assertRaises(Redirect) as cm: - self.ctrl_publish(req, ctrl='login') - self.assertEqual(req.base_url(), cm.exception.location) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_basetemplates.py --- a/web/test/unittest_views_basetemplates.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.htmlparser import XMLValidator - - -class LogFormTemplateTC(CubicWebTC): - - def _login_labels(self): - valid = self.content_type_validators.get('text/html', XMLValidator)() - req = self.requestcls(self.vreg, url='login') - page = valid.parse_string(self.vreg['views'].main_template(req, 'login')) - return page.find_tag('label') - - def test_label(self): - self.set_option('allow-email-login', 'yes') - self.assertEqual(self._login_labels(), ['login or email', 'password']) - self.set_option('allow-email-login', 'no') - self.assertEqual(self._login_labels(), ['login', 'password']) - - -class MainNoTopTemplateTC(CubicWebTC): - - def test_valid_xhtml(self): - with self.admin_access.web_request() as req: - self.view('index', template='main-no-top', req=req) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_baseviews.py --- a/web/test/unittest_views_baseviews.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,160 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from logilab.common.testlib import unittest_main -from logilab.mtconverter import html_unescape - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.utils import json -from cubicweb.view import StartupView, TRANSITIONAL_DOCTYPE -from cubicweb.web.views import vid_from_rset - -def loadjson(value): - return json.loads(html_unescape(value)) - -class VidFromRsetTC(CubicWebTC): - - def test_no_rset(self): - with self.admin_access.web_request() as req: - self.assertEqual(vid_from_rset(req, None, self.schema), 'index') - - def test_no_entity(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X WHERE X login "blabla"') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'noresult') - - def test_one_entity(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X WHERE X login "admin"') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'primary') - rset = req.execute('Any X, L WHERE X login "admin", X login L') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'primary') - req.search_state = ('pasnormal',) - rset = req.execute('Any X WHERE X login "admin"') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'outofcontext-search') - - def test_one_entity_eid(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X WHERE X eid 1') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'primary') - - def test_more_than_one_entity_same_type(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X WHERE X is CWUser') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'sameetypelist') - rset = req.execute('Any X, L WHERE X login L') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'sameetypelist') - - def test_more_than_one_entity_diff_type(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X WHERE X is IN (CWUser, CWGroup)') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'list') - - def test_more_than_one_entity_by_row(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X, G WHERE X in_group G') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') - - def test_more_than_one_entity_by_row_2(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X, GN WHERE X in_group G, G name GN') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') - - def test_aggregat(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X, COUNT(T) GROUPBY X WHERE X is T') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') - rset = req.execute('Any MAX(X) WHERE X is CWUser') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') - - def test_subquery(self): - with self.admin_access.web_request() as req: - rset = req.execute( -'DISTINCT Any X,N ORDERBY N ' -'WITH X,N BEING (' -' (DISTINCT Any P,N WHERE P is CWUser, P login N)' -' UNION' -' (DISTINCT Any W,N WHERE W is CWGroup, W name N))') - self.assertEqual(vid_from_rset(req, rset, self.schema), 'table') - - -class TableViewTC(CubicWebTC): - - def _prepare_entity(self, req): - e = req.create_entity("State", name=u'<toto>', description=u'loo"ong blabla') - rset = req.execute('Any X, D, CD, NOW - CD WHERE X is State, ' - 'X description D, X creation_date CD, X eid %(x)s', - {'x': e.eid}) - view = self.vreg['views'].select('table', req, rset=rset) - return e, rset, view - - def test_sortvalue(self): - with self.admin_access.web_request() as req: - e, _, view = self._prepare_entity(req) - colrenderers = view.build_column_renderers()[:3] - self.assertListEqual([renderer.sortvalue(0) for renderer in colrenderers], - [u'<toto>', u'loo"ong blabla', e.creation_date]) - - -class HTMLStreamTests(CubicWebTC): - - def test_set_doctype_reset_xmldecl(self): - """ - tests `cubicweb.web.request.CubicWebRequestBase.set_doctype` - with xmldecl reset - """ - class MyView(StartupView): - __regid__ = 'my-view' - def call(self): - self._cw.set_doctype('<!DOCTYPE html>') - - with self.admin_access.web_request() as req: - with self.temporary_appobjects(MyView): - html_source = self.view('my-view', req=req).source - source_lines = [line.strip() - for line in html_source.splitlines(False) - if line.strip()] - self.assertListEqual([b'<!DOCTYPE html>', - b'<html xmlns:cubicweb="http://www.cubicweb.org" lang="en">'], - source_lines[:2]) - - def test_set_doctype_no_reset_xmldecl(self): - """ - tests `cubicweb.web.request.CubicWebRequestBase.set_doctype` - with no xmldecl reset - """ - html_doctype = TRANSITIONAL_DOCTYPE.strip() - class MyView(StartupView): - __regid__ = 'my-view' - def call(self): - self._cw.set_doctype(html_doctype) - self._cw.main_stream.set_htmlattrs([('lang', 'cz')]) - - with self.admin_access.web_request() as req: - with self.temporary_appobjects(MyView): - html_source = self.view('my-view', req=req).source - source_lines = [line.strip() - for line in html_source.splitlines(False) - if line.strip()] - self.assertListEqual([html_doctype.encode('ascii'), - b'<html xmlns:cubicweb="http://www.cubicweb.org" lang="cz">', - b'<head>'], - source_lines[:3]) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_csv.py --- a/web/test/unittest_views_csv.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from cubicweb.devtools.testlib import CubicWebTC - - -class CSVExportViewsTC(CubicWebTC): - - def test_csvexport(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' - 'WHERE X in_group G, G name GN') - data = self.view('csvexport', rset, req=req) - self.assertEqual(req.headers_out.getRawHeaders('content-type'), - ['text/comma-separated-values;charset=UTF-8']) - expected_data = "String;COUNT(CWUser)\nguests;1\nmanagers;1" - self.assertMultiLineEqual(expected_data, data.decode('utf-8')) - - def test_csvexport_on_empty_rset(self): - """Should return the CSV header. - """ - with self.admin_access.web_request() as req: - rset = req.execute(u'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' - 'WHERE X in_group G, G name GN, X login "Miles"') - data = self.view('csvexport', rset, req=req) - self.assertEqual(req.headers_out.getRawHeaders('content-type'), - ['text/comma-separated-values;charset=UTF-8']) - expected_data = "String;COUNT(CWUser)" - self.assertMultiLineEqual(expected_data, data.decode('utf-8')) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_editforms.py --- a/web/test/unittest_views_editforms.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,257 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -from logilab.common.testlib import unittest_main, mock_object - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web.views import uicfg -from cubicweb.web.formwidgets import AutoCompletionWidget -from cubicweb.schema import RRQLExpression - - -AFFK = uicfg.autoform_field_kwargs -AFS = uicfg.autoform_section - -def rbc(entity, formtype, section): - if section in ('attributes', 'metadata', 'hidden'): - permission = 'update' - else: - permission = 'add' - return [(rschema.type, x) - for rschema, tschemas, x in AFS.relations_by_section(entity, - formtype, - section, - permission)] - -class AutomaticEntityFormTC(CubicWebTC): - - def test_custom_widget(self): - with self.admin_access.web_request() as req: - AFFK.tag_subject_of(('CWUser', 'login', '*'), - {'widget': AutoCompletionWidget(autocomplete_initfunc='get_logins')}) - form = self.vreg['forms'].select('edition', req, entity=req.user) - field = form.field_by_name('login', 'subject') - self.assertIsInstance(field.widget, AutoCompletionWidget) - AFFK.del_rtag('CWUser', 'login', '*', 'subject') - - - def test_cwuser_relations_by_category(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('CWUser')(req) - # see custom configuration in views.cwuser - self.assertEqual(rbc(e, 'main', 'attributes'), - [('login', 'subject'), - ('upassword', 'subject'), - ('firstname', 'subject'), - ('surname', 'subject'), - ('in_group', 'subject'), - ]) - self.assertEqual(rbc(e, 'muledit', 'attributes'), - [('login', 'subject'), - ('upassword', 'subject'), - ('in_group', 'subject'), - ]) - self.assertCountEqual(rbc(e, 'main', 'metadata'), - [('last_login_time', 'subject'), - ('cw_source', 'subject'), - ('creation_date', 'subject'), - ('modification_date', 'subject'), - ('created_by', 'subject'), - ('owned_by', 'subject'), - ('bookmarked_by', 'object'), - ]) - # XXX skip 'tags' relation here and in the hidden category because - # of some test interdependancy when pytest is launched on whole cw - # (appears here while expected in hidden - self.assertCountEqual([x for x in rbc(e, 'main', 'relations') - if x != ('tags', 'object')], - [('connait', 'subject'), - ('custom_workflow', 'subject'), - ('primary_email', 'subject'), - ('checked_by', 'object'), - ]) - self.assertListEqual(rbc(e, 'main', 'inlined'), - [('use_email', 'subject'), - ]) - # owned_by is defined both as subject and object relations on CWUser - self.assertListEqual(sorted(x for x in rbc(e, 'main', 'hidden') - if x != ('tags', 'object')), - sorted([('for_user', 'object'), - ('created_by', 'object'), - ('wf_info_for', 'object'), - ('owned_by', 'object'), - ])) - - def test_inlined_view(self): - self.assertIn('main_inlined', - AFS.etype_get('CWUser', 'use_email', 'subject', 'EmailAddress')) - self.assertNotIn('main_inlined', - AFS.etype_get('CWUser', 'primary_email', 'subject', 'EmailAddress')) - self.assertIn('main_relations', - AFS.etype_get('CWUser', 'primary_email', 'subject', 'EmailAddress')) - - def test_personne_relations_by_category(self): - with self.admin_access.web_request() as req: - e = self.vreg['etypes'].etype_class('Personne')(req) - self.assertListEqual(rbc(e, 'main', 'attributes'), - [('nom', 'subject'), - ('prenom', 'subject'), - ('sexe', 'subject'), - ('promo', 'subject'), - ('titre', 'subject'), - ('ass', 'subject'), - ('web', 'subject'), - ('tel', 'subject'), - ('fax', 'subject'), - ('datenaiss', 'subject'), - ('test', 'subject'), - ('description', 'subject'), - ('salary', 'subject'), - ]) - self.assertListEqual(rbc(e, 'muledit', 'attributes'), - [('nom', 'subject'), - ]) - self.assertCountEqual(rbc(e, 'main', 'metadata'), - [('cw_source', 'subject'), - ('creation_date', 'subject'), - ('modification_date', 'subject'), - ('created_by', 'subject'), - ('owned_by', 'subject'), - ]) - self.assertCountEqual(rbc(e, 'main', 'relations'), - [('travaille', 'subject'), - ('manager', 'object'), - ('connait', 'object'), - ]) - self.assertListEqual(rbc(e, 'main', 'hidden'), - []) - - def test_edition_form(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWUser X LIMIT 1') - form = self.vreg['forms'].select('edition', req, rset=rset, row=0, col=0) - # should be also selectable by specifying entity - self.vreg['forms'].select('edition', req, entity=rset.get_entity(0, 0)) - self.assertFalse(any(f for f in form.fields if f is None)) - - def test_edition_form_with_action(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWUser X LIMIT 1') - form = self.vreg['forms'].select('edition', req, rset=rset, row=0, - col=0, action='my_custom_action') - self.assertEqual(form.form_action(), 'my_custom_action') - - def test_attribute_add_permissions(self): - # https://www.cubicweb.org/ticket/4342844 - with self.admin_access.repo_cnx() as cnx: - self.create_user(cnx, u'toto') - cnx.commit() - with self.new_access(u'toto').web_request() as req: - e = self.vreg['etypes'].etype_class('Personne')(req) - cform = self.vreg['forms'].select('edition', req, entity=e) - self.assertIn('sexe', - [rschema.type - for rschema, _ in cform.editable_attributes()]) - with self.new_access(u'toto').repo_cnx() as cnx: - person_eid = cnx.create_entity('Personne', nom=u'Robert').eid - cnx.commit() - person = req.entity_from_eid(person_eid) - mform = self.vreg['forms'].select('edition', req, entity=person) - self.assertNotIn('sexe', - [rschema.type - for rschema, _ in mform.editable_attributes()]) - - def test_inlined_relations(self): - with self.admin_access.web_request() as req: - with self.temporary_permissions(EmailAddress={'add': ()}): - autoform = self.vreg['forms'].select('edition', req, entity=req.user) - self.assertEqual(list(autoform.inlined_form_views()), []) - - def test_check_inlined_rdef_permissions(self): - # try to check permissions when creating an entity ('user' below is a - # fresh entity without an eid) - with self.admin_access.web_request() as req: - ttype = 'EmailAddress' - rschema = self.schema['use_email'] - rdef = rschema.rdefs[('CWUser', ttype)] - tschema = self.schema[ttype] - role = 'subject' - with self.temporary_permissions((rdef, {'add': ()})): - user = self.vreg['etypes'].etype_class('CWUser')(req) - autoform = self.vreg['forms'].select('edition', req, entity=user) - self.assertFalse(autoform.check_inlined_rdef_permissions(rschema, role, - tschema, ttype)) - # we actually don't care about the actual expression, - # may_have_permission only checks the presence of such expressions - expr = RRQLExpression('S use_email O') - with self.temporary_permissions((rdef, {'add': (expr,)})): - user = self.vreg['etypes'].etype_class('CWUser')(req) - autoform = self.vreg['forms'].select('edition', req, entity=user) - self.assertTrue(autoform.check_inlined_rdef_permissions(rschema, role, - tschema, ttype)) - - -class FormViewsTC(CubicWebTC): - - def test_delete_conf_formview(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWGroup X') - self.view('deleteconf', rset, template=None, req=req).source - - def test_automatic_edition_formview(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWUser X') - self.view('edition', rset, row=0, template=None, req=req).source - - def test_automatic_edition_copyformview(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWUser X') - self.view('copy', rset, row=0, template=None, req=req).source - - def test_automatic_creation_formview(self): - with self.admin_access.web_request() as req: - self.view('creation', None, etype='CWUser', template=None, req=req).source - - def test_automatic_muledit_formview(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWUser X') - self.view('muledit', rset, template=None, req=req).source - - def test_automatic_reledit_formview(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWUser X') - self.view('reledit', rset, row=0, rtype='login', template=None, req=req).source - - def test_automatic_inline_edit_formview(self): - with self.admin_access.web_request() as req: - geid = req.execute('CWGroup X LIMIT 1')[0][0] - rset = req.execute('CWUser X LIMIT 1') - self.view('inline-edition', rset, row=0, col=0, rtype='in_group', - peid=geid, role='object', i18nctx='', pform=MOCKPFORM, - template=None, req=req).source - - def test_automatic_inline_creation_formview(self): - with self.admin_access.web_request() as req: - geid = req.execute('CWGroup X LIMIT 1')[0][0] - self.view('inline-creation', None, etype='CWUser', rtype='in_group', - peid=geid, petype='CWGroup', i18nctx='', role='object', pform=MOCKPFORM, - template=None, req=req) - -MOCKPFORM = mock_object(form_previous_values={}, form_valerror=None) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_errorform.py --- a/web/test/unittest_views_errorform.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,92 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -import re -import sys - -from logilab.common.testlib import unittest_main - -from cubicweb import Forbidden -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.view import StartupView -from cubicweb.web import Redirect - - -class ErrorViewTC(CubicWebTC): - def setUp(self): - super(ErrorViewTC, self).setUp() - self.vreg.config['submit-mail'] = "test@logilab.fr" - self.vreg.config['print-traceback'] = "yes" - - def test_error_generation(self): - """ - tests - """ - - class MyWrongView(StartupView): - __regid__ = 'my-view' - def call(self): - raise ValueError('This is wrong') - - with self.temporary_appobjects(MyWrongView): - with self.admin_access.web_request() as req: - try: - self.view('my-view', req=req) - except Exception as e: - req.data['excinfo'] = sys.exc_info() - req.data['ex'] = e - html = self.view('error', req=req) - self.assertTrue(re.search(b'^<input name="__signature" type="hidden" ' - b'value="[0-9a-f]{32}" />$', - html.source, re.M)) - - - def test_error_submit_nosig(self): - """ - tests that the reportbug controller refuses submission if - there is not content signature - """ - with self.admin_access.web_request() as req: - req.form = {'description': u'toto'} - with self.assertRaises(Forbidden) as cm: - self.ctrl_publish(req, 'reportbug') - - def test_error_submit_wrongsig(self): - """ - tests that the reportbug controller refuses submission if the - content signature is invalid - """ - with self.admin_access.web_request() as req: - req.form = {'__signature': 'X', - 'description': u'toto'} - with self.assertRaises(Forbidden) as cm: - self.ctrl_publish(req, 'reportbug') - - def test_error_submit_ok(self): - """ - tests that the reportbug controller accept the email submission if the - content signature is valid - """ - with self.admin_access.web_request() as req: - sign = self.vreg.config.sign_text('toto') - req.form = {'__signature': sign, - 'description': u'toto'} - with self.assertRaises(Redirect) as cm: - self.ctrl_publish(req, 'reportbug') - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_forms.py --- a/web/test/unittest_views_forms.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from logilab.common import tempattr, attrdict - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web.views.autoform import InlinedFormField - -class InlinedFormTC(CubicWebTC): - - def test_linked_to(self): - with self.admin_access.web_request() as req: - formview = req.vreg['views'].select( - 'inline-creation', req, - etype='File', rtype='described_by_test', role='subject', - peid=123, - petype='Salesterm') - self.assertEqual({('described_by_test', 'object'): [123]}, - formview.form.linked_to) - - def test_linked_to_parent_being_created(self): - with self.admin_access.web_request() as req: - formview = req.vreg['views'].select( - 'inline-creation', req, - etype='File', rtype='described_by_test', role='subject', - peid='A', - petype='Salesterm') - self.assertEqual(formview.form.linked_to, {}) - - def test_remove_js_depending_on_cardinality(self): - with self.admin_access.web_request() as req: - formview = req.vreg['views'].select( - 'inline-creation', req, - etype='File', rtype='described_by_test', role='subject', - peid='A', - petype='Salesterm') - # cardinality is 1, can't remove - self.assertIsNone(formview._get_removejs()) - rdef = self.schema['Salesterm'].rdef('described_by_test') - with tempattr(rdef, 'cardinality', '?*'): - self.assertTrue(formview._get_removejs()) - with tempattr(rdef, 'cardinality', '+*'): - # formview has no parent info (pform). This is what happens - # when an inline form is requested through AJAX. - self.assertTrue(formview._get_removejs()) - fakeview = attrdict(dict(rtype='described_by_test', role='subject')) - # formview is first, can't be removed - formview.pform = attrdict(fields=[InlinedFormField(view=formview), - InlinedFormField(view=fakeview)]) - self.assertIsNone(formview._get_removejs()) - # formview isn't first, can be removed - formview.pform = attrdict(fields=[InlinedFormField(view=fakeview), - InlinedFormField(view=formview)]) - self.assertTrue(formview._get_removejs()) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_json.py --- a/web/test/unittest_views_json.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -from six import binary_type - -from cubicweb.devtools.testlib import CubicWebTC - - -class JsonViewsTC(CubicWebTC): - anonymize = True - res_jsonp_data = b'[["guests", 1]]' - - def setUp(self): - super(JsonViewsTC, self).setUp() - self.config.global_set_option('anonymize-jsonp-queries', self.anonymize) - - def test_json_rsetexport(self): - with self.admin_access.web_request() as req: - rset = req.execute( - 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN WHERE X in_group G, G name GN') - data = self.view('jsonexport', rset, req=req) - self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) - self.assertListEqual(data, [["guests", 1], ["managers", 1]]) - - def test_json_rsetexport_empty_rset(self): - with self.admin_access.web_request() as req: - rset = req.execute(u'Any X WHERE X is CWUser, X login "foobarbaz"') - data = self.view('jsonexport', rset, req=req) - self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) - self.assertListEqual(data, []) - - def test_json_rsetexport_with_jsonp(self): - with self.admin_access.web_request() as req: - req.form.update({'callback': u'foo', - 'rql': u'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' - 'WHERE X in_group G, G name GN'}) - data = self.ctrl_publish(req, ctrl='jsonp') - self.assertIsInstance(data, binary_type) - self.assertEqual(req.headers_out.getRawHeaders('content-type'), - ['application/javascript']) - # because jsonp anonymizes data, only 'guests' group should be found - self.assertEqual(data, b'foo(' + self.res_jsonp_data + b')') - - def test_json_rsetexport_with_jsonp_and_bad_vid(self): - with self.admin_access.web_request() as req: - req.form.update({'callback': 'foo', - # "vid" parameter should be ignored by jsonp controller - 'vid': 'table', - 'rql': 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' - 'WHERE X in_group G, G name GN'}) - data = self.ctrl_publish(req, ctrl='jsonp') - self.assertEqual(req.headers_out.getRawHeaders('content-type'), - ['application/javascript']) - # result should be plain json, not the table view - self.assertEqual(data, b'foo(' + self.res_jsonp_data + b')') - - def test_json_ersetexport(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any G ORDERBY GN WHERE G is CWGroup, G name GN') - data = self.view('ejsonexport', rset, req=req) - self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) - self.assertEqual(data[0]['name'], 'guests') - self.assertEqual(data[1]['name'], 'managers') - - rset = req.execute(u'Any G WHERE G is CWGroup, G name "foo"') - data = self.view('ejsonexport', rset, req=req) - self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) - self.assertEqual(data, []) - - -class NotAnonymousJsonViewsTC(JsonViewsTC): - anonymize = False - res_jsonp_data = b'[["guests", 1], ["managers", 1]]' - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_navigation.py --- a/web/test/unittest_views_navigation.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,123 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""cubicweb.web.views.navigation unit tests""" - -from logilab.common.testlib import unittest_main - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web.views.navigation import (PageNavigation, SortedNavigation, - PageNavigationSelect) -from cubicweb.web.views.ibreadcrumbs import BreadCrumbEntityVComponent - -BreadCrumbEntityVComponent.visible = True - -class NavigationTC(CubicWebTC): - - def test_navigation_selection_whatever(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X,N WHERE X name N') - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - self.assertIsInstance(navcomp, PageNavigation) - req.set_search_state('W:X:Y:Z') - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - self.assertIsInstance(navcomp, PageNavigation) - req.set_search_state('normal') - - def test_navigation_selection_ordered(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X,N ORDERBY N LIMIT 40 WHERE X name N') - navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20) - self.assertIsInstance(navcomp, SortedNavigation) - req.set_search_state('W:X:Y:Z') - navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20) - self.assertIsInstance(navcomp, SortedNavigation) - req.set_search_state('normal') - navcomp.render() - - def test_navigation_selection_large_rset(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X,N LIMIT 120 WHERE X name N') - navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20) - self.assertIsInstance(navcomp, PageNavigationSelect) - rset = req.execute('Any X,N ORDERBY N LIMIT 120 WHERE X name N') - navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20) - self.assertIsInstance(navcomp, PageNavigationSelect) - - def test_navigation_selection_not_enough_1(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X,N LIMIT 10 WHERE X name N') - navcomp = self.vreg['components'].select_or_none('navigation', req, rset=rset) - self.assertEqual(navcomp, None) - req.set_search_state('W:X:Y:Z') - navcomp = self.vreg['components'].select_or_none('navigation', req, rset=rset) - self.assertEqual(navcomp, None) - req.set_search_state('normal') - - def test_navigation_selection_not_enough_2(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any N, COUNT(RDEF) GROUPBY N ORDERBY N ' - 'WHERE RDEF relation_type RT, RT name N') - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - self.assertIsInstance(navcomp, SortedNavigation) - req.set_search_state('W:X:Y:Z') - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - self.assertIsInstance(navcomp, SortedNavigation) - - def test_navigation_selection_wrong_boundary(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X,N WHERE X name N') - req.form['__start'] = 1000000 - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - html = navcomp.render() - - def test_sorted_navigation_1(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any RDEF ORDERBY RT WHERE RDEF relation_type RT') - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - html = navcomp.render() - - def test_sorted_navigation_2(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any RDEF ORDERBY RDEF WHERE RDEF relation_type RT') - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - html = navcomp.render() - - def test_sorted_navigation_3(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWAttribute RDEF ORDERBY RDEF') - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - html = navcomp.render() - - def test_sorted_navigation_4(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any RDEF ORDERBY N ' - 'WHERE RDEF relation_type RT, RT name N') - navcomp = self.vreg['components'].select('navigation', req, rset=rset) - html = navcomp.render() - - def test_sorted_navigation_5(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any N, COUNT(RDEF) GROUPBY N ORDERBY N ' - 'WHERE RDEF relation_type RT, RT name N') - navcomp = self.vreg['components'].select('navigation', rset.req, rset=rset) - html = navcomp.render() - - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_pyviews.py --- a/web/test/unittest_views_pyviews.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,45 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -from logilab.common.testlib import unittest_main -from cubicweb.devtools.testlib import CubicWebTC - -class PyViewsTC(CubicWebTC): - - def test_pyvaltable(self): - with self.admin_access.web_request() as req: - view = self.vreg['views'].select('pyvaltable', req, - pyvalue=[[1, 'a'], [2, 'b']]) - content = view.render(pyvalue=[[1, 'a'], [2, 'b']], - headers=['num', 'char']) - self.assertEqual(content.strip(), '''<table class="listing"><tbody>\ -<tr class="even" onmouseout="$(this).removeClass("highlighted")" onmouseover="$(this).addClass("highlighted");"><td >1</td><td >a</td></tr> -<tr class="odd" onmouseout="$(this).removeClass("highlighted")" onmouseover="$(this).addClass("highlighted");"><td >2</td><td >b</td></tr> -</tbody></table>''') - - def test_pyvallist(self): - with self.admin_access.web_request() as req: - view = self.vreg['views'].select('pyvallist', req, - pyvalue=[1, 'a']) - content = view.render(pyvalue=[1, 'a']) - self.assertEqual(content.strip(), '''<ul> -<li>1</li> -<li>a</li> -</ul>''') - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_searchrestriction.py --- a/web/test/unittest_views_searchrestriction.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,102 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web import facet - - -class InsertAttrRelationTC(CubicWebTC): - - def parse(self, query): - rqlst = self.vreg.parse(self.session, query) - select = rqlst.children[0] - return rqlst - - def _generate(self, rqlst, rel, role, attr): - select = rqlst.children[0] - filtered_variable = facet.get_filtered_variable(select) - facet.prepare_select(select, filtered_variable) - facet.insert_attr_select_relation(select, filtered_variable, - rel, role, attr) - return rqlst.as_string() - - @property - def select(self): - return self.parse(u'Any B,(NOW - CD),S,V,U,GROUP_CONCAT(TN),VN,P,CD,BMD ' - 'GROUPBY B,CD,S,V,U,VN,P,BMD ' - 'WHERE B in_state S, B creation_date CD, ' - 'B modification_date BMD, T? tags B, T name TN, ' - 'V? bookmarked_by B, V title VN, B created_by U?, ' - 'B in_group P, P name "managers"') - - def test_1(self): - self.assertEqual(self._generate(self.select, 'in_state', 'subject', 'name'), - 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' - 'B in_state A, B is CWUser, A name C') - - def test_2(self): - self.assertEqual(self._generate(self.select, 'tags', 'object', 'name'), - 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' - 'A tags B, B is CWUser, A name C') - - def test_3(self): - self.assertEqual(self._generate(self.select, 'created_by', 'subject', 'login'), - 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' - 'B created_by A, B is CWUser, A login C') - - def test_4(self): - self.assertEqual(self._generate(self.parse(u'Any X WHERE X is CWUser'), 'created_by', 'subject', 'login'), - "DISTINCT Any A,B ORDERBY B WHERE X is CWUser, X created_by A, A login B") - - def test_5(self): - self.assertEqual(self._generate(self.parse(u'Any X,L WHERE X is CWUser, X login L'), 'created_by', 'subject', 'login'), - "DISTINCT Any A,B ORDERBY B WHERE X is CWUser, X created_by A, A login B") - - def test_nonregr1(self): - select = self.parse(u'Any T,V WHERE T bookmarked_by V?, ' - 'V in_state VS, VS name "published", T created_by U') - self.assertEqual(self._generate(select, 'created_by', 'subject', 'login'), - "DISTINCT Any A,B ORDERBY B WHERE T created_by U, " - "T created_by A, T is Bookmark, A login B") - - def test_nonregr2(self): - #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N' - select = self.parse(u'DISTINCT Any V,TN,L ORDERBY TN,L WHERE T nom TN, V connait T, T is Personne, V is CWUser,' - 'NOT V in_state VS, VS name "published", V login L') - rschema = self.schema['connait'] - for rdefs in rschema.rdefs.values(): - rdefs.cardinality = '++' - try: - self.assertEqual(self._generate(select, 'in_state', 'subject', 'name'), - 'DISTINCT Any A,B ORDERBY B WHERE V is CWUser, ' - 'NOT EXISTS(V in_state VS), VS name "published", ' - 'V in_state A, A name B') - finally: - for rdefs in rschema.rdefs.values(): - rdefs.cardinality = '**' - - def test_nonregr3(self): - #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N' - select = self.parse(u'DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is CWUser, Y is Bookmark, X in_group A') - self.assertEqual(self._generate(select, 'in_group', 'subject', 'name'), - "DISTINCT Any B,C ORDERBY C WHERE X is CWUser, X in_group B, B name C") - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_staticcontrollers.py --- a/web/test/unittest_views_staticcontrollers.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,175 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -from contextlib import contextmanager - -from logilab.common import tempattr -from logilab.common.testlib import Tags -from cubicweb.devtools.testlib import CubicWebTC - -import os -import os.path as osp -import glob - -from cubicweb.utils import HTMLHead -from cubicweb.web.views.staticcontrollers import ConcatFilesHandler - -class staticfilespublishermixin(object): - - @contextmanager - def _publish_static_files(self, url, header={}): - with self.admin_access.web_request(headers=header) as req: - req._url = url - self.app_handle_request(req, url) - yield req - -class StaticControllerCacheTC(staticfilespublishermixin, CubicWebTC): - tags = CubicWebTC.tags | Tags('static_controller', 'cache', 'http') - - def test_static_file_are_cached(self): - with self._publish_static_files('data/cubicweb.css') as req: - self.assertEqual(200, req.status_out) - self.assertIn('last-modified', req.headers_out) - next_headers = { - 'if-modified-since': req.get_response_header('last-modified', raw=True), - } - with self._publish_static_files('data/cubicweb.css', next_headers) as req: - self.assertEqual(304, req.status_out) - -class StaticDirectoryControllerTC(staticfilespublishermixin, CubicWebTC): - - def test_check_static_dir_access(self): - """write a file in the static directory and test the access""" - staticdir = osp.join(self.session.vreg.config.static_directory) - if not os.path.exists(staticdir): - os.makedirs(staticdir) - filename = osp.join(staticdir, 'test') - with open(filename, 'a') as f: - with self._publish_static_files('static/test') as req: - self.assertEqual(200, req.status_out) - -class DataControllerTC(staticfilespublishermixin, CubicWebTC): - tags = CubicWebTC.tags | Tags('static_controller', 'data', 'http') - - def _check_datafile_ok(self, fname): - with self._publish_static_files(fname) as req: - self.assertEqual(200, req.status_out) - self.assertIn('last-modified', req.headers_out) - self.assertIn('expires', req.headers_out) - self.assertEqual(req.get_response_header('cache-control'), - {'max-age': 604800}) - next_headers = { - 'if-modified-since': req.get_response_header('last-modified', raw=True), - } - with self._publish_static_files(fname, next_headers) as req: - self.assertEqual(304, req.status_out) - - def _check_datafile_redirect(self, fname, expected): - with self._publish_static_files(fname) as req: - self.assertEqual(302, req.status_out) - self.assertEqual(req.get_response_header('location'), - req.base_url() + expected) - - def _check_no_datafile(self, fname): - with self._publish_static_files(fname) as req: - self.assertEqual(404, req.status_out) - - def test_static_data_mode(self): - hash = self.vreg.config.instance_md5_version() - self.assertEqual(32, len(hash)) - - with tempattr(self.vreg.config, 'mode', 'test'): - self._check_datafile_ok('data/cubicweb.css') - self._check_no_datafile('data/does/not/exist') - self._check_no_datafile('data/%s/cubicweb.css' % ('0'*len(hash))) - - with tempattr(self.vreg.config, 'mode', 'notest'): - self.config._init_base_url() # reset config.datadir_url - self._check_datafile_redirect('data/cubicweb.css', 'data/%s/cubicweb.css' % hash) - self._check_datafile_ok('data/%s/cubicweb.css' % hash) - self._check_no_datafile('data/%s/does/not/exist' % hash) - self._check_datafile_redirect('data/%s/does/not/exist' % ('0'*len(hash)), - 'data/%s/%s/does/not/exist' % (hash, '0'*len(hash))) - - -class ConcatFilesTC(CubicWebTC): - - tags = CubicWebTC.tags | Tags('static_controller', 'concat') - - def tearDown(self): - super(ConcatFilesTC, self).tearDown() - self._cleanup_concat_cache() - - def _cleanup_concat_cache(self): - uicachedir = osp.join(self.config.apphome, 'uicache') - for fname in glob.glob(osp.join(uicachedir, 'cache_concat_*')): - os.unlink(osp.join(uicachedir, fname)) - - @contextmanager - def _publish_js_files(self, js_files): - with self.admin_access.web_request() as req: - head = HTMLHead(req) - url = head.concat_urls([req.data_url(js_file) - for js_file in js_files])[len(req.base_url()):] - req._url = url - res = self.app_handle_request(req, url) - yield res, req - - def expected_content(self, js_files): - content = b'' - for js_file in js_files: - dirpath, rid = self.config.locate_resource(js_file) - if dirpath is not None: # ignore resources not found - with open(osp.join(dirpath, rid), 'rb') as f: - content += f.read() + b'\n' - return content - - def test_cache(self): - js_files = ('cubicweb.ajax.js', 'jquery.js') - with self._publish_js_files(js_files) as (result, req): - self.assertNotEqual(404, req.status_out) - # check result content - self.assertEqual(result, self.expected_content(js_files)) - # make sure we kept a cached version on filesystem - concat_hander = ConcatFilesHandler(self.config) - filepath = concat_hander.build_filepath(js_files) - self.assertTrue(osp.isfile(filepath)) - - - def test_invalid_file_in_debug_mode(self): - js_files = ('cubicweb.ajax.js', 'dummy.js') - # in debug mode, an error is raised - self.config.debugmode = True - try: - with self._publish_js_files(js_files) as (result, req): - #print result - self.assertEqual(404, req.status_out) - finally: - self.config.debugmode = False - - def test_invalid_file_in_production_mode(self): - js_files = ('cubicweb.ajax.js', 'dummy.js') - with self._publish_js_files(js_files) as (result, req): - self.assertNotEqual(404, req.status_out) - # check result content - self.assertEqual(result, self.expected_content(js_files)) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_treeview.py --- a/web/test/unittest_views_treeview.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,51 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from logilab.common.testlib import unittest_main -from logilab.mtconverter import html_unescape - -from cubicweb.devtools.htmlparser import XMLValidator -from cubicweb.devtools.testlib import CubicWebTC - - -class TreeViewTC(CubicWebTC): - - def test_treeview(self): - with self.admin_access.repo_cnx() as cnx: - ce = cnx.create_entity - root = ce('TreeNode', name=u'root') - node = ce('TreeNode', name=u'node1', parent=root) - ce('TreeNode', name=u'leaf1a', parent=node) - ce('TreeNode', name=u'leaf1b', parent=node) - node = ce('TreeNode', name=u'node2', parent=root) - ce('TreeNode', name=u'leaf2a', parent=node) - ce('TreeNode', name=u'leaf2b', parent=node) - root_eid = root.eid - cnx.commit() - - with self.admin_access.web_request() as req: - root = req.entity_from_eid(root_eid) - valid = self.content_type_validators.get('text/html', XMLValidator)() - page = valid.parse_string(root.view('tree', klass='oh-my-class')) - uls = page.find_tag('ul', gettext=False) - for _, attrib in uls: - self.assertEqual(attrib['class'], 'oh-my-class') - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_views_xmlrss.py --- a/web/test/unittest_views_xmlrss.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,40 +0,0 @@ -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web.views.xmlrss import SERIALIZERS - -class EntityXMLViewTC(CubicWebTC): - """see also cw.sobjects.test.unittest_parsers""" - def test(self): - rels = ['tags-object', 'in_group-subject', - 'in_state-subject', 'use_email-subject'] - with self.admin_access.web_request(relation=rels) as req: - self.assertMultiLineEqual( - req.user.view('xml'), - '''\ -<CWUser eid="6" cwuri="http://testing.fr/cubicweb/6" cwsource="system"> - <creation_date>%(cdate)s</creation_date> - <firstname/> - <last_login_time/> - <login>admin</login> - <modification_date>%(mdate)s</modification_date> - <surname/> - <upassword/> - <tags role="object"> - </tags> - <in_group role="subject"> - <CWGroup eid="%(group_eid)s" cwuri="http://testing.fr/cubicweb/%(group_eid)s"/> - </in_group> - <in_state role="subject"> - <State eid="%(state_eid)s" cwuri="http://testing.fr/cubicweb/%(state_eid)s" name="activated"/> - </in_state> - <use_email role="subject"> - </use_email> -</CWUser> -''' % {'cdate': SERIALIZERS['Datetime'](req.user.creation_date), - 'mdate': SERIALIZERS['Datetime'](req.user.modification_date), - 'state_eid': req.user.in_state[0].eid, - 'group_eid': req.user.in_group[0].eid}) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_viewselector.py --- a/web/test/unittest_viewselector.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,547 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""XXX rename, split, reorganize this""" -from __future__ import print_function - -from logilab.common.testlib import unittest_main - -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb import Binary, UnknownProperty -from cubicweb.predicates import (is_instance, - specified_etype_implements, rql_condition) -from cubicweb.web import NoSelectableObject -from cubicweb.web.action import Action - -from cubicweb.web.views import (primary, baseviews, tableview, - editforms, management, actions, startup, cwuser, schema, xbel, - vcard, owl, treeview, idownloadable, wdoc, debug, cwuser, - cwproperties, cwsources, xmlrss, rdf, csvexport, json, - undohistory) - - -USERACTIONS = [actions.UserPreferencesAction, - actions.UserInfoAction, - actions.LogoutAction] -SITEACTIONS = [actions.ManageAction] -FOOTERACTIONS = [wdoc.HelpAction, - wdoc.AboutAction, - actions.PoweredByAction] -MANAGEACTIONS = [actions.SiteConfigurationAction, - schema.ViewSchemaAction, - cwuser.ManageUsersAction, - cwsources.ManageSourcesAction, - debug.SiteInfoAction] - -if hasattr(rdf, 'RDFView'): # not available if rdflib not installed - RDFVIEWS = [('rdf', rdf.RDFView), ('n3rdf', rdf.RDFN3View)] -else: - RDFVIEWS = [] - -class ViewSelectorTC(CubicWebTC): - - def setup_database(self): - with self.admin_access.repo_cnx() as cnx: - cnx.create_entity('BlogEntry', title=u"une news !", content=u"cubicweb c'est beau") - cnx.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index") - cnx.create_entity('EmailAddress', address=u"devel@logilab.fr", alias=u'devel') - cnx.create_entity('Tag', name=u'x') - cnx.commit() - -class VRegistryTC(ViewSelectorTC): - """test the view selector""" - - def _test_registered(self, registry, content): - try: - expected = getattr(self, 'all_%s' % registry) - except AttributeError: - return - if registry == 'hooks': - self.assertEqual(len(content), expected, content) - return - try: - self.assertSetEqual(list(content), expected) - except Exception: - print(registry, sorted(expected), sorted(content)) - print('no more', [v for v in expected if not v in content]) - print('missing', [v for v in content if not v in expected]) - raise - - def setUp(self): - super(VRegistryTC, self).setUp() - assert self.vreg['views']['propertiesform'] - - def test_possible_views_none_rset(self): - with self.admin_access.web_request() as req: - self.assertListEqual(self.pviews(req, None), - [('cw.sources-management', cwsources.CWSourcesManagementView), - ('cw.users-and-groups-management', cwuser.UsersAndGroupsManagementView), - ('gc', debug.GCView), - ('index', startup.IndexView), - ('info', debug.ProcessInformationView), - ('manage', startup.ManageView), - ('owl', owl.OWLView), - ('propertiesform', cwproperties.CWPropertiesForm), - ('registry', debug.RegistryView), - ('schema', schema.SchemaView), - ('siteinfo', debug.SiteInfoView), - ('systempropertiesform', cwproperties.SystemCWPropertiesForm), - ('undohistory', undohistory.UndoHistoryView)]) - - def test_possible_views_noresult(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X WHERE X eid 999999') - self.assertListEqual(self.pviews(req, rset), - [('csvexport', csvexport.CSVRsetView), - ('ecsvexport', csvexport.CSVEntityView), - ('ejsonexport', json.JsonEntityView), - ('jsonexport', json.JsonRsetView), - ]) - - def test_possible_views_one_egroup(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWGroup X WHERE X name "managers"') - self.assertCountEqual(self.pviews(req, rset), - RDFVIEWS + - [('csvexport', csvexport.CSVRsetView), - ('ecsvexport', csvexport.CSVEntityView), - ('ejsonexport', json.JsonEntityView), - ('filetree', treeview.FileTreeView), - ('jsonexport', json.JsonRsetView), - ('list', baseviews.ListView), - ('oneline', baseviews.OneLineView), - ('owlabox', owl.OWLABOXView), - ('primary', cwuser.CWGroupPrimaryView), - ('rsetxml', xmlrss.XMLRsetView), - ('rss', xmlrss.RSSView), - ('sameetypelist', baseviews.SameETypeListView), - ('security', management.SecurityManagementView), - ('table', tableview.RsetTableView), - ('text', baseviews.TextView), - ('treeview', treeview.TreeView), - ('xbel', xbel.XbelView), - ('xml', xmlrss.XMLView)]) - - def test_possible_views_multiple_egroups(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWGroup X') - self.assertCountEqual(self.pviews(req, rset), - RDFVIEWS + - [('csvexport', csvexport.CSVRsetView), - ('ecsvexport', csvexport.CSVEntityView), - ('ejsonexport', json.JsonEntityView), - ('filetree', treeview.FileTreeView), - ('jsonexport', json.JsonRsetView), - ('list', baseviews.ListView), - ('oneline', baseviews.OneLineView), - ('owlabox', owl.OWLABOXView), - ('primary', cwuser.CWGroupPrimaryView), - ('rsetxml', xmlrss.XMLRsetView), - ('rss', xmlrss.RSSView), - ('sameetypelist', baseviews.SameETypeListView), - ('security', management.SecurityManagementView), - ('table', tableview.RsetTableView), - ('text', baseviews.TextView), - ('treeview', treeview.TreeView), - ('xbel', xbel.XbelView), - ('xml', xmlrss.XMLView), - ]) - - def test_propertiesform_admin(self): - assert self.vreg['views']['propertiesform'] - with self.admin_access.web_request() as req: - rset1 = req.execute('CWUser X WHERE X login "admin"') - self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=None)) - self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset1)) - rset2 = req.execute('CWUser X WHERE X login "anon"') - self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset2)) - - def test_propertiesform_anon(self): - with self.new_access(u'anon').web_request() as req: - rset1 = req.execute('CWUser X WHERE X login "admin"') - self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=None) - self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset1) - rset2 = req.execute('CWUser X WHERE X login "anon"') - self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset2) - - def test_propertiesform_jdoe(self): - with self.admin_access.repo_cnx() as cnx: - self.create_user(cnx, u'jdoe') - cnx.commit() - with self.new_access(u'jdoe').web_request() as req: - rset1 = req.execute('CWUser X WHERE X login "admin"') - rset2 = req.execute('CWUser X WHERE X login "jdoe"') - self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=None)) - self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset1) - self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset2)) - - def test_possible_views_multiple_different_types(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X') - self.assertCountEqual(self.pviews(req, rset), - RDFVIEWS + - [('csvexport', csvexport.CSVRsetView), - ('ecsvexport', csvexport.CSVEntityView), - ('ejsonexport', json.JsonEntityView), - ('filetree', treeview.FileTreeView), - ('jsonexport', json.JsonRsetView), - ('list', baseviews.ListView), - ('oneline', baseviews.OneLineView), - ('owlabox', owl.OWLABOXView), - ('primary', primary.PrimaryView), - ('rsetxml', xmlrss.XMLRsetView), - ('rss', xmlrss.RSSView), - ('security', management.SecurityManagementView), - ('table', tableview.RsetTableView), - ('text', baseviews.TextView), - ('treeview', treeview.TreeView), - ('xbel', xbel.XbelView), - ('xml', xmlrss.XMLView), - ]) - - def test_possible_views_any_rset(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any N, X WHERE X in_group Y, Y name N') - self.assertListEqual(self.pviews(req, rset), - [('csvexport', csvexport.CSVRsetView), - ('jsonexport', json.JsonRsetView), - ('rsetxml', xmlrss.XMLRsetView), - ('table', tableview.RsetTableView), - ]) - - def test_possible_views_multiple_eusers(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWUser X') - self.assertCountEqual(self.pviews(req, rset), - RDFVIEWS + - [('csvexport', csvexport.CSVRsetView), - ('ecsvexport', csvexport.CSVEntityView), - ('ejsonexport', json.JsonEntityView), - ('filetree', treeview.FileTreeView), - ('foaf', cwuser.FoafView), - ('jsonexport', json.JsonRsetView), - ('list', baseviews.ListView), - ('oneline', baseviews.OneLineView), - ('owlabox', owl.OWLABOXView), - ('primary', primary.PrimaryView), - ('rsetxml', xmlrss.XMLRsetView), - ('rss', xmlrss.RSSView), - ('sameetypelist', baseviews.SameETypeListView), - ('security', management.SecurityManagementView), - ('table', tableview.RsetTableView), - ('text', baseviews.TextView), - ('treeview', treeview.TreeView), - ('vcard', vcard.VCardCWUserView), - ('xbel', xbel.XbelView), - ('xml', xmlrss.XMLView), - ]) - - def test_possible_actions_none_rset(self): - with self.admin_access.web_request() as req: - self.assertDictEqual(self.pactionsdict(req, None, skipcategories=()), - {'useractions': USERACTIONS, - 'siteactions': SITEACTIONS, - 'manage': MANAGEACTIONS, - 'footer': FOOTERACTIONS}) - - def test_possible_actions_no_entity(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X WHERE X eid 999999') - self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), - {'useractions': USERACTIONS, - 'siteactions': SITEACTIONS, - 'manage': MANAGEACTIONS, - 'footer': FOOTERACTIONS, - }) - - def test_possible_actions_same_type_entities(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWGroup X') - self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), - {'useractions': USERACTIONS, - 'siteactions': SITEACTIONS, - 'manage': MANAGEACTIONS, - 'footer': FOOTERACTIONS, - 'mainactions': [actions.MultipleEditAction], - 'moreactions': [actions.DeleteAction, - actions.AddNewAction]}) - - def test_possible_actions_different_types_entities(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any X') - self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), - {'useractions': USERACTIONS, - 'siteactions': SITEACTIONS, - 'manage': MANAGEACTIONS, - 'footer': FOOTERACTIONS, - 'moreactions': [actions.DeleteAction], - }) - - def test_possible_actions_final_entities(self): - with self.admin_access.web_request() as req: - rset = req.execute('Any N, X WHERE X in_group Y, Y name N') - self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), - {'useractions': USERACTIONS, - 'siteactions': SITEACTIONS, - 'manage': MANAGEACTIONS, - 'footer': FOOTERACTIONS, - }) - - def test_possible_actions_eetype_cwuser_entity(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWEType X WHERE X name "CWUser"') - self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), - {'useractions': USERACTIONS, - 'siteactions': SITEACTIONS, - 'manage': MANAGEACTIONS, - 'footer': FOOTERACTIONS, - 'mainactions': [actions.ModifyAction, - actions.ViewSameCWEType], - 'moreactions': [actions.ManagePermissionsAction, - actions.AddRelatedActions, - actions.DeleteAction, - actions.CopyAction, - ], - }) - - - def test_select_creation_form(self): - rset = None - with self.admin_access.web_request() as req: - # creation form - req.form['etype'] = 'CWGroup' - self.assertIsInstance(self.vreg['views'].select('creation', req, rset=rset), - editforms.CreationFormView) - - with self.admin_access.web_request() as req: - # custom creation form - class CWUserCreationForm(editforms.CreationFormView): - __select__ = specified_etype_implements('CWUser') - - self.vreg._loadedmods[__name__] = {} - self.vreg.register(CWUserCreationForm) - req.form['etype'] = 'CWUser' - - self.assertIsInstance(self.vreg['views'].select('creation', req, rset=rset), - CWUserCreationForm) - - def test_select_view(self): - # no entity - rset = None - with self.admin_access.web_request() as req: - self.assertIsInstance(self.vreg['views'].select('index', req, rset=rset), - startup.IndexView) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'primary', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'table', req, rset=rset) - - # no entity - rset = req.execute('Any X WHERE X eid 999999') - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'index', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'creation', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'primary', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'table', req, rset=rset) - # one entity - rset = req.execute('CWGroup X WHERE X name "managers"') - self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), - primary.PrimaryView) - self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset), - baseviews.ListView) - self.assertIsInstance(self.vreg['views'].select('edition', req, rset=rset), - editforms.EditionFormView) - self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), - tableview.RsetTableView) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'creation', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'index', req, rset=rset) - # list of entities of the same type - rset = req.execute('CWGroup X') - self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), - primary.PrimaryView) - self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset), - baseviews.ListView) - self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), - tableview.RsetTableView) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'creation', req, rset=rset) - # list of entities of different types - rset = req.execute('Any X') - self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), - primary.PrimaryView) - self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset), - baseviews.ListView) - self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), - tableview.RsetTableView) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'creation', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'index', req, rset=rset) - # whatever - rset = req.execute('Any N, X WHERE X in_group Y, Y name N') - self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), - tableview.RsetTableView) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'index', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'creation', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'primary', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'list', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'edition', req, rset=rset) - # mixed query - rset = req.execute('Any U,G WHERE U is CWUser, G is CWGroup') - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'edition', req, rset=rset) - self.assertRaises(NoSelectableObject, - self.vreg['views'].select, 'creation', req, rset=rset) - self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset), - tableview.RsetTableView) - - def test_interface_selector(self): - with self.admin_access.web_request() as req: - req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim')) - # image primary view priority - rset = req.execute('File X WHERE X data_name "bim.png"') - self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), - idownloadable.IDownloadablePrimaryView) - - - def test_score_entity_selector(self): - with self.admin_access.web_request() as req: - req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim')) - # image/ehtml primary view priority - rset = req.execute('File X WHERE X data_name "bim.png"') - self.assertIsInstance(self.vreg['views'].select('image', req, rset=rset), - idownloadable.ImageView) - self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset) - - fileobj = req.create_entity('File', data_name=u'bim.html', data=Binary(b'<html>bam</html')) - # image/ehtml primary view priority - rset = req.execute('File X WHERE X data_name "bim.html"') - self.assertIsInstance(self.vreg['views'].select('ehtml', req, rset=rset), - idownloadable.EHTMLView) - self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset) - - fileobj = req.create_entity('File', data_name=u'bim.txt', data=Binary(b'boum')) - # image/ehtml primary view priority - rset = req.execute('File X WHERE X data_name "bim.txt"') - self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset) - self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset) - - - def _test_view(self, vid, rql, args): - with self.admin_access.web_request() as req: - if rql is None: - rset = None - else: - rset = req.execute(rql) - try: - obj = self.vreg['views'].select(vid, req, rset=rset, **args) - return obj.render(**args) - except Exception: - print(vid, rset, args) - raise - - def test_form(self): - for vid, rql, args in ( - #('creation', 'Any X WHERE X eid 999999', {}), - ('edition', 'CWGroup X WHERE X name "managers"', {}), - ('copy', 'CWGroup X WHERE X name "managers"', {}), - ('muledit', 'CWGroup X', {}), - #('muledit', 'Any X', {}), - ): - self._test_view(vid, rql, args) - - - def test_properties(self): - self.assertEqual(sorted(k for k in self.vreg['propertydefs'] - if k.startswith('ctxcomponents.edit_box')), - ['ctxcomponents.edit_box.context', - 'ctxcomponents.edit_box.order', - 'ctxcomponents.edit_box.visible']) - self.assertEqual([k for k in self.vreg['propertyvalues'] - if not k.startswith('system.version')], - []) - self.assertEqual(self.vreg.property_value('ctxcomponents.edit_box.visible'), True) - self.assertEqual(self.vreg.property_value('ctxcomponents.edit_box.order'), 2) - self.assertEqual(self.vreg.property_value('ctxcomponents.possible_views_box.visible'), False) - self.assertEqual(self.vreg.property_value('ctxcomponents.possible_views_box.order'), 10) - self.assertRaises(UnknownProperty, self.vreg.property_value, 'ctxcomponents.actions_box') - - - -class CWETypeRQLAction(Action): - __regid__ = 'testaction' - __select__ = is_instance('CWEType') & rql_condition('X name "CWEType"') - title = 'bla' - - -class RQLActionTC(ViewSelectorTC): - - def setUp(self): - super(RQLActionTC, self).setUp() - self.vreg._loadedmods[__name__] = {} - self.vreg.register(CWETypeRQLAction) - actionsreg = self.vreg['actions'] - actionsreg['testaction'][0].__registered__(actionsreg) - - def tearDown(self): - super(RQLActionTC, self).tearDown() - del self.vreg['actions']['testaction'] - - def test(self): - with self.admin_access.web_request() as req: - rset = req.execute('CWEType X WHERE X name "CWEType"') - self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), - {'useractions': USERACTIONS, - 'siteactions': SITEACTIONS, - 'footer': FOOTERACTIONS, - 'manage': MANAGEACTIONS, - 'mainactions': [actions.ModifyAction, actions.ViewSameCWEType], - 'moreactions': [actions.ManagePermissionsAction, - actions.AddRelatedActions, - actions.DeleteAction, - actions.CopyAction, - CWETypeRQLAction, - ], - }) - rset = req.execute('CWEType X WHERE X name "CWRType"') - self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()), - {'useractions': USERACTIONS, - 'siteactions': SITEACTIONS, - 'footer': FOOTERACTIONS, - 'manage': MANAGEACTIONS, - 'mainactions': [actions.ModifyAction, actions.ViewSameCWEType], - 'moreactions': [actions.ManagePermissionsAction, - actions.AddRelatedActions, - actions.DeleteAction, - actions.CopyAction,] - }) - - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_web.py --- a/web/test/unittest_web.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,157 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from json import loads -from os.path import join -import tempfile - -try: - import requests - assert [int(n) for n in requests.__version__.split('.', 2)][:2] >= [1, 2] -except (ImportError, AssertionError): - requests = None - -from logilab.common.testlib import TestCase, unittest_main -from cubicweb.devtools.httptest import CubicWebServerTC -from cubicweb.devtools.fake import FakeRequest - -class AjaxReplaceUrlTC(TestCase): - - def test_ajax_replace_url_1(self): - self._test_arurl("fname=view&rql=Person%20P&vid=list", - rql='Person P', vid='list') - - def test_ajax_replace_url_2(self): - self._test_arurl("age=12&fname=view&name=bar&rql=Person%20P&vid=oneline", - rql='Person P', vid='oneline', name='bar', age=12) - - def _test_arurl(self, qs, **kwargs): - req = FakeRequest() - arurl = req.ajax_replace_url - # NOTE: for the simplest use cases, we could use doctest - url = arurl('foo', **kwargs) - self.assertTrue(url.startswith('javascript:')) - self.assertTrue(url.endswith('()')) - cbname = url.split()[1][:-2] - self.assertMultiLineEqual( - 'function %s() { $("#foo").loadxhtml("http://testing.fr/cubicweb/ajax?%s",' - '{pageid: "%s"},"get","replace"); }' % - (cbname, qs, req.pageid), - req.html_headers.post_inlined_scripts[0]) - - -class FileUploadTC(CubicWebServerTC): - - def setUp(self): - "Skip whole test class if a suitable requests module is not available" - if requests is None: - self.skipTest('Python ``requests`` module is not available') - super(FileUploadTC, self).setUp() - - @property - def _post_url(self): - with self.admin_access.web_request() as req: - return req.build_url('ajax', fname='fileupload') - - def _fobject(self, fname): - return open(join(self.datadir, fname), 'rb') - - def _fcontent(self, fname): - return self._fobject(fname).read() - - def test_single_file_upload(self): - files = {'file': ('schema.py', self._fobject('schema.py'))} - webreq = requests.post(self._post_url, files=files) - # check backward compat : a single uploaded file leads to a single - # 2-uple in the request form - expect = {'fname': u'fileupload', - 'file': ['schema.py', self._fcontent('schema.py')]} - self.assertEqual(webreq.status_code, 200) - self.assertDictEqual(expect, loads(webreq.content)) - - def test_multiple_file_upload(self): - files = [('files', ('schema.py', self._fobject('schema.py'))), - ('files', ('views.py', self._fobject('views.py')))] - webreq = requests.post(self._post_url, files=files,) - expect = {'fname': u'fileupload', - 'files': [['schema.py', self._fcontent('schema.py')], - ['views.py', self._fcontent('views.py')]],} - self.assertEqual(webreq.status_code, 200) - self.assertDictEqual(expect, loads(webreq.content)) - - -class LanguageTC(CubicWebServerTC): - - def test_language_neg(self): - headers = {'Accept-Language': 'fr'} - webreq = self.web_request(headers=headers) - self.assertIn('lang="fr"', webreq.read()) - vary = [h.lower().strip() for h in webreq.getheader('Vary').split(',')] - self.assertIn('accept-language', vary) - headers = {'Accept-Language': 'en'} - webreq = self.web_request(headers=headers) - self.assertIn('lang="en"', webreq.read()) - vary = [h.lower().strip() for h in webreq.getheader('Vary').split(',')] - self.assertIn('accept-language', vary) - - def test_response_codes(self): - with self.admin_access.client_cnx() as cnx: - admin_eid = cnx.user.eid - # guest can't see admin - webreq = self.web_request('/%d' % admin_eid) - self.assertEqual(webreq.status, 403) - - # but admin can - self.web_login() - webreq = self.web_request('/%d' % admin_eid) - self.assertEqual(webreq.status, 200) - - def test_session_cookie_httponly(self): - webreq = self.web_request() - self.assertIn('HttpOnly', webreq.getheader('set-cookie')) - - -class MiscOptionsTC(CubicWebServerTC): - @classmethod - def setUpClass(cls): - super(MiscOptionsTC, cls).setUpClass() - cls.logfile = tempfile.NamedTemporaryFile() - - def setUp(self): - super(MiscOptionsTC, self).setUp() - self.config.global_set_option('query-log-file', self.logfile.name) - self.config.global_set_option('datadir-url', '//static.testing.fr/') - # call load_configuration again to let the config reset its datadir_url - self.config.load_configuration() - - def test_log_queries(self): - self.web_request() - self.assertTrue(self.logfile.read()) - - def test_datadir_url(self): - webreq = self.web_request() - self.assertNotIn('/data/', webreq.read()) - - @classmethod - def tearDownClass(cls): - super(MiscOptionsTC, cls).tearDownClass() - cls.logfile.close() - - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/test/unittest_webconfig.py --- a/web/test/unittest_webconfig.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""cubicweb.web.webconfig unit tests""" - -import os - -from logilab.common.testlib import TestCase, unittest_main -from cubicweb.devtools import ApptestConfiguration, fake - -class WebconfigTC(TestCase): - def setUp(self): - # need explicit None if dirname(__file__) is empty, see - # ApptestConfiguration.__init__ - self.config = ApptestConfiguration('data', apphome=os.path.dirname(__file__) or None) - self.config._cubes = ['file'] - self.config.load_configuration() - - def test_nonregr_print_css_as_list(self): - """make sure PRINT_CSS *must* is a list""" - config = self.config - print_css = config.uiprops['STYLESHEETS_PRINT'] - self.assertTrue(isinstance(print_css, list)) - ie_css = config.uiprops['STYLESHEETS_IE'] - self.assertTrue(isinstance(ie_css, list)) - - def test_locate_resource(self): - self.assertIn('FILE_ICON', self.config.uiprops) - rname = self.config.uiprops['FILE_ICON'].replace(self.config.datadir_url, '') - self.assertIn('file', self.config.locate_resource(rname)[0].split(os.sep)) - cubicwebcsspath = self.config.locate_resource('cubicweb.css')[0].split(os.sep) - - # 'shared' if tests under apycot - self.assertTrue('web' in cubicwebcsspath or 'shared' in cubicwebcsspath, - 'neither "web" nor "shared" found in cubicwebcsspath (%s)' - % cubicwebcsspath) - - def test_sign_text(self): - signature = self.config.sign_text(u'hôp') - self.assertTrue(self.config.check_text_sign(u'hôp', signature)) - -if __name__ == '__main__': - unittest_main() diff -r 058bb3dc685f -r 0b59724cb3f2 web/uicfg.py --- a/web/uicfg.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,28 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -""" -This module has been moved to web.views.uicfg. -""" -__docformat__ = "restructuredtext en" - -from warnings import warn -from cubicweb.web.views.uicfg import * - - -warn('[3.16] moved to cubicweb.web.views.uicfg', - DeprecationWarning, stacklevel=2) diff -r 058bb3dc685f -r 0b59724cb3f2 web/uihelper.py --- a/web/uihelper.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,177 +0,0 @@ -# copyright 2011-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""This module provide highlevel helpers to avoid uicfg boilerplate -for most common tasks such as fields ordering, widget customization, etc. - - -Here are a few helpers to customize *action box* rendering: - -.. autofunction:: cubicweb.web.uihelper.append_to_addmenu -.. autofunction:: cubicweb.web.uihelper.remove_from_addmenu - - -and a few other ones for *form configuration*: - -.. autofunction:: cubicweb.web.uihelper.set_fields_order -.. autofunction:: cubicweb.web.uihelper.hide_field -.. autofunction:: cubicweb.web.uihelper.hide_fields -.. autofunction:: cubicweb.web.uihelper.set_field_kwargs -.. autofunction:: cubicweb.web.uihelper.set_field -.. autofunction:: cubicweb.web.uihelper.edit_inline -.. autofunction:: cubicweb.web.uihelper.edit_as_attr -.. autofunction:: cubicweb.web.uihelper.set_muledit_editable - -The module also provides a :class:`FormConfig` base class that lets you gather -uicfg declaration in the scope of a single class, which can sometimes -be clearer to read than a bunch of sequential function calls. - -.. autoclass:: cubicweb.web.uihelper.FormConfig - -""" -__docformat__ = "restructuredtext en" - -from six import add_metaclass - -from logilab.common.deprecation import deprecated -from cubicweb.web.views import uicfg - - -## generic uicfg helpers ###################################################### - -backward_compat_funcs = (('append_to_addmenu', uicfg.actionbox_appearsin_addmenu), - ('remove_from_addmenu', uicfg.actionbox_appearsin_addmenu), - ('set_fields_order', uicfg.autoform_field_kwargs), - ('hide_field', uicfg.autoform_section), - ('hide_fields', uicfg.autoform_section), - ('set_field_kwargs', uicfg.autoform_field_kwargs), - ('set_field', uicfg.autoform_field), - ('edit_inline', uicfg.autoform_section), - ('edit_as_attr', uicfg.autoform_section), - ('set_muledit_editable', uicfg.autoform_section), - ) - -for funcname, tag in backward_compat_funcs: - msg = ('[3.16] uihelper.%(name)s is deprecated, please use ' - 'web.views.uicfg.%(rtagid)s.%(name)s' % dict( - name=funcname, rtagid=tag.__regid__)) - globals()[funcname] = deprecated(msg)(getattr(tag, funcname)) - - -class meta_formconfig(type): - """metaclass of FormConfig classes, only for easier declaration purpose""" - def __init__(cls, name, bases, classdict): - if cls.etype is None: - return - uicfg_afs = cls.uicfg_afs or uicfg.autoform_section - uicfg_aff = cls.uicfg_aff or uicfg.autoform_field - uicfg_affk = cls.uicfg_affk or uicfg.autoform_field_kwargs - for attr_role in cls.hidden: - uicfg_afs.hide_field(cls.etype, attr_role, formtype=cls.formtype) - for attr_role in cls.rels_as_attrs: - uicfg_afs.edit_as_attr(cls.etype, attr_role, formtype=cls.formtype) - for attr_role in cls.inlined: - uicfg_afs.edit_inline(cls.etype, attr_role, formtype=cls.formtype) - for rtype, widget in cls.widgets.items(): - uicfg_affk.set_field_kwargs(cls.etype, rtype, widget=widget) - for rtype, field in cls.fields.items(): - uicfg_aff.set_field(cls.etype, rtype, field) - uicfg_affk.set_fields_order(cls.etype, cls.fields_order) - super(meta_formconfig, cls).__init__(name, bases, classdict) - - -@add_metaclass(meta_formconfig) -class FormConfig: - """helper base class to define uicfg rules on a given entity type. - - In all descriptions below, attributes list can either be a list of - attribute names of a list of 2-tuples (relation name, role of - the edited entity in the relation). - - **Attributes** - - :attr:`etype` - which entity type the form config is for. This attribute is **mandatory** - - :attr:`formtype` - the formtype the class tries toc customize (i.e. *main*, *inlined*, or *muledit*), - default is *main*. - - :attr:`hidden` - the list of attributes or relations to hide. - - :attr:`rels_as_attrs` - the list of attributes to edit in the *attributes* section. - - :attr:`inlined` - the list of attributes to edit in the *inlined* section. - - :attr:`fields_order` - the list of attributes to edit, in the desired order. Unspecified - fields will be displayed after specified ones, their order - being consistent with the schema definition. - - :attr:`widgets` - a dictionary mapping attribute names to widget instances. - - :attr:`fields` - a dictionary mapping attribute names to field instances. - - :attr:`uicfg_afs` - an instance of ``cubicweb.web.uicfg.AutoformSectionRelationTags`` - Default is None, meaning ``cubicweb.web.uicfg.autoform_section`` is used. - - :attr:`uicfg_aff` - an instance of ``cubicweb.web.uicfg.AutoformFieldTags`` - Default is None, meaning ``cubicweb.web.uicfg.autoform_field`` is used. - - :attr:`uicfg_affk` - an instance of ``cubicweb.web.uicfg.AutoformFieldKwargsTags`` - Default is None, meaning ``cubicweb.web.uicfg.autoform_field_kwargs`` is used. - - Examples: - -.. sourcecode:: python - - from cubicweb.web import uihelper, formwidgets as fwdgs - - class LinkFormConfig(uihelper.FormConfig): - etype = 'Link' - hidden = ('title', 'description', 'embed') - widgets = dict( - url=fwdgs.TextInput(attrs={'size':40}), - ) - - class UserFormConfig(uihelper.FormConfig): - etype = 'CWUser' - hidden = ('login',) - rels_as_attrs = ('in_group',) - fields_order = ('firstname', 'surname', 'in_group', 'use_email') - inlined = ('use_email',) - - """ - formtype = 'main' - etype = None # must be defined in concrete subclasses - hidden = () - rels_as_attrs = () - inlined = () - fields_order = () - widgets = {} - fields = {} - uicfg_afs = None - uicfg_aff = None - uicfg_affk = None diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/__init__.py --- a/web/views/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,148 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""Views, forms, actions... for the CubicWeb web client""" - -__docformat__ = "restructuredtext en" - -import os -import sys -import tempfile - -from six import add_metaclass - -from rql import nodes -from logilab.mtconverter import xml_escape -from logilab.common.deprecation import class_deprecated - - -def need_table_view(rset, schema): - """return True if we think that a table view is more appropriate than a - list or primary view to display the given result set - """ - rqlst = rset.syntax_tree() - if len(rqlst.children) > 1: - # UNION query, use a table - return True - selected = rqlst.children[0].selection - try: - mainvar = selected[0] - except AttributeError: - # not a variable ref, using table view is probably a good option - return True - if not (isinstance(mainvar, nodes.VariableRef) or - (isinstance(mainvar, nodes.Constant) and mainvar.uid)): - return True - for i, etype in enumerate(rset.description[0][1:]): - # etype may be None on outer join - if etype is None: - return True - # check the selected index node is a VariableRef (else we - # won't detect aggregate function - if not isinstance(selected[i+1], nodes.VariableRef): - return True - # if this is not a final entity - if not schema.eschema(etype).final: - return True - # if this is a final entity not linked to the main variable - var = selected[i+1].variable - for vref in var.references(): - rel = vref.relation() - if rel is None: - continue - if mainvar.is_equivalent(rel.children[0]): - break - else: - return True - return False - -# FIXME: VID_BY_MIMETYPE is unfortunately a bit too naive since -# some browsers (e.g. FF2) send a bunch of mimetypes in -# the Accept header, for instance: -# text/xml,application/xml,application/xhtml+xml,text/html;q=0.9, -# text/plain;q=0.8,image/png,*/*;q=0.5 -VID_BY_MIMETYPE = { - #'text/xml': 'xml', - # XXX rss, owl... -} -def vid_from_rset(req, rset, schema, check_table=True): - """given a result set, return a view id""" - if rset is None: - return 'index' - for mimetype in req.parse_accept_header('Accept'): - if mimetype in VID_BY_MIMETYPE: - return VID_BY_MIMETYPE[mimetype] - nb_rows = len(rset) - # empty resultset - if nb_rows == 0: - return 'noresult' - # entity result set - if not schema.eschema(rset.description[0][0]).final: - if check_table and need_table_view(rset, schema): - return 'table' - if nb_rows == 1: - if req.search_state[0] == 'normal': - return 'primary' - return 'outofcontext-search' - if len(rset.column_types(0)) == 1: - return 'sameetypelist' - return 'list' - return 'table' - - -def linksearch_select_url(req, rset): - """when searching an entity to create a relation, return a URL to select - entities in the given rset - """ - req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') ) - target, eid, r_type, searchedtype = req.search_state[1] - if target == 'subject': - id_fmt = '%s:%s:%%s' % (eid, r_type) - else: - id_fmt = '%%s:%s:%s' % (r_type, eid) - triplets = '-'.join(id_fmt % row[0] for row in rset.rows) - return "javascript: selectForAssociation('%s', '%s');" % (triplets, eid) - - -def add_etype_button(req, etype, csscls='addButton right', **urlkwargs): - vreg = req.vreg - eschema = vreg.schema.eschema(etype) - if eschema.has_perm(req, 'add'): - url = vreg['etypes'].etype_class(etype).cw_create_url(req, **urlkwargs) - return u'<a href="%s" class="%s">%s</a>' % ( - xml_escape(url), csscls, req.__('New %s' % etype)) - return u'' - - - -@add_metaclass(class_deprecated) -class TmpFileViewMixin(object): - __deprecation_warning__ = '[3.18] %(cls)s is deprecated' - binary = True - content_type = 'application/octet-stream' - cache_max_age = 60*60*2 # stay in http cache for 2 hours by default - - def call(self): - self.cell_call() - - def cell_call(self, row=0, col=0): - self.cw_row, self.cw_col = row, col # in case one needs it - fd, tmpfile = tempfile.mkstemp('.png') - os.close(fd) - self._generate(tmpfile) - self.w(open(tmpfile, 'rb').read()) - os.unlink(tmpfile) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/actions.py --- a/web/views/actions.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,429 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""Set of HTML base actions""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from logilab.mtconverter import xml_escape -from logilab.common.registry import objectify_predicate, yes - -from cubicweb.schema import display_name -from cubicweb.predicates import (EntityPredicate, - one_line_rset, multi_lines_rset, one_etype_rset, relation_possible, - nonempty_rset, non_final_entity, score_entity, - authenticated_user, match_user_groups, match_search_state, - has_permission, has_add_permission, is_instance, debug_mode, - ) -from cubicweb.web import controller, action -from cubicweb.web.views import uicfg, linksearch_select_url, vid_from_rset - - -class has_editable_relation(EntityPredicate): - """accept if some relations for an entity found in the result set is - editable by the logged user. - - See `EntityPredicate` documentation for behaviour when row is not specified. - """ - - def score_entity(self, entity): - # if user has no update right but it can modify some relation, - # display action anyway - form = entity._cw.vreg['forms'].select('edition', entity._cw, - entity=entity, mainform=False) - for dummy in form.editable_relations(): - return 1 - for dummy in form.inlined_form_views(): - return 1 - for dummy in form.editable_attributes(strict=True): - return 1 - return 0 - -@objectify_predicate -def match_searched_etype(cls, req, rset=None, **kwargs): - return req.match_search_state(rset) - -@objectify_predicate -def view_is_not_default_view(cls, req, rset=None, **kwargs): - # interesting if it propose another view than the current one - vid = req.form.get('vid') - if vid and vid != vid_from_rset(req, rset, req.vreg.schema): - return 1 - return 0 - -@objectify_predicate -def addable_etype_empty_rset(cls, req, rset=None, **kwargs): - if rset is not None and not rset.rowcount: - rqlst = rset.syntax_tree() - if len(rqlst.children) > 1: - return 0 - select = rqlst.children[0] - if len(select.defined_vars) == 1 and len(select.solutions) == 1: - rset._searched_etype = next(iter(select.solutions[0].values())) - eschema = req.vreg.schema.eschema(rset._searched_etype) - if not (eschema.final or eschema.is_subobject(strict=True)) \ - and eschema.has_perm(req, 'add'): - return 1 - return 0 - -class has_undoable_transactions(EntityPredicate): - "Select entities having public (i.e. end-user) undoable transactions." - - def score_entity(self, entity): - if not entity._cw.vreg.config['undo-enabled']: - return 0 - if entity._cw.cnx.undoable_transactions(eid=entity.eid): - return 1 - else: - return 0 - - -# generic 'main' actions ####################################################### - -class SelectAction(action.Action): - """base class for link search actions. By default apply on - any size entity result search it the current state is 'linksearch' - if accept match. - """ - __regid__ = 'select' - __select__ = (match_search_state('linksearch') & nonempty_rset() - & match_searched_etype()) - - title = _('select') - category = 'mainactions' - order = 0 - - def url(self): - return linksearch_select_url(self._cw, self.cw_rset) - - -class CancelSelectAction(action.Action): - __regid__ = 'cancel' - __select__ = match_search_state('linksearch') - - title = _('cancel select') - category = 'mainactions' - order = 10 - - def url(self): - target, eid, r_type, searched_type = self._cw.search_state[1] - return self._cw.build_url(str(eid), - vid='edition', __mode='normal') - - -class ViewAction(action.Action): - __regid__ = 'view' - __select__ = (action.Action.__select__ & - match_user_groups('users', 'managers') & - view_is_not_default_view() & - non_final_entity()) - - title = _('view') - category = 'mainactions' - order = 0 - - def url(self): - params = self._cw.form.copy() - for param in ('vid', '__message') + controller.NAV_FORM_PARAMETERS: - params.pop(param, None) - if self._cw.ajax_request: - path = 'view' - if self.cw_rset is not None: - params = {'rql': self.cw_rset.printable_rql()} - else: - path = self._cw.relative_path(includeparams=False) - return self._cw.build_url(path, **params) - - -class ModifyAction(action.Action): - __regid__ = 'edit' - __select__ = (action.Action.__select__ - & one_line_rset() & has_editable_relation()) - - title = _('modify') - category = 'mainactions' - order = 10 - - def url(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - return entity.absolute_url(vid='edition') - - -class MultipleEditAction(action.Action): - __regid__ = 'muledit' # XXX get strange conflicts if id='edit' - __select__ = (action.Action.__select__ & multi_lines_rset() & - one_etype_rset() & has_permission('update')) - - title = _('modify') - category = 'mainactions' - order = 10 - - def url(self): - return self._cw.build_url('view', rql=self.cw_rset.printable_rql(), vid='muledit') - - -# generic "more" actions ####################################################### - -class ManagePermissionsAction(action.Action): - __regid__ = 'managepermission' - __select__ = (action.Action.__select__ & one_line_rset() & - non_final_entity() & match_user_groups('managers')) - - title = _('manage permissions') - category = 'moreactions' - order = 15 - - def url(self): - return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0).absolute_url(vid='security') - - -class DeleteAction(action.Action): - __regid__ = 'delete' - __select__ = action.Action.__select__ & has_permission('delete') - - title = _('delete') - category = 'moreactions' - order = 20 - - def url(self): - if len(self.cw_rset) == 1: - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - return self._cw.build_url(entity.rest_path(), vid='deleteconf') - return self._cw.build_url(rql=self.cw_rset.printable_rql(), vid='deleteconf') - - -class CopyAction(action.Action): - __regid__ = 'copy' - __select__ = (action.Action.__select__ & one_line_rset() - & has_permission('add')) - - title = _('copy') - category = 'moreactions' - order = 30 - - def url(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - return entity.absolute_url(vid='copy') - - -class AddNewAction(MultipleEditAction): - """when we're seeing more than one entity with the same type, propose to - add a new one - """ - __regid__ = 'addentity' - __select__ = (action.Action.__select__ & - (addable_etype_empty_rset() - | (multi_lines_rset() & one_etype_rset() & has_add_permission())) - ) - - category = 'moreactions' - order = 40 - - @property - def rsettype(self): - if self.cw_rset: - return self.cw_rset.description[0][0] - return self.cw_rset._searched_etype - - @property - def title(self): - return self._cw.__('add a %s' % self.rsettype) # generated msgid - - def url(self): - return self._cw.vreg["etypes"].etype_class(self.rsettype).cw_create_url(self._cw) - - -class AddRelatedActions(action.Action): - """fill 'addrelated' sub-menu of the actions box""" - __regid__ = 'addrelated' - __select__ = action.Action.__select__ & one_line_rset() & non_final_entity() - - submenu = _('addrelated') - order = 17 - - def fill_menu(self, box, menu): - # when there is only one item in the sub-menu, replace the sub-menu by - # item's title prefixed by 'add' - menu.label_prefix = self._cw._('add') - super(AddRelatedActions, self).fill_menu(box, menu) - - def redirect_params(self, entity): - return {'__redirectpath': entity.rest_path(), # should not be url quoted! - '__redirectvid': self._cw.form.get('vid', '')} - - def actual_actions(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - eschema = entity.e_schema - params = self.redirect_params(entity) - for rschema, teschema, role in self.add_related_schemas(entity): - if rschema.role_rdef(eschema, teschema, role).role_cardinality(role) in '1?': - if entity.related(rschema, role): - continue - if role == 'subject': - label = 'add %s %s %s %s' % (eschema, rschema, teschema, role) - url = self.linkto_url(entity, rschema, teschema, 'object', **params) - else: - label = 'add %s %s %s %s' % (teschema, rschema, eschema, role) - url = self.linkto_url(entity, rschema, teschema, 'subject', **params) - yield self.build_action(self._cw._(label), url) - - def add_related_schemas(self, entity): - """this is actually used ui method to generate 'addrelated' actions from - the schema. - - If you don't want any auto-generated actions, you should overrides this - method to return an empty list. If you only want some, you can configure - them by using uicfg.actionbox_appearsin_addmenu - """ - appearsin_addmenu = self._cw.vreg['uicfg'].select( - 'actionbox_appearsin_addmenu', self._cw, entity=entity) - req = self._cw - eschema = entity.e_schema - for role, rschemas in (('subject', eschema.subject_relations()), - ('object', eschema.object_relations())): - for rschema in rschemas: - if rschema.final: - continue - for teschema in rschema.targets(eschema, role): - if not appearsin_addmenu.etype_get(eschema, rschema, - role, teschema): - continue - rdef = rschema.role_rdef(eschema, teschema, role) - # check the relation can be added - # XXX consider autoform_permissions_overrides? - if role == 'subject'and not rdef.has_perm( - req, 'add', fromeid=entity.eid): - continue - if role == 'object'and not rdef.has_perm( - req, 'add', toeid=entity.eid): - continue - # check the target types can be added as well - if teschema.may_have_permission('add', req): - yield rschema, teschema, role - - def linkto_url(self, entity, rtype, etype, target, **kwargs): - return self._cw.vreg["etypes"].etype_class(etype).cw_create_url( - self._cw, __linkto='%s:%s:%s' % (rtype, entity.eid, target), - **kwargs) - - -class ViewSameCWEType(action.Action): - """when displaying the schema of a CWEType, offer to list entities of that type - """ - __regid__ = 'entitiesoftype' - __select__ = one_line_rset() & is_instance('CWEType') & score_entity(lambda x: not x.final) - category = 'mainactions' - order = 40 - - @property - def etype(self): - return self.cw_rset.get_entity(0,0).name - - @property - def title(self): - return self._cw.__('view all %s') % display_name(self._cw, self.etype, 'plural').lower() - - def url(self): - return self._cw.build_url(self.etype) - -# logged user actions ######################################################### - -class UserPreferencesAction(action.Action): - __regid__ = 'myprefs' - __select__ = authenticated_user() - - title = _('user preferences') - category = 'useractions' - order = 10 - - def url(self): - return self._cw.build_url(self.__regid__) - - -class UserInfoAction(action.Action): - __regid__ = 'myinfos' - __select__ = authenticated_user() - - title = _('profile') - category = 'useractions' - order = 20 - - def url(self): - return self._cw.build_url('cwuser/%s'%self._cw.user.login, vid='edition') - - -class LogoutAction(action.Action): - __regid__ = 'logout' - __select__ = authenticated_user() - - title = _('logout') - category = 'useractions' - order = 30 - - def url(self): - return self._cw.build_url(self.__regid__) - - -# site actions ################################################################ - -class ManagersAction(action.Action): - __abstract__ = True - __select__ = match_user_groups('managers') - - category = 'siteactions' - - def url(self): - return self._cw.build_url(self.__regid__) - - -class SiteConfigurationAction(ManagersAction): - __regid__ = 'siteconfig' - title = _('site configuration') - order = 10 - category = 'manage' - - -class ManageAction(ManagersAction): - __regid__ = 'manage' - title = _('manage') - order = 20 - - -# footer actions ############################################################### - -class PoweredByAction(action.Action): - __regid__ = 'poweredby' - __select__ = yes() - - category = 'footer' - order = 3 - title = _('Powered by CubicWeb') - - def url(self): - return 'http://www.cubicweb.org' - -## default actions ui configuration ########################################### - -addmenu = uicfg.actionbox_appearsin_addmenu -addmenu.tag_object_of(('*', 'relation_type', 'CWRType'), True) -addmenu.tag_object_of(('*', 'from_entity', 'CWEType'), False) -addmenu.tag_object_of(('*', 'to_entity', 'CWEType'), False) -addmenu.tag_object_of(('*', 'in_group', 'CWGroup'), True) -addmenu.tag_object_of(('*', 'bookmarked_by', 'CWUser'), True) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/ajaxcontroller.py --- a/web/views/ajaxcontroller.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,458 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -# -# (disable pylint msg for client obj access to protected member as in obj._cw) -# pylint: disable=W0212 -"""The ``ajaxcontroller`` module defines the :class:`AjaxController` -controller and the ``ajax-func`` cubicweb registry. - -.. autoclass:: cubicweb.web.views.ajaxcontroller.AjaxController - :members: - -``ajax-funcs`` registry hosts exposed remote functions, that is -functions that can be called from the javascript world. - -To register a new remote function, either decorate your function -with the :func:`~cubicweb.web.views.ajaxcontroller.ajaxfunc` decorator: - -.. sourcecode:: python - - from cubicweb.predicates import mactch_user_groups - from cubicweb.web.views.ajaxcontroller import ajaxfunc - - @ajaxfunc(output_type='json', selector=match_user_groups('managers')) - def list_users(self): - return [u for (u,) in self._cw.execute('Any L WHERE U login L')] - -or inherit from :class:`~cubicweb.web.views.ajaxcontroller.AjaxFunction` and -implement the ``__call__`` method: - -.. sourcecode:: python - - from cubicweb.web.views.ajaxcontroller import AjaxFunction - class ListUser(AjaxFunction): - __regid__ = 'list_users' # __regid__ is the name of the exposed function - __select__ = match_user_groups('managers') - output_type = 'json' - - def __call__(self): - return [u for (u, ) in self._cw.execute('Any L WHERE U login L')] - - -.. autoclass:: cubicweb.web.views.ajaxcontroller.AjaxFunction - :members: - -.. autofunction:: cubicweb.web.views.ajaxcontroller.ajaxfunc - -""" - -__docformat__ = "restructuredtext en" - -from warnings import warn -from functools import partial - -from six import PY2, text_type - -from logilab.common.date import strptime -from logilab.common.registry import yes -from logilab.common.deprecation import deprecated - -from cubicweb import ObjectNotFound, NoSelectableObject -from cubicweb.appobject import AppObject -from cubicweb.utils import json, json_dumps, UStringIO -from cubicweb.uilib import exc_message -from cubicweb.web import RemoteCallFailed, DirectResponse -from cubicweb.web.controller import Controller -from cubicweb.web.views import vid_from_rset -from cubicweb.web.views import basecontrollers - - -def optional_kwargs(extraargs): - if extraargs is None: - return {} - # we receive unicode keys which is not supported by the **syntax - return dict((str(key), value) for key, value in extraargs.items()) - - -class AjaxController(Controller): - """AjaxController handles ajax remote calls from javascript - - The following javascript function call: - - .. sourcecode:: javascript - - var d = asyncRemoteExec('foo', 12, "hello"); - d.addCallback(function(result) { - alert('server response is: ' + result); - }); - - will generate an ajax HTTP GET on the following url:: - - BASE_URL/ajax?fname=foo&arg=12&arg="hello" - - The AjaxController controller will therefore be selected to handle those URLs - and will itself select the :class:`cubicweb.web.views.ajaxcontroller.AjaxFunction` - matching the *fname* parameter. - """ - __regid__ = 'ajax' - - def publish(self, rset=None): - self._cw.ajax_request = True - try: - fname = self._cw.form['fname'] - except KeyError: - raise RemoteCallFailed('no method specified') - # 1/ check first for old-style (JSonController) ajax func for bw compat - try: - func = getattr(basecontrollers.JSonController, 'js_%s' % fname) - if PY2: - func = func.__func__ - func = partial(func, self) - except AttributeError: - # 2/ check for new-style (AjaxController) ajax func - try: - func = self._cw.vreg['ajax-func'].select(fname, self._cw) - except ObjectNotFound: - raise RemoteCallFailed('no %s method' % fname) - else: - warn('[3.15] remote function %s found on JSonController, ' - 'use AjaxFunction / @ajaxfunc instead' % fname, - DeprecationWarning, stacklevel=2) - # no <arg> attribute means the callback takes no argument - args = self._cw.form.get('arg', ()) - if not isinstance(args, (list, tuple)): - args = (args,) - try: - args = [json.loads(arg) for arg in args] - except ValueError as exc: - self.exception('error while decoding json arguments for ' - 'js_%s: %s (err: %s)', fname, args, exc) - raise RemoteCallFailed(exc_message(exc, self._cw.encoding)) - try: - result = func(*args) - except (RemoteCallFailed, DirectResponse): - raise - except Exception as exc: - self.exception('an exception occurred while calling js_%s(%s): %s', - fname, args, exc) - raise RemoteCallFailed(exc_message(exc, self._cw.encoding)) - if result is None: - return '' - # get unicode on @htmlize methods, encoded string on @jsonize methods - elif isinstance(result, text_type): - return result.encode(self._cw.encoding) - return result - -class AjaxFunction(AppObject): - """ - Attributes on this base class are: - - :attr: `check_pageid`: make sure the pageid received is valid before proceeding - :attr: `output_type`: - - - *None*: no processing, no change on content-type - - - *json*: serialize with `json_dumps` and set *application/json* - content-type - - - *xhtml*: wrap result in an XML node and forces HTML / XHTML - content-type (use ``_cw.html_content_type()``) - - """ - __registry__ = 'ajax-func' - __select__ = yes() - __abstract__ = True - - check_pageid = False - output_type = None - - @staticmethod - def _rebuild_posted_form(names, values, action=None): - form = {} - for name, value in zip(names, values): - # remove possible __action_xxx inputs - if name.startswith('__action'): - if action is None: - # strip '__action_' to get the actual action name - action = name[9:] - continue - # form.setdefault(name, []).append(value) - if name in form: - curvalue = form[name] - if isinstance(curvalue, list): - curvalue.append(value) - else: - form[name] = [curvalue, value] - else: - form[name] = value - # simulate click on __action_%s button to help the controller - if action: - form['__action_%s' % action] = u'whatever' - return form - - def validate_form(self, action, names, values): - self._cw.form = self._rebuild_posted_form(names, values, action) - return basecontrollers._validate_form(self._cw, self._cw.vreg) - - def _exec(self, rql, args=None, rocheck=True): - """json mode: execute RQL and return resultset as json""" - rql = rql.strip() - if rql.startswith('rql:'): - rql = rql[4:] - if rocheck: - self._cw.ensure_ro_rql(rql) - try: - return self._cw.execute(rql, args) - except Exception as ex: - self.exception("error in _exec(rql=%s): %s", rql, ex) - return None - return None - - def _call_view(self, view, paginate=False, **kwargs): - divid = self._cw.form.get('divid') - # we need to call pagination before with the stream set - try: - stream = view.set_stream() - except AttributeError: - stream = UStringIO() - kwargs['w'] = stream.write - assert not paginate - if divid == 'pageContent': - # ensure divid isn't reused by the view (e.g. table view) - del self._cw.form['divid'] - # mimick main template behaviour - stream.write(u'<div id="pageContent">') - vtitle = self._cw.form.get('vtitle') - if vtitle: - stream.write(u'<h1 class="vtitle">%s</h1>\n' % vtitle) - paginate = True - nav_html = UStringIO() - if paginate and not view.handle_pagination: - view.paginate(w=nav_html.write) - stream.write(nav_html.getvalue()) - if divid == 'pageContent': - stream.write(u'<div id="contentmain">') - view.render(**kwargs) - extresources = self._cw.html_headers.getvalue(skiphead=True) - if extresources: - stream.write(u'<div class="ajaxHtmlHead">\n') # XXX use a widget? - stream.write(extresources) - stream.write(u'</div>\n') - if divid == 'pageContent': - stream.write(u'</div>%s</div>' % nav_html.getvalue()) - return stream.getvalue() - - -def _ajaxfunc_factory(implementation, selector=yes(), _output_type=None, - _check_pageid=False, regid=None): - """converts a standard python function into an AjaxFunction appobject""" - class AnAjaxFunc(AjaxFunction): - __regid__ = regid or implementation.__name__ - __select__ = selector - output_type = _output_type - check_pageid = _check_pageid - - def serialize(self, content): - if self.output_type is None: - return content - elif self.output_type == 'xhtml': - self._cw.set_content_type(self._cw.html_content_type()) - return ''.join((u'<div>', - content.strip(), u'</div>')) - elif self.output_type == 'json': - self._cw.set_content_type('application/json') - return json_dumps(content) - raise RemoteCallFailed('no serializer found for output type %s' - % self.output_type) - - def __call__(self, *args, **kwargs): - if self.check_pageid: - data = self._cw.session.data.get(self._cw.pageid) - if data is None: - raise RemoteCallFailed(self._cw._('pageid-not-found')) - return self.serialize(implementation(self, *args, **kwargs)) - - AnAjaxFunc.__name__ = implementation.__name__ - # make sure __module__ refers to the original module otherwise - # vreg.register(obj) will ignore ``obj``. - AnAjaxFunc.__module__ = implementation.__module__ - # relate the ``implementation`` object to its wrapper appobject - # will be used by e.g.: - # import base_module - # @ajaxfunc - # def foo(self): - # return 42 - # assert foo(object) == 42 - # vreg.register_and_replace(foo, base_module.older_foo) - implementation.__appobject__ = AnAjaxFunc - return implementation - - -def ajaxfunc(implementation=None, selector=yes(), output_type=None, - check_pageid=False, regid=None): - """promote a standard function to an ``AjaxFunction`` appobject. - - All parameters are optional: - - :param selector: a custom selector object if needed, default is ``yes()`` - - :param output_type: either None, 'json' or 'xhtml' to customize output - content-type. Default is None - - :param check_pageid: whether the function requires a valid `pageid` or not - to proceed. Default is False. - - :param regid: a custom __regid__ for the created ``AjaxFunction`` object. Default - is to keep the wrapped function name. - - ``ajaxfunc`` can be used both as a standalone decorator: - - .. sourcecode:: python - - @ajaxfunc - def my_function(self): - return 42 - - or as a parametrizable decorator: - - .. sourcecode:: python - - @ajaxfunc(output_type='json') - def my_function(self): - return 42 - - """ - # if used as a parametrized decorator (e.g. @ajaxfunc(output_type='json')) - if implementation is None: - def _decorator(func): - return _ajaxfunc_factory(func, selector=selector, - _output_type=output_type, - _check_pageid=check_pageid, - regid=regid) - return _decorator - # else, used as a standalone decorator (i.e. @ajaxfunc) - return _ajaxfunc_factory(implementation, selector=selector, - _output_type=output_type, - _check_pageid=check_pageid, regid=regid) - - - -############################################################################### -# Cubicweb remote functions for : # -# - appobject rendering # -# - user / page session data management # -############################################################################### -@ajaxfunc(output_type='xhtml') -def view(self): - # XXX try to use the page-content template - req = self._cw - rql = req.form.get('rql') - if rql: - rset = self._exec(rql) - elif 'eid' in req.form: - rset = self._cw.eid_rset(req.form['eid']) - else: - rset = None - vid = req.form.get('vid') or vid_from_rset(req, rset, self._cw.vreg.schema) - try: - viewobj = self._cw.vreg['views'].select(vid, req, rset=rset) - except NoSelectableObject: - vid = req.form.get('fallbackvid', 'noresult') - viewobj = self._cw.vreg['views'].select(vid, req, rset=rset) - viewobj.set_http_cache_headers() - if req.is_client_cache_valid(): - return '' - return self._call_view(viewobj, paginate=req.form.pop('paginate', False)) - - -@ajaxfunc(output_type='xhtml') -def component(self, compid, rql, registry='components', extraargs=None): - if rql: - rset = self._exec(rql) - else: - rset = None - # XXX while it sounds good, addition of the try/except below cause pb: - # when filtering using facets return an empty rset, the edition box - # isn't anymore selectable, as expected. The pb is that with the - # try/except below, we see a "an error occurred" message in the ui, while - # we don't see it without it. Proper fix would probably be to deal with - # this by allowing facet handling code to tell to js_component that such - # error is expected and should'nt be reported. - #try: - comp = self._cw.vreg[registry].select(compid, self._cw, rset=rset, - **optional_kwargs(extraargs)) - #except NoSelectableObject: - # raise RemoteCallFailed('unselectable') - return self._call_view(comp, **optional_kwargs(extraargs)) - -@ajaxfunc(output_type='xhtml') -def render(self, registry, oid, eid=None, - selectargs=None, renderargs=None): - if eid is not None: - rset = self._cw.eid_rset(eid) - # XXX set row=0 - elif self._cw.form.get('rql'): - rset = self._cw.execute(self._cw.form['rql']) - else: - rset = None - viewobj = self._cw.vreg[registry].select(oid, self._cw, rset=rset, - **optional_kwargs(selectargs)) - return self._call_view(viewobj, **optional_kwargs(renderargs)) - - -@ajaxfunc(output_type='json') -def i18n(self, msgids): - """returns the translation of `msgid`""" - return [self._cw._(msgid) for msgid in msgids] - -@ajaxfunc(output_type='json') -def format_date(self, strdate): - """returns the formatted date for `msgid`""" - date = strptime(strdate, '%Y-%m-%d %H:%M:%S') - return self._cw.format_date(date) - -@ajaxfunc(output_type='json') -def external_resource(self, resource): - """returns the URL of the external resource named `resource`""" - return self._cw.uiprops[resource] - -@ajaxfunc -def unload_page_data(self): - """remove user's session data associated to current pageid""" - self._cw.session.data.pop(self._cw.pageid, None) - -@ajaxfunc(output_type='json') -@deprecated("[3.13] use jQuery.cookie(cookiename, cookievalue, {path: '/'}) in js land instead") -def set_cookie(self, cookiename, cookievalue): - """generates the Set-Cookie HTTP reponse header corresponding - to `cookiename` / `cookievalue`. - """ - cookiename, cookievalue = str(cookiename), str(cookievalue) - self._cw.set_cookie(cookiename, cookievalue) - - - -@ajaxfunc -def delete_relation(self, rtype, subjeid, objeid): - rql = 'DELETE S %s O WHERE S eid %%(s)s, O eid %%(o)s' % rtype - self._cw.execute(rql, {'s': subjeid, 'o': objeid}) - -@ajaxfunc -def add_relation(self, rtype, subjeid, objeid): - rql = 'SET S %s O WHERE S eid %%(s)s, O eid %%(o)s' % rtype - self._cw.execute(rql, {'s': subjeid, 'o': objeid}) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/ajaxedit.py --- a/web/views/ajaxedit.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,111 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""Set of views allowing edition of entities/relations using ajax""" - -__docformat__ = "restructuredtext en" - -from cubicweb import role -from cubicweb.view import View -from cubicweb.predicates import match_form_params, match_kwargs -from cubicweb.web import component, stdmsgs, formwidgets as fw - -class AddRelationView(component.EditRelationMixIn, View): - """base class for view which let add entities linked by a given relation - - subclasses should define at least id, rtype and target class attributes. - """ - __registry__ = 'views' - __regid__ = 'xaddrelation' - __select__ = (match_form_params('rtype', 'target') - | match_kwargs('rtype', 'target')) - cw_property_defs = {} # don't want to inherit this from Box - expected_kwargs = form_params = ('rtype', 'target') - - def cell_call(self, row, col, rtype=None, target=None, etype=None): - self.rtype = rtype or self._cw.form['rtype'] - self.target = target or self._cw.form['target'] - self.etype = etype or self._cw.form.get('etype') - entity = self.cw_rset.get_entity(row, col) - rschema = self._cw.vreg.schema.rschema(self.rtype) - if not self.etype: - if self.target == 'object': - etypes = rschema.objects(entity.e_schema) - else: - etypes = rschema.subjects(entity.e_schema) - if len(etypes) == 1: - self.etype = etypes[0] - self.w(u'<div id="%s">' % self.domid) - self.w(u'<h1>%s</h1>' % self._cw._('relation %(relname)s of %(ent)s') - % {'relname': rschema.display_name(self._cw, role(self)), - 'ent': entity.view('incontext')}) - self.w(u'<ul class="list-unstyled">') - for boxitem in self.unrelated_boxitems(entity): - self.w('<li>%s</li>' % boxitem) - self.w(u'</ul></div>') - - def unrelated_entities(self, entity): - """returns the list of unrelated entities - - if etype is not defined on the Box's class, the default - behaviour is to use the entity's appropraite vocabulary function - """ - # use entity.unrelated if we've been asked for a particular etype - if getattr(self, 'etype', None): - rset = entity.unrelated(self.rtype, self.etype, role(self), - ordermethod='fetch_order') - self.paginate(self._cw, rset=rset, w=self.w) - return rset.entities() - super(AddRelationView, self).unrelated_entities(self) - - -def ajax_composite_form(container, entity, rtype, okjs, canceljs, - entityfkwargs=None): - """ - * if entity is None, edit container (assert container.has_eid()) - * if entity has not eid, will be created - * if container has not eid, will be created (see vcreview InsertionPoint) - """ - req = container._cw - parentexists = entity is None or container.has_eid() - buttons = [fw.Button(onclick=okjs), - fw.Button(stdmsgs.BUTTON_CANCEL, onclick=canceljs)] - freg = req.vreg['forms'] - # main form kwargs - mkwargs = dict(action='#', domid='%sForm%s' % (rtype, container.eid), - form_buttons=buttons, - onsubmit='javascript: %s; return false' % okjs) - # entity form kwargs - # use formtype=inlined to skip the generic relations edition section - fkwargs = dict(entity=entity or container, formtype='inlined') - if entityfkwargs is not None: - fkwargs.update(entityfkwargs) - # form values - formvalues = {} - if entity is not None: # creation - formvalues[rtype] = container.eid - if parentexists: # creation / edition - mkwargs.update(fkwargs) - # use formtype=inlined to avoid viewing the relation edition section - form = freg.select('edition', req, **mkwargs) - else: # creation of both container and comment entities - form = freg.select('composite', req, form_renderer_id='default', - **mkwargs) - form.add_subform(freg.select('edition', req, entity=container, - mainform=False, mainentity=True)) - form.add_subform(freg.select('edition', req, mainform=False, **fkwargs)) - return form, formvalues diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/apacherewrite.py --- a/web/views/apacherewrite.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,111 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""provide class to do Apache rewrite rules'job inside cubicweb (though functionnalities -are much more limited for the moment) - -""" - -__docformat__ = "restructuredtext en" - -from re import compile - -from cubicweb.web import Redirect -from cubicweb.web.component import Component - -class RewriteCond(object): - def __init__(self, condition, match='host', rules=(), action='rewrite'): - self.condition = compile(condition) - assert match in ('host', 'path'), match - self.match_part = match - self.rules = [] - for rule, replace in rules: - rulergx = compile(rule) - self.rules.append( (rulergx, replace) ) - assert action in ('rewrite', 'redirect', 'stop'), action - self.process = getattr(self, 'action_%s' % action) - - def match(self, **kwargs): - self._match = self.condition.match(kwargs[self.match_part]) - return not self._match is None - - def action_rewrite(self, path): - for rgx, replace in self.rules: - if not rgx.match(path) is None: - matchdict = self._match.groupdict() or None - if not matchdict is None: - replace = replace % matchdict - return rgx.sub(replace, path) - return path - - def action_redirect(self, path): - url = self.action_rewrite(path) - raise Redirect(url) - - def action_stop(self, path): - return path - - -class ApacheURLRewrite(Component): - """inherit from this class with actual rules to activate apache style rewriting - - rules should have the form : - - [('condition pattern 1', [('rule1 pattern', 'replace expression'), - ('rule2 pattern', 'replace expression')], - ('condition pattern 2', [('rule1 pattern', 'replace expression'), - ('rule2 pattern', 'replace expression')] - ] - - for instance the equivalent of the following apache rules: - - RewriteCond %{HTTP_HOST} ^logilab\.fr - RewriteRule ^/(.*) http://www.logilab.fr/$1 [L,R=301] - - RewriteCond %{HTTP_HOST} ^www\.logilab\.fr - RewriteRule ^/(.*) http://localhost:8080/$1 [L,P] - - RewriteCond %{HTTP_HOST} ^(.+)\.logilab\.fr - RewriteRule ^/(data/.*) http://localhost:8080/$1 [L,P] - RewriteRule ^/(json.*) http://localhost:8080/$1 [L,P] - RewriteRule ^/(.*) http://localhost:8080/m_%1/$1 [L,P] - - could be written (considering that no "host rewritting" is necessary): - - class MyAppRules(ApacheURLRewrite): - rules = [ - RewriteCond('logilab\.fr', match='host', - rules=[('/(.*)', r'http://www.logilab.fr/\1')], - action='redirect'), - RewriteCond('(www)\.logilab\.fr', match='host', action='stop'), - RewriteCond('/(data|json)/', match='path', action='stop'), - RewriteCond('(?P<cat>.*)\.logilab\.fr', match='host', - rules=[('/(.*)', r'/m_%(cat)s/\1')]), - ] - """ - __abstract__ = True - __regid__ = 'urlrewriter' - rules = [] - - def get_rules(self, req): - return self.rules - - def rewrite(self, host, path, req): - for cond in self.get_rules(req): - if cond.match(host=host, path=path): - return cond.process(path) - return path diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/authentication.py --- a/web/views/authentication.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,174 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""user authentication component""" - -__docformat__ = "restructuredtext en" - -from logilab.common.deprecation import class_renamed - -from cubicweb import AuthenticationError -from cubicweb.view import Component -from cubicweb.web import InvalidSession - - -class NoAuthInfo(Exception): pass - - -class WebAuthInfoRetriever(Component): - __registry__ = 'webauth' - order = None - __abstract__ = True - - def authentication_information(self, req): - """retrieve authentication information from the given request, raise - NoAuthInfo if expected information is not found. - """ - raise NotImplementedError() - - def authenticated(self, retriever, req, session, login, authinfo): - """callback when return authentication information have opened a - repository connection successfully. Take care req has no session - attached yet, hence req.execute isn't available. - """ - pass - - def request_has_auth_info(self, req): - """tells from the request if it has enough information - to proceed to authentication, would the current session - be invalidated - """ - raise NotImplementedError() - - def revalidate_login(self, req): - """returns a login string or None, for repository session validation - purposes - """ - raise NotImplementedError() - - def cleanup_authentication_information(self, req): - """called when the retriever has returned some authentication - information but we get an authentication error when using them, so it - get a chance to clean things up (e.g. remove cookie) - """ - pass - -WebAuthInfoRetreiver = class_renamed( - 'WebAuthInfoRetreiver', WebAuthInfoRetriever, - '[3.17] WebAuthInfoRetreiver had been renamed into WebAuthInfoRetriever ' - '("ie" instead of "ei")') - - -class LoginPasswordRetriever(WebAuthInfoRetriever): - __regid__ = 'loginpwdauth' - order = 10 - - def authentication_information(self, req): - """retreive authentication information from the given request, raise - NoAuthInfo if expected information is not found. - """ - login, password = req.get_authorization() - if not login: - raise NoAuthInfo() - return login, {'password': password} - - def request_has_auth_info(self, req): - return req.get_authorization()[0] is not None - - def revalidate_login(self, req): - return req.get_authorization()[0] - -LoginPasswordRetreiver = class_renamed( - 'LoginPasswordRetreiver', LoginPasswordRetriever, - '[3.17] LoginPasswordRetreiver had been renamed into LoginPasswordRetriever ' - '("ie" instead of "ei")') - - - -class RepositoryAuthenticationManager(object): - """authenticate user associated to a request and check session validity""" - - def __init__(self, repo): - self.repo = repo - vreg = repo.vreg - self.log_queries = vreg.config['query-log-file'] - self.authinforetrievers = sorted(vreg['webauth'].possible_objects(vreg), - key=lambda x: x.order) - # 2-uple login / password, login is None when no anonymous access - # configured - self.anoninfo = vreg.config.anonymous_user() - if self.anoninfo[0]: - self.anoninfo = (self.anoninfo[0], {'password': self.anoninfo[1]}) - - def validate_session(self, req, session): - """check session validity and return the connected user on success. - - raise :exc:`InvalidSession` if session is corrupted for a reason or - another and should be closed - - also invoked while going from anonymous to logged in - """ - for retriever in self.authinforetrievers: - if retriever.request_has_auth_info(req): - login = retriever.revalidate_login(req) - return self._validate_session(req, session, login) - # let's try with the current session - return self._validate_session(req, session, None) - - def _validate_session(self, req, session, login): - # check session.login and not user.login, since in case of login by - # email, login and cnx.login are the email while user.login is the - # actual user login - if login and session.login != login: - raise InvalidSession('login mismatch') - - def authenticate(self, req): - """authenticate user using connection information found in the request, - and return corresponding a :class:`~cubicweb.dbapi.Connection` instance, - as well as login used to open the connection. - - raise :exc:`cubicweb.AuthenticationError` if authentication failed - (no authentication info found or wrong user/password) - """ - has_auth = False - for retriever in self.authinforetrievers: - try: - login, authinfo = retriever.authentication_information(req) - except NoAuthInfo: - continue - has_auth = True - try: - session = self._authenticate(login, authinfo) - except AuthenticationError: - retriever.cleanup_authentication_information(req) - continue # the next one may succeed - for retriever_ in self.authinforetrievers: - retriever_.authenticated(retriever, req, session, login, authinfo) - return session, login - # false if no authentication info found, i.e. this is not an - # authentication failure - if has_auth: - req.set_message(req._('authentication failure')) - login, authinfo = self.anoninfo - if login: - session = self._authenticate(login, authinfo) - return session, login - raise AuthenticationError() - - def _authenticate(self, login, authinfo): - sessionid = self.repo.connect(login, **authinfo) - return self.repo._sessions[sessionid] diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/autoform.py --- a/web/views/autoform.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1057 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -""" -.. autodocstring:: cubicweb.web.views.autoform::AutomaticEntityForm - -Configuration through uicfg -``````````````````````````` - -It is possible to manage which and how an entity's attributes and relations -will be edited in the various contexts where the automatic entity form is used -by using proper uicfg tags. - -The details of the uicfg syntax can be found in the :ref:`uicfg` chapter. - -Possible relation tags that apply to entity forms are detailled below. -They are all in the :mod:`cubicweb.web.uicfg` module. - -Attributes/relations display location -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -``autoform_section`` specifies where to display a relation in form for a given -form type. :meth:`tag_attribute`, :meth:`tag_subject_of` and -:meth:`tag_object_of` methods for this relation tag expect two arguments -additionally to the relation key: a `formtype` and a `section`. - -`formtype` may be one of: - -* 'main', the main entity form (e.g. the one you get when creating or editing an - entity) - -* 'inlined', the form for an entity inlined into another form - -* 'muledit', the table form when editing multiple entities of the same type - - -section may be one of: - -* 'hidden', don't display (not even in a hidden input) - -* 'attributes', display in the attributes section - -* 'relations', display in the relations section, using the generic relation - selector combobox (available in main form only, and not usable for attributes) - -* 'inlined', display target entity of the relation into an inlined form - (available in main form only, and not for attributes) - -By default, mandatory relations are displayed in the 'attributes' section, -others in 'relations' section. - - -Change default fields -^^^^^^^^^^^^^^^^^^^^^ - -Use ``autoform_field`` to replace the default field class to use for a relation -or attribute. You can put either a field class or instance as value (put a class -whenether it's possible). - -.. Warning:: - - `autoform_field_kwargs` should usually be used instead of - `autoform_field`. If you put a field instance into `autoform_field`, - `autoform_field_kwargs` values for this relation will be ignored. - - -Customize field options -^^^^^^^^^^^^^^^^^^^^^^^ - -In order to customize field options (see :class:`~cubicweb.web.formfields.Field` -for a detailed list of options), use `autoform_field_kwargs`. This rtag takes -a dictionary as arguments, that will be given to the field's contructor. - -You can then put in that dictionary any arguments supported by the field -class. For instance: - -.. sourcecode:: python - - # Change the content of the combobox. Here `ticket_done_in_choices` is a - # function which returns a list of elements to populate the combobox - autoform_field_kwargs.tag_subject_of(('Ticket', 'done_in', '*'), - {'sort': False, - 'choices': ticket_done_in_choices}) - - # Force usage of a TextInput widget for the expression attribute of - # RQLExpression entities - autoform_field_kwargs.tag_attribute(('RQLExpression', 'expression'), - {'widget': fw.TextInput}) - -.. note:: - - the widget argument can be either a class or an instance (the later - case being convenient to pass the Widget specific initialisation - options) - -Overriding permissions -^^^^^^^^^^^^^^^^^^^^^^ - -The `autoform_permissions_overrides` rtag provides a way to by-pass security -checking for dark-corner case where it can't be verified properly. - - -.. More about inlined forms -.. Controlling the generic relation fields -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from six.moves import range - -from logilab.mtconverter import xml_escape -from logilab.common.decorators import iclassmethod, cached -from logilab.common.deprecation import deprecated -from logilab.common.registry import NoSelectableObject - -from cubicweb import neg_role, uilib -from cubicweb.schema import display_name -from cubicweb.view import EntityView -from cubicweb.predicates import ( - match_kwargs, match_form_params, non_final_entity, - specified_etype_implements) -from cubicweb.utils import json_dumps -from cubicweb.web import (stdmsgs, eid_param, - form as f, formwidgets as fw, formfields as ff) -from cubicweb.web.views import uicfg, forms -from cubicweb.web.views.ajaxcontroller import ajaxfunc - - -# inlined form handling ######################################################## - -class InlinedFormField(ff.Field): - def __init__(self, view=None, **kwargs): - kwargs.setdefault('label', None) - # don't add eidparam=True since this field doesn't actually hold the - # relation value (the subform does) hence should not be listed in - # _cw_entity_fields - super(InlinedFormField, self).__init__(name=view.rtype, role=view.role, - **kwargs) - self.view = view - - def render(self, form, renderer): - """render this field, which is part of form, using the given form - renderer - """ - view = self.view - i18nctx = 'inlined:%s.%s.%s' % (form.edited_entity.e_schema, - view.rtype, view.role) - return u'<div class="inline-%s-%s-slot">%s</div>' % ( - view.rtype, view.role, - view.render(i18nctx=i18nctx, row=view.cw_row, col=view.cw_col)) - - def form_init(self, form): - """method called before by build_context to trigger potential field - initialization requiring the form instance - """ - if self.view.form: - self.view.form.build_context(form.formvalues) - - @property - def needs_multipart(self): - if self.view.form: - # take a look at inlined forms to check (recursively) if they need - # multipart handling. - return self.view.form.needs_multipart - return False - - def has_been_modified(self, form): - return False - - def process_posted(self, form): - pass # handled by the subform - - -class InlineEntityEditionFormView(f.FormViewMixIn, EntityView): - """ - :attr peid: the parent entity's eid hosting the inline form - :attr rtype: the relation bridging `etype` and `peid` - :attr role: the role played by the `peid` in the relation - :attr pform: the parent form where this inlined form is being displayed - """ - __regid__ = 'inline-edition' - __select__ = non_final_entity() & match_kwargs('peid', 'rtype') - - _select_attrs = ('peid', 'rtype', 'role', 'pform', 'etype') - removejs = "removeInlinedEntity('%s', '%s', '%s')" - - # make pylint happy - peid = rtype = role = pform = etype = None - - def __init__(self, *args, **kwargs): - for attr in self._select_attrs: - # don't pop attributes from kwargs, so the end-up in - # self.cw_extra_kwargs which is then passed to the edition form (see - # the .form method) - setattr(self, attr, kwargs.get(attr)) - super(InlineEntityEditionFormView, self).__init__(*args, **kwargs) - - def _entity(self): - assert self.cw_row is not None, self - return self.cw_rset.get_entity(self.cw_row, self.cw_col) - - @property - def petype(self): - assert isinstance(self.peid, int) - pentity = self._cw.entity_from_eid(self.peid) - return pentity.e_schema.type - - @property - @cached - def form(self): - entity = self._entity() - form = self._cw.vreg['forms'].select('edition', self._cw, - entity=entity, - formtype='inlined', - form_renderer_id='inline', - copy_nav_params=False, - mainform=False, - parent_form=self.pform, - **self.cw_extra_kwargs) - if self.pform is None: - form.restore_previous_post(form.session_key()) - #assert form.parent_form - self.add_hiddens(form, entity) - return form - - def cell_call(self, row, col, i18nctx, **kwargs): - """ - :param peid: the parent entity's eid hosting the inline form - :param rtype: the relation bridging `etype` and `peid` - :param role: the role played by the `peid` in the relation - """ - entity = self._entity() - divonclick = "restoreInlinedEntity('%s', '%s', '%s')" % ( - self.peid, self.rtype, entity.eid) - self.render_form(i18nctx, divonclick=divonclick, **kwargs) - - def _get_removejs(self): - """ - Don't display the remove link in edition form if the - cardinality is 1. Handled in InlineEntityCreationFormView for - creation form. - """ - entity = self._entity() - rdef = entity.e_schema.rdef(self.rtype, neg_role(self.role), self.petype) - card = rdef.role_cardinality(self.role) - if card == '1': # don't display remove link - return None - # if cardinality is 1..n (+), dont display link to remove an inlined form for the first form - # allowing to edit the relation. To detect so: - # - # * if parent form (pform) is None, we're generated through an ajax call and so we know this - # is not the first form - # - # * if parent form is not None, look for previous InlinedFormField in the parent's form - # fields - if card == '+' and self.pform is not None: - # retrieve all field'views handling this relation and return None if we're the first of - # them - first_view = next(iter((f.view for f in self.pform.fields - if isinstance(f, InlinedFormField) - and f.view.rtype == self.rtype and f.view.role == self.role))) - if self == first_view: - return None - return self.removejs and self.removejs % ( - self.peid, self.rtype, entity.eid) - - def render_form(self, i18nctx, **kwargs): - """fetch and render the form""" - entity = self._entity() - divid = '%s-%s-%s' % (self.peid, self.rtype, entity.eid) - title = self.form_title(entity, i18nctx) - removejs = self._get_removejs() - countkey = '%s_count' % self.rtype - try: - self._cw.data[countkey] += 1 - except KeyError: - self._cw.data[countkey] = 1 - self.form.render(w=self.w, divid=divid, title=title, removejs=removejs, - i18nctx=i18nctx, counter=self._cw.data[countkey] , - **kwargs) - - def form_title(self, entity, i18nctx): - return self._cw.pgettext(i18nctx, entity.cw_etype) - - def add_hiddens(self, form, entity): - """to ease overriding (see cubes.vcsfile.views.forms for instance)""" - iid = 'rel-%s-%s-%s' % (self.peid, self.rtype, entity.eid) - # * str(self.rtype) in case it's a schema object - # * neged_role() since role is the for parent entity, we want the role - # of the inlined entity - form.add_hidden(name=str(self.rtype), value=self.peid, - role=neg_role(self.role), eidparam=True, id=iid) - - def keep_entity(self, form, entity): - if not entity.has_eid(): - return True - # are we regenerating form because of a validation error? - if form.form_previous_values: - cdvalues = self._cw.list_form_param(eid_param(self.rtype, self.peid), - form.form_previous_values) - if unicode(entity.eid) not in cdvalues: - return False - return True - - -class InlineEntityCreationFormView(InlineEntityEditionFormView): - """ - :attr etype: the entity type being created in the inline form - """ - __regid__ = 'inline-creation' - __select__ = (match_kwargs('peid', 'petype', 'rtype') - & specified_etype_implements('Any')) - _select_attrs = InlineEntityEditionFormView._select_attrs + ('petype',) - - # make pylint happy - petype = None - - @property - def removejs(self): - entity = self._entity() - rdef = entity.e_schema.rdef(self.rtype, neg_role(self.role), self.petype) - card = rdef.role_cardinality(self.role) - # when one is adding an inline entity for a relation of a single card, - # the 'add a new xxx' link disappears. If the user then cancel the addition, - # we have to make this link appears back. This is done by giving add new link - # id to removeInlineForm. - if card == '?': - divid = "addNew%s%s%s:%s" % (self.etype, self.rtype, self.role, self.peid) - return "removeInlineForm('%%s', '%%s', '%s', '%%s', '%s')" % ( - self.role, divid) - elif card in '+*': - return "removeInlineForm('%%s', '%%s', '%s', '%%s')" % self.role - # don't do anything for card == '1' - - @cached - def _entity(self): - try: - cls = self._cw.vreg['etypes'].etype_class(self.etype) - except Exception: - self.w(self._cw._('no such entity type %s') % self.etype) - return - entity = cls(self._cw) - entity.eid = next(self._cw.varmaker) - return entity - - def call(self, i18nctx, **kwargs): - self.render_form(i18nctx, **kwargs) - - -class InlineAddNewLinkView(InlineEntityCreationFormView): - """ - :attr card: the cardinality of the relation according to role of `peid` - """ - __regid__ = 'inline-addnew-link' - __select__ = (match_kwargs('peid', 'petype', 'rtype') - & specified_etype_implements('Any')) - - _select_attrs = InlineEntityCreationFormView._select_attrs + ('card',) - card = None # make pylint happy - form = None # no actual form wrapped - - def call(self, i18nctx, **kwargs): - self._cw.set_varmaker() - divid = "addNew%s%s%s:%s" % (self.etype, self.rtype, self.role, self.peid) - self.w(u'<div class="inlinedform" id="%s" cubicweb:limit="true">' - % divid) - js = "addInlineCreationForm('%s', '%s', '%s', '%s', '%s', '%s')" % ( - self.peid, self.petype, self.etype, self.rtype, self.role, i18nctx) - if self.pform.should_hide_add_new_relation_link(self.rtype, self.card): - js = "toggleVisibility('%s'); %s" % (divid, js) - __ = self._cw.pgettext - self.w(u'<a class="addEntity" id="add%s:%slink" href="javascript: %s" >+ %s.</a>' - % (self.rtype, self.peid, js, __(i18nctx, 'add a %s' % self.etype))) - self.w(u'</div>') - - -# generic relations handling ################################################## - -def relation_id(eid, rtype, role, reid): - """return an identifier for a relation between two entities""" - if role == 'subject': - return u'%s:%s:%s' % (eid, rtype, reid) - return u'%s:%s:%s' % (reid, rtype, eid) - -def toggleable_relation_link(eid, nodeid, label='x'): - """return javascript snippet to delete/undelete a relation between two - entities - """ - js = u"javascript: togglePendingDelete('%s', %s);" % ( - nodeid, xml_escape(json_dumps(eid))) - return u'[<a class="handle" href="%s" id="handle%s">%s</a>]' % ( - js, nodeid, label) - - -def get_pending_inserts(req, eid=None): - """shortcut to access req's pending_insert entry - - This is where are stored relations being added while editing - an entity. This used to be stored in a temporary cookie. - """ - pending = req.session.data.get('pending_insert', ()) - return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending - if eid is None or eid in (subj, obj)] - -def get_pending_deletes(req, eid=None): - """shortcut to access req's pending_delete entry - - This is where are stored relations being removed while editing - an entity. This used to be stored in a temporary cookie. - """ - pending = req.session.data.get('pending_delete', ()) - return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending - if eid is None or eid in (subj, obj)] - -def parse_relations_descr(rdescr): - """parse a string describing some relations, in the form - subjeids:rtype:objeids - where subjeids and objeids are eids separeted by a underscore - - return an iterator on (subject eid, relation type, object eid) found - """ - for rstr in rdescr: - subjs, rtype, objs = rstr.split(':') - for subj in subjs.split('_'): - for obj in objs.split('_'): - yield int(subj), rtype, int(obj) - -def delete_relations(req, rdefs): - """delete relations from the repository""" - # FIXME convert to using the syntax subject:relation:eids - execute = req.execute - for subj, rtype, obj in parse_relations_descr(rdefs): - rql = 'DELETE X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype - execute(rql, {'x': subj, 'y': obj}) - req.set_message(req._('relations deleted')) - -def insert_relations(req, rdefs): - """insert relations into the repository""" - execute = req.execute - for subj, rtype, obj in parse_relations_descr(rdefs): - rql = 'SET X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype - execute(rql, {'x': subj, 'y': obj}) - - -# ajax edition helpers ######################################################## -@ajaxfunc(output_type='xhtml', check_pageid=True) -def inline_creation_form(self, peid, petype, ttype, rtype, role, i18nctx): - view = self._cw.vreg['views'].select('inline-creation', self._cw, - etype=ttype, rtype=rtype, role=role, - peid=peid, petype=petype) - return self._call_view(view, i18nctx=i18nctx) - -@ajaxfunc(output_type='json') -def validate_form(self, action, names, values): - return self.validate_form(action, names, values) - -@ajaxfunc -def cancel_edition(self, errorurl): - """cancelling edition from javascript - - We need to clear associated req's data : - - errorurl - - pending insertions / deletions - """ - self._cw.cancel_edition(errorurl) - - -def _add_pending(req, eidfrom, rel, eidto, kind): - key = 'pending_%s' % kind - pendings = req.session.data.setdefault(key, set()) - pendings.add( (int(eidfrom), rel, int(eidto)) ) - -def _remove_pending(req, eidfrom, rel, eidto, kind): - key = 'pending_%s' % kind - pendings = req.session.data[key] - pendings.remove( (int(eidfrom), rel, int(eidto)) ) - -@ajaxfunc(output_type='json') -def remove_pending_insert(self, args): - eidfrom, rel, eidto = args - _remove_pending(self._cw, eidfrom, rel, eidto, 'insert') - -@ajaxfunc(output_type='json') -def add_pending_inserts(self, tripletlist): - for eidfrom, rel, eidto in tripletlist: - _add_pending(self._cw, eidfrom, rel, eidto, 'insert') - -@ajaxfunc(output_type='json') -def remove_pending_delete(self, args): - eidfrom, rel, eidto = args - _remove_pending(self._cw, eidfrom, rel, eidto, 'delete') - -@ajaxfunc(output_type='json') -def add_pending_delete(self, args): - eidfrom, rel, eidto = args - _add_pending(self._cw, eidfrom, rel, eidto, 'delete') - - -class GenericRelationsWidget(fw.FieldWidget): - - def render(self, form, field, renderer): - stream = [] - w = stream.append - req = form._cw - _ = req._ - __ = _ - eid = form.edited_entity.eid - w(u'<table id="relatedEntities">') - for rschema, role, related in field.relations_table(form): - # already linked entities - if related: - label = rschema.display_name(req, role, context=form.edited_entity.cw_etype) - w(u'<tr><th class="labelCol">%s</th>' % label) - w(u'<td>') - w(u'<ul class="list-unstyled">') - for viewparams in related: - w(u'<li>%s<span id="span%s" class="%s">%s</span></li>' - % (viewparams[1], viewparams[0], viewparams[2], viewparams[3])) - if not form.force_display and form.maxrelitems < len(related): - link = (u'<span>' - '[<a href="javascript: window.location.href+=\'&__force_display=1\'">%s</a>]' - '</span>' % _('view all')) - w(u'<li>%s</li>' % link) - w(u'</ul>') - w(u'</td>') - w(u'</tr>') - pendings = list(field.restore_pending_inserts(form)) - if not pendings: - w(u'<tr><th> </th><td> </td></tr>') - else: - for row in pendings: - # soon to be linked to entities - w(u'<tr id="tr%s">' % row[1]) - w(u'<th>%s</th>' % row[3]) - w(u'<td>') - w(u'<a class="handle" title="%s" href="%s">[x]</a>' % - (_('cancel this insert'), row[2])) - w(u'<a id="a%s" class="editionPending" href="%s">%s</a>' - % (row[1], row[4], xml_escape(row[5]))) - w(u'</td>') - w(u'</tr>') - w(u'<tr id="relationSelectorRow_%s" class="separator">' % eid) - w(u'<th class="labelCol">') - w(u'<select id="relationSelector_%s" tabindex="%s" ' - 'onchange="javascript:showMatchingSelect(this.options[this.selectedIndex].value,%s);">' - % (eid, req.next_tabindex(), xml_escape(json_dumps(eid)))) - w(u'<option value="">%s</option>' % _('select a relation')) - for i18nrtype, rschema, role in field.relations: - # more entities to link to - w(u'<option value="%s_%s">%s</option>' % (rschema, role, i18nrtype)) - w(u'</select>') - w(u'</th>') - w(u'<td id="unrelatedDivs_%s"></td>' % eid) - w(u'</tr>') - w(u'</table>') - return '\n'.join(stream) - - -class GenericRelationsField(ff.Field): - widget = GenericRelationsWidget - - def __init__(self, relations, name='_cw_generic_field', **kwargs): - assert relations - kwargs['eidparam'] = True - super(GenericRelationsField, self).__init__(name, **kwargs) - self.relations = relations - - def process_posted(self, form): - todelete = get_pending_deletes(form._cw) - if todelete: - delete_relations(form._cw, todelete) - toinsert = get_pending_inserts(form._cw) - if toinsert: - insert_relations(form._cw, toinsert) - return () - - def relations_table(self, form): - """yiels 3-tuples (rtype, role, related_list) - where <related_list> itself a list of : - - node_id (will be the entity element's DOM id) - - appropriate javascript's togglePendingDelete() function call - - status 'pendingdelete' or '' - - oneline view of related entity - """ - entity = form.edited_entity - pending_deletes = get_pending_deletes(form._cw, entity.eid) - for label, rschema, role in self.relations: - related = [] - if entity.has_eid(): - rset = entity.related(rschema, role, limit=form.related_limit) - if role == 'subject': - haspermkwargs = {'fromeid': entity.eid} - else: - haspermkwargs = {'toeid': entity.eid} - if rschema.has_perm(form._cw, 'delete', **haspermkwargs): - toggleable_rel_link_func = toggleable_relation_link - else: - toggleable_rel_link_func = lambda x, y, z: u'' - for row in range(rset.rowcount): - nodeid = relation_id(entity.eid, rschema, role, - rset[row][0]) - if nodeid in pending_deletes: - status, label = u'pendingDelete', '+' - else: - status, label = u'', 'x' - dellink = toggleable_rel_link_func(entity.eid, nodeid, label) - eview = form._cw.view('oneline', rset, row=row) - related.append((nodeid, dellink, status, eview)) - yield (rschema, role, related) - - def restore_pending_inserts(self, form): - """used to restore edition page as it was before clicking on - 'search for <some entity type>' - """ - entity = form.edited_entity - pending_inserts = set(get_pending_inserts(form._cw, form.edited_entity.eid)) - for pendingid in pending_inserts: - eidfrom, rtype, eidto = pendingid.split(':') - pendingid = 'id' + pendingid - if int(eidfrom) == entity.eid: # subject - label = display_name(form._cw, rtype, 'subject', - entity.cw_etype) - reid = eidto - else: - label = display_name(form._cw, rtype, 'object', - entity.cw_etype) - reid = eidfrom - jscall = "javascript: cancelPendingInsert('%s', 'tr', null, %s);" \ - % (pendingid, entity.eid) - rset = form._cw.eid_rset(reid) - eview = form._cw.view('text', rset, row=0) - yield rtype, pendingid, jscall, label, reid, eview - - -class UnrelatedDivs(EntityView): - __regid__ = 'unrelateddivs' - __select__ = match_form_params('relation') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - relname, role = self._cw.form.get('relation').rsplit('_', 1) - rschema = self._cw.vreg.schema.rschema(relname) - hidden = 'hidden' in self._cw.form - is_cell = 'is_cell' in self._cw.form - self.w(self.build_unrelated_select_div(entity, rschema, role, - is_cell=is_cell, hidden=hidden)) - - def build_unrelated_select_div(self, entity, rschema, role, - is_cell=False, hidden=True): - options = [] - divid = 'div%s_%s_%s' % (rschema.type, role, entity.eid) - selectid = 'select%s_%s_%s' % (rschema.type, role, entity.eid) - if rschema.symmetric or role == 'subject': - targettypes = rschema.objects(entity.e_schema) - etypes = '/'.join(sorted(etype.display_name(self._cw) for etype in targettypes)) - else: - targettypes = rschema.subjects(entity.e_schema) - etypes = '/'.join(sorted(etype.display_name(self._cw) for etype in targettypes)) - etypes = uilib.cut(etypes, self._cw.property_value('navigation.short-line-size')) - options.append('<option>%s %s</option>' % (self._cw._('select a'), etypes)) - options += self._get_select_options(entity, rschema, role) - options += self._get_search_options(entity, rschema, role, targettypes) - relname, role = self._cw.form.get('relation').rsplit('_', 1) - return u"""\ -<div class="%s" id="%s"> - <select id="%s" onchange="javascript: addPendingInsert(this.options[this.selectedIndex], %s, %s, '%s');"> - %s - </select> -</div> -""" % (hidden and 'hidden' or '', divid, selectid, - xml_escape(json_dumps(entity.eid)), is_cell and 'true' or 'null', relname, - '\n'.join(options)) - - def _get_select_options(self, entity, rschema, role): - """add options to search among all entities of each possible type""" - options = [] - pending_inserts = get_pending_inserts(self._cw, entity.eid) - rtype = rschema.type - form = self._cw.vreg['forms'].select('edition', self._cw, entity=entity) - field = form.field_by_name(rschema, role, entity.e_schema) - limit = self._cw.property_value('navigation.combobox-limit') - # NOTE: expect 'limit' arg on choices method of relation field - for eview, reid in field.vocabulary(form, limit=limit): - if reid is None: - if eview: # skip blank value - options.append('<option class="separator">-- %s --</option>' - % xml_escape(eview)) - elif reid != ff.INTERNAL_FIELD_VALUE: - optionid = relation_id(entity.eid, rtype, role, reid) - if optionid not in pending_inserts: - # prefix option's id with letters to make valid XHTML wise - options.append('<option id="id%s" value="%s">%s</option>' % - (optionid, reid, xml_escape(eview))) - return options - - def _get_search_options(self, entity, rschema, role, targettypes): - """add options to search among all entities of each possible type""" - options = [] - _ = self._cw._ - for eschema in targettypes: - mode = '%s:%s:%s:%s' % (role, entity.eid, rschema.type, eschema) - url = self._cw.build_url(entity.rest_path(), vid='search-associate', - __mode=mode) - options.append((eschema.display_name(self._cw), - '<option value="%s">%s %s</option>' % ( - xml_escape(url), _('Search for'), eschema.display_name(self._cw)))) - return [o for l, o in sorted(options)] - - -# The automatic entity form #################################################### - -class AutomaticEntityForm(forms.EntityFieldsForm): - """AutomaticEntityForm is an automagic form to edit any entity. It - is designed to be fully generated from schema but highly - configurable through uicfg. - - Of course, as for other forms, you can also customise it by specifying - various standard form parameters on selection, overriding, or - adding/removing fields in selected instances. - """ - __regid__ = 'edition' - - cwtarget = 'eformframe' - cssclass = 'entityForm' - copy_nav_params = True - form_buttons = [fw.SubmitButton(), - fw.Button(stdmsgs.BUTTON_APPLY, cwaction='apply'), - fw.Button(stdmsgs.BUTTON_CANCEL, - {'class': fw.Button.css_class + ' cwjs-edition-cancel'})] - # for attributes selection when searching in uicfg.autoform_section - formtype = 'main' - # set this to a list of [(relation, role)] if you want to explictily tell - # which relations should be edited - display_fields = None - # action on the form tag - _default_form_action_path = 'validateform' - - @iclassmethod - def field_by_name(cls_or_self, name, role=None, eschema=None): - """return field with the given name and role. If field is not explicitly - defined for the form but `eclass` is specified, guess_field will be - called. - """ - try: - return super(AutomaticEntityForm, cls_or_self).field_by_name(name, role, eschema) - except f.FieldNotFound: - if name == '_cw_generic_field' and not isinstance(cls_or_self, type): - return cls_or_self._generic_relations_field() - raise - - # base automatic entity form methods ####################################### - - def __init__(self, *args, **kwargs): - super(AutomaticEntityForm, self).__init__(*args, **kwargs) - self.uicfg_afs = self._cw.vreg['uicfg'].select( - 'autoform_section', self._cw, entity=self.edited_entity) - entity = self.edited_entity - if entity.has_eid(): - entity.complete() - for rtype, role in self.editable_attributes(): - try: - self.field_by_name(str(rtype), role) - continue # explicitly specified - except f.FieldNotFound: - # has to be guessed - try: - field = self.field_by_name(str(rtype), role, - eschema=entity.e_schema) - self.fields.append(field) - except f.FieldNotFound: - # meta attribute such as <attr>_format - continue - if self.fieldsets_in_order: - fsio = list(self.fieldsets_in_order) - else: - fsio = [None] - self.fieldsets_in_order = fsio - # add fields for relation whose target should have an inline form - for formview in self.inlined_form_views(): - field = self._inlined_form_view_field(formview) - self.fields.append(field) - if not field.fieldset in fsio: - fsio.append(field.fieldset) - if self.formtype == 'main': - # add the generic relation field if necessary - if entity.has_eid() and ( - self.display_fields is None or - '_cw_generic_field' in self.display_fields): - try: - field = self.field_by_name('_cw_generic_field') - except f.FieldNotFound: - # no editable relation - pass - else: - self.fields.append(field) - if not field.fieldset in fsio: - fsio.append(field.fieldset) - self.maxrelitems = self._cw.property_value('navigation.related-limit') - self.force_display = bool(self._cw.form.get('__force_display')) - fnum = len(self.fields) - self.fields.sort(key=lambda f: f.order is None and fnum or f.order) - - @property - def related_limit(self): - if self.force_display: - return None - return self.maxrelitems + 1 - - # autoform specific fields ################################################# - - def _generic_relations_field(self): - srels_by_cat = self.editable_relations() - if not srels_by_cat: - raise f.FieldNotFound('_cw_generic_field') - fieldset = 'This %s:' % self.edited_entity.e_schema - return GenericRelationsField(self.editable_relations(), - fieldset=fieldset, label=None) - - def _inlined_form_view_field(self, view): - # XXX allow more customization - kwargs = self.uicfg_affk.etype_get(self.edited_entity.e_schema, - view.rtype, view.role, view.etype) - if kwargs is None: - kwargs = {} - return InlinedFormField(view=view, **kwargs) - - # methods mapping edited entity relations to fields in the form ############ - - def _relations_by_section(self, section, permission='add', strict=False): - """return a list of (relation schema, target schemas, role) matching - given category(ies) and permission - """ - return self.uicfg_afs.relations_by_section( - self.edited_entity, self.formtype, section, permission, strict) - - def editable_attributes(self, strict=False): - """return a list of (relation schema, role) to edit for the entity""" - if self.display_fields is not None: - schema = self._cw.vreg.schema - return [(schema[rtype], role) for rtype, role in self.display_fields] - if self.edited_entity.has_eid() and not self.edited_entity.cw_has_perm('update'): - return [] - action = 'update' if self.edited_entity.has_eid() else 'add' - return [(rtype, role) for rtype, _, role in self._relations_by_section( - 'attributes', action, strict)] - - def editable_relations(self): - """return a sorted list of (relation's label, relation'schema, role) for - relations in the 'relations' section - """ - result = [] - for rschema, _, role in self._relations_by_section('relations', - strict=True): - result.append( (rschema.display_name(self.edited_entity._cw, role, - self.edited_entity.cw_etype), - rschema, role) ) - return sorted(result) - - def inlined_relations(self): - """return a list of (relation schema, target schemas, role) matching - given category(ies) and permission - """ - return self._relations_by_section('inlined') - - # inlined forms control #################################################### - - def inlined_form_views(self): - """compute and return list of inlined form views (hosting the inlined - form object) - """ - allformviews = [] - entity = self.edited_entity - for rschema, ttypes, role in self.inlined_relations(): - # show inline forms only if there's one possible target type - # for rschema - if len(ttypes) != 1: - self.warning('entity related by the %s relation should have ' - 'inlined form but there is multiple target types, ' - 'dunno what to do', rschema) - continue - tschema = ttypes[0] - ttype = tschema.type - formviews = list(self.inline_edition_form_view(rschema, ttype, role)) - card = rschema.role_rdef(entity.e_schema, ttype, role).role_cardinality(role) - # there is no related entity and we need at least one: we need to - # display one explicit inline-creation view - if self.should_display_inline_creation_form(rschema, formviews, card): - formviews += self.inline_creation_form_view(rschema, ttype, role) - # we can create more than one related entity, we thus display a link - # to add new related entities - if self.must_display_add_new_relation_link(rschema, role, tschema, - ttype, formviews, card): - addnewlink = self._cw.vreg['views'].select( - 'inline-addnew-link', self._cw, - etype=ttype, rtype=rschema, role=role, card=card, - peid=self.edited_entity.eid, - petype=self.edited_entity.e_schema, pform=self) - formviews.append(addnewlink) - allformviews += formviews - return allformviews - - def should_display_inline_creation_form(self, rschema, existant, card): - """return true if a creation form should be inlined - - by default true if there is no related entity and we need at least one - """ - return not existant and card in '1+' - - def should_display_add_new_relation_link(self, rschema, existant, card): - """return true if we should add a link to add a new creation form - (through ajax call) - - by default true if there is no related entity or if the relation has - multiple cardinality - """ - return not existant or card in '+*' - - def must_display_add_new_relation_link(self, rschema, role, tschema, - ttype, existant, card): - """return true if we must add a link to add a new creation form - (through ajax call) - - by default true if there is no related entity or if the relation has - multiple cardinality and it is permitted to add the inlined object and - relation. - """ - return (self.should_display_add_new_relation_link( - rschema, existant, card) and - self.check_inlined_rdef_permissions( - rschema, role, tschema, ttype)) - - def check_inlined_rdef_permissions(self, rschema, role, tschema, ttype): - """return true if permissions are granted on the inlined object and - relation""" - if not tschema.has_perm(self._cw, 'add'): - return False - entity = self.edited_entity - rdef = entity.e_schema.rdef(rschema, role, ttype) - if entity.has_eid(): - if role == 'subject': - rdefkwargs = {'fromeid': entity.eid} - else: - rdefkwargs = {'toeid': entity.eid} - return rdef.has_perm(self._cw, 'add', **rdefkwargs) - return rdef.may_have_permission('add', self._cw) - - - def should_hide_add_new_relation_link(self, rschema, card): - """return true if once an inlined creation form is added, the 'add new' - link should be hidden - - by default true if the relation has single cardinality - """ - return card in '1?' - - def inline_edition_form_view(self, rschema, ttype, role): - """yield inline form views for already related entities through the - given relation - """ - entity = self.edited_entity - related = entity.has_eid() and entity.related(rschema, role) - if related: - vvreg = self._cw.vreg['views'] - # display inline-edition view for all existing related entities - for i, relentity in enumerate(related.entities()): - if relentity.cw_has_perm('update'): - yield vvreg.select('inline-edition', self._cw, - rset=related, row=i, col=0, - etype=ttype, rtype=rschema, role=role, - peid=entity.eid, pform=self) - - def inline_creation_form_view(self, rschema, ttype, role): - """yield inline form views to a newly related (hence created) entity - through the given relation - """ - try: - yield self._cw.vreg['views'].select('inline-creation', self._cw, - etype=ttype, rtype=rschema, role=role, - peid=self.edited_entity.eid, - petype=self.edited_entity.e_schema, - pform=self) - except NoSelectableObject: - # may be raised if user doesn't have the permission to add ttype entities (no checked - # earlier) or if there is some custom selector on the view - pass - - -## default form ui configuration ############################################## - -_AFS = uicfg.autoform_section -# use primary and not generated for eid since it has to be an hidden -_AFS.tag_attribute(('*', 'eid'), 'main', 'hidden') -_AFS.tag_attribute(('*', 'eid'), 'muledit', 'attributes') -_AFS.tag_attribute(('*', 'description'), 'main', 'attributes') -_AFS.tag_attribute(('*', 'has_text'), 'main', 'hidden') -_AFS.tag_subject_of(('*', 'in_state', '*'), 'main', 'hidden') -for rtype in ('creation_date', 'modification_date', 'cwuri', - 'owned_by', 'created_by', 'cw_source'): - _AFS.tag_subject_of(('*', rtype, '*'), 'main', 'metadata') - -_AFS.tag_subject_of(('*', 'by_transition', '*'), 'main', 'attributes') -_AFS.tag_subject_of(('*', 'by_transition', '*'), 'muledit', 'attributes') -_AFS.tag_object_of(('*', 'by_transition', '*'), 'main', 'hidden') -_AFS.tag_object_of(('*', 'from_state', '*'), 'main', 'hidden') -_AFS.tag_object_of(('*', 'to_state', '*'), 'main', 'hidden') -_AFS.tag_subject_of(('*', 'wf_info_for', '*'), 'main', 'attributes') -_AFS.tag_subject_of(('*', 'wf_info_for', '*'), 'muledit', 'attributes') -_AFS.tag_object_of(('*', 'wf_info_for', '*'), 'main', 'hidden') -_AFS.tag_attribute(('CWEType', 'final'), 'main', 'hidden') -_AFS.tag_attribute(('CWRType', 'final'), 'main', 'hidden') -_AFS.tag_attribute(('CWUser', 'firstname'), 'main', 'attributes') -_AFS.tag_attribute(('CWUser', 'surname'), 'main', 'attributes') -_AFS.tag_attribute(('CWUser', 'last_login_time'), 'main', 'metadata') -_AFS.tag_subject_of(('CWUser', 'in_group', '*'), 'main', 'attributes') -_AFS.tag_subject_of(('CWUser', 'in_group', '*'), 'muledit', 'attributes') -_AFS.tag_subject_of(('*', 'primary_email', '*'), 'main', 'relations') -_AFS.tag_subject_of(('*', 'use_email', '*'), 'main', 'inlined') -_AFS.tag_subject_of(('CWRelation', 'relation_type', '*'), 'main', 'inlined') -_AFS.tag_subject_of(('CWRelation', 'from_entity', '*'), 'main', 'inlined') -_AFS.tag_subject_of(('CWRelation', 'to_entity', '*'), 'main', 'inlined') - -_AFFK = uicfg.autoform_field_kwargs -_AFFK.tag_attribute(('RQLExpression', 'expression'), - {'widget': fw.TextInput}) -_AFFK.tag_subject_of(('TrInfo', 'wf_info_for', '*'), - {'widget': fw.HiddenInput}) - -def registration_callback(vreg): - global etype_relation_field - - def etype_relation_field(etype, rtype, role='subject'): - try: - eschema = vreg.schema.eschema(etype) - return AutomaticEntityForm.field_by_name(rtype, role, eschema) - except (KeyError, f.FieldNotFound): - # catch KeyError raised when etype/rtype not found in schema - AutomaticEntityForm.error('field for %s %s may not be found in schema' % (rtype, role)) - return None - - vreg.register_all(globals().values(), __name__) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/basecomponents.py --- a/web/views/basecomponents.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,228 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""Bases HTML components: - -* the rql input form -* the logged user link -""" -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from logilab.mtconverter import xml_escape -from logilab.common.registry import yes -from logilab.common.deprecation import class_renamed -from rql import parse - -from cubicweb.predicates import (match_form_params, match_context, - multi_etypes_rset, configuration_values, - anonymous_user, authenticated_user) -from cubicweb.schema import display_name -from cubicweb.utils import wrap_on_write -from cubicweb.uilib import toggle_action -from cubicweb.web import component -from cubicweb.web.htmlwidgets import MenuWidget, PopupBoxMenu - -VISIBLE_PROP_DEF = { - _('visible'): dict(type='Boolean', default=True, - help=_('display the component or not')), - } - -class RQLInputForm(component.Component): - """build the rql input form, usually displayed in the header""" - __regid__ = 'rqlinput' - cw_property_defs = VISIBLE_PROP_DEF - visible = False - - def call(self, view=None): - req = self._cw - if hasattr(view, 'filter_box_context_info'): - rset = view.filter_box_context_info()[0] - else: - rset = self.cw_rset - # display multilines query as one line - rql = rset is not None and rset.printable_rql() or req.form.get('rql', '') - rql = rql.replace(u"\n", u" ") - rql_suggestion_comp = self._cw.vreg['components'].select_or_none('rql.suggestions', self._cw) - if rql_suggestion_comp is not None: - # enable autocomplete feature only if the rql - # suggestions builder is available - self._cw.add_css('jquery.ui.css') - self._cw.add_js(('cubicweb.ajax.js', 'jquery.ui.js')) - self._cw.add_onload('$("#rql").autocomplete({source: "%s"});' - % (req.build_url('json', fname='rql_suggest'))) - self.w(u'''<div id="rqlinput" class="%s"><form action="%s"><fieldset> -<input type="text" id="rql" name="rql" value="%s" title="%s" tabindex="%s" accesskey="q" class="searchField" /> -''' % (not self.cw_propval('visible') and 'hidden' or '', - req.build_url('view'), xml_escape(rql), req._('full text or RQL query'), req.next_tabindex())) - if req.search_state[0] != 'normal': - self.w(u'<input type="hidden" name="__mode" value="%s"/>' - % ':'.join(req.search_state[1])) - self.w(u'</fieldset></form></div>') - - - -class HeaderComponent(component.CtxComponent): # XXX rename properly along with related context - """if the user is the anonymous user, build a link to login else display a menu - with user'action (preference, logout, etc...) - """ - __abstract__ = True - cw_property_defs = component.override_ctx( - component.CtxComponent, - vocabulary=['header-center', 'header-left', 'header-right', ]) - # don't want user to hide this component using an cwproperty - site_wide = True - context = _('header-center') - - -class ApplLogo(HeaderComponent): - """build the instance logo, usually displayed in the header""" - __regid__ = 'logo' - __select__ = yes() # no need for a cnx - order = -1 - context = _('header-left') - - def render(self, w): - w(u'<a id="logo" href="%s"></a>' % self._cw.base_url()) - - -class ApplicationName(HeaderComponent): - """display the instance name""" - __regid__ = 'appliname' - - # XXX support kwargs for compat with other components which gets the view as - # argument - def render(self, w, **kwargs): - title = self._cw.property_value('ui.site-title') - if title: - w(u'<span id="appliName"><a href="%s">%s</a></span>' % ( - self._cw.base_url(), xml_escape(title))) - - -class CookieLoginComponent(HeaderComponent): - __regid__ = 'anonuserlink' - __select__ = (HeaderComponent.__select__ & anonymous_user() - & configuration_values('auth-mode', 'cookie')) - context = 'header-right' - loginboxid = 'popupLoginBox' - _html = u"""<a class="logout icon-login" title="%s" href="javascript: -cw.htmlhelpers.popupLoginBox('%s', '__login');">%s</a>""" - - def render(self, w): - # XXX bw compat, though should warn about subclasses redefining call - self.w = w - self.call() - - def call(self): - self._cw.add_css('cubicweb.pictograms.css') - self.w(self._html % (self._cw._('login / password'), - self.loginboxid, self._cw._('i18n_login_popup'))) - self._cw.view('logform', rset=self.cw_rset, id=self.loginboxid, - klass='%s hidden' % self.loginboxid, title=False, - showmessage=False, w=self.w) - - -class HTTPLoginComponent(CookieLoginComponent): - __select__ = (HeaderComponent.__select__ & anonymous_user() - & configuration_values('auth-mode', 'http')) - - def render(self, w): - # this redirects to the 'login' controller which in turn - # will raise a 401/Unauthorized - req = self._cw - w(u'[<a class="logout" title="%s" href="%s">%s</a>]' - % (req._('login / password'), req.build_url('login'), req._('login'))) - - -_UserLink = class_renamed('_UserLink', HeaderComponent) -AnonUserLink = class_renamed('AnonUserLink', CookieLoginComponent) -AnonUserLink.__abstract__ = True -AnonUserLink.__select__ &= yes(1) - - -class AnonUserStatusLink(HeaderComponent): - __regid__ = 'userstatus' - __select__ = anonymous_user() - context = _('header-right') - order = HeaderComponent.order - 10 - - def render(self, w): - pass - -class AuthenticatedUserStatus(AnonUserStatusLink): - __select__ = authenticated_user() - - def render(self, w): - # display useractions and siteactions - self._cw.add_css('cubicweb.pictograms.css') - actions = self._cw.vreg['actions'].possible_actions(self._cw, rset=self.cw_rset) - box = MenuWidget('', 'userActionsBox', _class='', islist=False) - menu = PopupBoxMenu(self._cw.user.login, isitem=False, link_class='icon-user') - box.append(menu) - for action in actions.get('useractions', ()): - menu.append(self.action_link(action)) - if actions.get('useractions') and actions.get('siteactions'): - menu.append(self.separator()) - for action in actions.get('siteactions', ()): - menu.append(self.action_link(action)) - box.render(w=w) - - -class ApplicationMessage(component.Component): - """display messages given using the __message/_cwmsgid parameter into a - special div section - """ - __select__ = yes() - __regid__ = 'applmessages' - # don't want user to hide this component using a cwproperty - cw_property_defs = {} - - def call(self, msg=None): - if msg is None: - msg = self._cw.message # XXX don't call self._cw.message twice - self.w(u'<div id="appMsg" onclick="%s" class="%s">\n' % - (toggle_action('appMsg'), (msg and ' ' or 'hidden'))) - self.w(u'<div class="message" id="%s">%s</div>' % (self.domid, msg)) - self.w(u'</div>') - - -# contextual components ######################################################## - - -class MetaDataComponent(component.EntityCtxComponent): - __regid__ = 'metadata' - context = 'navbottom' - order = 1 - - def render_body(self, w): - self.entity.view('metadata', w=w) - - -class SectionLayout(component.Layout): - __select__ = match_context('navtop', 'navbottom', - 'navcontenttop', 'navcontentbottom') - cssclass = 'section' - - def render(self, w): - if self.init_rendering(): - view = self.cw_extra_kwargs['view'] - w(u'<div class="%s %s" id="%s">' % (self.cssclass, view.cssclass, - view.domid)) - with wrap_on_write(w, '<h4>') as wow: - view.render_title(wow) - view.render_body(w) - w(u'</div>\n') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/basecontrollers.py --- a/web/views/basecontrollers.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,302 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""Set of base controllers, which are directly plugged into the application -object to handle publication. -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from six import text_type - -from logilab.common.deprecation import deprecated - -from cubicweb import (NoSelectableObject, ObjectNotFound, ValidationError, - AuthenticationError, UndoTransactionException, - Forbidden) -from cubicweb.utils import json_dumps -from cubicweb.predicates import (authenticated_user, anonymous_user, - match_form_params) -from cubicweb.web import Redirect, RemoteCallFailed -from cubicweb.web.controller import Controller, append_url_params -from cubicweb.web.views import vid_from_rset -import cubicweb.transaction as tx - -@deprecated('[3.15] jsonize is deprecated, use AjaxFunction appobjects instead') -def jsonize(func): - """decorator to sets correct content_type and calls `json_dumps` on - results - """ - def wrapper(self, *args, **kwargs): - self._cw.set_content_type('application/json') - return json_dumps(func(self, *args, **kwargs)) - wrapper.__name__ = func.__name__ - return wrapper - -@deprecated('[3.15] xhtmlize is deprecated, use AjaxFunction appobjects instead') -def xhtmlize(func): - """decorator to sets correct content_type and calls `xmlize` on results""" - def wrapper(self, *args, **kwargs): - self._cw.set_content_type(self._cw.html_content_type()) - result = func(self, *args, **kwargs) - return ''.join((u'<div>', result.strip(), - u'</div>')) - wrapper.__name__ = func.__name__ - return wrapper - -@deprecated('[3.15] check_pageid is deprecated, use AjaxFunction appobjects instead') -def check_pageid(func): - """decorator which checks the given pageid is found in the - user's session data - """ - def wrapper(self, *args, **kwargs): - data = self._cw.session.data.get(self._cw.pageid) - if data is None: - raise RemoteCallFailed(self._cw._('pageid-not-found')) - return func(self, *args, **kwargs) - return wrapper - - -class LoginController(Controller): - __regid__ = 'login' - __select__ = anonymous_user() - - def publish(self, rset=None): - """log in the instance""" - if self._cw.vreg.config['auth-mode'] == 'http': - # HTTP authentication - raise AuthenticationError() - else: - # Cookie authentication - return self.appli.need_login_content(self._cw) - -class LoginControllerForAuthed(Controller): - __regid__ = 'login' - __select__ = ~anonymous_user() - - def publish(self, rset=None): - """log in the instance""" - path = self._cw.form.get('postlogin_path', '') - # Redirect expects a URL, not a path. Also path may contain a query - # string, hence should not be given to _cw.build_url() - raise Redirect(self._cw.base_url() + path) - - -class LogoutController(Controller): - __regid__ = 'logout' - - def publish(self, rset=None): - """logout from the instance""" - return self.appli.session_handler.logout(self._cw, self.goto_url()) - - def goto_url(self): - # * in http auth mode, url will be ignored - # * in cookie mode redirecting to the index view is enough : either - # anonymous connection is allowed and the page will be displayed or - # we'll be redirected to the login form - msg = self._cw._('you have been logged out') - return self._cw.build_url('view', vid='loggedout') - - -class ViewController(Controller): - """standard entry point : - - build result set - - select and call main template - """ - __regid__ = 'view' - template = 'main-template' - - def publish(self, rset=None): - """publish a request, returning an encoded string""" - view, rset = self._select_view_and_rset(rset) - view.set_http_cache_headers() - if self._cw.is_client_cache_valid(): - return b'' - template = self.appli.main_template_id(self._cw) - return self._cw.vreg['views'].main_template(self._cw, template, - rset=rset, view=view) - - def _select_view_and_rset(self, rset): - req = self._cw - if rset is None and not hasattr(req, '_rql_processed'): - req._rql_processed = True - if req.cnx: - rset = self.process_rql() - else: - rset = None - vid = req.form.get('vid') or vid_from_rset(req, rset, self._cw.vreg.schema) - try: - view = self._cw.vreg['views'].select(vid, req, rset=rset) - except ObjectNotFound: - self.warning("the view %s could not be found", vid) - req.set_message(req._("The view %s could not be found") % vid) - vid = vid_from_rset(req, rset, self._cw.vreg.schema) - view = self._cw.vreg['views'].select(vid, req, rset=rset) - except NoSelectableObject: - if rset: - req.set_message(req._("The view %s can not be applied to this query") % vid) - else: - req.set_message(req._("You have no access to this view or it can not " - "be used to display the current data.")) - vid = req.form.get('fallbackvid') or vid_from_rset(req, rset, req.vreg.schema) - view = req.vreg['views'].select(vid, req, rset=rset) - return view, rset - - def execute_linkto(self, eid=None): - """XXX __linkto parameter may cause security issue - - defined here since custom application controller inheriting from this - one use this method? - """ - req = self._cw - if not '__linkto' in req.form: - return - if eid is None: - eid = int(req.form['eid']) - for linkto in req.list_form_param('__linkto', pop=True): - rtype, eids, target = linkto.split(':') - assert target in ('subject', 'object') - eids = eids.split('_') - if target == 'subject': - rql = 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype - else: - rql = 'SET Y %s X WHERE X eid %%(x)s, Y eid %%(y)s' % rtype - for teid in eids: - req.execute(rql, {'x': eid, 'y': int(teid)}) - - -def _validation_error(req, ex): - req.cnx.rollback() - ex.translate(req._) # translate messages using ui language - # XXX necessary to remove existant validation error? - # imo (syt), it's not necessary - req.session.data.pop(req.form.get('__errorurl'), None) - foreid = ex.entity - eidmap = req.data.get('eidmap', {}) - for var, eid in eidmap.items(): - if foreid == eid: - foreid = var - break - return (foreid, ex.errors) - - -def _validate_form(req, vreg): - # XXX should use the `RemoteCallFailed` mechanism - try: - ctrl = vreg['controllers'].select('edit', req=req) - except NoSelectableObject: - return (False, {None: req._('not authorized')}, None) - try: - ctrl.publish(None) - except ValidationError as ex: - return (False, _validation_error(req, ex), ctrl._edited_entity) - except Redirect as ex: - try: - txuuid = req.cnx.commit() # ValidationError may be raised on commit - except ValidationError as ex: - return (False, _validation_error(req, ex), ctrl._edited_entity) - except Exception as ex: - req.cnx.rollback() - req.exception('unexpected error while validating form') - return (False, str(ex).decode('utf-8'), ctrl._edited_entity) - else: - if txuuid is not None: - req.data['last_undoable_transaction'] = txuuid - # complete entity: it can be used in js callbacks where we might - # want every possible information - if ctrl._edited_entity: - ctrl._edited_entity.complete() - return (True, ex.location, ctrl._edited_entity) - except Exception as ex: - req.cnx.rollback() - req.exception('unexpected error while validating form') - return (False, text_type(ex), ctrl._edited_entity) - return (False, '???', None) - - -class FormValidatorController(Controller): - __regid__ = 'validateform' - - def response(self, domid, status, args, entity): - callback = str(self._cw.form.get('__onsuccess', 'null')) - errback = str(self._cw.form.get('__onfailure', 'null')) - cbargs = str(self._cw.form.get('__cbargs', 'null')) - self._cw.set_content_type('text/html') - jsargs = json_dumps((status, args, entity)) - return """<script type="text/javascript"> - window.parent.handleFormValidationResponse('%s', %s, %s, %s, %s); -</script>""" % (domid, callback, errback, jsargs, cbargs) - - def publish(self, rset=None): - self._cw.ajax_request = True - # XXX unclear why we have a separated controller here vs - # js_validate_form on the json controller - status, args, entity = _validate_form(self._cw, self._cw.vreg) - domid = self._cw.form.get('__domid', 'entityForm') - return self.response(domid, status, args, entity).encode(self._cw.encoding) - - -class JSonController(Controller): - __regid__ = 'json' - - def publish(self, rset=None): - warn('[3.15] JSONController is deprecated, use AjaxController instead', - DeprecationWarning) - ajax_controller = self._cw.vreg['controllers'].select('ajax', self._cw, appli=self.appli) - return ajax_controller.publish(rset) - - -class MailBugReportController(Controller): - __regid__ = 'reportbug' - __select__ = match_form_params('description') - - def publish(self, rset=None): - req = self._cw - desc = req.form['description'] - # The description is generated and signed by cubicweb itself, check - # description's signature so we don't want to send spam here - sign = req.form.get('__signature', '') - if not (sign and req.vreg.config.check_text_sign(desc, sign)): - raise Forbidden('Invalid content') - self.sendmail(req.vreg.config['submit-mail'], - req._('%s error report') % req.vreg.config.appid, - desc) - raise Redirect(req.build_url(__message=req._('bug report sent'))) - - -class UndoController(Controller): - __regid__ = 'undo' - __select__ = authenticated_user() & match_form_params('txuuid') - - def publish(self, rset=None): - txuuid = self._cw.form['txuuid'] - try: - self._cw.cnx.undo_transaction(txuuid) - except UndoTransactionException as exc: - errors = exc.errors - #This will cause a rollback in main_publish - raise ValidationError(None, {None: '\n'.join(errors)}) - else : - self.redirect() # Will raise Redirect - - def redirect(self, msg=None): - req = self._cw - msg = msg or req._("transaction undone") - self._redirect({'_cwmsgid': req.set_redirect_message(msg)}) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/basetemplates.py --- a/web/views/basetemplates.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,530 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""default templates for CubicWeb web client""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from logilab.mtconverter import xml_escape -from logilab.common.deprecation import class_renamed -from logilab.common.registry import objectify_predicate -from logilab.common.decorators import classproperty - -from cubicweb.predicates import match_kwargs, no_cnx, anonymous_user -from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW, StartupView -from cubicweb.utils import UStringIO -from cubicweb.schema import display_name -from cubicweb.web import component, formfields as ff, formwidgets as fw -from cubicweb.web.views import forms - -# main templates ############################################################## - -class LogInOutTemplate(MainTemplate): - - def call(self): - self.set_request_content_type() - w = self.w - self.write_doctype() - self.template_header('text/html', self._cw._('login_action')) - w(u'<body>\n') - self.content(w) - w(u'</body>') - - def template_header(self, content_type, view=None, page_title='', additional_headers=()): - w = self.whead - # explictly close the <base> tag to avoid IE 6 bugs while browsing DOM - w(u'<base href="%s"></base>' % xml_escape(self._cw.base_url())) - w(u'<meta http-equiv="content-type" content="%s; charset=%s"/>\n' - % (content_type, self._cw.encoding)) - w(NOINDEX) - w(NOFOLLOW) - w(u'\n'.join(additional_headers) + u'\n') - self.wview('htmlheader', rset=self.cw_rset) - w(u'<title>%s\n' % xml_escape(page_title)) - - def content(self): - raise NotImplementedError() - - -class LogInTemplate(LogInOutTemplate): - __regid__ = 'login' - __select__ = anonymous_user() - title = 'log in' - - def content(self, w): - self.wview('logform', rset=self.cw_rset, id='loginBox', klass='') - - -class LoggedOutTemplate(StartupView): - __regid__ = 'loggedout' - __select__ = anonymous_user() - title = 'logged out' - - def call(self): - msg = self._cw._('you have been logged out') - if self._cw.cnx: - comp = self._cw.vreg['components'].select('applmessages', self._cw) - comp.render(w=self.w, msg=msg) - self.wview('index') - else: - self.w(u'

              %s

              ' % msg) - - -@objectify_predicate -def modal_view(cls, req, rset, *args, **kwargs): - if req.form.get('__modal', None): - return 1 - -@objectify_predicate -def templatable_view(cls, req, rset, *args, **kwargs): - view = kwargs.pop('view', None) - if view is None: - return 1 - if view.binary: - return 0 - if '__notemplate' in req.form: - return 0 - return view.templatable - - -class NonTemplatableViewTemplate(MainTemplate): - """main template for any non templatable views (xml, binaries, etc.)""" - __regid__ = 'main-template' - __select__ = ~templatable_view() - - def call(self, view): - view.set_request_content_type() - view.set_stream() - if (('__notemplate' in self._cw.form) - and view.templatable - and view.content_type == self._cw.html_content_type()): - view.w(u'
              ') - view.render() - view.w(u'
              ') - else: - view.render() - # have to replace our stream by view's stream (which may be a binary - # stream) - self._stream = view._stream - - -class ModalMainTemplate(MainTemplate): - """ a no-decoration main template for standard views - that typically live in a modal context """ - __regid__ = 'main-template' - __select__ = templatable_view() & modal_view() - - def call(self, view): - view.set_request_content_type() - view.render(w=self.w) - - -class TheMainTemplate(MainTemplate): - """default main template : - - - call header / footer templates - """ - __regid__ = 'main-template' - __select__ = templatable_view() - - def call(self, view): - self.set_request_content_type() - self.template_header(self.content_type, view) - w = self.w - w(u'
              \n') - vtitle = self._cw.form.get('vtitle') - if vtitle: - w(u'
              %s
              \n' % xml_escape(vtitle)) - # display entity type restriction component - etypefilter = self._cw.vreg['components'].select_or_none( - 'etypenavigation', self._cw, rset=self.cw_rset) - if etypefilter and etypefilter.cw_propval('visible'): - etypefilter.render(w=w) - nav_html = UStringIO() - if view and not view.handle_pagination: - view.paginate(w=nav_html.write) - w(nav_html.getvalue()) - w(u'
              \n') - view.render(w=w) - w(u'
              \n') # close id=contentmain - w(nav_html.getvalue()) - w(u'
              \n') # closes id=pageContent - self.template_footer(view) - - def template_header(self, content_type, view=None, page_title='', additional_headers=()): - page_title = page_title or view.page_title() - additional_headers = additional_headers or view.html_headers() - self.template_html_header(content_type, page_title, additional_headers) - self.template_body_header(view) - - def template_html_header(self, content_type, page_title, additional_headers=()): - w = self.whead - lang = self._cw.lang - self.write_doctype() - self._cw.html_headers.define_var('BASE_URL', self._cw.base_url()) - self._cw.html_headers.define_var('DATA_URL', self._cw.datadir_url) - w(u'\n' - % (content_type, self._cw.encoding)) - w(u'\n'.join(additional_headers) + u'\n') - self.wview('htmlheader', rset=self.cw_rset) - if page_title: - w(u'%s\n' % xml_escape(page_title)) - - def template_body_header(self, view): - w = self.w - w(u'\n') - self.wview('header', rset=self.cw_rset, view=view) - w(u'
              \n') - self.nav_column(view, 'left') - w(u'\n') - self.nav_column(view, 'right') - self.w(u'
              \n') - components = self._cw.vreg['components'] - rqlcomp = components.select_or_none('rqlinput', self._cw, rset=self.cw_rset) - if rqlcomp: - rqlcomp.render(w=self.w, view=view) - msgcomp = components.select_or_none('applmessages', self._cw, rset=self.cw_rset) - if msgcomp: - msgcomp.render(w=self.w) - self.content_header(view) - - def template_footer(self, view=None): - self.content_footer(view) - self.w(u'
              \n') - self.wview('footer', rset=self.cw_rset) - self.w(u'') - - def nav_column(self, view, context): - boxes = list(self._cw.vreg['ctxcomponents'].poss_visible_objects( - self._cw, rset=self.cw_rset, view=view, context=context)) - if boxes: - getlayout = self._cw.vreg['components'].select - self.w(u'\n') - - def content_header(self, view=None): - """by default, display informal messages in content header""" - self.wview('contentheader', rset=self.cw_rset, view=view) - - def content_footer(self, view=None): - self.wview('contentfooter', rset=self.cw_rset, view=view) - - -class ErrorTemplate(TheMainTemplate): - """fallback template if an internal error occurred during displaying the main - template. This template may be called for authentication error, which means - that req.cnx and req.user may not be set. - """ - __regid__ = 'error-template' - - def call(self): - """display an unexpected error""" - self.set_request_content_type() - self._cw.reset_headers() - view = self._cw.vreg['views'].select('error', self._cw, rset=self.cw_rset) - self.template_header(self.content_type, view, self._cw._('an error occurred'), - [NOINDEX, NOFOLLOW]) - view.render(w=self.w) - self.template_footer(view) - - def template_header(self, content_type, view=None, page_title='', additional_headers=()): - w = self.whead - lang = self._cw.lang - self.write_doctype() - w(u'\n' - % (content_type, self._cw.encoding)) - w(u'\n'.join(additional_headers)) - self.wview('htmlheader', rset=self.cw_rset) - w(u'%s\n' % xml_escape(page_title)) - self.w(u'\n') - - def template_footer(self, view=None): - self.w(u'') - - -class SimpleMainTemplate(TheMainTemplate): - - __regid__ = 'main-no-top' - - def template_header(self, content_type, view=None, page_title='', additional_headers=()): - page_title = page_title or view.page_title() - additional_headers = additional_headers or view.html_headers() - whead = self.whead - lang = self._cw.lang - self.write_doctype() - whead(u'\n' - % (content_type, self._cw.encoding)) - whead(u'\n'.join(additional_headers) + u'\n') - self.wview('htmlheader', rset=self.cw_rset) - w = self.w - whead(u'%s\n' % xml_escape(page_title)) - w(u'\n') - w(u'
              ') - w(u'\n') - w(u'') - w(u'' % (entity.cw_row % 2 and u'even' or u'odd')) - # XXX turn this into a widget used on the eid field - w(u'' % checkbox('eid', entity.eid, - checked=qeid in values)) - for field in fields: - error = form.field_error(field) - if error: - w(u'\n') - w(u'') - else: - self._main_display_fields = fields - - -class EntityFormRenderer(BaseFormRenderer): - """This is the 'default' renderer for entity's form. - - You can still use form_renderer_id = 'base' if you want base FormRenderer - layout even when selected for an entity. - """ - __regid__ = 'default' - # needs some additional points in some case (XXX explain cases) - __select__ = is_instance('Any') & yes() - - _options = FormRenderer._options + ('main_form_title',) - main_form_title = _('main informations') - - def open_form(self, form, values): - attrs_fs_label = '' - if self.main_form_title: - attrs_fs_label += ('
              %s
              ' - % self._cw._(self.main_form_title)) - attrs_fs_label += '
              ' - return attrs_fs_label + super(EntityFormRenderer, self).open_form(form, values) - - def close_form(self, form, values): - """seems dumb but important for consistency w/ close form, and necessary - for form renderers overriding open_form to use something else or more than - and
              - """ - return super(EntityFormRenderer, self).close_form(form, values) + '
              ' - - def render_buttons(self, w, form): - if len(form.form_buttons) == 3: - w("""
              ') - - def topleft_header(self): - logo = self._cw.vreg['components'].select_or_none('logo', self._cw, - rset=self.cw_rset) - if logo and logo.cw_propval('visible'): - w = self.w - w(u'\n') - w(u'\n') - w(u'\n') - - -# page parts templates ######################################################## - -class HTMLHeader(View): - """default html headers""" - __regid__ = 'htmlheader' - - def call(self, **kwargs): - self.favicon() - self.stylesheets() - self.javascripts() - self.alternates() - - def favicon(self): - favicon = self._cw.uiprops.get('FAVICON', None) - if favicon: - self.whead(u'\n' % favicon) - - def stylesheets(self): - req = self._cw - add_css = req.add_css - for css in req.uiprops['STYLESHEETS']: - add_css(css, localfile=False) - for css in req.uiprops['STYLESHEETS_PRINT']: - add_css(css, u'print', localfile=False) - for css in req.uiprops['STYLESHEETS_IE']: - add_css(css, localfile=False, ieonly=True) - - def javascripts(self): - for jscript in self._cw.uiprops['JAVASCRIPTS']: - self._cw.add_js(jscript, localfile=False) - - def alternates(self): - urlgetter = self._cw.vreg['components'].select_or_none('rss_feed_url', - self._cw, rset=self.cw_rset) - if urlgetter is not None: - self.whead(u'\n' - % xml_escape(urlgetter.feed_url())) - - -class HTMLPageHeader(View): - """default html page header""" - __regid__ = 'header' - main_cell_components = ('appliname', 'breadcrumbs') - headers = (('headtext', 'header-left'), - ('header-center', 'header-center'), - ('header-right', 'header-right') - ) - - def call(self, view, **kwargs): - self.main_header(view) - self.w(u'
              ') - self.state_header() - self.w(u'
              ') - - def main_header(self, view): - """build the top menu with authentification info and the rql box""" - w = self.w - w(u'\n') - for colid, context in self.headers: - w(u'') - w(u'\n') - - def state_header(self): - state = self._cw.search_state - if state[0] == 'normal': - return - _ = self._cw._ - value = self._cw.view('oneline', self._cw.eid_rset(state[1][1])) - msg = ' '.join((_("searching for"), - display_name(self._cw, state[1][3]), - _("to associate with"), value, - _("by relation"), '"', - display_name(self._cw, state[1][2], state[1][0]), - '"')) - return self.w(u'
              %s
              ' % msg) - - -class HTMLPageFooter(View): - """default html page footer: include footer actions""" - __regid__ = 'footer' - - def call(self, **kwargs): - self.w(u'') - - def footer_content(self): - actions = self._cw.vreg['actions'].possible_actions(self._cw, - rset=self.cw_rset) - footeractions = actions.get('footer', ()) - for i, action in enumerate(footeractions): - self.w(u'%s' % (action.url(), - self._cw._(action.title))) - if i < (len(footeractions) - 1): - self.w(u' | ') - -class HTMLContentHeader(View): - """default html page content header: - * include message component if selectable for this request - * include selectable content navigation components - """ - __regid__ = 'contentheader' - - def call(self, view, **kwargs): - """by default, display informal messages in content header""" - components = self._cw.vreg['ctxcomponents'].poss_visible_objects( - self._cw, rset=self.cw_rset, view=view, context='navtop') - if components: - self.w(u'
              ') - for comp in components: - comp.render(w=self.w, view=view) - self.w(u'
              ') - - -class HTMLContentFooter(View): - """default html page content footer: include selectable content navigation - components - """ - __regid__ = 'contentfooter' - - def call(self, view, **kwargs): - components = self._cw.vreg['ctxcomponents'].poss_visible_objects( - self._cw, rset=self.cw_rset, view=view, context='navbottom') - if components: - self.w(u'
              ') - for comp in components: - comp.render(w=self.w, view=view) - self.w(u'
              ') - -class BaseLogForm(forms.FieldsForm): - """Abstract Base login form to be used by any login form - """ - __abstract__ = True - - __regid__ = 'logform' - domid = 'loginForm' - needs_css = ('cubicweb.login.css',) - - onclick_base = "javascript: cw.htmlhelpers.popupLoginBox('%s', '%s');" - onclick_args = (None, None) - - @classproperty - def form_buttons(cls): - # we use a property because sub class will need to define their own onclick_args. - # Therefor we can't juste make the string formating when instanciating this class - onclick = cls.onclick_base % cls.onclick_args - form_buttons = [fw.SubmitButton(label=_('log in'), - attrs={'class': 'loginButton'}), - fw.ResetButton(label=_('cancel'), - attrs={'class': 'loginButton', - 'onclick': onclick}),] - ## Can't shortcut next access because __dict__ is a "dictproxy" which - ## does not support items assignement. - # cls.__dict__['form_buttons'] = form_buttons - return form_buttons - - def form_action(self): - if self.action is None: - # reuse existing redirection if it exists - target = self._cw.form.get('postlogin_path', - self._cw.relative_path()) - url_args = {} - if target and target != '/': - url_args['postlogin_path'] = target - return self._cw.build_url('login', __secure__=True, **url_args) - return super(BaseLogForm, self).form_action() - -class LogForm(BaseLogForm): - """Simple login form that send username and password - """ - __regid__ = 'logform' - domid = 'loginForm' - needs_css = ('cubicweb.login.css',) - # XXX have to recall fields name since python is mangling __login/__password - __login = ff.StringField('__login', widget=fw.TextInput({'class': 'data'})) - __password = ff.StringField('__password', label=_('password'), - widget=fw.PasswordSingleInput({'class': 'data'})) - - onclick_args = ('popupLoginBox', '__login') - - -class LogFormView(View): - # XXX an awful lot of hardcoded assumptions there - # makes it unobvious to reuse/specialize - __regid__ = 'logform' - __select__ = match_kwargs('id', 'klass') - - title = 'log in' - - def call(self, id, klass, title=True, showmessage=True): - w = self.w - w(u'
              ' % (id, klass)) - if title: - stitle = self._cw.property_value('ui.site-title') - if stitle: - stitle = xml_escape(stitle) - else: - stitle = u' ' - w(u'
              %s
              ' % stitle) - w(u'
              \n') - if showmessage and self._cw.message: - w(u'
              %s
              \n' % self._cw.message) - config = self._cw.vreg.config - if config['auth-mode'] != 'http': - self.login_form(id) # Cookie authentication - w(u'
              ') - w(u'
              \n') - - def login_form(self, id): - cw = self._cw - form = cw.vreg['forms'].select('logform', cw) - if cw.vreg.config['allow-email-login']: - label = cw._('login or email') - else: - label = cw.pgettext('CWUser', 'login') - form.field_by_name('__login').label = label - form.render(w=self.w, table_class='', display_progress_div=False) - cw.html_headers.add_onload('jQuery("#__login:visible").focus()') - -LogFormTemplate = class_renamed('LogFormTemplate', LogFormView) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/baseviews.py --- a/web/views/baseviews.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,644 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -HTML views -~~~~~~~~~~ - -Special views -````````````` - -.. autoclass:: NullView -.. autoclass:: NoResultView -.. autoclass:: FinalView - - -Base entity views -````````````````` - -.. autoclass:: InContextView -.. autoclass:: OutOfContextView -.. autoclass:: OneLineView - -Those are used to display a link to an entity, whose label depends on the entity -having to be displayed in or out of context (of another entity): some entities -make sense in the context of another entity. For instance, the `Version` of a -`Project` in forge. So one may expect that 'incontext' will be called when -display a version from within the context of a project, while 'outofcontext"' -will be called in other cases. In our example, the 'incontext' view of the -version would be something like '0.1.2', while the 'outofcontext' view would -include the project name, e.g. 'baz 0.1.2' (since only a version number without -the associated project doesn't make sense if you don't know yet that you're -talking about the famous 'baz' project. |cubicweb| tries to make guess and call -'incontext'/'outofcontext' nicely. When it can't know, the 'oneline' view should -be used. - - -List entity views -````````````````` - -.. autoclass:: ListView -.. autoclass:: SimpleListView -.. autoclass:: SameETypeListView -.. autoclass:: CSVView - -Those list views can be given a 'subvid' arguments, telling the view to use of -each item in the list. When not specified, the value of the 'redirect_vid' -attribute of :class:`ListItemView` (for 'listview') or of -:class:`SimpleListView` will be used. This default to 'outofcontext' for 'list' -/ 'incontext' for 'simplelist' - - -Text entity views -~~~~~~~~~~~~~~~~~ - -Basic HTML view have some variants to be used when generating raw text, not HTML -(for notifications for instance). Also, as explained above, some of the HTML -views use those text views as a basis. - -.. autoclass:: TextView -.. autoclass:: InContextTextView -.. autoclass:: OutOfContextView -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from datetime import timedelta -from warnings import warn - -from six.moves import range - -from rql import nodes - -from logilab.mtconverter import TransformError, xml_escape -from logilab.common.registry import yes - -from cubicweb import NoSelectableObject, tags -from cubicweb.predicates import empty_rset, one_etype_rset, match_kwargs -from cubicweb.schema import display_name -from cubicweb.view import EntityView, AnyRsetView, View -from cubicweb.uilib import cut -from cubicweb.web.views import calendar - - -class NullView(AnyRsetView): - """:__regid__: *null* - - This view is the default view used when nothing needs to be rendered. It is - always applicable and is usually used as fallback view when calling - :meth:`_cw.view` to display nothing if the result set is empty. - """ - __regid__ = 'null' - __select__ = yes() - def call(self, **kwargs): - pass - cell_call = call - - -class NoResultView(View): - """:__regid__: *noresult* - - This view is the default view to be used when no result has been found - (i.e. empty result set). - - It's usually used as fallback view when calling :meth:`_cw.view` to display - "no results" if the result set is empty. - """ - __regid__ = 'noresult' - __select__ = empty_rset() - - def call(self, **kwargs): - self.w(u'
              %s
              \n' - % self._cw._('No result matching query')) - - -class FinalView(AnyRsetView): - """:__regid__: *final* - - Display the value of a result set cell with minimal transformations - (i.e. you'll get a number for entities). It is applicable on any result set, - though usually dedicated for cells containing an attribute's value. - """ - __regid__ = 'final' - - def cell_call(self, row, col, props=None, format='text/html'): - value = self.cw_rset.rows[row][col] - if value is None: - self.w(u'') - return - etype = self.cw_rset.description[row][col] - if etype == 'String': - entity, rtype = self.cw_rset.related_entity(row, col) - if entity is not None: - # call entity's printable_value which may have more information - # about string format & all - self.w(entity.printable_value(rtype, value, format=format)) - return - value = self._cw.printable_value(etype, value, props) - if etype in ('Time', 'Interval'): - self.w(value.replace(' ', ' ')) - else: - self.wdata(value) - - -class InContextView(EntityView): - """:__regid__: *incontext* - - This view is used when the entity should be considered as displayed in its - context. By default it produces the result of ``entity.dc_title()`` wrapped in a - link leading to the primary view of the entity. - """ - __regid__ = 'incontext' - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - desc = cut(entity.dc_description(), 50) - self.w(u'%s' % ( - xml_escape(entity.absolute_url()), xml_escape(desc), - xml_escape(entity.dc_title()))) - -class OutOfContextView(EntityView): - """:__regid__: *outofcontext* - - This view is used when the entity should be considered as displayed out of - its context. By default it produces the result of ``entity.dc_long_title()`` - wrapped in a link leading to the primary view of the entity. - """ - __regid__ = 'outofcontext' - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - desc = cut(entity.dc_description(), 50) - self.w(u'%s' % ( - xml_escape(entity.absolute_url()), xml_escape(desc), - xml_escape(entity.dc_long_title()))) - - -class OneLineView(EntityView): - """:__regid__: *oneline* - - This view is used when we can't tell if the entity should be considered as - displayed in or out of context. By default it produces the result of the - `text` view in a link leading to the primary view of the entity. - """ - __regid__ = 'oneline' - title = _('oneline') - - def cell_call(self, row, col, **kwargs): - """the one line view for an entity: linked text view - """ - entity = self.cw_rset.get_entity(row, col) - desc = cut(entity.dc_description(), 50) - title = cut(entity.dc_title(), - self._cw.property_value('navigation.short-line-size')) - self.w(u'%s' % ( - xml_escape(entity.absolute_url()), xml_escape(desc), - xml_escape(title))) - - -# text views ################################################################### - -class TextView(EntityView): - """:__regid__: *text* - - This is the simplest text view for an entity. By default it returns the - result of the entity's `dc_title()` method, which is cut to fit the - `navigation.short-line-size` property if necessary. - """ - __regid__ = 'text' - title = _('text') - content_type = 'text/plain' - - def call(self, **kwargs): - """The view is called for an entire result set, by default loop other - rows of the result set and call the same view on the particular row. - - Subclasses views that are applicable on None result sets will have to - override this method. - """ - rset = self.cw_rset - if rset is None: - raise NotImplementedError(self) - for i in range(len(rset)): - self.wview(self.__regid__, rset, row=i, **kwargs) - if len(rset) > 1: - self.w(u"\n") - - def cell_call(self, row, col=0, **kwargs): - entity = self.cw_rset.get_entity(row, col) - self.w(cut(entity.dc_title(), - self._cw.property_value('navigation.short-line-size'))) - - -class InContextTextView(TextView): - """:__regid__: *textincontext* - - Similar to the `text` view, but called when an entity is considered in - context (see description of incontext HTML view for more information on - this). By default it displays what's returned by the `dc_title()` method of - the entity. - """ - __regid__ = 'textincontext' - title = None # not listed as a possible view - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - self.w(entity.dc_title()) - - -class OutOfContextTextView(InContextTextView): - """:__regid__: *textoutofcontext* - - Similar to the `text` view, but called when an entity is considered out of - context (see description of outofcontext HTML view for more information on - this). By default it displays what's returned by the `dc_long_title()` - method of the entity. - """ - __regid__ = 'textoutofcontext' - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - self.w(entity.dc_long_title()) - - -# list views ################################################################## - -class ListView(EntityView): - """:__regid__: *list* - - This view displays a list of entities by creating a HTML list (`
                `) and - call the view `listitem` for each entity of the result set. The 'list' view - will generate HTML like: - - .. sourcecode:: html - -
                  -
                • "result of 'subvid' view for a row
                • - ... -
                - - If you wish to use a different view for each entity, either subclass and - change the :attr:`item_vid` class attribute or specify a `subvid` argument - when calling this view. - """ - __regid__ = 'list' - title = _('list') - item_vid = 'listitem' - - def call(self, klass=None, title=None, subvid=None, listid=None, **kwargs): - """display a list of entities by calling their view - - :param listid: the DOM id to use for the root element - """ - # XXX much of the behaviour here should probably be outside this view - if subvid is None and 'subvid' in self._cw.form: - subvid = self._cw.form.pop('subvid') # consume it - if listid: - listid = u' id="%s"' % listid - else: - listid = u'' - if title: - self.w(u'

                %s

                \n' % (listid, klass or 'section', title)) - self.w(u'
                  \n') - else: - self.w(u'\n' % (listid, klass or 'section')) - for i in range(self.cw_rset.rowcount): - self.cell_call(row=i, col=0, vid=subvid, klass=klass, **kwargs) - self.w(u'
                \n') - if title: - self.w(u'\n') - - def cell_call(self, row, col=0, vid=None, klass=None, **kwargs): - self.w(u'
              • ') - self.wview(self.item_vid, self.cw_rset, row=row, col=col, vid=vid, **kwargs) - self.w(u'
              • \n') - - -class ListItemView(EntityView): - __regid__ = 'listitem' - - @property - def redirect_vid(self): - if self._cw.search_state[0] == 'normal': - return 'outofcontext' - return 'outofcontext-search' - - def cell_call(self, row, col, vid=None, **kwargs): - if not vid: - vid = self.redirect_vid - try: - self.wview(vid, self.cw_rset, row=row, col=col, **kwargs) - except NoSelectableObject: - if vid == self.redirect_vid: - raise - self.wview(self.redirect_vid, self.cw_rset, row=row, col=col, **kwargs) - - -class SimpleListView(ListItemView): - """:__regid__: *simplelist* - - Similar to :class:~cubicweb.web.views.baseviews.ListView but using '
                ' - instead of '
                  '. It rely on '
                  ' behaviour to separate items. HTML will - look like - - .. sourcecode:: html - -
                  "result of 'subvid' view for a row
                  - ... - - - It relies on base :class:`~cubicweb.view.View` class implementation of the - :meth:`call` method to insert those
                  . - """ - __regid__ = 'simplelist' - redirect_vid = 'incontext' - - def call(self, subvid=None, **kwargs): - """display a list of entities by calling their view - - :param listid: the DOM id to use for the root element - """ - if subvid is None and 'vid' in kwargs: - warn("should give a 'subvid' argument instead of 'vid'", - DeprecationWarning, stacklevel=2) - else: - kwargs['vid'] = subvid - return super(SimpleListView, self).call(**kwargs) - - -class SameETypeListView(EntityView): - """:__regid__: *sameetypelist* - - This view displays a list of entities of the same type, in HTML section - ('
                  ') and call the view `sameetypelistitem` for each entity of the - result set. It's designed to get a more adapted global list when displayed - entities are all of the same type (for instance, display gallery if there - are only images entities). - """ - __regid__ = 'sameetypelist' - __select__ = EntityView.__select__ & one_etype_rset() - item_vid = 'sameetypelistitem' - - @property - def title(self): - etype = next(iter(self.cw_rset.column_types(0))) - return display_name(self._cw, etype, form='plural') - - def call(self, **kwargs): - """display a list of entities by calling their view""" - showtitle = kwargs.pop('showtitle', not 'vtitle' in self._cw.form) - if showtitle: - self.w(u'

                  %s

                  ' % self.title) - super(SameETypeListView, self).call(**kwargs) - - def cell_call(self, row, col=0, **kwargs): - self.wview(self.item_vid, self.cw_rset, row=row, col=col, **kwargs) - - -class SameETypeListItemView(EntityView): - __regid__ = 'sameetypelistitem' - - def cell_call(self, row, col, **kwargs): - self.wview('listitem', self.cw_rset, row=row, col=col, **kwargs) - - -class CSVView(SimpleListView): - """:__regid__: *csv* - - This view displays each entity in a coma separated list. It is NOT related - to the well-known text file format. - """ - __regid__ = 'csv' - redirect_vid = 'incontext' - separator = u', ' - - def call(self, subvid=None, **kwargs): - kwargs['vid'] = subvid - rset = self.cw_rset - for i in range(len(rset)): - self.cell_call(i, 0, **kwargs) - if i < rset.rowcount-1: - self.w(self.separator) - - -# XXX to be documented views ################################################### - -class MetaDataView(EntityView): - """paragraph view of some metadata""" - __regid__ = 'metadata' - show_eid = True - - def cell_call(self, row, col): - _ = self._cw._ - entity = self.cw_rset.get_entity(row, col) - self.w(u'
                  ') - if self.show_eid: - self.w(u'%s #%s - ' % (entity.dc_type(), entity.eid)) - if entity.modification_date != entity.creation_date: - self.w(u'%s ' % _('latest update on')) - self.w(u'%s, ' - % self._cw.format_date(entity.modification_date)) - # entities from external source may not have a creation date (eg ldap) - if entity.creation_date: - self.w(u'%s ' % _('created on')) - self.w(u'%s' - % self._cw.format_date(entity.creation_date)) - if entity.creator: - if entity.creation_date: - self.w(u' %s ' % _('by')) - else: - self.w(u' %s ' % _('created_by')) - self.w(u'%s' % entity.creator.name()) - meta = entity.cw_metainformation() - if meta['source']['uri'] != 'system': - self.w(u' (%s' % _('cw_source')) - self.w(u' %s)' % meta['source']['uri']) - self.w(u'
                  ') - - -class TreeItemView(ListItemView): - __regid__ = 'treeitem' - - def cell_call(self, row, col): - self.wview('incontext', self.cw_rset, row=row, col=col) - - -class TextSearchResultView(EntityView): - """this view is used to display full-text search - - It tries to highlight part of data where the search word appears. - - XXX: finish me (fixed line width, fixed number of lines, CSS, etc.) - """ - __regid__ = 'tsearch' - - def cell_call(self, row, col, **kwargs): - entity = self.cw_rset.complete_entity(row, col) - self.w(entity.view('incontext')) - searched = self.cw_rset.searched_text() - if searched is None: - return - searched = searched.lower() - highlighted = '%s' % searched - for attr in entity.e_schema.indexable_attributes(): - try: - value = xml_escape(entity.printable_value(attr, format='text/plain').lower()) - except TransformError as ex: - continue - except Exception: - continue - if searched in value: - contexts = [] - for ctx in value.split(searched): - if len(ctx) > 30: - contexts.append(u'...' + ctx[-30:]) - else: - contexts.append(ctx) - value = u'\n' + highlighted.join(contexts) - self.w(value.replace('\n', '
                  ')) - - -class TooltipView(EntityView): - """A entity view used in a tooltip""" - __regid__ = 'tooltip' - def cell_call(self, row, col): - self.wview('oneline', self.cw_rset, row=row, col=col) - - -class GroupByView(EntityView): - """grouped view of a result set. The `group_key` method return the group - key of an entities (a string or tuple of string). - - For each group, display a link to entities of this group by generating url - like / or //. - """ - __abstract__ = True - __select__ = EntityView.__select__ & match_kwargs('basepath') - entity_attribute = None - reversed = False - - def index_url(self, basepath, key, **kwargs): - if isinstance(key, (list, tuple)): - key = '/'.join(key) - return self._cw.build_url('%s/%s' % (basepath, key), - **kwargs) - - def index_link(self, basepath, key, items): - url = self.index_url(basepath, key) - if isinstance(key, (list, tuple)): - key = ' '.join(key) - return tags.a(key, href=url) - - def group_key(self, entity, **kwargs): - value = getattr(entity, self.entity_attribute) - if callable(value): - value = value() - return value - - def call(self, basepath, maxentries=None, **kwargs): - index = {} - for entity in self.cw_rset.entities(): - index.setdefault(self.group_key(entity, **kwargs), []).append(entity) - displayed = sorted(index) - if self.reversed: - displayed = reversed(displayed) - if maxentries is None: - needmore = False - else: - needmore = len(index) > maxentries - displayed = tuple(displayed)[:maxentries] - w = self.w - w(u'
                    ') - for key in displayed: - if key: - w(u'
                  • %s
                  • \n' % - self.index_link(basepath, key, index[key])) - if needmore: - url = self._cw.build_url('view', vid=self.__regid__, - rql=self.cw_rset.printable_rql()) - w( u'
                  • %s
                  • \n' % tags.a(u'[%s]' % self._cw._('see more'), - href=url)) - w(u'
                  \n') - - -class ArchiveView(GroupByView): - """archive view of a result set. Links to months are built using a basepath - parameters, eg using url like // - """ - __regid__ = 'cw.archive.by_date' - entity_attribute = 'creation_date' - reversed = True - - def group_key(self, entity, **kwargs): - value = super(ArchiveView, self).group_key(entity, **kwargs) - return '%04d' % value.year, '%02d' % value.month - - def index_link(self, basepath, key, items): - """represent a single month entry""" - year, month = key - label = u'%s %s [%s]' % (self._cw._(calendar.MONTHNAMES[int(month)-1]), - year, len(items)) - etypes = set(entity.cw_etype for entity in items) - vtitle = '%s %s' % (', '.join(display_name(self._cw, etype, 'plural') - for etype in etypes), - label) - title = self._cw._('archive for %(month)s/%(year)s') % { - 'month': month, 'year': year} - url = self.index_url(basepath, key, vtitle=vtitle) - return tags.a(label, href=url, title=title) - - -class AuthorView(GroupByView): - """author view of a result set. Links to month are built using a basepath - parameters, eg using url like / - """ - __regid__ = 'cw.archive.by_author' - entity_attribute = 'creator' - - def group_key(self, entity, **kwargs): - value = super(AuthorView, self).group_key(entity, **kwargs) - if value: - return (value.name(), value.login) - return (None, None) - - def index_link(self, basepath, key, items): - if key[0] is None: - return - label = u'%s [%s]' % (key[0], len(items)) - etypes = set(entity.cw_etype for entity in items) - vtitle = self._cw._('%(etype)s by %(author)s') % { - 'etype': ', '.join(display_name(self._cw, etype, 'plural') - for etype in etypes), - 'author': label} - url = self.index_url(basepath, key[1], vtitle=vtitle) - title = self._cw._('archive for %(author)s') % {'author': key[0]} - return tags.a(label, href=url, title=title) - - -# bw compat #################################################################### - -from logilab.common.deprecation import class_moved, class_deprecated - -from cubicweb.web.views import boxes, xmlrss, primary, tableview -PrimaryView = class_moved(primary.PrimaryView) -SideBoxView = class_moved(boxes.SideBoxView) -XmlView = class_moved(xmlrss.XMLView) -XmlItemView = class_moved(xmlrss.XMLItemView) -XmlRsetView = class_moved(xmlrss.XMLRsetView) -RssView = class_moved(xmlrss.RSSView) -RssItemView = class_moved(xmlrss.RSSItemView) -TableView = class_moved(tableview.TableView) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/bookmark.py --- a/web/views/bookmark.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,140 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Primary view for bookmarks + user's bookmarks box""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from logilab.mtconverter import xml_escape - -from cubicweb import Unauthorized -from cubicweb.predicates import is_instance, one_line_rset -from cubicweb.web import action, component, htmlwidgets, formwidgets as fw -from cubicweb.web.views import uicfg, primary -from cubicweb.web.views.ajaxcontroller import ajaxfunc - -_abaa = uicfg.actionbox_appearsin_addmenu -_abaa.tag_subject_of(('*', 'bookmarked_by', '*'), False) -_abaa.tag_object_of(('*', 'bookmarked_by', '*'), False) - -_afs = uicfg.autoform_section -_afs.tag_object_of(('*', 'bookmarked_by', 'CWUser'), 'main', 'metadata') -_afs.tag_attribute(('Bookmark', 'path'), 'main', 'attributes') -_afs.tag_attribute(('Bookmark', 'path'), 'muledit', 'attributes') - -_affk = uicfg.autoform_field_kwargs -_affk.tag_attribute(('Bookmark', 'path'), {'widget': fw.EditableURLWidget}) - - -class FollowAction(action.Action): - __regid__ = 'follow' - __select__ = one_line_rset() & is_instance('Bookmark') - - title = _('follow') - category = 'mainactions' - - def url(self): - return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0).actual_url() - - -class BookmarkPrimaryView(primary.PrimaryView): - __select__ = is_instance('Bookmark') - - def cell_call(self, row, col): - """the primary view for bookmark entity""" - entity = self.cw_rset.complete_entity(row, col) - self.w(u' ') - self.w(u"") - self.w(u"%s : %s" % (self._cw._('Bookmark'), xml_escape(entity.title))) - self.w(u"") - self.w(u'

                  ' % ( - xml_escape(entity.actual_url()))) - self.w(u'') - self.w(u'

                  %s%s

                  ' % (self._cw._('Used by:'), ', '.join(xml_escape(u.name()) - for u in entity.bookmarked_by))) - self.w(u'
                  ') - - -class BookmarksBox(component.CtxComponent): - """display a box containing all user's bookmarks""" - __regid__ = 'bookmarks_box' - - title = _('bookmarks') - order = 40 - rql = ('Any B,T,P ORDERBY lower(T) ' - 'WHERE B is Bookmark,B title T, B path P, B bookmarked_by U, ' - 'U eid %(x)s') - - def init_rendering(self): - ueid = self._cw.user.eid - self.bookmarks_rset = self._cw.execute(self.rql, {'x': ueid}) - rschema = self._cw.vreg.schema.rschema('bookmarked_by') - eschema = self._cw.vreg.schema.eschema('Bookmark') - self.can_delete = rschema.has_perm(self._cw, 'delete', toeid=ueid) - self.can_edit = (eschema.has_perm(self._cw, 'add') and - rschema.has_perm(self._cw, 'add', toeid=ueid)) - if not self.bookmarks_rset and not self.can_edit: - raise component.EmptyComponent() - self.items = [] - - def render_body(self, w): - ueid = self._cw.user.eid - req = self._cw - if self.can_delete: - req.add_js('cubicweb.ajax.js') - for bookmark in self.bookmarks_rset.entities(): - label = self.link(bookmark.title, bookmark.action_url()) - if self.can_delete: - dlink = u'[-]' % ( - bookmark.eid, req._('delete this bookmark')) - label = '
                  %s %s
                  ' % (dlink, label) - self.append(label) - if self.can_edit: - menu = htmlwidgets.BoxMenu(req._('manage bookmarks')) - linkto = 'bookmarked_by:%s:subject' % ueid - # use a relative path so that we can move the instance without - # loosing bookmarks - path = req.relative_path() - # XXX if vtitle specified in params, extract it and use it as - # default value for bookmark's title - url = req.vreg['etypes'].etype_class('Bookmark').cw_create_url( - req, __linkto=linkto, path=path) - menu.append(self.link(req._('bookmark this page'), url)) - if self.bookmarks_rset: - if req.user.is_in_group('managers'): - bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, U eid %s' % ueid - erset = self.bookmarks_rset - else: - # we can't edit shared bookmarks we don't own - bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, B owned_by U, U eid %(x)s' - erset = req.execute(bookmarksrql, {'x': ueid}, - build_descr=False) - bookmarksrql %= {'x': ueid} - if erset: - url = req.build_url(vid='muledit', rql=bookmarksrql) - menu.append(self.link(req._('edit bookmarks'), url)) - url = req.user.absolute_url(vid='xaddrelation', rtype='bookmarked_by', - target='subject') - menu.append(self.link(req._('pick existing bookmarks'), url)) - self.append(menu) - self.render_items(w) - -@ajaxfunc -def delete_bookmark(self, beid): - rql = 'DELETE B bookmarked_by U WHERE B eid %(b)s, U eid %(u)s' - self._cw.execute(rql, {'b': int(beid), 'u' : self._cw.user.eid}) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/boxes.py --- a/web/views/boxes.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,279 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Generic boxes for CubicWeb web client: - -* actions box -* search box - -Additional boxes (disabled by default): -* schema box -* possible views box -* startup views box -""" -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from six import text_type, add_metaclass - -from logilab.mtconverter import xml_escape -from logilab.common.deprecation import class_deprecated - -from cubicweb import Unauthorized -from cubicweb.predicates import (match_user_groups, match_kwargs, - non_final_entity, nonempty_rset, - match_context, contextual) -from cubicweb.utils import wrap_on_write -from cubicweb.view import EntityView -from cubicweb.schema import display_name -from cubicweb.web import component, box, htmlwidgets - -# XXX bw compat, some cubes import this class from here -BoxTemplate = box.BoxTemplate -BoxHtml = htmlwidgets.BoxHtml - -class EditBox(component.CtxComponent): - """ - box with all actions impacting the entity displayed: edit, copy, delete - change state, add related entities... - """ - __regid__ = 'edit_box' - - title = _('actions') - order = 2 - contextual = True - __select__ = component.CtxComponent.__select__ & non_final_entity() - - def init_rendering(self): - super(EditBox, self).init_rendering() - _ = self._cw._ - self._menus_in_order = [] - self._menus_by_id = {} - # build list of actions - actions = self._cw.vreg['actions'].possible_actions(self._cw, self.cw_rset, - **self.cw_extra_kwargs) - other_menu = self._get_menu('moreactions', _('more actions')) - for category, defaultmenu in (('mainactions', self), - ('moreactions', other_menu), - ('addrelated', None)): - for action in actions.get(category, ()): - if action.submenu: - menu = self._get_menu(action.submenu) - else: - menu = defaultmenu - action.fill_menu(self, menu) - # if we've nothing but actions in the other_menu, add them directly into the box - if not self.items and len(self._menus_by_id) == 1 and not other_menu.is_empty(): - self.items = other_menu.items - else: # ensure 'more actions' menu appears last - self._menus_in_order.remove(other_menu) - self._menus_in_order.append(other_menu) - for submenu in self._menus_in_order: - self.add_submenu(self, submenu) - if not self.items: - raise component.EmptyComponent() - - def render_title(self, w): - title = self._cw._(self.title) - if self.cw_rset: - etypes = self.cw_rset.column_types(0) - if len(etypes) == 1: - plural = self.cw_rset.rowcount > 1 and 'plural' or '' - etypelabel = display_name(self._cw, next(iter(etypes)), plural) - title = u'%s - %s' % (title, etypelabel.lower()) - w(title) - - def render_body(self, w): - self.render_items(w) - - def _get_menu(self, id, title=None, label_prefix=None): - try: - return self._menus_by_id[id] - except KeyError: - if title is None: - title = self._cw._(id) - self._menus_by_id[id] = menu = htmlwidgets.BoxMenu(title) - menu.label_prefix = label_prefix - self._menus_in_order.append(menu) - return menu - - def add_submenu(self, box, submenu, label_prefix=None): - appendanyway = getattr(submenu, 'append_anyway', False) - if len(submenu.items) == 1 and not appendanyway: - boxlink = submenu.items[0] - if submenu.label_prefix: - # XXX iirk - if hasattr(boxlink, 'label'): - boxlink.label = u'%s %s' % (submenu.label_prefix, boxlink.label) - else: - boxlink = u'%s %s' % (submenu.label_prefix, boxlink) - box.append(boxlink) - elif submenu.items: - box.append(submenu) - elif appendanyway: - box.append(xml_escape(submenu.label)) - - -class SearchBox(component.CtxComponent): - """display a box with a simple search form""" - __regid__ = 'search_box' - - title = _('search') - order = 0 - formdef = u"""
                  -
                  - - - - - -
                  -
                  """ - - def render_title(self, w): - w(u"""%s""" - % self._cw._(self.title)) - - def render_body(self, w): - if self._cw.form.pop('__fromsearchbox', None): - rql = self._cw.form.get('rql', '') - else: - rql = '' - tabidx1 = self._cw.next_tabindex() - tabidx2 = self._cw.next_tabindex() - w(self.formdef % {'action': self._cw.build_url('view'), - 'value': xml_escape(rql), - 'id': self.cw_extra_kwargs.get('domid', 'tsearch'), - 'tabindex1': tabidx1, - 'tabindex2': tabidx2}) - - -# boxes disabled by default ################################################### - -class PossibleViewsBox(component.CtxComponent): - """display a box containing links to all possible views""" - __regid__ = 'possible_views_box' - - contextual = True - title = _('possible views') - order = 10 - visible = False # disabled by default - - def init_rendering(self): - self.views = [v for v in self._cw.vreg['views'].possible_views(self._cw, - rset=self.cw_rset) - if v.category != 'startupview'] - if not self.views: - raise component.EmptyComponent() - self.items = [] - - def render_body(self, w): - for category, views in box.sort_by_category(self.views): - menu = htmlwidgets.BoxMenu(self._cw._(category), ident=category) - for view in views: - menu.append(self.action_link(view)) - self.append(menu) - self.render_items(w) - - -class StartupViewsBox(PossibleViewsBox): - """display a box containing links to all startup views""" - __regid__ = 'startup_views_box' - - contextual = False - title = _('startup views') - order = 70 - visible = False # disabled by default - - def init_rendering(self): - self.views = [v for v in self._cw.vreg['views'].possible_views(self._cw) - if v.category == 'startupview'] - if not self.views: - raise component.EmptyComponent() - self.items = [] - - -class RsetBox(component.CtxComponent): - """helper view class to display an rset in a sidebox""" - __select__ = nonempty_rset() & match_kwargs('title', 'vid') - __regid__ = 'rsetbox' - cw_property_defs = {} - context = 'incontext' - - @property - def domid(self): - return super(RsetBox, self).domid + text_type(abs(id(self))) + text_type(abs(id(self.cw_rset))) - - def render_title(self, w): - w(self.cw_extra_kwargs['title']) - - def render_body(self, w): - if 'dispctrl' in self.cw_extra_kwargs: - # XXX do not modify dispctrl! - self.cw_extra_kwargs['dispctrl'].setdefault('subvid', 'outofcontext') - self.cw_extra_kwargs['dispctrl'].setdefault('use_list_limit', 1) - self._cw.view(self.cw_extra_kwargs['vid'], self.cw_rset, w=w, - initargs=self.cw_extra_kwargs) - - # helper classes ############################################################## - -@add_metaclass(class_deprecated) -class SideBoxView(EntityView): - """helper view class to display some entities in a sidebox""" - __deprecation_warning__ = '[3.10] SideBoxView is deprecated, use RsetBox instead (%(cls)s)' - - __regid__ = 'sidebox' - - def call(self, title=u'', **kwargs): - """display a list of entities by calling their view""" - if 'dispctrl' in self.cw_extra_kwargs: - # XXX do not modify dispctrl! - self.cw_extra_kwargs['dispctrl'].setdefault('subvid', 'outofcontext') - self.cw_extra_kwargs['dispctrl'].setdefault('use_list_limit', 1) - if title: - self.cw_extra_kwargs['title'] = title - self.cw_extra_kwargs.setdefault('context', 'incontext') - box = self._cw.vreg['ctxcomponents'].select( - 'rsetbox', self._cw, rset=self.cw_rset, vid='autolimited', - **self.cw_extra_kwargs) - box.render(self.w) - - -class ContextualBoxLayout(component.Layout): - __select__ = match_context('incontext', 'left', 'right') & contextual() - # predefined class in cubicweb.css: contextualBox | contextFreeBox - cssclass = 'contextualBox' - - def render(self, w): - if self.init_rendering(): - view = self.cw_extra_kwargs['view'] - w(u'
                  ' % (self.cssclass, view.cssclass, - view.domid)) - with wrap_on_write(w, u'
                  ', - u'
                  ') as wow: - view.render_title(wow) - w(u'
                  ') - view.render_body(w) - # boxFooter div is a CSS place holder (for shadow for example) - w(u'
                  \n') - - -class ContextFreeBoxLayout(ContextualBoxLayout): - __select__ = match_context('incontext', 'left', 'right') & ~contextual() - cssclass = 'contextFreeBox' diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/calendar.py --- a/web/views/calendar.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,244 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""html calendar views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -import copy -from datetime import timedelta - -from logilab.mtconverter import xml_escape -from logilab.common.date import todatetime - -from cubicweb.utils import json_dumps, make_uid -from cubicweb.predicates import adaptable -from cubicweb.view import EntityView, EntityAdapter - -# useful constants & functions ################################################ - -ONEDAY = timedelta(1) - -WEEKDAYS = (_("monday"), _("tuesday"), _("wednesday"), _("thursday"), - _("friday"), _("saturday"), _("sunday")) -MONTHNAMES = ( _('january'), _('february'), _('march'), _('april'), _('may'), - _('june'), _('july'), _('august'), _('september'), _('october'), - _('november'), _('december') - ) - - -class ICalendarableAdapter(EntityAdapter): - __needs_bw_compat__ = True - __regid__ = 'ICalendarable' - __abstract__ = True - - @property - def start(self): - """return start date""" - raise NotImplementedError - - @property - def stop(self): - """return stop date""" - raise NotImplementedError - - -# Calendar views ############################################################## - -try: - from vobject import iCalendar - - class iCalView(EntityView): - """A calendar view that generates a iCalendar file (RFC 2445) - - Does apply to ICalendarable compatible entities - """ - __select__ = adaptable('ICalendarable') - paginable = False - content_type = 'text/calendar' - title = _('iCalendar') - templatable = False - __regid__ = 'ical' - - def call(self): - ical = iCalendar() - for i in range(len(self.cw_rset.rows)): - task = self.cw_rset.complete_entity(i, 0) - event = ical.add('vevent') - event.add('summary').value = task.dc_title() - event.add('description').value = task.dc_description() - icalendarable = task.cw_adapt_to('ICalendarable') - if icalendarable.start: - event.add('dtstart').value = icalendarable.start - if icalendarable.stop: - event.add('dtend').value = icalendarable.stop - - buff = ical.serialize() - if not isinstance(buff, unicode): - buff = unicode(buff, self._cw.encoding) - self.w(buff) - -except ImportError: - pass - -class hCalView(EntityView): - """A calendar view that generates a hCalendar file - - Does apply to ICalendarable compatible entities - """ - __regid__ = 'hcal' - __select__ = adaptable('ICalendarable') - paginable = False - title = _('hCalendar') - #templatable = False - - def call(self): - self.w(u'
                  ') - for i in range(len(self.cw_rset.rows)): - task = self.cw_rset.complete_entity(i, 0) - self.w(u'
                  ') - self.w(u'

                  %s

                  ' % xml_escape(task.dc_title())) - self.w(u'
                  %s
                  ' - % task.dc_description(format='text/html')) - icalendarable = task.cw_adapt_to('ICalendarable') - if icalendarable.start: - self.w(u'%s' - % (icalendarable.start.isoformat(), - self._cw.format_date(icalendarable.start))) - if icalendarable.stop: - self.w(u'%s' - % (icalendarable.stop.isoformat(), - self._cw.format_date(icalendarable.stop))) - self.w(u'
                  ') - self.w(u'
                  ') - - -class CalendarItemView(EntityView): - __regid__ = 'calendaritem' - - def cell_call(self, row, col, dates=False): - task = self.cw_rset.complete_entity(row, 0) - task.view('oneline', w=self.w) - if dates: - icalendarable = task.cw_adapt_to('ICalendarable') - if icalendarable.start and icalendarable.stop: - self.w('
                  %s' % self._cw._('from %(date)s') - % {'date': self._cw.format_date(icalendarable.start)}) - self.w('
                  %s' % self._cw._('to %(date)s') - % {'date': self._cw.format_date(icalendarable.stop)}) - else: - self.w('
                  %s'%self._cw.format_date(icalendarable.start - or icalendarable.stop)) - - -class _TaskEntry(object): - def __init__(self, task, color, index=0): - self.task = task - self.color = color - self.index = index - self.length = 1 - icalendarable = task.cw_adapt_to('ICalendarable') - self.start = icalendarable.start - self.stop = icalendarable.stop - - def in_working_hours(self): - """predicate returning True is the task is in working hours""" - if todatetime(self.start).hour > 7 and todatetime(self.stop).hour < 20: - return True - return False - - def is_one_day_task(self): - return self.start and self.stop and self.start.isocalendar() == self.stop.isocalendar() - - -class CalendarView(EntityView): - __regid__ = 'calendar' - __select__ = adaptable('ICalendarable') - - paginable = False - title = _('calendar') - - fullcalendar_options = { - 'firstDay': 1, - 'firstHour': 8, - 'defaultView': 'month', - 'editable': True, - 'header': {'left': 'prev,next today', - 'center': 'title', - 'right': 'month,agendaWeek,agendaDay', - }, - } - - def call(self): - self._cw.add_css(('fullcalendar.css', 'cubicweb.calendar.css')) - self._cw.add_js(('jquery.ui.js', 'fullcalendar.min.js', 'jquery.qtip.min.js', 'fullcalendar.locale.js')) - self.calendar_id = 'cal' + make_uid('uid') - self.add_onload() - # write calendar div to load jquery fullcalendar object - self.w(u'
                  ' % self.calendar_id) - - def add_onload(self): - fullcalendar_options = self.fullcalendar_options.copy() - fullcalendar_options['events'] = self.get_events() - # i18n - # js callback to add a tooltip and to put html in event's title - js = """ - var options = $.fullCalendar.regional('%s', %s); - options.eventRender = function(event, $element) { - // add a tooltip for each event - var div = '
                  '+ event.description+ '
                  '; - $element.append(div); - // allow to have html tags in event's title - $element.find('span.fc-event-title').html($element.find('span.fc-event-title').text()); - }; - $("#%s").fullCalendar(options); - """ #" - self._cw.add_onload(js % (self._cw.lang, json_dumps(fullcalendar_options), self.calendar_id)) - - def get_events(self): - events = [] - for entity in self.cw_rset.entities(): - icalendarable = entity.cw_adapt_to('ICalendarable') - if not (icalendarable.start and icalendarable.stop): - continue - start_date = icalendarable.start or icalendarable.stop - event = {'eid': entity.eid, - 'title': entity.view('calendaritem'), - 'url': xml_escape(entity.absolute_url()), - 'className': 'calevent', - 'description': entity.view('tooltip'), - } - event['start'] = start_date.strftime('%Y-%m-%dT%H:%M') - event['allDay'] = True - if icalendarable.stop: - event['end'] = icalendarable.stop.strftime('%Y-%m-%dT%H:%M') - event['allDay'] = False - events.append(event) - return events - -class OneMonthCal(CalendarView): - __regid__ = 'onemonthcal' - - title = _('one month') - -class OneWeekCal(CalendarView): - __regid__ = 'oneweekcal' - - title = _('one week') - fullcalendar_options = CalendarView.fullcalendar_options.copy() - fullcalendar_options['defaultView'] = 'agendaWeek' diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/csvexport.py --- a/web/views/csvexport.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,107 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""csv export views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six import PY2 -from six.moves import range - -from cubicweb.schema import display_name -from cubicweb.predicates import any_rset, empty_rset -from cubicweb.uilib import UnicodeCSVWriter -from cubicweb.view import EntityView, AnyRsetView - -class CSVMixIn(object): - """mixin class for CSV views""" - templatable = False - content_type = "text/comma-separated-values" - binary = PY2 # python csv module is unicode aware in py3k - csv_params = {'dialect': 'excel', - 'quotechar': '"', - 'delimiter': ';', - 'lineterminator': '\n'} - - def set_request_content_type(self): - """overriden to set a .csv filename""" - self._cw.set_content_type(self.content_type, filename='cubicwebexport.csv') - - def csvwriter(self, **kwargs): - params = self.csv_params.copy() - params.update(kwargs) - return UnicodeCSVWriter(self.w, self._cw.encoding, **params) - - -class CSVRsetView(CSVMixIn, AnyRsetView): - """dumps raw result set in CSV""" - __regid__ = 'csvexport' - __select__ = any_rset() - title = _('csv export') - - def call(self): - writer = self.csvwriter() - writer.writerow(self.columns_labels()) - rset, descr = self.cw_rset, self.cw_rset.description - eschema = self._cw.vreg.schema.eschema - for rowindex, row in enumerate(rset): - csvrow = [] - for colindex, val in enumerate(row): - etype = descr[rowindex][colindex] - if val is not None and not eschema(etype).final: - # csvrow.append(val) # val is eid in that case - content = self._cw.view('textincontext', rset, - row=rowindex, col=colindex) - else: - content = self._cw.view('final', rset, - format='text/plain', - row=rowindex, col=colindex) - csvrow.append(content) - writer.writerow(csvrow) - - -class CSVEntityView(CSVMixIn, EntityView): - """dumps rset's entities (with full set of attributes) in CSV - - the generated CSV file will have a table per entity type found in the - resultset. ('table' here only means empty lines separation between table - contents) - """ - __regid__ = 'ecsvexport' - __select__ = EntityView.__select__ | empty_rset() - title = _('csv export (entities)') - - def call(self): - req = self._cw - rows_by_type = {} - writer = self.csvwriter() - rowdef_by_type = {} - for index in range(len(self.cw_rset)): - entity = self.cw_rset.complete_entity(index) - if entity.e_schema not in rows_by_type: - rowdef_by_type[entity.e_schema] = [rs for rs, at in entity.e_schema.attribute_definitions() - if at != 'Bytes'] - rows_by_type[entity.e_schema] = [[display_name(req, rschema.type) - for rschema in rowdef_by_type[entity.e_schema]]] - rows = rows_by_type[entity.e_schema] - rows.append([entity.printable_value(rs.type, format='text/plain') - for rs in rowdef_by_type[entity.e_schema]]) - for rows in rows_by_type.values(): - writer.writerows(rows) - # use two empty lines as separator - writer.writerows([[], []]) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/cwproperties.py --- a/web/views/cwproperties.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,442 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Specific views for CWProperty (eg site/user preferences""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from logilab.mtconverter import xml_escape - -from logilab.common.decorators import cached - -from cubicweb import UnknownProperty -from cubicweb.predicates import (one_line_rset, none_rset, is_instance, - match_user_groups, logged_user_in_rset) -from cubicweb.view import StartupView -from cubicweb.web import stdmsgs -from cubicweb.web.form import FormViewMixIn -from cubicweb.web.formfields import FIELDS, StringField -from cubicweb.web.formwidgets import (Select, TextInput, Button, SubmitButton, - FieldWidget) -from cubicweb.web.views import uicfg, primary, formrenderers, editcontroller -from cubicweb.web.views.ajaxcontroller import ajaxfunc - -uicfg.primaryview_section.tag_object_of(('*', 'for_user', '*'), 'hidden') - -# some string we want to be internationalizable for nicer display of property -# groups -_('navigation') -_('ui') -_('boxes') -_('components') -_('ctxcomponents') -_('navigation.combobox-limit') -_('navigation.page-size') -_('navigation.related-limit') -_('navigation.short-line-size') -_('ui.date-format') -_('ui.datetime-format') -_('ui.default-text-format') -_('ui.fckeditor') -_('ui.float-format') -_('ui.language') -_('ui.time-format') -_('open all') -_('ui.main-template') -_('ui.site-title') -_('ui.encoding') -_('category') - - -def make_togglable_link(nodeid, label): - """builds a HTML link that switches the visibility & remembers it""" - return u'%s' % ( - nodeid, label) - -def css_class(someclass): - return someclass and 'class="%s"' % someclass or '' - - -class CWPropertyPrimaryView(primary.PrimaryView): - __select__ = is_instance('CWProperty') - skip_none = False - - -class SystemCWPropertiesForm(FormViewMixIn, StartupView): - """site-wide properties edition form""" - __regid__ = 'systempropertiesform' - __select__ = none_rset() & match_user_groups('managers') - form_buttons = [SubmitButton()] - - title = _('site configuration') - category = 'startupview' - - def linkable(self): - return True - - def url(self): - """return the url associated with this view. We can omit rql here""" - return self._cw.build_url('view', vid=self.__regid__) - - def _cookie_name(self, somestr): - return str('%s_property_%s' % (self._cw.vreg.config.appid, somestr)) - - def _group_status(self, group, default=u'hidden'): - """return css class name 'hidden' (collapsed), or '' (open)""" - cookies = self._cw.get_cookie() - cookiename = self._cookie_name(group) - cookie = cookies.get(cookiename) - if cookie is None: - self._cw.set_cookie(cookiename, default, maxage=None) - status = default - else: - status = cookie.value - return status - - def call(self, **kwargs): - self._cw.add_js(('cubicweb.preferences.js', - 'cubicweb.edition.js', 'cubicweb.ajax.js')) - self._cw.add_css('cubicweb.preferences.css') - values = self.defined_keys - mainopts, groupedopts = self.group_properties() - # precompute all forms first to consume error message - mainforms, groupedforms = self.build_forms(mainopts, groupedopts) - _ = self._cw._ - self.w(u'

                  %s

                  \n' % _(self.title)) - for label, group, form in sorted((_(g), g, f) - for g, f in mainforms.items()): - self.wrap_main_form(group, label, form) - for label, group, objects in sorted((_(g), g, o) - for g, o in groupedforms.items()): - self.wrap_grouped_form(group, label, objects) - - @property - @cached - def cwprops_rset(self): - return self._cw.execute('Any P,K,V WHERE P is CWProperty, P pkey K, ' - 'P value V, NOT P for_user U') - - @property - def defined_keys(self): - values = {} - for i, entity in enumerate(self.cwprops_rset.entities()): - values[entity.pkey] = i - return values - - def group_properties(self): - mainopts, groupedopts = {}, {} - vreg = self._cw.vreg - # "self._regid__=='systempropertiesform'" to skip site wide properties on - # user's preference but not site's configuration - for key in vreg.user_property_keys(self.__regid__=='systempropertiesform'): - parts = key.split('.') - if parts[0] in vreg and len(parts) >= 3: - # appobject configuration - reg = parts[0] - propid = parts[-1] - oid = '.'.join(parts[1:-1]) - groupedopts.setdefault(reg, {}).setdefault(oid, []).append(key) - else: - mainopts.setdefault(parts[0], []).append(key) - return mainopts, groupedopts - - def build_forms(self, mainopts, groupedopts): - mainforms, groupedforms = {}, {} - for group, keys in mainopts.items(): - mainforms[group] = self.form(group, keys, False) - for group, objects in groupedopts.items(): - groupedforms[group] = {} - for oid, keys in objects.items(): - groupedforms[group][oid] = self.form(group + '_' + oid, keys, True) - return mainforms, groupedforms - - def entity_for_key(self, key): - values = self.defined_keys - if key in values: - entity = self.cwprops_rset.get_entity(values[key], 0) - else: - entity = self._cw.vreg['etypes'].etype_class('CWProperty')(self._cw) - entity.eid = next(self._cw.varmaker) - entity.cw_attr_cache['pkey'] = key - entity.cw_attr_cache['value'] = self._cw.vreg.property_value(key) - return entity - - def form(self, formid, keys, splitlabel=False): - form = self._cw.vreg['forms'].select( - 'composite', self._cw, domid=formid, action=self._cw.build_url(), - form_buttons=self.form_buttons, - onsubmit="return validatePrefsForm('%s')" % formid, - submitmsg=self._cw._('changes applied')) - path = self._cw.relative_path() - if '?' in path: - path, params = path.split('?', 1) - form.add_hidden('__redirectparams', params) - form.add_hidden('__redirectpath', path) - for key in keys: - self.form_row(form, key, splitlabel) - renderer = self._cw.vreg['formrenderers'].select('cwproperties', self._cw, - display_progress_div=False) - data = [] - form.render(w=data.append, renderer=renderer) - return u'\n'.join(data) - - def form_row(self, form, key, splitlabel): - entity = self.entity_for_key(key) - if splitlabel: - label = key.split('.')[-1] - else: - label = key - subform = self._cw.vreg['forms'].select('base', self._cw, entity=entity, - mainform=False) - subform.append_field(PropertyValueField(name='value', label=label, role='subject', - eidparam=True)) - subform.add_hidden('pkey', key, eidparam=True, role='subject') - form.add_subform(subform) - return subform - - def wrap_main_form(self, group, label, form): - status = css_class(self._group_status(group)) - self.w(u'
                  %s
                  \n' % - (make_togglable_link('fieldset_' + group, label))) - self.w(u'
                  ' % (group, status)) - self.w(u'
                  ') - self.w(form) - self.w(u'
                  ') - - def wrap_grouped_form(self, group, label, objects): - status = css_class(self._group_status(group)) - self.w(u'
                  %s
                  \n' % - (make_togglable_link('fieldset_' + group, label))) - self.w(u'
                  ' % (group, status)) - sorted_objects = sorted((self._cw.__('%s_%s' % (group, o)), o, f) - for o, f in objects.items()) - for label, oid, form in sorted_objects: - self.wrap_object_form(group, oid, label, form) - self.w(u'
                  ') - - def wrap_object_form(self, group, oid, label, form): - w = self.w - w(u'
                  ') - w(u'''') - docmsgid = '%s_%s_description' % (group, oid) - doc = self._cw._(docmsgid) - if doc != docmsgid: - w(u'
                  %s
                  ' % xml_escape(doc).capitalize()) - w(u'
                  ') - w(u'') - - -class CWPropertiesForm(SystemCWPropertiesForm): - """user's preferences properties edition form""" - __regid__ = 'propertiesform' - __select__ = ( - (none_rset() & match_user_groups('users','managers')) - | (one_line_rset() & match_user_groups('users') & logged_user_in_rset()) - | (one_line_rset() & match_user_groups('managers') & is_instance('CWUser')) - ) - - title = _('user preferences') - - @property - def user(self): - if self.cw_rset is None: - return self._cw.user - return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - - @property - @cached - def cwprops_rset(self): - return self._cw.execute('Any P,K,V WHERE P is CWProperty, P pkey K, P value V,' - 'P for_user U, U eid %(x)s', {'x': self.user.eid}) - - def form_row(self, form, key, splitlabel): - subform = super(CWPropertiesForm, self).form_row(form, key, splitlabel) - # if user is in the managers group and the property is being created, - # we have to set for_user explicitly - if not subform.edited_entity.has_eid() and self.user.matching_groups('managers'): - subform.add_hidden('for_user', self.user.eid, eidparam=True, role='subject') - return subform - -# cwproperty form objects ###################################################### - -class PlaceHolderWidget(FieldWidget): - - def render(self, form, field, renderer): - domid = field.dom_id(form) - # empty span as well else html validation fail (label is refering to - # this id) - return '
                  %s
                  ' % ( - domid, domid, form._cw._('select a key first')) - - -class NotEditableWidget(FieldWidget): - def __init__(self, value, msg=None): - self.value = value - self.msg = msg - - def render(self, form, field, renderer): - domid = field.dom_id(form) - value = '%s' % (domid, self.value) - if self.msg: - value += '
                  %s
                  ' % self.msg - return value - - -class PropertyKeyField(StringField): - """specific field for CWProperty.pkey to set the value widget according to - the selected key - """ - widget = Select - - def render(self, form, renderer): - wdg = self.get_widget(form) - # pylint: disable=E1101 - wdg.attrs['tabindex'] = form._cw.next_tabindex() - wdg.attrs['onchange'] = "javascript:setPropValueWidget('%s', %s)" % ( - form.edited_entity.eid, form._cw.next_tabindex()) - return wdg.render(form, self, renderer) - - def vocabulary(self, form): - entity = form.edited_entity - _ = form._cw._ - if entity.has_eid(): - return [(_(entity.pkey), entity.pkey)] - choices = entity._cw.vreg.user_property_keys() - return [(u'', u'')] + sorted(zip((_(v) for v in choices), choices)) - - -class PropertyValueField(StringField): - """specific field for CWProperty.value which will be different according to - the selected key type and vocabulary information - """ - widget = PlaceHolderWidget - - def render(self, form, renderer=None, tabindex=None): - wdg = self.get_widget(form) - if tabindex is not None: - wdg.attrs['tabindex'] = tabindex - return wdg.render(form, self, renderer) - - def form_init(self, form): - entity = form.edited_entity - if not (entity.has_eid() or 'pkey' in entity.cw_attr_cache): - # no key set yet, just include an empty div which will be filled - # on key selection - return - try: - pdef = form._cw.vreg.property_info(entity.pkey) - except UnknownProperty as ex: - form.warning('%s (you should probably delete that property ' - 'from the database)', ex) - msg = form._cw._('you should probably delete that property') - self.widget = NotEditableWidget(entity.printable_value('value'), - '%s (%s)' % (msg, ex)) - return - if entity.pkey.startswith('system.'): - msg = form._cw._('value associated to this key is not editable ' - 'manually') - self.widget = NotEditableWidget(entity.printable_value('value'), msg) - # XXX race condition when used from CWPropertyForm, should not rely on - # instance attributes - self.value = pdef['default'] - self.help = pdef['help'] - vocab = pdef['vocabulary'] - if vocab is not None: - if callable(vocab): - # list() just in case its a generator function - self.choices = list(vocab()) - else: - self.choices = vocab - wdg = Select() - elif pdef['type'] == 'String': # else we'll get a TextArea by default - wdg = TextInput() - else: - field = FIELDS[pdef['type']]() - wdg = field.widget - if pdef['type'] == 'Boolean': - self.choices = field.vocabulary(form) - self.widget = wdg - - -class CWPropertiesFormRenderer(formrenderers.FormRenderer): - """specific renderer for properties""" - __regid__ = 'cwproperties' - - def open_form(self, form, values): - err = '
                  ' - return super(CWPropertiesFormRenderer, self).open_form(form, values) + err - - def _render_fields(self, fields, w, form): - for field in fields: - w(u'
                  \n') - if self.display_label: - w(u'%s' % self.render_label(form, field)) - error = form.field_error(field) - if error: - w(u'%s' % error) - w(u'%s' % self.render_help(form, field)) - w(u'
                  ') - w(field.render(form, self)) - w(u'
                  ') - w(u'
                  ') - - def render_buttons(self, w, form): - w(u'
                  \n') - for button in form.form_buttons: - w(u'%s\n' % button.render(form)) - w(u'
                  ') - - -class CWPropertyIEditControlAdapter(editcontroller.IEditControlAdapter): - __select__ = is_instance('CWProperty') - - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - return 'view', {} - - -@ajaxfunc(output_type='xhtml') -def prop_widget(self, propkey, varname, tabindex=None): - """specific method for CWProperty handling""" - entity = self._cw.vreg['etypes'].etype_class('CWProperty')(self._cw) - entity.eid = varname - entity.pkey = propkey - form = self._cw.vreg['forms'].select('edition', self._cw, entity=entity) - form.build_context() - vfield = form.field_by_name('value', 'subject') - renderer = formrenderers.FormRenderer(self._cw) - return vfield.render(form, renderer, tabindex=tabindex) \ - + renderer.render_help(form, vfield) - -_afs = uicfg.autoform_section -_afs.tag_subject_of(('*', 'for_user', '*'), 'main', 'hidden') -_afs.tag_object_of(('*', 'for_user', '*'), 'main', 'hidden') -_aff = uicfg.autoform_field -_aff.tag_attribute(('CWProperty', 'pkey'), PropertyKeyField) -_aff.tag_attribute(('CWProperty', 'value'), PropertyValueField) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/cwsources.py --- a/web/views/cwsources.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,516 +0,0 @@ -# copyright 2010-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Specific views for data sources and related entities (eg CWSource, -CWSourceHostConfig, CWSourceSchemaConfig). -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -import logging -from itertools import repeat - -from six.moves import range - -from logilab.mtconverter import xml_escape -from logilab.common.decorators import cachedproperty - -from cubicweb import Unauthorized, tags -from cubicweb.utils import make_uid -from cubicweb.predicates import (is_instance, score_entity, has_related_entities, - match_user_groups, match_kwargs, match_view, one_line_rset) -from cubicweb.view import EntityView, StartupView -from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, display_name -from cubicweb.web import Redirect, formwidgets as wdgs, facet, action -from cubicweb.web.views import add_etype_button -from cubicweb.web.views import (uicfg, tabs, actions, ibreadcrumbs, navigation, - tableview, pyviews) - - -_abaa = uicfg.actionbox_appearsin_addmenu -# there are explicit 'add' buttons for those -_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_schema', '*'), False) -_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_for_source', '*'), False) -_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_host_config_of', '*'), False) -_abaa.tag_object_of(('CWDataImport', 'cw_import_of', '*'), False) - -_afs = uicfg.autoform_section -_afs.tag_attribute(('CWSource', 'latest_retrieval'), 'main', 'hidden') -_afs.tag_attribute(('CWSource', 'in_synchronization'), 'main', 'hidden') -_afs.tag_object_of(('*', 'cw_for_source', 'CWSource'), 'main', 'hidden') - -_affk = uicfg.autoform_field_kwargs -_affk.tag_attribute(('CWSource', 'parser'), {'widget': wdgs.TextInput}) - -# source primary views ######################################################### - -_pvs = uicfg.primaryview_section -_pvs.tag_attribute(('CWSource', 'name'), 'hidden') -_pvs.tag_object_of(('*', 'cw_for_source', 'CWSource'), 'hidden') -_pvs.tag_object_of(('*', 'cw_host_config_of', 'CWSource'), 'hidden') - -_pvdc = uicfg.primaryview_display_ctrl -_pvdc.tag_attribute(('CWSource', 'type'), {'vid': 'attribute'})# disable reledit - -_rc = uicfg.reledit_ctrl -_rc.tag_attribute(('CWSource', 'config'), {'rvid': 'verbatimattr'}) -_rc.tag_attribute(('CWSourceHostConfig', 'config'), {'rvid': 'verbatimattr'}) -_rc.tag_attribute(('CWSourceSchemaConfig', 'options'), {'rvid': 'verbatimattr'}) - - -class CWSourcePrimaryView(tabs.TabbedPrimaryView): - __select__ = is_instance('CWSource') - tabs = [_('cwsource-main'), _('cwsource-mapping'), _('cwsource-imports')] - default_tab = 'cwsource-main' - - -class CWSourceMainTab(tabs.PrimaryTab): - __regid__ = 'cwsource-main' - __select__ = is_instance('CWSource') - - def render_entity_attributes(self, entity): - super(CWSourceMainTab, self).render_entity_attributes(entity) - self.w(add_etype_button(self._cw, 'CWSourceHostConfig', - __linkto='cw_host_config_of:%s:subject' % entity.eid, - __redirectpath=entity.rest_path())) - try: - hostconfig = self._cw.execute( - 'Any X, XC, XH WHERE X cw_host_config_of S, S eid %(s)s, ' - 'X config XC, X match_host XH', {'s': entity.eid}) - except Unauthorized: - pass - else: - if hostconfig: - self.w(u'

                  %s

                  ' % self._cw._('CWSourceHostConfig_plural')) - self._cw.view('table', hostconfig, w=self.w, - displaycols=list(range(2)), - cellvids={1: 'editable-final'}) - - -MAPPED_SOURCE_TYPES = set( ('datafeed',) ) - -class CWSourceMappingTab(EntityView): - __regid__ = 'cwsource-mapping' - __select__ = (is_instance('CWSource') - & match_user_groups('managers') - & score_entity(lambda x:x.type in MAPPED_SOURCE_TYPES)) - - def entity_call(self, entity): - _ = self._cw._ - self.w('

                  %s

                  ' % _('Entity and relation supported by this source')) - self.w(add_etype_button(self._cw, 'CWSourceSchemaConfig', - __linkto='cw_for_source:%s:subject' % entity.eid)) - self.w(u'
                  ') - rset = self._cw.execute( - 'Any X, SCH, XO ORDERBY ET WHERE X options XO, X cw_for_source S, S eid %(s)s, ' - 'X cw_schema SCH, SCH is ET', {'s': entity.eid}) - self.wview('table', rset, 'noresult') - checker = MappingChecker(entity) - checker.check() - if (checker.errors or checker.warnings or checker.infos): - self.w('

                  %s

                  ' % _('Detected problems')) - errors = zip(repeat(_('error')), checker.errors) - warnings = zip(repeat(_('warning')), checker.warnings) - infos = zip(repeat(_('warning')), checker.infos) - self.wview('pyvaltable', pyvalue=errors + warnings + infos) - - -class MappingChecker(object): - def __init__(self, cwsource): - self.cwsource = cwsource - self.errors = [] - self.warnings = [] - self.infos = [] - self.schema = cwsource._cw.vreg.schema - - def init(self): - # supported entity types - self.sentities = set() - # supported relations - self.srelations = {} - # avoid duplicated messages - self.seen = set() - # first get mapping as dict/sets - for schemacfg in self.cwsource.reverse_cw_for_source: - self.init_schemacfg(schemacfg) - - def init_schemacfg(self, schemacfg): - cwerschema = schemacfg.schema - if cwerschema.__regid__ == 'CWEType': - self.sentities.add(cwerschema.name) - elif cwerschema.__regid__ == 'CWRType': - assert not cwerschema.name in self.srelations - self.srelations[cwerschema.name] = None - else: # CWAttribute/CWRelation - self.srelations.setdefault(cwerschema.rtype.name, []).append( - (cwerschema.stype.name, cwerschema.otype.name) ) - self.sentities.add(cwerschema.stype.name) - self.sentities.add(cwerschema.otype.name) - - def check(self): - self.init() - error = self.errors.append - warning = self.warnings.append - info = self.infos.append - for etype in self.sentities: - eschema = self.schema[etype] - for rschema, ttypes, role in eschema.relation_definitions(): - if rschema in META_RTYPES: - continue - ttypes = [ttype for ttype in ttypes if ttype in self.sentities] - if not rschema in self.srelations: - for ttype in ttypes: - rdef = rschema.role_rdef(etype, ttype, role) - self.seen.add(rdef) - if rdef.role_cardinality(role) in '1+': - error(_('relation %(type)s with %(etype)s as %(role)s ' - 'and target type %(target)s is mandatory but ' - 'not supported') % - {'rtype': rschema, 'etype': etype, 'role': role, - 'target': ttype}) - elif ttype in self.sentities: - warning(_('%s could be supported') % rdef) - elif not ttypes: - warning(_('relation %(rtype)s with %(etype)s as %(role)s is ' - 'supported but no target type supported') % - {'rtype': rschema, 'role': role, 'etype': etype}) - for rtype, rdefs in self.srelations.items(): - if rdefs is None: - rschema = self.schema[rtype] - for subj, obj in rschema.rdefs: - if subj in self.sentities and obj in self.sentities: - break - else: - error(_('relation %s is supported but none of its definitions ' - 'matches supported entities') % rtype) - self.custom_check() - - def custom_check(self): - pass - - - -class CWSourceImportsTab(EntityView): - __regid__ = 'cwsource-imports' - __select__ = (is_instance('CWSource') - & has_related_entities('cw_import_of', 'object')) - - def entity_call(self, entity): - rset = self._cw.execute('Any X, XST, XET, XS ORDERBY XST DESC WHERE ' - 'X cw_import_of S, S eid %(s)s, X status XS, ' - 'X start_timestamp XST, X end_timestamp XET', - {'s': entity.eid}) - self._cw.view('cw.imports-table', rset, w=self.w) - - -class CWImportsTable(tableview.EntityTableView): - __regid__ = 'cw.imports-table' - __select__ = is_instance('CWDataImport') - columns = ['import', 'start_timestamp', 'end_timestamp'] - column_renderers = {'import': tableview.MainEntityColRenderer()} - layout_args = {'display_filter': 'top'} - - -class CWSourceSyncAction(action.Action): - __regid__ = 'cw.source-sync' - __select__ = (action.Action.__select__ & match_user_groups('managers') - & one_line_rset() & is_instance('CWSource') - & score_entity(lambda x: x.name != 'system')) - - title = _('synchronize') - category = 'mainactions' - order = 20 - - def url(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - return entity.absolute_url(vid=self.__regid__) - - -class CWSourceSyncView(EntityView): - __regid__ = 'cw.source-sync' - __select__ = (match_user_groups('managers') - & one_line_rset() & is_instance('CWSource') - & score_entity(lambda x: x.name != 'system')) - - title = _('synchronize') - - def entity_call(self, entity): - self._cw.call_service('source-sync', source_eid=entity.eid) - msg = self._cw._('Source has been synchronized') - url = entity.absolute_url(tab='cwsource-imports', __message=msg) - raise Redirect(url) - - - - -# sources management view ###################################################### - -class ManageSourcesAction(actions.ManagersAction): - __regid__ = 'cwsource' - title = _('data sources') - category = 'manage' - order = 100 - - -class CWSourcesManagementView(StartupView): - __regid__ = 'cw.sources-management' - rql = ('Any S,ST,SP,SD,SN ORDERBY SN WHERE S is CWSource, S name SN, S type ST, ' - 'S latest_retrieval SD, S parser SP') - title = _('data sources management') - - def call(self, **kwargs): - self.w('

                  %s

                  ' % self._cw._(self.title)) - self.w(add_etype_button(self._cw, 'CWSource')) - self.w(u'
                  ') - self.wview('cw.sources-table', self._cw.execute(self.rql)) - - -class CWSourcesTable(tableview.EntityTableView): - __regid__ = 'cw.sources-table' - __select__ = is_instance('CWSource') - columns = ['source', 'type', 'parser', 'latest_retrieval', 'latest_import'] - - class LatestImportColRenderer(tableview.EntityTableColRenderer): - def render_cell(self, w, rownum): - entity = self.entity(rownum) - rset = self._cw.execute('Any X,XS,XST ORDERBY XST DESC LIMIT 1 WHERE ' - 'X cw_import_of S, S eid %(s)s, X status XS, ' - 'X start_timestamp XST', {'s': entity.eid}) - if rset: - self._cw.view('incontext', rset, row=0, w=w) - else: - w(self.empty_cell_content) - - column_renderers = { - 'source': tableview.MainEntityColRenderer(), - 'latest_import': LatestImportColRenderer(header=_('latest import'), - sortable=False) - } - -# datafeed source import ####################################################### - -REVERSE_SEVERITIES = { - logging.DEBUG : _('DEBUG'), - logging.INFO : _('INFO'), - logging.WARNING : _('WARNING'), - logging.ERROR : _('ERROR'), - logging.FATAL : _('FATAL') -} - - -def log_to_table(req, rawdata): - data = [] - for msg_idx, msg in enumerate(rawdata.split('
                  ')): - record = msg.strip() - if not record: - continue - try: - severity, url, line, msg = record.split('\t', 3) - except ValueError: - req.warning('badly formated log %s' % record) - url = line = u'' - severity = logging.DEBUG - msg = record - data.append( (severity, url, line, msg) ) - return data - - -class LogTableLayout(tableview.TableLayout): - __select__ = match_view('cw.log.table') - needs_js = tableview.TableLayout.needs_js + ('cubicweb.log.js',) - needs_css = tableview.TableLayout.needs_css + ('cubicweb.log.css',) - columns_css = { - 0: 'logSeverity', - 1: 'logPath', - 2: 'logLine', - 3: 'logMsg', - } - - def render_table(self, w, actions, paginate): - default_level = self.view.cw_extra_kwargs['default_level'] - if default_level != 'Debug': - self._cw.add_onload('$("select.logFilter").val("%s").change();' - % self._cw.form.get('logLevel', default_level)) - w(u'\n
                  ') - w(u'' % self._cw._(u'Message threshold')) - w(u'') - w(u'
                  ') - super(LogTableLayout, self).render_table(w, actions, paginate) - - def table_attributes(self): - attrs = super(LogTableLayout, self).table_attributes() - attrs['id'] = 'table'+self.view.domid - return attrs - - def row_attributes(self, rownum): - attrs = super(LogTableLayout, self).row_attributes(rownum) - attrs['id'] = 'log_msg_%i' % rownum - severityname = REVERSE_SEVERITIES[int(self.view.pyvalue[rownum][0])] - attrs['class'] = 'log%s' % severityname.capitalize() - return attrs - - def cell_attributes(self, rownum, colnum, colid): - attrs = super(LogTableLayout, self).cell_attributes(rownum, colnum, colid) - attrs['class'] = self.columns_css[colnum] - return attrs - - -class LogTable(pyviews.PyValTableView): - __regid__ = 'cw.log.table' - headers = [_('severity'), _('url'), _('line'), _('message')] - - @cachedproperty - def domid(self): - return make_uid('logTable') - - class SeverityRenderer(pyviews.PyValTableColRenderer): - def render_cell(self, w, rownum): - severity = self.data[rownum][0] - w(u'''' - u' %(severity)s' % { - 'severity': self._cw._(REVERSE_SEVERITIES[int(severity)]), - 'title': self._cw._('permalink to this message'), - 'msg_id': 'log_msg_%i' % rownum, - }) - def sortvalue(self, rownum): - return int(self.data[rownum][0]) - - class URLRenderer(pyviews.PyValTableColRenderer): - def render_cell(self, w, rownum): - url = self.data[rownum][1] - if url and url.startswith('http'): - url = tags.a(url, href=url) - w(url or u' ') - - class LineRenderer(pyviews.PyValTableColRenderer): - def render_cell(self, w, rownum): - line = self.data[rownum][2] - w(line or u' ') - - class MessageRenderer(pyviews.PyValTableColRenderer): - snip_over = 7 - def render_cell(self, w, rownum): - msg = self.data[rownum][3] - lines = msg.splitlines() - if len(lines) <= self.snip_over: - w(u'
                  %s
                  ' % msg) - else: - # The make_uid argument has no specific meaning here. - div_snip_id = make_uid(u'log_snip_') - div_full_id = make_uid(u'log_full_') - divs_id = (div_snip_id, div_full_id) - snip = u'\n'.join((lines[0], lines[1], - u' ...', - u' %i more lines [double click to expand]' % (len(lines)-4), - u' ...', - lines[-2], lines[-1])) - divs = ( - (div_snip_id, snip, u'expand', "class='collapsed'"), - (div_full_id, msg, u'collapse', "class='hidden'") - ) - for div_id, content, button, h_class in divs: - text = self._cw._(button) - js = u"toggleVisibility('%s'); toggleVisibility('%s');" % divs_id - w(u'
                  ' % (div_id, h_class)) - w(u'
                  ' % (js, text))
                  -                    w(content)
                  -                    w(u'
                  ') - w(u'
                  ') - - column_renderers = {0: SeverityRenderer(), - 1: URLRenderer(sortable=False), - 2: LineRenderer(sortable=False), - 3: MessageRenderer(sortable=False), - } - - -class DataFeedSourceDataImport(EntityView): - __select__ = EntityView.__select__ & match_kwargs('rtype') - __regid__ = 'cw.formated_log' - - def cell_call(self, row, col, rtype, loglevel='Info', **kwargs): - if 'dispctrl' in self.cw_extra_kwargs: - loglevel = self.cw_extra_kwargs['dispctrl'].get('loglevel', loglevel) - entity = self.cw_rset.get_entity(row, col) - value = getattr(entity, rtype) - if value: - self._cw.view('cw.log.table', pyvalue=log_to_table(self._cw, value), - default_level=loglevel, w=self.w) - else: - self.w(self._cw._('no log to display')) - - -_pvs.tag_attribute(('CWDataImport', 'log'), 'relations') -_pvdc.tag_attribute(('CWDataImport', 'log'), {'vid': 'cw.formated_log'}) -_pvs.tag_subject_of(('CWDataImport', 'cw_import_of', '*'), 'hidden') # in breadcrumbs -_pvs.tag_object_of(('*', 'cw_import_of', 'CWSource'), 'hidden') # in dedicated tab - - -class CWDataImportIPrevNextAdapter(navigation.IPrevNextAdapter): - __select__ = is_instance('CWDataImport') - - def next_entity(self): - if self.entity.start_timestamp is not None: - # add NOT X eid %(e)s because > may not be enough - rset = self._cw.execute( - 'Any X,XSTS ORDERBY 2 LIMIT 1 WHERE X is CWDataImport, ' - 'X cw_import_of S, S eid %(s)s, NOT X eid %(e)s, ' - 'X start_timestamp XSTS, X start_timestamp > %(sts)s', - {'sts': self.entity.start_timestamp, - 'e': self.entity.eid, - 's': self.entity.cwsource.eid}) - if rset: - return rset.get_entity(0, 0) - - def previous_entity(self): - if self.entity.start_timestamp is not None: - # add NOT X eid %(e)s because < may not be enough - rset = self._cw.execute( - 'Any X,XSTS ORDERBY 2 DESC LIMIT 1 WHERE X is CWDataImport, ' - 'X cw_import_of S, S eid %(s)s, NOT X eid %(e)s, ' - 'X start_timestamp XSTS, X start_timestamp < %(sts)s', - {'sts': self.entity.start_timestamp, - 'e': self.entity.eid, - 's': self.entity.cwsource.eid}) - if rset: - return rset.get_entity(0, 0) - -class CWDataImportStatusFacet(facet.AttributeFacet): - __regid__ = 'datafeed.dataimport.status' - __select__ = is_instance('CWDataImport') - rtype = 'status' - - -# breadcrumbs configuration #################################################### - -class CWsourceConfigIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('CWSourceHostConfig', 'CWSourceSchemaConfig') - def parent_entity(self): - return self.entity.cwsource - -class CWDataImportIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('CWDataImport') - def parent_entity(self): - return self.entity.cw_import_of[0] diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/cwuser.py --- a/web/views/cwuser.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,258 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Specific views for users and groups""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from hashlib import sha1 # pylint: disable=E0611 - -from six import text_type -from six.moves import range - -from logilab.mtconverter import xml_escape - -from cubicweb import tags -from cubicweb.schema import display_name -from cubicweb.predicates import one_line_rset, is_instance, match_user_groups -from cubicweb.view import EntityView, StartupView -from cubicweb.web import action, formwidgets -from cubicweb.web.views import uicfg, tabs, tableview, actions, add_etype_button - -_pvs = uicfg.primaryview_section -_pvs.tag_attribute(('CWUser', 'login'), 'hidden') - -_affk = uicfg.autoform_field_kwargs -_affk.tag_subject_of(('CWUser', 'in_group', 'CWGroup'), - {'widget': formwidgets.InOutWidget}) - -class UserPreferencesEntityAction(action.Action): - __regid__ = 'prefs' - __select__ = (one_line_rset() & is_instance('CWUser') & - match_user_groups('owners', 'managers')) - - title = _('preferences') - category = 'mainactions' - - def url(self): - user = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - return user.absolute_url(vid='propertiesform') - - -class FoafView(EntityView): - __regid__ = 'foaf' - __select__ = is_instance('CWUser') - - title = _('foaf') - templatable = False - content_type = 'text/xml' - - def call(self): - self.w(u''' - '''% self._cw.encoding) - for i in range(self.cw_rset.rowcount): - self.cell_call(i, 0) - self.w(u'\n') - - def entity_call(self, entity, **kwargs): - entity.complete() - # account - self.w(u'\n' % entity.absolute_url()) - self.w(u' %s\n' % entity.login) - self.w(u'\n') - # person - self.w(u'\n' % entity.absolute_url()) - self.w(u' \n' % entity.absolute_url()) - if entity.surname: - self.w(u'%s\n' - % xml_escape(entity.surname)) - if entity.firstname: - self.w(u'%s\n' - % xml_escape(entity.firstname)) - emailaddr = entity.cw_adapt_to('IEmailable').get_email() - if emailaddr: - self.w(u'%s\n' - % sha1(emailaddr.encode('utf-8')).hexdigest()) - self.w(u'\n') - - -# group views ################################################################## - -_pvs.tag_attribute(('CWGroup', 'name'), 'hidden') -_pvs.tag_subject_of(('CWGroup', 'read_permission', '*'), 'relations') -_pvs.tag_subject_of(('CWGroup', 'add_permission', '*'), 'relations') -_pvs.tag_subject_of(('CWGroup', 'delete_permission', '*'), 'relations') -_pvs.tag_subject_of(('CWGroup', 'update_permission', '*'), 'relations') -_pvs.tag_object_of(('CWUser', 'in_group', 'CWGroup'), 'hidden') -_pvs.tag_object_of(('*', 'require_group', 'CWGroup'), 'hidden') - - -class CWGroupPrimaryView(tabs.TabbedPrimaryView): - __select__ = is_instance('CWGroup') - tabs = [_('cwgroup-main'), _('cwgroup-permissions')] - default_tab = 'cwgroup-main' - - -class CWGroupMainTab(tabs.PrimaryTab): - __regid__ = 'cwgroup-main' - __select__ = tabs.PrimaryTab.__select__ & is_instance('CWGroup') - - def render_entity_attributes(self, entity): - rset = self._cw.execute( - 'Any U, FN, LN, CD, LL ORDERBY L WHERE U in_group G, ' - 'U login L, U firstname FN, U surname LN, U creation_date CD, ' - 'U last_login_time LL, G eid %(x)s', {'x': entity.eid}) - self.wview('cwgroup.users', rset, 'null') - -class CWGroupUsersTable(tableview.RsetTableView): - __regid__ = 'cwgroup.users' - __select__ = is_instance('CWUser') - headers = (_(u'user'), _(u'first name'), _(u'last name'), - _(u'creation date'), _(u'last login time')) - layout_args = {'display_filter': 'top'} - finalvid = 'editable-final' - - -class CWGroupPermTab(EntityView): - __regid__ = 'cwgroup-permissions' - __select__ = is_instance('CWGroup') - - def entity_call(self, entity): - self._cw.add_css(('cubicweb.schema.css','cubicweb.acl.css')) - access_types = ('read', 'delete', 'add', 'update') - w = self.w - objtype_access = {'CWEType': ('read', 'delete', 'add', 'update'), - 'CWRelation': ('add', 'delete')} - rql_cwetype = 'DISTINCT Any X WHERE X %s_permission CWG, X is CWEType, ' \ - 'CWG eid %%(e)s' - rql_cwrelation = 'DISTINCT Any RT WHERE X %s_permission CWG, X is CWRelation, ' \ - 'X relation_type RT, CWG eid %%(e)s' - self.render_objtype_access(entity, 'CWEType', objtype_access, rql_cwetype) - self.render_objtype_access(entity, 'CWRelation', objtype_access, rql_cwrelation) - - def render_objtype_access(self, entity, objtype, objtype_access, rql): - self.w(u'

                  %s

                  ' % self._cw._(objtype)) - for access_type in objtype_access[objtype]: - rset = self._cw.execute(rql % access_type, {'e': entity.eid}) - if rset: - self.w(u'
                  %s:
                  ' % self._cw.__(access_type + '_permission')) - self.w(u'
                  %s

                  ' % self._cw.view('csv', rset, 'null')) - - -class CWGroupInContextView(EntityView): - __regid__ = 'incontext' - __select__ = is_instance('CWGroup') - - def entity_call(self, entity, **kwargs): - entity.complete() - self.w(u'%s' % ( - entity.absolute_url(), xml_escape(entity.name), - entity.printable_value('name'))) - - -# user / groups management views ############################################### - -class ManageUsersAction(actions.ManagersAction): - __regid__ = 'cwuser' # see rewrite rule /cwuser - title = _('users and groups') - category = 'manage' - - -class UsersAndGroupsManagementView(tabs.TabsMixin, StartupView): - __regid__ = 'cw.users-and-groups-management' - __select__ = StartupView.__select__ & match_user_groups('managers') - title = _('Users and groups management') - tabs = [_('cw.users-management'), _('cw.groups-management'),] - default_tab = 'cw.users-management' - - def call(self, **kwargs): - """The default view representing the instance's management""" - self.w(u'

                  %s

                  ' % self._cw._(self.title)) - self.render_tabs(self.tabs, self.default_tab) - - -class CWUserManagementView(StartupView): - __regid__ = 'cw.users-management' - __select__ = StartupView.__select__ & match_user_groups('managers') - cache_max_age = 0 # disable caching - # XXX one could wish to display for instance only user's firstname/surname - # for non managers but filtering out NULL caused crash with an ldapuser - # source. The ldapuser source has been dropped and this code can be updated. - rql = ('Any U,US,F,S,U,UAA,UDS, L,UAA,USN,UDSN ORDERBY L WHERE U is CWUser, ' - 'U login L, U firstname F, U surname S, ' - 'U in_state US, US name USN, ' - 'U primary_email UA?, UA address UAA, ' - 'U cw_source UDS, US name UDSN') - - def call(self, **kwargs): - self.w(add_etype_button(self._cw, 'CWUser')) - self.w(u'
                  ') - self.wview('cw.users-table', self._cw.execute(self.rql)) - - -class CWUsersTable(tableview.EntityTableView): - __regid__ = 'cw.users-table' - __select__ = is_instance('CWUser') - columns = ['user', 'in_state', 'firstname', 'surname', - 'in_group', 'primary_email', 'cw_source'] - layout_args = {'display_filter': 'top'} - finalvid = 'editable-final' - - column_renderers = { - 'user': tableview.EntityTableColRenderer( - renderfunc=lambda w,x: w(tags.a(x.login, href=x.absolute_url())), - sortfunc=lambda x: x.login), - 'in_state': tableview.EntityTableColRenderer( - renderfunc=lambda w,x: w(x.cw_adapt_to('IWorkflowable').printable_state), - sortfunc=lambda x: x.cw_adapt_to('IWorkflowable').printable_state), - 'in_group': tableview.EntityTableColRenderer( - renderfunc=lambda w,x: x.view('reledit', rtype='in_group', role='subject', w=w)), - 'primary_email': tableview.RelatedEntityColRenderer( - getrelated=lambda x:x.primary_email and x.primary_email[0] or None), - 'cw_source': tableview.RelatedEntityColRenderer( - getrelated=lambda x: x.cw_source[0]), - } - - -class CWGroupsManagementView(StartupView): - __regid__ = 'cw.groups-management' - __select__ = StartupView.__select__ & match_user_groups('managers') - cache_max_age = 0 # disable caching - rql = ('Any G,GN ORDERBY GN WHERE G is CWGroup, G name GN, NOT G name "owners"') - - def call(self, **kwargs): - self.w(add_etype_button(self._cw, 'CWGroup')) - self.w(u'
                  ') - self.wview('cw.groups-table', self._cw.execute(self.rql)) - - -class CWGroupsTable(tableview.EntityTableView): - __regid__ = 'cw.groups-table' - __select__ = is_instance('CWGroup') - columns = ['group', 'nb_users'] - layout_args = {'display_filter': 'top'} - - column_renderers = { - 'group': tableview.MainEntityColRenderer(), - 'nb_users': tableview.EntityTableColRenderer( - header=_('num. users'), - renderfunc=lambda w,x: w(text_type(x.num_users())), - sortfunc=lambda x: x.num_users()), - } diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/debug.py --- a/web/views/debug.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,190 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""management and error screens""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from time import strftime, localtime - -from six import text_type - -from logilab.mtconverter import xml_escape - -from cubicweb.predicates import none_rset, match_user_groups -from cubicweb.view import StartupView -from cubicweb.web.views import actions, tabs - -def dict_to_html(w, dict): - # XHTML doesn't allow emtpy
                    nodes - if dict: - w(u'
                      ') - for key in sorted(dict): - w(u'
                    • %s: %s
                    • ' % ( - xml_escape(str(key)), xml_escape(repr(dict[key])))) - w(u'
                    ') - - -class SiteInfoAction(actions.ManagersAction): - __regid__ = 'siteinfo' - __select__ = match_user_groups('users','managers') - title = _('Site information') - category = 'manage' - order = 1000 - - -class SiteInfoView(tabs.TabsMixin, StartupView): - __regid__ = 'siteinfo' - title = _('Site information') - tabs = [_('info'), _('registry'), _('gc')] - default_tab = 'info' - - def call(self, **kwargs): - """The default view representing the instance's management""" - self.w(u'

                    %s

                    ' % self._cw._(self.title)) - self.render_tabs(self.tabs, self.default_tab) - - -class ProcessInformationView(StartupView): - """display various web server /repository information""" - __regid__ = 'info' - __select__ = none_rset() & match_user_groups('managers', 'users') - - title = _('server information') - cache_max_age = 0 - - def call(self, **kwargs): - req = self._cw - dtformat = req.property_value('ui.datetime-format') - _ = req._ - w = self.w - repo = req.cnx.repo - # generic instance information - w(u'

                    %s

                    ' % _('Instance')) - pyvalue = ((_('config type'), self._cw.vreg.config.name), - (_('config mode'), self._cw.vreg.config.mode), - (_('instance home'), self._cw.vreg.config.apphome)) - self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) - vcconf = repo.get_versions() - w(u'

                    %s

                    ' % _('versions configuration')) - missing = _('no version information') - pyvalue = [('CubicWeb', vcconf.get('cubicweb', missing))] - pyvalue += [(cube, vcconf.get(cube, missing)) - for cube in sorted(self._cw.vreg.config.cubes())] - self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) - # repository information - w(u'

                    %s

                    ' % _('Repository')) - w(u'

                    %s

                    ' % _('resources usage')) - stats = self._cw.call_service('repo_stats') - stats['looping_tasks'] = ', '.join('%s (%s seconds)' % (n, i) for n, i in stats['looping_tasks']) - stats['threads'] = ', '.join(sorted(stats['threads'])) - for k in stats: - if k in ('extid_cache_size', 'type_source_cache_size'): - continue - if k.endswith('_cache_size'): - stats[k] = '%s / %s' % (stats[k]['size'], stats[k]['maxsize']) - def format_stat(sname, sval): - return '%s %s' % (xml_escape(text_type(sval)), - sname.endswith('percent') and '%' or '') - pyvalue = [(sname, format_stat(sname, sval)) - for sname, sval in sorted(stats.items())] - self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) - # open repo sessions - if req.cnx.is_repo_in_memory and req.user.is_in_group('managers'): - w(u'

                    %s

                    ' % _('opened sessions')) - sessions = repo._sessions.values() - if sessions: - w(u'
                      ') - for session in sessions: - w(u'
                    • %s (%s: %s)
                      ' % ( - xml_escape(text_type(session)), - _('last usage'), - strftime(dtformat, localtime(session.timestamp)))) - dict_to_html(w, session.data) - w(u'
                    • ') - w(u'
                    ') - else: - w(u'

                    %s

                    ' % _('no repository sessions found')) - # web server information - w(u'

                    %s

                    ' % _('Web server')) - pyvalue = ((_('base url'), req.base_url()), - (_('data directory url'), req.datadir_url)) - self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) - from cubicweb.web.application import SESSION_MANAGER - if SESSION_MANAGER is not None and req.user.is_in_group('managers'): - sessions = SESSION_MANAGER.current_sessions() - w(u'

                    %s

                    ' % _('opened web sessions')) - if sessions: - w(u'
                      ') - for session in sessions: - last_usage_time = session.mtime - w(u'
                    • %s (%s: %s)
                      ' % ( - session.sessionid, - _('last usage'), - strftime(dtformat, localtime(last_usage_time)))) - dict_to_html(w, session.data) - w(u'
                    • ') - w(u'
                    ') - else: - w(u'

                    %s

                    ' % _('no web sessions found')) - - - -class RegistryView(StartupView): - """display vregistry content""" - __regid__ = 'registry' - __select__ = StartupView.__select__ & match_user_groups('managers') - title = _('registry') - cache_max_age = 0 - - def call(self, **kwargs): - self.w(u'

                    %s

                    ' % self._cw._("Registry's content")) - keys = sorted(self._cw.vreg) - url = xml_escape(self._cw.url()) - self.w(u'

                    %s

                    \n' % ' - '.join('%s' - % (url, key, key) for key in keys)) - for key in keys: - if key in ('boxes', 'contentnavigation'): # those are bw compat registries - continue - self.w(u'

                    %s

                    ' % (key, key)) - if self._cw.vreg[key]: - values = sorted(self._cw.vreg[key].items()) - self.wview('pyvaltable', pyvalue=[(key, xml_escape(repr(val))) - for key, val in values]) - else: - self.w(u'

                    Empty

                    \n') - - -class GCView(StartupView): - """display garbage collector information""" - __regid__ = 'gc' - __select__ = StartupView.__select__ & match_user_groups('managers') - title = _('memory leak debugging') - cache_max_age = 0 - - def call(self, **kwargs): - stats = self._cw.call_service('repo_gc_stats') - self.w(u'

                    %s

                    ' % _('Garbage collection information')) - self.w(u'

                    %s

                    ' % self._cw._('Looked up classes')) - self.wview('pyvaltable', pyvalue=stats['lookupclasses']) - self.w(u'

                    %s

                    ' % self._cw._('Most referenced classes')) - self.wview('pyvaltable', pyvalue=stats['referenced']) - if stats['unreachable']: - self.w(u'

                    %s

                    ' % self._cw._('Unreachable objects')) - values = [xml_escape(val) for val in stats['unreachable']] - self.wview('pyvallist', pyvalue=values) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/dotgraphview.py --- a/web/views/dotgraphview.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some basic stuff to build dot generated graph images""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -import tempfile -import os -import codecs - -from logilab.mtconverter import xml_escape -from logilab.common.graph import GraphGenerator, DotBackend - -from cubicweb.view import EntityView -from cubicweb.utils import make_uid - -class DotGraphView(EntityView): - __abstract__ = True - backend_class = DotBackend - backend_kwargs = {'ratio': 'compress', 'size': '30,10'} - - def cell_call(self, row, col): - if 'MSIE 8' in self._cw.useragent(): - return - entity = self.cw_rset.get_entity(row, col) - visitor = self.build_visitor(entity) - prophdlr = self.build_dotpropshandler() - graphname = 'dotgraph%s' % str(entity.eid) - generator = GraphGenerator(self.backend_class(graphname, None, - **self.backend_kwargs)) - # image file - fd, tmpfile = tempfile.mkstemp('.svg') - os.close(fd) - generator.generate(visitor, prophdlr, tmpfile) - with codecs.open(tmpfile, 'rb', encoding='utf-8') as svgfile: - self.w(svgfile.read()) - - def build_visitor(self, entity): - raise NotImplementedError - - def build_dotpropshandler(self): - return DotPropsHandler(self._cw) - - -class DotPropsHandler(object): - def __init__(self, req): - self._ = req._ - - def node_properties(self, entity): - """return default DOT drawing options for a state or transition""" - return {'label': entity.dc_long_title(), - 'href': entity.absolute_url(), - 'fontname': 'Courier', 'fontsize': 10, 'shape':'box', - } - - def edge_properties(self, transition, fromstate, tostate): - return {'label': '', 'dir': 'forward', - 'color': 'black', 'style': 'filled'} diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/edit_attributes.pt --- a/web/views/edit_attributes.pt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ - - - - - -
                    attrname -
                    error message if any
                    -
                    widget (input, textarea, etc.)
                    -
                    format help if any
                    -
                    diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/editcontroller.py --- a/web/views/editcontroller.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,395 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""The edit controller, automatically handling entity form submitting""" - -__docformat__ = "restructuredtext en" - -from warnings import warn -from collections import defaultdict - -from datetime import datetime - -from six import text_type - -from logilab.common.deprecation import deprecated -from logilab.common.graph import ordered_nodes - -from rql.utils import rqlvar_maker - -from cubicweb import _, Binary, ValidationError -from cubicweb.view import EntityAdapter -from cubicweb.predicates import is_instance -from cubicweb.web import (INTERNAL_FIELD_VALUE, RequestError, NothingToEdit, - ProcessFormError) -from cubicweb.web.views import basecontrollers, autoform - - -class IEditControlAdapter(EntityAdapter): - __regid__ = 'IEditControl' - __select__ = is_instance('Any') - - def __init__(self, _cw, **kwargs): - if self.__class__ is not IEditControlAdapter: - warn('[3.14] IEditControlAdapter is deprecated, override EditController' - ' using match_edited_type or match_form_id selectors for example.', - DeprecationWarning) - super(IEditControlAdapter, self).__init__(_cw, **kwargs) - - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - parent = self.entity.cw_adapt_to('IBreadCrumbs').parent_entity() - if parent is not None: - return parent.rest_path(), {} - return str(self.entity.e_schema).lower(), {} - - def pre_web_edit(self): - """callback called by the web editcontroller when an entity will be - created/modified, to let a chance to do some entity specific stuff. - - Do nothing by default. - """ - pass - - -def valerror_eid(eid): - try: - return int(eid) - except (ValueError, TypeError): - return eid - -class RqlQuery(object): - def __init__(self): - self.edited = [] - self.restrictions = [] - self.kwargs = {} - - def __repr__(self): - return ('Query ' % ( - self.edited, self.restrictions, self.kwargs)) - - def insert_query(self, etype): - if self.edited: - rql = 'INSERT %s X: %s' % (etype, ','.join(self.edited)) - else: - rql = 'INSERT %s X' % etype - if self.restrictions: - rql += ' WHERE %s' % ','.join(self.restrictions) - return rql - - def update_query(self, eid): - varmaker = rqlvar_maker() - var = next(varmaker) - while var in self.kwargs: - var = next(varmaker) - rql = 'SET %s WHERE X eid %%(%s)s' % (','.join(self.edited), var) - if self.restrictions: - rql += ', %s' % ','.join(self.restrictions) - self.kwargs[var] = eid - return rql - - def set_attribute(self, attr, value): - self.kwargs[attr] = value - self.edited.append('X %s %%(%s)s' % (attr, attr)) - - def set_inlined(self, relation, value): - self.kwargs[relation] = value - self.edited.append('X %s %s' % (relation, relation.upper())) - self.restrictions.append('%s eid %%(%s)s' % (relation.upper(), relation)) - - -class EditController(basecontrollers.ViewController): - __regid__ = 'edit' - - def publish(self, rset=None): - """edit / create / copy / delete entity / relations""" - for key in self._cw.form: - # There should be 0 or 1 action - if key.startswith('__action_'): - cbname = key[1:] - try: - callback = getattr(self, cbname) - except AttributeError: - raise RequestError(self._cw._('invalid action %r' % key)) - else: - return callback() - self._default_publish() - self.reset() - - def _ordered_formparams(self): - """ Return form parameters dictionaries for each edited entity. - - We ensure that entities can be created in this order accounting for - mandatory inlined relations. - """ - req = self._cw - graph = {} - get_rschema = self._cw.vreg.schema.rschema - # minparams = 2, because at least __type and eid are needed - values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2)) - for eid in req.edited_eids()) - # iterate over all the edited entities - for eid, values in values_by_eid.items(): - # add eid to the dependency graph - graph.setdefault(eid, set()) - # search entity's edited fields for mandatory inlined relation - for param in values['_cw_entity_fields'].split(','): - try: - rtype, role = param.split('-') - except ValueError: - # e.g. param='__type' - continue - rschema = get_rschema(rtype) - if rschema.inlined: - for target in rschema.targets(values['__type'], role): - rdef = rschema.role_rdef(values['__type'], target, role) - # if cardinality is 1 and if the target entity is being - # simultaneously edited, the current entity must be - # created before the target one - if rdef.cardinality[0 if role == 'subject' else 1] == '1': - # use .get since param may be unspecified (though it will usually lead - # to a validation error later) - target_eid = values.get(param) - if target_eid in values_by_eid: - # add dependency from the target entity to the - # current one - if role == 'object': - graph.setdefault(target_eid, set()).add(eid) - else: - graph.setdefault(eid, set()).add(target_eid) - break - for eid in reversed(ordered_nodes(graph)): - yield values_by_eid[eid] - - def _default_publish(self): - req = self._cw - self.errors = [] - self.relations_rql = [] - form = req.form - # so we're able to know the main entity from the repository side - if '__maineid' in form: - req.transaction_data['__maineid'] = form['__maineid'] - # no specific action, generic edition - self._to_create = req.data['eidmap'] = {} - # those two data variables are used to handle relation from/to entities - # which doesn't exist at time where the entity is edited and that - # deserves special treatment - req.data['pending_inlined'] = defaultdict(set) - req.data['pending_others'] = set() - try: - for formparams in self._ordered_formparams(): - eid = self.edit_entity(formparams) - except (RequestError, NothingToEdit) as ex: - if '__linkto' in req.form and 'eid' in req.form: - self.execute_linkto() - elif not ('__delete' in req.form or '__insert' in req.form): - raise ValidationError(None, {None: text_type(ex)}) - # all pending inlined relations to newly created entities have been - # treated now (pop to ensure there are no attempt to add new ones) - pending_inlined = req.data.pop('pending_inlined') - assert not pending_inlined, pending_inlined - # handle all other remaining relations now - for form_, field in req.data.pop('pending_others'): - self.handle_formfield(form_, field) - # then execute rql to set all relations - for querydef in self.relations_rql: - self._cw.execute(*querydef) - # XXX this processes *all* pending operations of *all* entities - if '__delete' in req.form: - todelete = req.list_form_param('__delete', req.form, pop=True) - if todelete: - autoform.delete_relations(self._cw, todelete) - self._cw.remove_pending_operations() - if self.errors: - errors = dict((f.name, text_type(ex)) for f, ex in self.errors) - raise ValidationError(valerror_eid(form.get('__maineid')), errors) - - def _insert_entity(self, etype, eid, rqlquery): - rql = rqlquery.insert_query(etype) - try: - entity = self._cw.execute(rql, rqlquery.kwargs).get_entity(0, 0) - neweid = entity.eid - except ValidationError as ex: - self._to_create[eid] = ex.entity - if self._cw.ajax_request: # XXX (syt) why? - ex.entity = eid - raise - self._to_create[eid] = neweid - return neweid - - def _update_entity(self, eid, rqlquery): - self._cw.execute(rqlquery.update_query(eid), rqlquery.kwargs) - - def edit_entity(self, formparams, multiple=False): - """edit / create / copy an entity and return its eid""" - req = self._cw - etype = formparams['__type'] - entity = req.vreg['etypes'].etype_class(etype)(req) - entity.eid = valerror_eid(formparams['eid']) - is_main_entity = req.form.get('__maineid') == formparams['eid'] - # let a chance to do some entity specific stuff - entity.cw_adapt_to('IEditControl').pre_web_edit() - # create a rql query from parameters - rqlquery = RqlQuery() - # process inlined relations at the same time as attributes - # this will generate less rql queries and might be useful in - # a few dark corners - if is_main_entity: - formid = req.form.get('__form_id', 'edition') - else: - # XXX inlined forms formid should be saved in a different formparams entry - # inbetween, use cubicweb standard formid for inlined forms - formid = 'edition' - form = req.vreg['forms'].select(formid, req, entity=entity) - eid = form.actual_eid(entity.eid) - editedfields = formparams['_cw_entity_fields'] - form.formvalues = {} # init fields value cache - for field in form.iter_modified_fields(editedfields, entity): - self.handle_formfield(form, field, rqlquery) - # if there are some inlined field which were waiting for this entity's - # creation, add relevant data to the rqlquery - for form_, field in req.data['pending_inlined'].pop(entity.eid, ()): - rqlquery.set_inlined(field.name, form_.edited_entity.eid) - if self.errors: - errors = dict((f.role_name(), text_type(ex)) for f, ex in self.errors) - raise ValidationError(valerror_eid(entity.eid), errors) - if eid is None: # creation or copy - entity.eid = eid = self._insert_entity(etype, formparams['eid'], rqlquery) - elif rqlquery.edited: # edition of an existant entity - self.check_concurrent_edition(formparams, eid) - self._update_entity(eid, rqlquery) - if is_main_entity: - self.notify_edited(entity) - if '__delete' in formparams: - # XXX deprecate? - todelete = req.list_form_param('__delete', formparams, pop=True) - autoform.delete_relations(req, todelete) - if '__cloned_eid' in formparams: - entity.copy_relations(int(formparams['__cloned_eid'])) - if is_main_entity: # only execute linkto for the main entity - self.execute_linkto(entity.eid) - return eid - - def handle_formfield(self, form, field, rqlquery=None): - eschema = form.edited_entity.e_schema - try: - for field, value in field.process_posted(form): - if not ( - (field.role == 'subject' and field.name in eschema.subjrels) - or - (field.role == 'object' and field.name in eschema.objrels)): - continue - rschema = self._cw.vreg.schema.rschema(field.name) - if rschema.final: - rqlquery.set_attribute(field.name, value) - else: - if form.edited_entity.has_eid(): - origvalues = set(entity.eid for entity in form.edited_entity.related(field.name, field.role, entities=True)) - else: - origvalues = set() - if value is None or value == origvalues: - continue # not edited / not modified / to do later - if rschema.inlined and rqlquery is not None and field.role == 'subject': - self.handle_inlined_relation(form, field, value, origvalues, rqlquery) - elif form.edited_entity.has_eid(): - self.handle_relation(form, field, value, origvalues) - else: - form._cw.data['pending_others'].add( (form, field) ) - except ProcessFormError as exc: - self.errors.append((field, exc)) - - def handle_inlined_relation(self, form, field, values, origvalues, rqlquery): - """handle edition for the (rschema, x) relation of the given entity - """ - if values: - rqlquery.set_inlined(field.name, next(iter(values))) - elif form.edited_entity.has_eid(): - self.handle_relation(form, field, values, origvalues) - - def handle_relation(self, form, field, values, origvalues): - """handle edition for the (rschema, x) relation of the given entity - """ - etype = form.edited_entity.e_schema - rschema = self._cw.vreg.schema.rschema(field.name) - if field.role == 'subject': - desttype = rschema.objects(etype)[0] - card = rschema.rdef(etype, desttype).cardinality[0] - subjvar, objvar = 'X', 'Y' - else: - desttype = rschema.subjects(etype)[0] - card = rschema.rdef(desttype, etype).cardinality[1] - subjvar, objvar = 'Y', 'X' - eid = form.edited_entity.eid - if field.role == 'object' or not rschema.inlined or not values: - # this is not an inlined relation or no values specified, - # explicty remove relations - rql = 'DELETE %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % ( - subjvar, rschema, objvar) - for reid in origvalues.difference(values): - self.relations_rql.append((rql, {'x': eid, 'y': reid})) - seteids = values.difference(origvalues) - if seteids: - rql = 'SET %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % ( - subjvar, rschema, objvar) - for reid in seteids: - self.relations_rql.append((rql, {'x': eid, 'y': reid})) - - def delete_entities(self, eidtypes): - """delete entities from the repository""" - redirect_info = set() - eidtypes = tuple(eidtypes) - for eid, etype in eidtypes: - entity = self._cw.entity_from_eid(eid, etype) - path, params = entity.cw_adapt_to('IEditControl').after_deletion_path() - redirect_info.add( (path, tuple(params.items())) ) - entity.cw_delete() - if len(redirect_info) > 1: - # In the face of ambiguity, refuse the temptation to guess. - self._after_deletion_path = 'view', () - else: - self._after_deletion_path = next(iter(redirect_info)) - if len(eidtypes) > 1: - self._cw.set_message(self._cw._('entities deleted')) - else: - self._cw.set_message(self._cw._('entity deleted')) - - - def check_concurrent_edition(self, formparams, eid): - req = self._cw - try: - form_ts = datetime.utcfromtimestamp(float(formparams['__form_generation_time'])) - except KeyError: - # Backward and tests compatibility : if no timestamp consider edition OK - return - if req.execute("Any X WHERE X modification_date > %(fts)s, X eid %(eid)s", - {'eid': eid, 'fts': form_ts}): - # We only mark the message for translation but the actual - # translation will be handled by the Validation mechanism... - msg = _("Entity %(eid)s has changed since you started to edit it." - " Reload the page and reapply your changes.") - # ... this is why we pass the formats' dict as a third argument. - raise ValidationError(eid, {None: msg}, {'eid' : eid}) - - def _action_apply(self): - self._default_publish() - self.reset() - - def _action_delete(self): - self.delete_entities(self._cw.edited_eids(withtype=True)) - return self.reset() diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/editforms.py --- a/web/views/editforms.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,272 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Set of HTML automatic forms to create, delete, copy or edit a single entity -or a list of entities of the same type -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from copy import copy - -from six.moves import range - -from logilab.mtconverter import xml_escape -from logilab.common.decorators import cached -from logilab.common.registry import yes -from logilab.common.deprecation import class_moved - -from cubicweb import tags -from cubicweb.predicates import (match_kwargs, one_line_rset, non_final_entity, - specified_etype_implements, is_instance) -from cubicweb.view import EntityView -from cubicweb.schema import display_name -from cubicweb.web import stdmsgs, eid_param, \ - formfields as ff, formwidgets as fw -from cubicweb.web.form import FormViewMixIn, FieldNotFound -from cubicweb.web.views import uicfg, forms, reledit - -_pvdc = uicfg.primaryview_display_ctrl - - -class DeleteConfForm(forms.CompositeForm): - __regid__ = 'deleteconf' - # XXX non_final_entity does not implement eclass_selector - __select__ = is_instance('Any') - - domid = 'deleteconf' - copy_nav_params = True - form_buttons = [fw.Button(stdmsgs.BUTTON_DELETE, cwaction='delete'), - fw.Button(stdmsgs.BUTTON_CANCEL, cwaction='cancel')] - - def __init__(self, *args, **kwargs): - super(DeleteConfForm, self).__init__(*args, **kwargs) - done = set() - for entity in self.cw_rset.entities(): - if entity.eid in done: - continue - done.add(entity.eid) - subform = self._cw.vreg['forms'].select('base', self._cw, - entity=entity, - mainform=False) - self.add_subform(subform) - - -class DeleteConfFormView(FormViewMixIn, EntityView): - """form used to confirm deletion of some entities""" - __regid__ = 'deleteconf' - title = _('delete') - # don't use navigation, all entities asked to be deleted should be displayed - # else we will only delete the displayed page - paginable = False - - def call(self, onsubmit=None): - """ask for confirmation before real deletion""" - req, w = self._cw, self.w - _ = req._ - w(u'\n' - % _('this action is not reversible!')) - # XXX above message should have style of a warning - w(u'

                    %s

                    \n' % _('Do you want to delete the following element(s)?')) - form = self._cw.vreg['forms'].select(self.__regid__, req, - rset=self.cw_rset, - onsubmit=onsubmit) - w(u'
                      \n') - for entity in self.cw_rset.entities(): - # don't use outofcontext view or any other that may contain inline - # edition form - w(u'
                    • %s
                    • ' % tags.a(entity.view('textoutofcontext'), - href=entity.absolute_url())) - w(u'
                    \n') - form.render(w=self.w) - - -class EditionFormView(FormViewMixIn, EntityView): - """display primary entity edition form""" - __regid__ = 'edition' - # add yes() so it takes precedence over deprecated views in baseforms, - # though not baseforms based customized view - __select__ = one_line_rset() & non_final_entity() & yes() - form_id = 'edition' - - title = _('modification') - - def cell_call(self, row, col, **kwargs): - entity = self.cw_rset.complete_entity(row, col) - self.render_form(entity) - - def render_form(self, entity): - """fetch and render the form""" - self.form_title(entity) - form = self._cw.vreg['forms'].select(self.form_id, self._cw, - entity=entity, - submitmsg=self.submited_message()) - self.init_form(form, entity) - form.render(w=self.w) - - def init_form(self, form, entity): - """customize your form before rendering here""" - pass - - def form_title(self, entity): - """the form view title""" - ptitle = self._cw._(self.title) - self.w(u'
                    %s %s
                    ' % ( - entity.dc_type(), ptitle and '(%s)' % ptitle)) - - def submited_message(self): - """return the message that will be displayed on successful edition""" - return self._cw._('entity edited') - - -class CreationFormView(EditionFormView): - """display primary entity creation form""" - __regid__ = 'creation' - __select__ = specified_etype_implements('Any') & yes() - - title = _('creation') - - def call(self, **kwargs): - """creation view for an entity""" - # at this point we know etype is a valid entity type, thanks to our - # selector - etype = kwargs.pop('etype', self._cw.form.get('etype')) - entity = self._cw.vreg['etypes'].etype_class(etype)(self._cw) - entity.eid = next(self._cw.varmaker) - self.render_form(entity) - - def form_title(self, entity): - """the form view title""" - if '__linkto' in self._cw.form: - if isinstance(self._cw.form['__linkto'], list): - # XXX which one should be considered (case: add a ticket to a - # version in jpl) - rtype, linkto_eid, role = self._cw.form['__linkto'][0].split(':') - else: - rtype, linkto_eid, role = self._cw.form['__linkto'].split(':') - linkto_rset = self._cw.eid_rset(linkto_eid) - linkto_type = linkto_rset.description[0][0] - if role == 'subject': - title = self._cw.__('creating %s (%s %s %s %%(linkto)s)' % ( - entity.e_schema, entity.e_schema, rtype, linkto_type)) - else: - title = self._cw.__('creating %s (%s %%(linkto)s %s %s)' % ( - entity.e_schema, linkto_type, rtype, entity.e_schema)) - msg = title % {'linkto' : self._cw.view('incontext', linkto_rset)} - self.w(u'
                    %s
                    ' % msg) - else: - super(CreationFormView, self).form_title(entity) - - def url(self): - """return the url associated with this view""" - req = self._cw - return req.vreg["etypes"].etype_class(req.form['etype']).cw_create_url( - req) - - def submited_message(self): - """return the message that will be displayed on successful edition""" - return self._cw._('entity created') - - -class CopyFormView(EditionFormView): - """display primary entity creation form initialized with values from another - entity - """ - __regid__ = 'copy' - - title = _('copy') - warning_message = _('Please note that this is only a shallow copy') - - def render_form(self, entity): - """fetch and render the form""" - # make a copy of entity to avoid altering the entity in the - # request's cache. - entity.complete() - self.newentity = copy(entity) - self.copying = entity - self.newentity.eid = next(self._cw.varmaker) - self.w(u'\n' - % self._cw._(self.warning_message)) - super(CopyFormView, self).render_form(self.newentity) - del self.newentity - - def init_form(self, form, entity): - """customize your form before rendering here""" - super(CopyFormView, self).init_form(form, entity) - if entity.eid == self.newentity.eid: - form.add_hidden(eid_param('__cloned_eid', entity.eid), - self.copying.eid) - for rschema, role in form.editable_attributes(): - if not rschema.final: - # ensure relation cache is filed - rset = self.copying.related(rschema, role) - self.newentity.cw_set_relation_cache(rschema, role, rset) - - def submited_message(self): - """return the message that will be displayed on successful edition""" - return self._cw._('entity copied') - - -class TableEditForm(forms.CompositeForm): - __regid__ = 'muledit' - domid = 'entityForm' - onsubmit = "return validateForm('%s', null);" % domid - form_buttons = [fw.SubmitButton(_('validate modifications on selected items')), - fw.ResetButton(_('revert changes'))] - - def __init__(self, req, rset, **kwargs): - kwargs.setdefault('__redirectrql', rset.printable_rql()) - super(TableEditForm, self).__init__(req, rset=rset, **kwargs) - for row in range(len(self.cw_rset)): - form = self._cw.vreg['forms'].select('edition', self._cw, - rset=self.cw_rset, row=row, - formtype='muledit', - copy_nav_params=False, - mainform=False) - # XXX rely on the EntityCompositeFormRenderer to put the eid input - form.remove_field(form.field_by_name('eid')) - self.add_subform(form) - - -class TableEditFormView(FormViewMixIn, EntityView): - __regid__ = 'muledit' - __select__ = EntityView.__select__ & yes() - title = _('multiple edit') - - def call(self, **kwargs): - """a view to edit multiple entities of the same type the first column - should be the eid - """ - # XXX overriding formvid (eg __form_id) necessary to make work edition: - # the edit controller try to select the form with no rset but - # entity=entity, and use this form to edit the entity. So we want - # edition form there but specifying formvid may have other undesired - # side effect. Maybe we should provide another variable optionally - # telling which form the edit controller should select (eg difffers - # between html generation / post handling form) - form = self._cw.vreg['forms'].select(self.__regid__, self._cw, - rset=self.cw_rset, - copy_nav_params=True, - formvid='edition') - form.render(w=self.w) - - -# click and edit handling ('reledit') ########################################## - -ClickAndEditFormView = class_moved(reledit.ClickAndEditFormView) -AutoClickAndEditFormView = class_moved(reledit.AutoClickAndEditFormView) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/editviews.py --- a/web/views/editviews.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,128 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Some views used to help to the edition process""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from logilab.common.decorators import cached -from logilab.mtconverter import xml_escape - -from cubicweb.view import EntityView, StartupView -from cubicweb.predicates import (one_line_rset, non_final_entity, - match_search_state) -from cubicweb.web import httpcache -from cubicweb.web.views import baseviews, linksearch_select_url - - -class SearchForAssociationView(EntityView): - """view called by the edition view when the user asks to search for - something to link to the edited eid - """ - __regid__ = 'search-associate' - __select__ = (one_line_rset() & match_search_state('linksearch') - & non_final_entity()) - - title = _('search for association') - - def cell_call(self, row, col): - rset, vid, divid, paginate = self.filter_box_context_info() - self.cw_rset = rset - self.w(u'
                    ' % divid) - self.paginate() - self.wview(vid, rset, 'noresult') - self.w(u'
                    ') - - @cached - def filter_box_context_info(self): - entity = self.cw_rset.get_entity(0, 0) - role, eid, rtype, etype = self._cw.search_state[1] - assert entity.eid == int(eid) - # the default behaviour is to fetch all unrelated entities and display - # them. Use fetch_order and not fetch_unrelated_order as sort method - # since the latter is mainly there to select relevant items in the combo - # box, it doesn't give interesting result in this context - rql, args = entity.cw_unrelated_rql(rtype, etype, role, - ordermethod='fetch_order', - vocabconstraints=False) - rset = self._cw.execute(rql, args) - return rset, 'list', "search-associate-content", True - - -class OutOfContextSearch(EntityView): - __regid__ = 'outofcontext-search' - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - erset = entity.as_rset() - if self._cw.match_search_state(erset): - self.w(u'%s [...]' % ( - xml_escape(linksearch_select_url(self._cw, erset)), - self._cw._('select this entity'), - xml_escape(entity.view('textoutofcontext')), - xml_escape(entity.absolute_url(vid='primary')), - self._cw._('view detail for this entity'))) - else: - entity.view('outofcontext', w=self.w) - - -class ComboboxView(EntityView): - """the view used in combobox (unrelated entities) - - THIS IS A TEXT VIEW. DO NOT HTML_ESCAPE - """ - __regid__ = 'combobox' - title = None - - def cell_call(self, row, col, **kwargs): - """the combo-box view for an entity: same as text out of context view - by default - """ - self.wview('textoutofcontext', self.cw_rset, row=row, col=col) - - -class EditableFinalView(baseviews.FinalView): - """same as FinalView but enables inplace-edition when possible""" - __regid__ = 'editable-final' - - def cell_call(self, row, col, props=None): - entity, rtype = self.cw_rset.related_entity(row, col) - if entity is not None: - self.w(entity.view('reledit', rtype=rtype)) - else: - super(EditableFinalView, self).cell_call(row, col, props) - -try: - from cubicweb.web import captcha -except ImportError: - # PIL not installed - pass -else: - class CaptchaView(StartupView): - __regid__ = 'captcha' - - http_cache_manager = httpcache.NoHTTPCacheManager - binary = True - templatable = False - content_type = 'image/jpg' - - def call(self): - text, data = captcha.captcha(self._cw.vreg.config['captcha-font-file'], - self._cw.vreg.config['captcha-font-size']) - key = self._cw.form.get('captchakey', 'captcha') - self._cw.session.data[key] = text - self.w(data.read()) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/emailaddress.py --- a/web/views/emailaddress.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,146 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Specific views for email addresses entities""" - -__docformat__ = "restructuredtext en" - -from logilab.mtconverter import xml_escape - -from cubicweb.schema import display_name -from cubicweb.predicates import is_instance -from cubicweb import Unauthorized -from cubicweb.web.views import uicfg, baseviews, primary, ibreadcrumbs - -_pvs = uicfg.primaryview_section -_pvs.tag_subject_of(('*', 'use_email', '*'), 'attributes') -_pvs.tag_subject_of(('*', 'primary_email', '*'), 'hidden') - -class EmailAddressPrimaryView(primary.PrimaryView): - __select__ = is_instance('EmailAddress') - - def cell_call(self, row, col, skipeids=None): - self.skipeids = skipeids - super(EmailAddressPrimaryView, self).cell_call(row, col) - - def render_entity_attributes(self, entity): - self.w(u'

                    ') - entity.view('oneline', w=self.w) - if entity.prefered: - self.w(u' (%s)' % entity.prefered.view('oneline')) - self.w(u'

                    ') - try: - persons = entity.reverse_primary_email - except Unauthorized: - persons = [] - if persons: - emailof = persons[0] - self.field(display_name(self._cw, 'primary_email', 'object'), emailof.view('oneline')) - pemaileid = emailof.eid - else: - pemaileid = None - try: - emailof = 'use_email' in self._cw.vreg.schema and entity.reverse_use_email or () - emailof = [e for e in emailof if not e.eid == pemaileid] - except Unauthorized: - emailof = [] - if emailof: - emailofstr = ', '.join(e.view('oneline') for e in emailof) - self.field(display_name(self._cw, 'use_email', 'object'), emailofstr) - - def render_entity_relations(self, entity): - for i, email in enumerate(entity.related_emails(self.skipeids)): - self.w(u'
                    ' % (i%2 and 'even' or 'odd')) - email.view('oneline', w=self.w, contexteid=entity.eid) - self.w(u'
                    ') - - -class EmailAddressShortPrimaryView(EmailAddressPrimaryView): - __select__ = is_instance('EmailAddress') - __regid__ = 'shortprimary' - title = None # hidden view - - def render_entity_attributes(self, entity): - self.w(u'
                    ') - entity.view('oneline', w=self.w) - self.w(u'
                    ') - - -class EmailAddressOneLineView(baseviews.OneLineView): - __select__ = is_instance('EmailAddress') - - def entity_call(self, entity, **kwargs): - if entity.reverse_primary_email: - self.w(u'') - if entity.alias: - self.w(u'%s <' % xml_escape(entity.alias)) - self.w('%s' % (xml_escape(entity.absolute_url()), - xml_escape(entity.display_address()))) - if entity.alias: - self.w(u'>\n') - if entity.reverse_primary_email: - self.w(u'') - - -class EmailAddressMailToView(baseviews.OneLineView): - """A one line view that builds a user clickable URL for an email with - 'mailto:'""" - - __regid__ = 'mailto' - __select__ = is_instance('EmailAddress') - - def entity_call(self, entity, **kwargs): - if entity.reverse_primary_email: - self.w(u'') - if entity.alias: - alias = entity.alias - elif entity.reverse_use_email: - alias = entity.reverse_use_email[0].dc_title() - else: - alias = None - if alias: - mailto = "mailto:%s <%s>" % (alias, entity.display_address()) - else: - mailto = "mailto:%s" % entity.display_address() - self.w(u'%s' % (xml_escape(mailto), - xml_escape(entity.display_address()))) - if entity.reverse_primary_email: - self.w(u'') - - -class EmailAddressInContextView(baseviews.InContextView): - __select__ = is_instance('EmailAddress') - - def cell_call(self, row, col, **kwargs): - if self._cw.vreg.config['mangle-emails']: - self.wview('oneline', self.cw_rset, row=row, col=col, **kwargs) - else: - self.wview('mailto', self.cw_rset, row=row, col=col, **kwargs) - - -class EmailAddressTextView(baseviews.TextView): - __select__ = is_instance('EmailAddress') - - def cell_call(self, row, col, **kwargs): - self.w(self.cw_rset.get_entity(row, col).display_address()) - - -class EmailAddressIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('EmailAddress') - - def parent_entity(self): - return self.entity.email_of diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/embedding.py --- a/web/views/embedding.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,38 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Objects interacting together to provides the external page embeding -functionality. -""" - -from logilab.common.deprecation import class_moved, moved - -try: - from cubes.embed.views import * - - IEmbedableAdapter = class_moved(IEmbedableAdapter, message='[3.17] IEmbedableAdapter moved to cubes.embed.views') - ExternalTemplate = class_moved(ExternalTemplate, message='[3.17] IEmbedableAdapter moved to cubes.embed.views') - EmbedController = class_moved(EmbedController, message='[3.17] IEmbedableAdapter moved to cubes.embed.views') - entity_has_embedable_url = moved('cubes.embed.views', 'entity_has_embedable_url') - EmbedAction = class_moved(EmbedAction, message='[3.17] EmbedAction moved to cubes.embed.views') - replace_href = class_moved(replace_href, message='[3.17] replace_href moved to cubes.embed.views') - embed_external_page = moved('cubes.embed.views', 'embed_external_page') - absolutize_links = class_moved(absolutize_links, message='[3.17] absolutize_links moved to cubes.embed.views') - prefix_links = moved('cubes.embed.views', 'prefix_links') -except ImportError: - from cubicweb.web import LOGGER - LOGGER.warning('[3.17] embedding extracted to cube embed that was not found. try installing it.') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/error.py --- a/web/views/error.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,40 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Set of HTML errors views. Error view are generally implemented -as startup views and are used for standard error pages (404, 500, etc.) -""" - -__docformat__ = "restructuredtext en" - -from cubicweb.view import StartupView - -class FourOhFour(StartupView): - __regid__ = '404' - - def call(self): - _ = self._cw._ - self.w(u"

                    %s

                    " % _('this resource does not exist')) - - -class ErrorOccured(StartupView): - __regid__ = '500' - - def call(self): - _ = self._cw._ - self.w(u"

                    %s

                    " % - _('an error occurred, the request cannot be fulfilled')) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/facets.py --- a/web/views/facets.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,435 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""the facets box and some basic facets""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from logilab.mtconverter import xml_escape -from logilab.common.decorators import cachedproperty -from logilab.common.registry import objectify_predicate, yes - -from cubicweb import tags -from cubicweb.predicates import (non_final_entity, multi_lines_rset, - match_context_prop, relation_possible) -from cubicweb.utils import json_dumps -from cubicweb.uilib import css_em_num_value -from cubicweb.view import AnyRsetView -from cubicweb.web import component, facet as facetbase -from cubicweb.web.views.ajaxcontroller import ajaxfunc - -def facets(req, rset, context, mainvar=None, **kwargs): - """return the base rql and a list of widgets for facets applying to the - given rset/context (cached version of :func:`_facet`) - - :param req: A :class:`~cubicweb.req.RequestSessionBase` object - :param rset: A :class:`~cubicweb.rset.ResultSet` - :param context: A string that match the ``__regid__`` of a ``FacetFilter`` - :param mainvar: A string that match a select var from the rset - """ - try: - cache = req.__rset_facets - except AttributeError: - cache = req.__rset_facets = {} - try: - return cache[(rset, context, mainvar)] - except KeyError: - facets = _facets(req, rset, context, mainvar, **kwargs) - cache[(rset, context, mainvar)] = facets - return facets - -def _facets(req, rset, context, mainvar, **kwargs): - """return the base rql and a list of widgets for facets applying to the - given rset/context - - :param req: A :class:`~cubicweb.req.RequestSessionBase` object - :param rset: A :class:`~cubicweb.rset.ResultSet` - :param context: A string that match the ``__regid__`` of a ``FacetFilter`` - :param mainvar: A string that match a select var from the rset - """ - ### initialisation - # XXX done by selectors, though maybe necessary when rset has been hijacked - # (e.g. contextview_selector matched) - origqlst = rset.syntax_tree() - # union not yet supported - if len(origqlst.children) != 1: - req.debug('facette disabled on union request %s', origqlst) - return None, () - rqlst = origqlst.copy() - select = rqlst.children[0] - filtered_variable, baserql = facetbase.init_facets(rset, select, mainvar) - ### Selection - possible_facets = req.vreg['facets'].poss_visible_objects( - req, rset=rset, rqlst=origqlst, select=select, - context=context, filtered_variable=filtered_variable, **kwargs) - wdgs = [(facet, facet.get_widget()) for facet in possible_facets] - return baserql, [wdg for facet, wdg in wdgs if wdg is not None] - - -@objectify_predicate -def contextview_selector(cls, req, rset=None, row=None, col=None, view=None, - **kwargs): - if view: - try: - getcontext = getattr(view, 'filter_box_context_info') - except AttributeError: - return 0 - rset = getcontext()[0] - if rset is None or rset.rowcount < 2: - return 0 - wdgs = facets(req, rset, cls.__regid__, view=view)[1] - return len(wdgs) - return 0 - -@objectify_predicate -def has_facets(cls, req, rset=None, **kwargs): - if rset is None or rset.rowcount < 2: - return 0 - wdgs = facets(req, rset, cls.__regid__, **kwargs)[1] - return len(wdgs) - - -def filter_hiddens(w, baserql, wdgs, **kwargs): - kwargs['facets'] = ','.join(wdg.facet.__regid__ for wdg in wdgs) - kwargs['baserql'] = baserql - for key, val in kwargs.items(): - w(u'' % ( - key, xml_escape(val))) - - -class FacetFilterMixIn(object): - """Mixin Class to generate Facet Filter Form - - To generate the form, you need to explicitly call the following method: - - .. automethod:: generate_form - - The most useful function to override is: - - .. automethod:: layout_widgets - """ - - needs_js = ['cubicweb.ajax.js', 'cubicweb.facets.js'] - needs_css = ['cubicweb.facets.css'] - - def generate_form(self, w, rset, divid, vid, vidargs=None, mainvar=None, - paginate=False, cssclass='', hiddens=None, **kwargs): - """display a form to filter some view's content - - :param w: Write function - - :param rset: ResultSet to be filtered - - :param divid: Dom ID of the div where the rendering of the view is done. - :type divid: string - - :param vid: ID of the view display in the div - :type vid: string - - :param paginate: Is the view paginated? - :type paginate: boolean - - :param cssclass: Additional css classes to put on the form. - :type cssclass: string - - :param hiddens: other hidden parametters to include in the forms. - :type hiddens: dict from extra keyword argument - """ - # XXX Facet.context property hijacks an otherwise well-behaved - # vocabulary with its own notions - # Hence we whack here to avoid a clash - kwargs.pop('context', None) - baserql, wdgs = facets(self._cw, rset, context=self.__regid__, - mainvar=mainvar, **kwargs) - assert wdgs - self._cw.add_js(self.needs_js) - self._cw.add_css(self.needs_css) - self._cw.html_headers.define_var('facetLoadingMsg', - self._cw._('facet-loading-msg')) - if vidargs is not None: - warn("[3.14] vidargs is deprecated. Maybe you're using some TableView?", - DeprecationWarning, stacklevel=2) - else: - vidargs = {} - vidargs = dict((k, v) for k, v in vidargs.items() if v) - facetargs = xml_escape(json_dumps([divid, vid, paginate, vidargs])) - w(u'
                    ' % (divid, cssclass, facetargs)) - w(u'
                    ') - if hiddens is None: - hiddens = {} - if mainvar: - hiddens['mainvar'] = mainvar - filter_hiddens(w, baserql, wdgs, **hiddens) - self.layout_widgets(w, self.sorted_widgets(wdgs)) - - # is supposed to submit the form only if there is a single - # input:text field. However most browsers will submit the form - # on anyway if there is an input:submit field. - # - # see: http://www.w3.org/MarkUp/html-spec/html-spec_8.html#SEC8.2 - # - # Firefox 7.0.1 does not submit form on if there is more than a - # input:text field and not input:submit but does it if there is an - # input:submit. - # - # IE 6 or Firefox 2 behave the same way. - w(u'') - # - w(u'
                    \n') - w(u'
                    \n') - - def sorted_widgets(self, wdgs): - """sort widgets: by default sort by widget height, then according to - widget.order (the original widgets order) - """ - return sorted(wdgs, key=lambda x: 99 * (not x.facet.start_unfolded) or x.height ) - - def layout_widgets(self, w, wdgs): - """layout widgets: by default simply render each of them - (i.e. succession of
                    ) - """ - for wdg in wdgs: - wdg.render(w=w) - - -class FilterBox(FacetFilterMixIn, component.CtxComponent): - """filter results of a query""" - __regid__ = 'facet.filterbox' - __select__ = ((non_final_entity() & has_facets()) - | contextview_selector()) # can't use has_facets because of - # contextview mecanism - context = 'left' # XXX doesn't support 'incontext', only 'left' or 'right' - title = _('facet.filters') - visible = True # functionality provided by the search box by default - order = 1 - - bk_linkbox_template = u'
                    %s
                    ' - - def render_body(self, w, **kwargs): - req = self._cw - rset, vid, divid, paginate = self._get_context() - assert len(rset) > 1 - if vid is None: - vid = req.form.get('vid') - if self.bk_linkbox_template and req.vreg.schema['Bookmark'].has_perm(req, 'add'): - w(self.bookmark_link(rset)) - w(self.focus_link(rset)) - hiddens = {} - for param in ('subvid', 'vtitle'): - if param in req.form: - hiddens[param] = req.form[param] - self.generate_form(w, rset, divid, vid, paginate=paginate, - hiddens=hiddens, **self.cw_extra_kwargs) - - def _get_context(self): - view = self.cw_extra_kwargs.get('view') - context = getattr(view, 'filter_box_context_info', lambda: None)() - if context: - rset, vid, divid, paginate = context - else: - rset = self.cw_rset - vid, divid = None, 'pageContent' - paginate = view and view.paginable - return rset, vid, divid, paginate - - def bookmark_link(self, rset): - req = self._cw - bk_path = u'rql=%s' % req.url_quote(rset.printable_rql()) - if req.form.get('vid'): - bk_path += u'&vid=%s' % req.url_quote(req.form['vid']) - bk_path = u'view?' + bk_path - bk_title = req._('my custom search') - linkto = u'bookmarked_by:%s:subject' % req.user.eid - bkcls = req.vreg['etypes'].etype_class('Bookmark') - bk_add_url = bkcls.cw_create_url(req, path=bk_path, title=bk_title, - __linkto=linkto) - bk_base_url = bkcls.cw_create_url(req, title=bk_title, __linkto=linkto) - bk_link = u'%s' % ( - xml_escape(bk_base_url), xml_escape(bk_add_url), - req._('bookmark this search')) - return self.bk_linkbox_template % bk_link - - def focus_link(self, rset): - return self.bk_linkbox_template % tags.a(self._cw._('focus on this selection'), - href=self._cw.url(), id='focusLink') - -class FilterTable(FacetFilterMixIn, AnyRsetView): - __regid__ = 'facet.filtertable' - __select__ = has_facets() - average_perfacet_uncomputable_overhead = .3 - - def call(self, vid, divid, vidargs=None, cssclass=''): - hiddens = self.cw_extra_kwargs.setdefault('hiddens', {}) - hiddens['fromformfilter'] = '1' - self.generate_form(self.w, self.cw_rset, divid, vid, vidargs=vidargs, - cssclass=cssclass, **self.cw_extra_kwargs) - - @cachedproperty - def per_facet_height_overhead(self): - return (css_em_num_value(self._cw.vreg, 'facet_MarginBottom', .2) + - css_em_num_value(self._cw.vreg, 'facet_Padding', .2) + - self.average_perfacet_uncomputable_overhead) - - def layout_widgets(self, w, wdgs): - """layout widgets: put them in a table where each column should have - sum(wdg.height) < wdg_stack_size. - """ - w(u'
                    \n') - widget_queue = [] - queue_height = 0 - wdg_stack_size = facetbase._DEFAULT_FACET_GROUP_HEIGHT - for wdg in wdgs: - height = wdg.height + self.per_facet_height_overhead - if queue_height + height <= wdg_stack_size: - widget_queue.append(wdg) - queue_height += height - continue - w(u'
                    ') - for queued in widget_queue: - queued.render(w=w) - w(u'
                    ') - widget_queue = [wdg] - queue_height = height - if widget_queue: - w(u'
                    ') - for queued in widget_queue: - queued.render(w=w) - w(u'
                    ') - w(u'
                    \n') - -# python-ajax remote functions used by facet widgets ######################### - -@ajaxfunc(output_type='json') -def filter_build_rql(self, names, values): - form = self._rebuild_posted_form(names, values) - self._cw.form = form - builder = facetbase.FilterRQLBuilder(self._cw) - return builder.build_rql() - -@ajaxfunc(output_type='json') -def filter_select_content(self, facetids, rql, mainvar): - # Union unsupported yet - select = self._cw.vreg.parse(self._cw, rql).children[0] - filtered_variable = facetbase.get_filtered_variable(select, mainvar) - facetbase.prepare_select(select, filtered_variable) - update_map = {} - for fid in facetids: - fobj = facetbase.get_facet(self._cw, fid, select, filtered_variable) - update_map[fid] = fobj.possible_values() - return update_map - - - -# facets ###################################################################### - -class CWSourceFacet(facetbase.RelationFacet): - __regid__ = 'cw_source-facet' - rtype = 'cw_source' - target_attr = 'name' - -class CreatedByFacet(facetbase.RelationFacet): - __regid__ = 'created_by-facet' - rtype = 'created_by' - target_attr = 'login' - -class InGroupFacet(facetbase.RelationFacet): - __regid__ = 'in_group-facet' - rtype = 'in_group' - target_attr = 'name' - -class InStateFacet(facetbase.RelationAttributeFacet): - __regid__ = 'in_state-facet' - rtype = 'in_state' - target_attr = 'name' - - -# inherit from RelationFacet to benefit from its possible_values implementation -class ETypeFacet(facetbase.RelationFacet): - __regid__ = 'etype-facet' - __select__ = yes() - order = 1 - rtype = 'is' - target_attr = 'name' - - @property - def title(self): - return self._cw._('entity type') - - def vocabulary(self): - """return vocabulary for this facet, eg a list of 2-uple (label, value) - """ - etypes = self.cw_rset.column_types(0) - return sorted((self._cw._(etype), etype) for etype in etypes) - - def add_rql_restrictions(self): - """add restriction for this facet into the rql syntax tree""" - value = self._cw.form.get(self.__regid__) - if not value: - return - self.select.add_type_restriction(self.filtered_variable, value) - - def possible_values(self): - """return a list of possible values (as string since it's used to - compare to a form value in javascript) for this facet - """ - select = self.select - select.save_state() - try: - facetbase.cleanup_select(select, self.filtered_variable) - etype_var = facetbase.prepare_vocabulary_select( - select, self.filtered_variable, self.rtype, self.role) - attrvar = select.make_variable() - select.add_selected(attrvar) - select.add_relation(etype_var, 'name', attrvar) - return [etype for _, etype in self.rqlexec(select.as_string())] - finally: - select.recover() - - -class HasTextFacet(facetbase.AbstractFacet): - __select__ = relation_possible('has_text', 'subject') & match_context_prop() - __regid__ = 'has_text-facet' - rtype = 'has_text' - role = 'subject' - order = 0 - - @property - def wdgclass(self): - return facetbase.FacetStringWidget - - @property - def title(self): - return self._cw._('has_text') - - def get_widget(self): - """return the widget instance to use to display this facet - - default implentation expects a .vocabulary method on the facet and - return a combobox displaying this vocabulary - """ - return self.wdgclass(self) - - def add_rql_restrictions(self): - """add restriction for this facet into the rql syntax tree""" - value = self._cw.form.get(self.__regid__) - if not value: - return - self.select.add_constant_restriction(self.filtered_variable, 'has_text', value, 'String') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/formrenderers.py --- a/web/views/formrenderers.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,546 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -Renderers ---------- - -.. Note:: - Form renderers are responsible to layout a form to HTML. - -Here are the base renderers available: - -.. autoclass:: cubicweb.web.views.formrenderers.FormRenderer -.. autoclass:: cubicweb.web.views.formrenderers.HTableFormRenderer -.. autoclass:: cubicweb.web.views.formrenderers.EntityCompositeFormRenderer -.. autoclass:: cubicweb.web.views.formrenderers.EntityFormRenderer -.. autoclass:: cubicweb.web.views.formrenderers.EntityInlinedFormRenderer - -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from six import text_type - -from logilab.mtconverter import xml_escape -from logilab.common.registry import yes - -from cubicweb import tags, uilib -from cubicweb.appobject import AppObject -from cubicweb.predicates import is_instance -from cubicweb.utils import json_dumps, support_args -from cubicweb.web import eid_param, formwidgets as fwdgs - - -def checkbox(name, value, attrs='', checked=None): - if checked is None: - checked = value - checked = checked and 'checked="checked"' or '' - return u'' % ( - name, value, checked, attrs) - -def field_label(form, field): - if callable(field.label): - return field.label(form, field) - # XXX with 3.6 we can now properly rely on 'if field.role is not None' and - # stop having a tuple for label - if isinstance(field.label, tuple): # i.e. needs contextual translation - return form._cw.pgettext(*field.label) - return form._cw._(field.label) - - - -class FormRenderer(AppObject): - """This is the 'default' renderer, displaying fields in a two columns table: - - +--------------+--------------+ - | field1 label | field1 input | - +--------------+--------------+ - | field2 label | field2 input | - +--------------+--------------+ - - +---------+ - | buttons | - +---------+ - """ - __registry__ = 'formrenderers' - __regid__ = 'default' - - _options = ('display_label', 'display_help', - 'display_progress_div', 'table_class', 'button_bar_class', - # add entity since it may be given to select the renderer - 'entity') - display_label = True - display_help = True - display_progress_div = True - table_class = u'attributeForm' - button_bar_class = u'formButtonBar' - - def __init__(self, req=None, rset=None, row=None, col=None, **kwargs): - super(FormRenderer, self).__init__(req, rset=rset, row=row, col=col) - if self._set_options(kwargs): - raise ValueError('unconsumed arguments %s' % kwargs) - - def _set_options(self, kwargs): - for key in self._options: - try: - setattr(self, key, kwargs.pop(key)) - except KeyError: - continue - return kwargs - - # renderer interface ###################################################### - - def render(self, w, form, values): - self._set_options(values) - form.add_media() - data = [] - _w = data.append - _w(self.open_form(form, values)) - self.render_content(_w, form, values) - _w(self.close_form(form, values)) - errormsg = self.error_message(form) - if errormsg: - data.insert(0, errormsg) - # NOTE: we call unicode because `tag` objects may be found within data - # e.g. from the cwtags library - w(''.join(text_type(x) for x in data)) - - def render_content(self, w, form, values): - if self.display_progress_div: - w(u'
                    %s
                    ' % self._cw._('validating...')) - w(u'\n
                    \n') - self.render_fields(w, form, values) - self.render_buttons(w, form) - w(u'\n
                    \n') - - def render_label(self, form, field): - if field.label is None: - return u'' - label = field_label(form, field) - attrs = {'for': field.dom_id(form)} - if field.required: - attrs['class'] = 'required' - return tags.label(label, **attrs) - - def render_help(self, form, field): - help = [] - descr = field.help - if callable(descr): - descr = descr(form, field) - if descr: - help.append('
                    %s
                    ' % self._cw._(descr)) - example = field.example_format(self._cw) - if example: - help.append('
                    (%s: %s)
                    ' - % (self._cw._('sample format'), example)) - return u' '.join(help) - - # specific methods (mostly to ease overriding) ############################# - - def error_message(self, form): - """return formatted error message - - This method should be called once inlined field errors has been consumed - """ - req = self._cw - errex = form.form_valerror - # get extra errors - if errex is not None: - errormsg = req._('please correct the following errors:') - errors = form.remaining_errors() - if errors: - if len(errors) > 1: - templstr = u'
                  • %s
                  • \n' - else: - templstr = u' %s\n' - for field, err in errors: - if field is None: - errormsg += templstr % err - else: - errormsg += templstr % '%s: %s' % (req._(field), err) - if len(errors) > 1: - errormsg = '
                      %s
                    ' % errormsg - return u'
                    %s
                    ' % errormsg - return u'' - - def open_form(self, form, values, **attrs): - if form.needs_multipart: - enctype = u'multipart/form-data' - else: - enctype = u'application/x-www-form-urlencoded' - attrs.setdefault('enctype', enctype) - attrs.setdefault('method', 'post') - attrs.setdefault('action', form.form_action() or '#') - if form.domid: - attrs.setdefault('id', form.domid) - if form.onsubmit: - attrs.setdefault('onsubmit', form.onsubmit) - if form.cssstyle: - attrs.setdefault('style', form.cssstyle) - if form.cssclass: - attrs.setdefault('class', form.cssclass) - if form.cwtarget: - attrs.setdefault('target', form.cwtarget) - if not form.autocomplete: - attrs.setdefault('autocomplete', 'off') - return '
                    ' % uilib.sgml_attributes(attrs) - - def close_form(self, form, values): - """seems dumb but important for consistency w/ close form, and necessary - for form renderers overriding open_form to use something else or more than - and - """ - out = u'
                    ' - if form.cwtarget: - attrs = {'name': form.cwtarget, 'id': form.cwtarget, - 'width': '0px', 'height': '0px', - 'src': 'javascript: void(0);'} - out = (u'\n' % uilib.sgml_attributes(attrs)) + out - return out - - def render_fields(self, w, form, values): - fields = self._render_hidden_fields(w, form) - if fields: - self._render_fields(fields, w, form) - self.render_child_forms(w, form, values) - - def render_child_forms(self, w, form, values): - # render - for childform in getattr(form, 'forms', []): - self.render_fields(w, childform, values) - - def _render_hidden_fields(self, w, form): - fields = form.fields[:] - for field in form.fields: - if not field.is_visible(): - w(field.render(form, self)) - w(u'\n') - fields.remove(field) - return fields - - def _render_fields(self, fields, w, form): - byfieldset = {} - for field in fields: - byfieldset.setdefault(field.fieldset, []).append(field) - if form.fieldsets_in_order: - fieldsets = form.fieldsets_in_order - else: - fieldsets = byfieldset - for fieldset in list(fieldsets): - try: - fields = byfieldset.pop(fieldset) - except KeyError: - self.warning('no such fieldset: %s (%s)', fieldset, form) - continue - w(u'
                    \n') - if fieldset: - w(u'%s' % self._cw.__(fieldset)) - w(u'\n' % self.table_class) - for field in fields: - w(u'\n' % (field.name, field.role)) - if self.display_label and field.label is not None: - w(u'\n' % self.render_label(form, field)) - w(u'\n') - w(field.render(form, self)) - w(u'\n') - if error: - self.render_error(w, error) - if self.display_help: - w(self.render_help(form, field)) - w(u'\n') - w(u'
                    %s
                    \n') - if byfieldset: - self.warning('unused fieldsets: %s', ', '.join(byfieldset)) - - def render_buttons(self, w, form): - if not form.form_buttons: - return - w(u'\n\n' % self.button_bar_class) - for button in form.form_buttons: - w(u'\n' % button.render(form)) - w(u'
                    %s
                    ') - - def render_error(self, w, err): - """return validation error for widget's field, if any""" - w(u'%s' % err) - - - -class BaseFormRenderer(FormRenderer): - """use form_renderer_id = 'base' if you want base FormRenderer layout even - when selected for an entity - """ - __regid__ = 'base' - - - -class HTableFormRenderer(FormRenderer): - """The 'htable' form renderer display fields horizontally in a table: - - +--------------+--------------+---------+ - | field1 label | field2 label | | - +--------------+--------------+---------+ - | field1 input | field2 input | buttons | - +--------------+--------------+---------+ - """ - __regid__ = 'htable' - - display_help = False - def _render_fields(self, fields, w, form): - w(u'') - w(u'') - for field in fields: - if self.display_label: - w(u'' % self.render_label(form, field)) - if self.display_help: - w(self.render_help(form, field)) - # empty slot for buttons - w(u'') - w(u'') - w(u'') - for field in fields: - error = form.field_error(field) - if error: - w(u'') - w(u'') - w(u'') - w(u'
                    %s 
                    ') - self.render_error(w, error) - else: - w(u'') - w(field.render(form, self)) - w(u'') - for button in form.form_buttons: - w(button.render(form)) - w(u'
                    ') - - def render_buttons(self, w, form): - pass - - -class OneRowTableFormRenderer(FormRenderer): - """The 'htable' form renderer display fields horizontally in a table: - - +--------------+--------------+--------------+--------------+---------+ - | field1 label | field1 input | field2 label | field2 input | buttons | - +--------------+--------------+--------------+--------------+---------+ - """ - __regid__ = 'onerowtable' - - display_help = False - def _render_fields(self, fields, w, form): - w(u'') - w(u'') - for field in fields: - if self.display_label: - w(u'' % self.render_label(form, field)) - if self.display_help: - w(self.render_help(form, field)) - error = form.field_error(field) - if error: - w(u'') - w(u'') - w(u'') - w(u'
                    %s') - self.render_error(w, error) - else: - w(u'') - w(field.render(form, self)) - w(u'') - for button in form.form_buttons: - w(button.render(form)) - w(u'
                    ') - - def render_buttons(self, w, form): - pass - - -class EntityCompositeFormRenderer(FormRenderer): - """This is a specific renderer for the multiple entities edition form - ('muledit'). - - Each entity form will be displayed in row off a table, with a check box for - each entities to indicate which ones are edited. Those checkboxes should be - automatically updated when something is edited. - """ - __regid__ = 'composite' - - _main_display_fields = None - - def render_fields(self, w, form, values): - if form.parent_form is None: - w(u'') - # get fields from the first subform with something to display (we - # may have subforms with nothing editable that will simply be - # skipped later) - for subform in form.forms: - subfields = [field for field in subform.fields - if field.is_visible()] - if subfields: - break - if subfields: - # main form, display table headers - w(u'') - w(u'' % - tags.input(type='checkbox', - title=self._cw._('toggle check boxes'), - onclick="setCheckboxesState('eid', null, this.checked)")) - for field in subfields: - w(u'' % field_label(form, field)) - w(u'') - super(EntityCompositeFormRenderer, self).render_fields(w, form, values) - if form.parent_form is None: - w(u'
                    %s%s
                    ') - if self._main_display_fields: - super(EntityCompositeFormRenderer, self)._render_fields( - self._main_display_fields, w, form) - - def _render_fields(self, fields, w, form): - if form.parent_form is not None: - entity = form.edited_entity - values = form.form_previous_values - qeid = eid_param('eid', entity.eid) - cbsetstate = "setCheckboxesState('eid', %s, 'checked')" % \ - xml_escape(json_dumps(entity.eid)) - w(u'
              %s') - self.render_error(w, error) - else: - w(u'') - if isinstance(field.widget, (fwdgs.Select, fwdgs.CheckBox, - fwdgs.Radio)): - field.widget.attrs['onchange'] = cbsetstate - elif isinstance(field.widget, fwdgs.Input): - field.widget.attrs['onkeypress'] = cbsetstate - # XXX else - w(u'
              %s
              ' % field.render(form, self)) - w(u'
              - - - -
              - %s - - %s - %s -
              """ % tuple(button.render(form) for button in form.form_buttons)) - else: - super(EntityFormRenderer, self).render_buttons(w, form) - - -class EntityInlinedFormRenderer(EntityFormRenderer): - """This is a specific renderer for entity's form inlined into another - entity's form. - """ - __regid__ = 'inline' - fieldset_css_class = 'subentity' - - def render_title(self, w, form, values): - w(u'
              ') - w(u'%(title)s ' - '#%(counter)s ' % values) - if values['removejs']: - values['removemsg'] = self._cw._('remove-inlined-entity-form') - w(u'[%(removemsg)s]' - % values) - w(u'
              ') - - def render(self, w, form, values): - form.add_media() - self.open_form(w, form, values) - self.render_title(w, form, values) - # XXX that stinks - # cleanup values - for key in ('title', 'removejs', 'removemsg'): - values.pop(key, None) - self.render_fields(w, form, values) - self.close_form(w, form, values) - - def open_form(self, w, form, values): - try: - w(u'
              ' % values) - except KeyError: - w(u'
              ' % values) - else: - w(u'
              %s
              ' % ( - values['divid'], self._cw._('click on the box to cancel the deletion'))) - w(u'
              ') - - def close_form(self, w, form, values): - w(u'
              ') - - def render_fields(self, w, form, values): - w(u'
              ' % values) - fields = self._render_hidden_fields(w, form) - w(u'
              ') - w(u'
              ' % self.fieldset_css_class) - if fields: - self._render_fields(fields, w, form) - self.render_child_forms(w, form, values) - w(u'
              ') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/forms.py --- a/web/views/forms.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,483 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -Base form classes ------------------ - -.. Note: - - Form is the glue that bind a context to a set of fields, and is rendered - using a form renderer. No display is actually done here, though you'll find - some attributes of form that are used to control the rendering process. - -Besides the automagic form we'll see later, there are roughly two main -form classes in |cubicweb|: - -.. autoclass:: cubicweb.web.views.forms.FieldsForm -.. autoclass:: cubicweb.web.views.forms.EntityFieldsForm - -As you have probably guessed, choosing between them is easy. Simply ask you the -question 'I am editing an entity or not?'. If the answer is yes, use -:class:`EntityFieldsForm`, else use :class:`FieldsForm`. - -Actually there exists a third form class: - -.. autoclass:: cubicweb.web.views.forms.CompositeForm - -but you'll use this one rarely. -""" - -__docformat__ = "restructuredtext en" - - -import time -import inspect - -from six import text_type - -from logilab.common import dictattr, tempattr -from logilab.common.decorators import iclassmethod, cached -from logilab.common.textutils import splitstrip - -from cubicweb import ValidationError, neg_role -from cubicweb.predicates import non_final_entity, match_kwargs, one_line_rset -from cubicweb.web import RequestError, ProcessFormError -from cubicweb.web import form -from cubicweb.web.views import uicfg -from cubicweb.web.formfields import guess_field - - -class FieldsForm(form.Form): - """This is the base class for fields based forms. - - **Attributes** - - The following attributes may be either set on subclasses or given on - form selection to customize the generated form: - - :attr:`needs_js` - sequence of javascript files that should be added to handle this form - (through :meth:`~cubicweb.web.request.Request.add_js`) - - :attr:`needs_css` - sequence of css files that should be added to handle this form (through - :meth:`~cubicweb.web.request.Request.add_css`) - - :attr:`domid` - value for the "id" attribute of the tag - - :attr:`action` - value for the "action" attribute of the tag - - :attr:`onsubmit` - value for the "onsubmit" attribute of the tag - - :attr:`cssclass` - value for the "class" attribute of the tag - - :attr:`cssstyle` - value for the "style" attribute of the tag - - :attr:`cwtarget` - value for the "target" attribute of the tag - - :attr:`redirect_path` - relative to redirect to after submitting the form - - :attr:`copy_nav_params` - flag telling if navigation parameters should be copied back in hidden - inputs - - :attr:`form_buttons` - sequence of form control (:class:`~cubicweb.web.formwidgets.Button` - widgets instances) - - :attr:`form_renderer_id` - identifier of the form renderer to use to render the form - - :attr:`fieldsets_in_order` - sequence of fieldset names , to control order - - :attr:`autocomplete` - set to False to add 'autocomplete=off' in the form open tag - - **Generic methods** - - .. automethod:: cubicweb.web.form.Form.field_by_name(name, role=None) - .. automethod:: cubicweb.web.form.Form.fields_by_name(name, role=None) - - **Form construction methods** - - .. automethod:: cubicweb.web.form.Form.remove_field(field) - .. automethod:: cubicweb.web.form.Form.append_field(field) - .. automethod:: cubicweb.web.form.Form.insert_field_before(field, name, role=None) - .. automethod:: cubicweb.web.form.Form.insert_field_after(field, name, role=None) - .. automethod:: cubicweb.web.form.Form.add_hidden(name, value=None, **kwargs) - - **Form rendering methods** - - .. automethod:: cubicweb.web.views.forms.FieldsForm.render - - **Form posting methods** - - Once a form is posted, you can retrieve the form on the controller side and - use the following methods to ease processing. For "simple" forms, this - should looks like : - - .. sourcecode :: python - - form = self._cw.vreg['forms'].select('myformid', self._cw) - posted = form.process_posted() - # do something with the returned dictionary - - Notice that form related to entity edition should usually use the - `edit` controller which will handle all the logic for you. - - .. automethod:: cubicweb.web.views.forms.FieldsForm.process_posted - .. automethod:: cubicweb.web.views.forms.FieldsForm.iter_modified_fields - """ - __regid__ = 'base' - - - # attributes overrideable by subclasses or through __init__ - needs_js = ('cubicweb.ajax.js', 'cubicweb.edition.js',) - needs_css = ('cubicweb.form.css',) - action = None - cssclass = None - cssstyle = None - cwtarget = None - redirect_path = None - form_buttons = None - form_renderer_id = 'default' - fieldsets_in_order = None - autocomplete = True - - @property - def needs_multipart(self): - """true if the form needs enctype=multipart/form-data""" - return any(field.needs_multipart for field in self.fields) - - def _get_onsubmit(self): - try: - return self._onsubmit - except AttributeError: - return "return freezeFormButtons('%(domid)s');" % dictattr(self) - def _set_onsubmit(self, value): - self._onsubmit = value - onsubmit = property(_get_onsubmit, _set_onsubmit) - - def add_media(self): - """adds media (CSS & JS) required by this widget""" - if self.needs_js: - self._cw.add_js(self.needs_js) - if self.needs_css: - self._cw.add_css(self.needs_css) - - def render(self, formvalues=None, renderer=None, **kwargs): - """Render this form, using the `renderer` given as argument or the - default according to :attr:`form_renderer_id`. The rendered form is - returned as a unicode string. - - `formvalues` is an optional dictionary containing values that will be - considered as field's value. - - Extra keyword arguments will be given to renderer's :meth:`render` method. - """ - w = kwargs.pop('w', None) - self.build_context(formvalues) - if renderer is None: - renderer = self.default_renderer() - renderer.render(w, self, kwargs) - - def default_renderer(self): - return self._cw.vreg['formrenderers'].select( - self.form_renderer_id, self._cw, - rset=self.cw_rset, row=self.cw_row, col=self.cw_col or 0) - - formvalues = None - def build_context(self, formvalues=None): - """build form context values (the .context attribute which is a - dictionary with field instance as key associated to a dictionary - containing field 'name' (qualified), 'id', 'value' (for display, always - a string). - """ - if self.formvalues is not None: - return # already built - self.formvalues = formvalues or {} - # use a copy in case fields are modified while context is built (eg - # __linkto handling for instance) - for field in self.fields[:]: - for field in field.actual_fields(self): - field.form_init(self) - # store used field in an hidden input for later usage by a controller - fields = set() - eidfields = set() - for field in self.fields: - if field.eidparam: - eidfields.add(field.role_name()) - elif field.name not in self.control_fields: - fields.add(field.role_name()) - if fields: - self.add_hidden('_cw_fields', u','.join(fields)) - if eidfields: - self.add_hidden('_cw_entity_fields', u','.join(eidfields), - eidparam=True) - - _default_form_action_path = 'edit' - def form_action(self): - action = self.action - if action is None: - return self._cw.build_url(self._default_form_action_path) - return action - - # controller form processing methods ####################################### - - def iter_modified_fields(self, editedfields=None, entity=None): - """return a generator on field that has been modified by the posted - form. - """ - if editedfields is None: - try: - editedfields = self._cw.form['_cw_fields'] - except KeyError: - raise RequestError(self._cw._('no edited fields specified')) - entityform = entity and len(inspect.getargspec(self.field_by_name)) == 4 # XXX - for editedfield in splitstrip(editedfields): - try: - name, role = editedfield.split('-') - except Exception: - name = editedfield - role = None - if entityform: - field = self.field_by_name(name, role, eschema=entity.e_schema) - else: - field = self.field_by_name(name, role) - if field.has_been_modified(self): - yield field - - def process_posted(self): - """use this method to process the content posted by a simple form. it - will return a dictionary with field names as key and typed value as - associated value. - """ - with tempattr(self, 'formvalues', {}): # init fields value cache - errors = [] - processed = {} - for field in self.iter_modified_fields(): - try: - for field, value in field.process_posted(self): - processed[field.role_name()] = value - except ProcessFormError as exc: - errors.append((field, exc)) - if errors: - errors = dict((f.role_name(), text_type(ex)) for f, ex in errors) - raise ValidationError(None, errors) - return processed - - -class EntityFieldsForm(FieldsForm): - """This class is designed for forms used to edit some entities. It should - handle for you all the underlying stuff necessary to properly work with the - generic :class:`~cubicweb.web.views.editcontroller.EditController`. - """ - - __regid__ = 'base' - __select__ = (match_kwargs('entity') - | (one_line_rset() & non_final_entity())) - domid = 'entityForm' - uicfg_aff = uicfg.autoform_field - uicfg_affk = uicfg.autoform_field_kwargs - - @iclassmethod - def field_by_name(cls_or_self, name, role=None, eschema=None): - """return field with the given name and role. If field is not explicitly - defined for the form but `eclass` is specified, guess_field will be - called. - """ - try: - return super(EntityFieldsForm, cls_or_self).field_by_name(name, role) - except form.FieldNotFound: - if eschema is None or role is None or not name in eschema.schema: - raise - rschema = eschema.schema.rschema(name) - # XXX use a sample target type. Document this. - tschemas = rschema.targets(eschema, role) - fieldcls = cls_or_self.uicfg_aff.etype_get( - eschema, rschema, role, tschemas[0]) - kwargs = cls_or_self.uicfg_affk.etype_get( - eschema, rschema, role, tschemas[0]) - if kwargs is None: - kwargs = {} - if fieldcls: - if not isinstance(fieldcls, type): - return fieldcls # already and instance - return fieldcls(name=name, role=role, eidparam=True, **kwargs) - if isinstance(cls_or_self, type): - req = None - else: - req = cls_or_self._cw - field = guess_field(eschema, rschema, role, req=req, eidparam=True, **kwargs) - if field is None: - raise - return field - - def __init__(self, _cw, rset=None, row=None, col=None, **kwargs): - try: - self.edited_entity = kwargs.pop('entity') - except KeyError: - self.edited_entity = rset.complete_entity(row or 0, col or 0) - msg = kwargs.pop('submitmsg', None) - super(EntityFieldsForm, self).__init__(_cw, rset, row, col, **kwargs) - self.uicfg_aff = self._cw.vreg['uicfg'].select( - 'autoform_field', self._cw, entity=self.edited_entity) - self.uicfg_affk = self._cw.vreg['uicfg'].select( - 'autoform_field_kwargs', self._cw, entity=self.edited_entity) - self.add_hidden('__type', self.edited_entity.cw_etype, eidparam=True) - - self.add_hidden('eid', self.edited_entity.eid) - self.add_generation_time() - # mainform default to true in parent, hence default to True - if kwargs.get('mainform', True) or kwargs.get('mainentity', False): - self.add_hidden(u'__maineid', self.edited_entity.eid) - # If we need to directly attach the new object to another one - if '__linkto' in self._cw.form: - if msg: - msg = '%s %s' % (msg, self._cw._('and linked')) - else: - msg = self._cw._('entity linked') - if msg: - msgid = self._cw.set_redirect_message(msg) - self.add_hidden('_cwmsgid', msgid) - - def add_generation_time(self): - # use %f to prevent (unlikely) display in exponential format - self.add_hidden('__form_generation_time', '%.6f' % time.time(), - eidparam=True) - - def add_linkto_hidden(self): - """add the __linkto hidden field used to directly attach the new object - to an existing other one when the relation between those two is not - already present in the form. - - Warning: this method must be called only when all form fields are setup - """ - for (rtype, role), eids in self.linked_to.items(): - # if the relation is already setup by a form field, do not add it - # in a __linkto hidden to avoid setting it twice in the controller - try: - self.field_by_name(rtype, role) - except form.FieldNotFound: - for eid in eids: - self.add_hidden('__linkto', '%s:%s:%s' % (rtype, eid, role)) - - def render(self, *args, **kwargs): - self.add_linkto_hidden() - return super(EntityFieldsForm, self).render(*args, **kwargs) - - @property - @cached - def linked_to(self): - linked_to = {} - # case where this is an embeded creation form - try: - eid = int(self.cw_extra_kwargs['peid']) - except (KeyError, ValueError): - # When parent is being created, its eid is not numeric (e.g. 'A') - # hence ValueError. - pass - else: - ltrtype = self.cw_extra_kwargs['rtype'] - ltrole = neg_role(self.cw_extra_kwargs['role']) - linked_to[(ltrtype, ltrole)] = [eid] - # now consider __linkto if the current form is the main form - try: - self.field_by_name('__maineid') - except form.FieldNotFound: - return linked_to - for linkto in self._cw.list_form_param('__linkto'): - ltrtype, eid, ltrole = linkto.split(':') - linked_to.setdefault((ltrtype, ltrole), []).append(int(eid)) - return linked_to - - def session_key(self): - """return the key that may be used to store / retreive data about a - previous post which failed because of a validation error - """ - if self.force_session_key is not None: - return self.force_session_key - # XXX if this is a json request, suppose we should redirect to the - # entity primary view - if self._cw.ajax_request and self.edited_entity.has_eid(): - return '%s#%s' % (self.edited_entity.absolute_url(), self.domid) - # XXX we should not consider some url parameters that may lead to - # different url after a validation error - return '%s#%s' % (self._cw.url(), self.domid) - - def default_renderer(self): - return self._cw.vreg['formrenderers'].select( - self.form_renderer_id, self._cw, rset=self.cw_rset, row=self.cw_row, - col=self.cw_col, entity=self.edited_entity) - - def should_display_add_new_relation_link(self, rschema, existant, card): - return False - - # controller side method (eg POST reception handling) - - def actual_eid(self, eid): - # should be either an int (existant entity) or a variable (to be - # created entity) - assert eid or eid == 0, repr(eid) # 0 is a valid eid - try: - return int(eid) - except ValueError: - try: - return self._cw.data['eidmap'][eid] - except KeyError: - self._cw.data['eidmap'][eid] = None - return None - - def editable_relations(self): - return () - - -class CompositeFormMixIn(object): - __regid__ = 'composite' - form_renderer_id = __regid__ - - def __init__(self, *args, **kwargs): - super(CompositeFormMixIn, self).__init__(*args, **kwargs) - self.forms = [] - - def add_subform(self, subform): - """mark given form as a subform and append it""" - subform.parent_form = self - self.forms.append(subform) - - def build_context(self, formvalues=None): - super(CompositeFormMixIn, self).build_context(formvalues) - for form in self.forms: - form.build_context(formvalues) - - -class CompositeForm(CompositeFormMixIn, FieldsForm): - """Form composed of sub-forms. Typical usage is edition of multiple entities - at once. - """ - -class CompositeEntityForm(CompositeFormMixIn, EntityFieldsForm): - pass # XXX why is this class necessary? diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/ibreadcrumbs.py --- a/web/views/ibreadcrumbs.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,203 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""breadcrumbs components definition for CubicWeb web client""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from six import text_type - -from logilab.mtconverter import xml_escape - -from cubicweb import tags, uilib -from cubicweb.entity import Entity -from cubicweb.predicates import (is_instance, one_line_rset, adaptable, - one_etype_rset, multi_lines_rset, any_rset, - match_form_params) -from cubicweb.view import EntityView, EntityAdapter -from cubicweb.web.views import basecomponents -# don't use AnyEntity since this may cause bug with isinstance() due to reloading - - - -class IBreadCrumbsAdapter(EntityAdapter): - """adapters for entities which can be"located" on some path to display in - the web ui - """ - __regid__ = 'IBreadCrumbs' - __select__ = is_instance('Any', accept_none=False) - - def parent_entity(self): - itree = self.entity.cw_adapt_to('ITree') - if itree is not None: - return itree.parent() - return None - - def breadcrumbs(self, view=None, recurs=None): - """return a list containing some: - - * tuple (url, label) - * entity - * simple label string - - defining path from a root to the current view - - the main view is given as argument so breadcrumbs may vary according to - displayed view (may be None). When recursing on a parent entity, the - `recurs` argument should be a set of already traversed nodes (infinite - loop safety belt). - """ - parent = self.parent_entity() - if parent is not None: - if recurs: - _recurs = recurs - else: - _recurs = set() - if _recurs and parent.eid in _recurs: - self.error('cycle in breadcrumbs for entity %s' % self.entity) - return [] - _recurs.add(parent.eid) - adapter = parent.cw_adapt_to('IBreadCrumbs') - path = adapter.breadcrumbs(view, _recurs) + [self.entity] - else: - path = [self.entity] - if not recurs: - if view is None: - if 'vtitle' in self._cw.form: - # embeding for instance - path.append( self._cw.form['vtitle'] ) - elif view.__regid__ != 'primary' and hasattr(view, 'title'): - path.append( self._cw._(view.title) ) - return path - - -class BreadCrumbEntityVComponent(basecomponents.HeaderComponent): - __regid__ = 'breadcrumbs' - __select__ = (basecomponents.HeaderComponent.__select__ - & one_line_rset() & adaptable('IBreadCrumbs')) - order = basecomponents.ApplicationName.order + 1 - context = basecomponents.ApplicationName.context - separator = u' > ' - link_template = u'%s' - first_separator = True - - # XXX support kwargs for compat with other components which gets the view as - # argument - def render(self, w, **kwargs): - try: - entity = self.cw_extra_kwargs['entity'] - except KeyError: - entity = self.cw_rset.get_entity(0, 0) - adapter = entity.cw_adapt_to('IBreadCrumbs') - view = self.cw_extra_kwargs.get('view') - path = adapter.breadcrumbs(view) - if path: - self.open_breadcrumbs(w) - self.render_breadcrumbs(w, entity, path) - self.close_breadcrumbs(w) - - def open_breadcrumbs(self, w): - w(u'') - if self.first_separator: - w(self.separator) - - def close_breadcrumbs(self, w): - w(u'') - - def render_breadcrumbs(self, w, contextentity, path): - root = path.pop(0) - if isinstance(root, Entity): - w(self.link_template % (self._cw.build_url(root.__regid__), - root.dc_type('plural'))) - w(self.separator) - self.wpath_part(w, root, contextentity, not path) - for i, parent in enumerate(path): - w(self.separator) - w(u"\n") - self.wpath_part(w, parent, contextentity, i == len(path) - 1) - - def wpath_part(self, w, part, contextentity, last=False): # XXX deprecates last argument? - if isinstance(part, Entity): - w(part.view('breadcrumbs')) - elif isinstance(part, tuple): - url, title = part - textsize = self._cw.property_value('navigation.short-line-size') - w(self.link_template % ( - xml_escape(url), xml_escape(uilib.cut(title, textsize)))) - else: - textsize = self._cw.property_value('navigation.short-line-size') - w(xml_escape(uilib.cut(text_type(part), textsize))) - - -class BreadCrumbETypeVComponent(BreadCrumbEntityVComponent): - __select__ = (basecomponents.HeaderComponent.__select__ - & multi_lines_rset() & one_etype_rset() - & adaptable('IBreadCrumbs')) - - def render_breadcrumbs(self, w, contextentity, path): - # XXX hack: only display etype name or first non entity path part - root = path.pop(0) - if isinstance(root, Entity): - w(u'%s' % (self._cw.build_url(root.__regid__), - root.dc_type('plural'))) - else: - self.wpath_part(w, root, contextentity, not path) - - -class BreadCrumbAnyRSetVComponent(BreadCrumbEntityVComponent): - __select__ = basecomponents.HeaderComponent.__select__ & any_rset() - - # XXX support kwargs for compat with other components which gets the view as - # argument - def render(self, w, **kwargs): - self.open_breadcrumbs(w) - w(self._cw._('search')) - self.close_breadcrumbs(w) - - -class BreadCrumbLinkToVComponent(BreadCrumbEntityVComponent): - __select__ = basecomponents.HeaderComponent.__select__ & match_form_params('__linkto') - - def render(self, w, **kwargs): - eid = self._cw.list_form_param('__linkto')[0].split(':')[1] - entity = self._cw.entity_from_eid(eid) - ecmp = self._cw.vreg[self.__registry__].select( - self.__regid__, self._cw, entity=entity, **kwargs) - ecmp.render(w, **kwargs) - - -class BreadCrumbView(EntityView): - __regid__ = 'breadcrumbs' - - def cell_call(self, row, col, **kwargs): - entity = self.cw_rset.get_entity(row, col) - desc = uilib.cut(entity.dc_description(), 50) - # NOTE remember camember: tags.a autoescapes - self.w(tags.a(entity.view('breadcrumbtext'), - href=entity.absolute_url(), title=desc)) - - -class BreadCrumbTextView(EntityView): - __regid__ = 'breadcrumbtext' - - def cell_call(self, row, col, **kwargs): - entity = self.cw_rset.get_entity(row, col) - textsize = self._cw.property_value('navigation.short-line-size') - self.w(uilib.cut(entity.dc_title(), textsize)) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/idownloadable.py --- a/web/views/idownloadable.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,203 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -Specific views for entities adapting to IDownloadable -===================================================== -""" -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six.moves import range - -from logilab.mtconverter import BINARY_ENCODINGS, TransformError, xml_escape -from logilab.common.deprecation import class_renamed, deprecated - -from cubicweb import tags -from cubicweb.view import EntityView -from cubicweb.predicates import (one_line_rset, is_instance, match_context_prop, - adaptable, has_mimetype) -from cubicweb.mttransforms import ENGINE -from cubicweb.web import component, httpcache -from cubicweb.web.views import primary, baseviews - - -class DownloadBox(component.EntityCtxComponent): - """add download box""" - __regid__ = 'download_box' # no download box for images - __select__ = (component.EntityCtxComponent.__select__ & - adaptable('IDownloadable') & ~has_mimetype('image/')) - - order = 10 - title = _('download') - - def init_rendering(self): - self.items = [self.entity] - - def render_body(self, w): - for item in self.items: - idownloadable = item.cw_adapt_to('IDownloadable') - w(u'%s %s' - % (xml_escape(idownloadable.download_url()), - self._cw.uiprops['DOWNLOAD_ICON'], - self._cw._('download icon'), - xml_escape(idownloadable.download_file_name()))) - - -class DownloadView(EntityView): - """download view - - this view is replacing the deprecated 'download' controller and allow - downloading of entities providing the necessary interface - """ - __regid__ = 'download' - __select__ = one_line_rset() & adaptable('IDownloadable') - - templatable = False - content_type = 'application/octet-stream' - binary = True - http_cache_manager = httpcache.EntityHTTPCacheManager - add_to_breadcrumbs = False - - def set_request_content_type(self): - """overriden to set the correct filetype and filename""" - entity = self.cw_rset.complete_entity(self.cw_row or 0, self.cw_col or 0) - adapter = entity.cw_adapt_to('IDownloadable') - encoding = adapter.download_encoding() - if encoding in BINARY_ENCODINGS: - contenttype = 'application/%s' % encoding - encoding = None - else: - contenttype = adapter.download_content_type() - self._cw.set_content_type(contenttype or self.content_type, - filename=adapter.download_file_name(), - encoding=encoding, - disposition='attachment') - - def call(self): - entity = self.cw_rset.complete_entity(self.cw_row or 0, self.cw_col or 0) - adapter = entity.cw_adapt_to('IDownloadable') - self.w(adapter.download_data()) - - def last_modified(self): - return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0).modification_date - - -class DownloadLinkView(EntityView): - """view displaying a link to download the file""" - __regid__ = 'downloadlink' - __select__ = adaptable('IDownloadable') - title = None # should not be listed in possible views - - - def cell_call(self, row, col, title=None, **kwargs): - entity = self.cw_rset.get_entity(row, col) - url = xml_escape(entity.cw_adapt_to('IDownloadable').download_url()) - self.w(u'%s' % (url, xml_escape(title or entity.dc_title()))) - - -class IDownloadablePrimaryView(primary.PrimaryView): - __select__ = adaptable('IDownloadable') - - def render_entity_attributes(self, entity): - self.w(u'
              ') - adapter = entity.cw_adapt_to('IDownloadable') - contenttype = adapter.download_content_type() - if contenttype.startswith('image/'): - self._cw.add_js('cubicweb.image.js') - self.wview('image', entity.cw_rset, row=entity.cw_row, col=entity.cw_col, - link=True, klass='contentimage') - super(IDownloadablePrimaryView, self).render_entity_attributes(entity) - elif contenttype.endswith('html'): - self.wview('downloadlink', entity.cw_rset, title=self._cw._('download'), row=entity.cw_row) - self.wview('ehtml', entity.cw_rset, row=entity.cw_row, col=entity.cw_col, - height='600px', width='100%') - else: - super(IDownloadablePrimaryView, self).render_entity_attributes(entity) - self.wview('downloadlink', entity.cw_rset, title=self._cw._('download'), row=entity.cw_row) - self.render_data(entity, contenttype, 'text/html') - self.w(u'
              ') - - def render_data(self, entity, sourcemt, targetmt): - adapter = entity.cw_adapt_to('IDownloadable') - if ENGINE.find_path(sourcemt, targetmt): - try: - self.w(entity._cw_mtc_transform(adapter.download_data(), sourcemt, - targetmt, adapter.download_encoding())) - except Exception as ex: - self.exception('while rendering data for %s', entity) - msg = self._cw._("can't display data, unexpected error: %s") \ - % xml_escape(unicode(ex)) - self.w('
              %s
              ' % msg) - return True - return False - - -class IDownloadableOneLineView(baseviews.OneLineView): - __select__ = adaptable('IDownloadable') - - def cell_call(self, row, col, title=None, **kwargs): - """the oneline view is a link to download the file""" - entity = self.cw_rset.get_entity(row, col) - url = xml_escape(entity.absolute_url()) - adapter = entity.cw_adapt_to('IDownloadable') - name = xml_escape(title or entity.dc_title()) - durl = xml_escape(adapter.download_url()) - self.w(u'%s [%s]' % - (url, name, durl, self._cw._('download'))) - - -class AbstractEmbeddedView(EntityView): - __abstract__ = True - - _embedding_tag = None - - def call(self, **kwargs): - rset = self.cw_rset - for i in range(len(rset)): - self.w(u'
              ') - self.wview(self.__regid__, rset, row=i, col=0, **kwargs) - self.w(u'
              ') - - def cell_call(self, row, col, link=False, **kwargs): - entity = self.cw_rset.get_entity(row, col) - adapter = entity.cw_adapt_to('IDownloadable') - tag = self._embedding_tag(src=adapter.download_url(), # pylint: disable=E1102 - alt=(self._cw._('download %s') % adapter.download_file_name()), - **kwargs) - if link: - self.w(u'%s' % (adapter.download_url(), tag)) - else: - self.w(tag) - - -class ImageView(AbstractEmbeddedView): - """image embedded view""" - __regid__ = 'image' - __select__ = has_mimetype('image/') - - title = _('image') - _embedding_tag = tags.img - - -class EHTMLView(AbstractEmbeddedView): - """html embedded view""" - __regid__ = 'ehtml' - __select__ = has_mimetype('text/html') - - title = _('embedded html') - _embedding_tag = tags.iframe diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/igeocodable.py --- a/web/views/igeocodable.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Specific views for entities implementing IGeocodable""" - -try: - from cubes.geocoding.views import (IGeocodableAdapter, - GeocodingJsonView, - GoogleMapBubbleView, - GoogleMapsView, - GoogeMapsLegend) - - from logilab.common.deprecation import class_moved - - msg = '[3.17] cubicweb.web.views.igeocodable moved to cubes.geocoding.views' - IGeocodableAdapter = class_moved(IGeocodableAdapter, message=msg) - GeocodingJsonView = class_moved(GeocodingJsonView, message=msg) - GoogleMapBubbleView = class_moved(GoogleMapBubbleView, message=msg) - GoogleMapsView = class_moved(GoogleMapsView, message=msg) - GoogeMapsLegend = class_moved(GoogeMapsLegend, message=msg) -except ImportError: - from cubicweb.web import LOGGER - LOGGER.warning('[3.17] igeocoding extracted to cube geocoding that was not found. try installing it.') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/isioc.py --- a/web/views/isioc.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,35 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Specific views for SIOC (Semantically-Interlinked Online Communities) - -http://sioc-project.org -""" - -from logilab.common.deprecation import class_moved - -try: - from cubes.sioc.views import * - - ISIOCItemAdapter = class_moved(ISIOCItemAdapter, message='[3.17] ISIOCItemAdapter moved to cubes.isioc.views') - ISIOCContainerAdapter = class_moved(ISIOCContainerAdapter, message='[3.17] ISIOCContainerAdapter moved to cubes.isioc.views') - SIOCView = class_moved(SIOCView, message='[3.17] SIOCView moved to cubes.is.view') - SIOCContainerView = class_moved(SIOCContainerView, message='[3.17] SIOCContainerView moved to cubes.is.view') - SIOCItemView = class_moved(SIOCItemView, message='[3.17] SIOCItemView moved to cubes.is.view') -except ImportError: - from cubicweb.web import LOGGER - LOGGER.warning('[3.17] isioc extracted to cube sioc that was not found. try installing it.') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/json.py --- a/web/views/json.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,150 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""json export views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from cubicweb.uilib import rest_traceback - -from cubicweb.utils import json_dumps -from cubicweb.predicates import ExpectedValuePredicate, any_rset, empty_rset -from cubicweb.view import EntityView, AnyRsetView -from cubicweb.web.application import anonymized_request -from cubicweb.web.views import basecontrollers, management - - -class JsonpController(basecontrollers.ViewController): - """The jsonp controller is the same as a ViewController but : - - - anonymize request (avoid CSRF attacks) - - if ``vid`` parameter is passed, make sure it's sensible (i.e. either - "jsonexport" or "ejsonexport") - - if ``callback`` request parameter is passed, it's used as json padding - - - Response's content-type will either be ``application/javascript`` or - ``application/json`` depending on ``callback`` parameter presence or not. - """ - __regid__ = 'jsonp' - - def publish(self, rset=None): - if 'vid' in self._cw.form: - vid = self._cw.form['vid'] - if vid not in ('jsonexport', 'ejsonexport'): - self.warning("vid %s can't be used with jsonp controller, " - "falling back to jsonexport", vid) - self._cw.form['vid'] = 'jsonexport' - else: # if no vid is specified, use jsonexport - self._cw.form['vid'] = 'jsonexport' - if self._cw.vreg.config['anonymize-jsonp-queries']: - with anonymized_request(self._cw): - return self._get_json_data(rset) - else: - return self._get_json_data(rset) - - def _get_json_data(self, rset): - json_data = super(JsonpController, self).publish(rset) - if 'callback' in self._cw.form: # jsonp - json_padding = self._cw.form['callback'].encode('ascii') - # use ``application/javascript`` if ``callback`` parameter is - # provided, keep ``application/json`` otherwise - self._cw.set_content_type('application/javascript') - json_data = json_padding + b'(' + json_data + b')' - return json_data - - -class JsonMixIn(object): - """mixin class for json views - - Handles the following optional request parameters: - - - ``_indent`` : must be an integer. If found, it is used to pretty print - json output - """ - templatable = False - content_type = 'application/json' - binary = True - - def wdata(self, data): - if '_indent' in self._cw.form: - indent = int(self._cw.form['_indent']) - else: - indent = None - # python's json.dumps escapes non-ascii characters - self.w(json_dumps(data, indent=indent).encode('ascii')) - - -class JsonRsetView(JsonMixIn, AnyRsetView): - """dumps raw result set in JSON format""" - __regid__ = 'jsonexport' - __select__ = any_rset() # means rset might be empty or have any shape - title = _('json-export-view') - - def call(self): - # XXX mimic w3c recommandations to serialize SPARQL results in json? - # http://www.w3.org/TR/rdf-sparql-json-res/ - self.wdata(self.cw_rset.rows) - - -class JsonEntityView(JsonMixIn, EntityView): - """dumps rset entities in JSON - - The following additional metadata is added to each row : - - - ``cw_etype`` : entity type - - ``cw_source`` : source url - """ - __regid__ = 'ejsonexport' - __select__ = EntityView.__select__ | empty_rset() - title = _('json-entities-export-view') - - def call(self): - entities = [] - for entity in self.cw_rset.entities(): - serializer = entity.cw_adapt_to('ISerializable') - entities.append(serializer.serialize()) - self.wdata(entities) - - -class _requested_vid(ExpectedValuePredicate): - """predicate that checks vid parameter value - - It differs from ``match_view`` in that it doesn't expect a ``view`` - parameter to be given to ``select`` but will rather check - ``req.form['vid']`` to match expected vid. - """ - def __call__(self, cls, req, rset=None, **kwargs): - return req.form.get('vid') in self.expected - - -class JsonErrorView(JsonMixIn, management.ErrorView): - """custom error view selected when client asks for a json view - - The returned json object will contain err / traceback informations. - """ - __select__ = (management.ErrorView.__select__ & - _requested_vid('jsonexport', 'ejsonexport')) - - def call(self): - errmsg, exclass, excinfo = self._excinfo() - self.wdata({ - 'errmsg': errmsg, - 'exclass': exclass, - 'traceback': rest_traceback(excinfo, errmsg), - }) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/magicsearch.py --- a/web/views/magicsearch.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,660 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""a query processor to handle quick search shortcuts for cubicweb -""" - -__docformat__ = "restructuredtext en" - -import re -from logging import getLogger - -from six import text_type - -from yams.interfaces import IVocabularyConstraint - -from rql import RQLSyntaxError, BadRQLQuery, parse -from rql.utils import rqlvar_maker -from rql.nodes import Relation - -from cubicweb import Unauthorized -from cubicweb.view import Component -from cubicweb.web.views.ajaxcontroller import ajaxfunc - -LOGGER = getLogger('cubicweb.magicsearch') - -def _get_approriate_translation(translations_found, eschema): - """return the first (should be the only one) possible translation according - to the given entity type - """ - # get the list of all attributes / relations for this kind of entity - existing_relations = set(eschema.subject_relations()) - consistent_translations = translations_found & existing_relations - if len(consistent_translations) == 0: - return None - return consistent_translations.pop() - - -def translate_rql_tree(rqlst, translations, schema): - """Try to translate each relation in the RQL syntax tree - - :type rqlst: `rql.stmts.Statement` - :param rqlst: the RQL syntax tree - - :type translations: dict - :param translations: the reverted l10n dict - - :type schema: `cubicweb.schema.Schema` - :param schema: the instance's schema - """ - # var_types is used as a map : var_name / var_type - vartypes = {} - # ambiguous_nodes is used as a map : relation_node / (var_name, available_translations) - ambiguous_nodes = {} - # For each relation node, check if it's a localized relation name - # If it's a localized name, then use the original relation name, else - # keep the existing relation name - for relation in rqlst.get_nodes(Relation): - rtype = relation.r_type - lhs, rhs = relation.get_variable_parts() - if rtype == 'is': - try: - etype = translations[rhs.value] - rhs.value = etype - except KeyError: - # If no translation found, leave the entity type as is - etype = rhs.value - # Memorize variable's type - vartypes[lhs.name] = etype - else: - try: - translation_set = translations[rtype] - except KeyError: - pass # If no translation found, leave the relation type as is - else: - # Only one possible translation, no ambiguity - if len(translation_set) == 1: - relation.r_type = next(iter(translations[rtype])) - # More than 1 possible translation => resolve it later - else: - ambiguous_nodes[relation] = (lhs.name, translation_set) - if ambiguous_nodes: - resolve_ambiguities(vartypes, ambiguous_nodes, schema) - - -def resolve_ambiguities(var_types, ambiguous_nodes, schema): - """Tries to resolve remaining ambiguities for translation - /!\ An ambiguity is when two different string can be localized with - the same string - A simple example: - - 'name' in a company context will be localized as 'nom' in French - - but ... 'surname' will also be localized as 'nom' - - :type var_types: dict - :param var_types: a map : var_name / var_type - - :type ambiguous_nodes: dict - :param ambiguous_nodes: a map : relation_node / (var_name, available_translations) - - :type schema: `cubicweb.schema.Schema` - :param schema: the instance's schema - """ - # Now, try to resolve ambiguous translations - for relation, (var_name, translations_found) in ambiguous_nodes.items(): - try: - vartype = var_types[var_name] - except KeyError: - continue - # Get schema for this entity type - eschema = schema.eschema(vartype) - rtype = _get_approriate_translation(translations_found, eschema) - if rtype is None: - continue - relation.r_type = rtype - - - -QUOTED_SRE = re.compile(r'(.*?)(["\'])(.+?)\2') - -TRANSLATION_MAPS = {} -def trmap(config, schema, lang): - try: - return TRANSLATION_MAPS[lang] - except KeyError: - assert lang in config.translations, '%s %s' % (lang, config.translations) - tr, ctxtr = config.translations[lang] - langmap = {} - for etype in schema.entities(): - etype = str(etype) - langmap[tr(etype).capitalize()] = etype - langmap[etype.capitalize()] = etype - for rtype in schema.relations(): - rtype = str(rtype) - langmap.setdefault(tr(rtype).lower(), set()).add(rtype) - langmap.setdefault(rtype, set()).add(rtype) - TRANSLATION_MAPS[lang] = langmap - return langmap - - -class BaseQueryProcessor(Component): - __abstract__ = True - __regid__ = 'magicsearch_processor' - # set something if you want explicit component search facility for the - # component - name = None - - def process_query(self, uquery): - args = self.preprocess_query(uquery) - try: - return self._cw.execute(*args) - finally: - # rollback necessary to avoid leaving the connection in a bad state - self._cw.cnx.rollback() - - def preprocess_query(self, uquery): - raise NotImplementedError() - - - - -class DoNotPreprocess(BaseQueryProcessor): - """this one returns the raw query and should be placed in first position - of the chain - """ - name = 'rql' - priority = 0 - def preprocess_query(self, uquery): - return uquery, - - -class QueryTranslator(BaseQueryProcessor): - """ parses through rql and translates into schema language entity names - and attributes - """ - priority = 2 - def preprocess_query(self, uquery): - rqlst = parse(uquery, print_errors=False) - schema = self._cw.vreg.schema - # rql syntax tree will be modified in place if necessary - translate_rql_tree(rqlst, trmap(self._cw.vreg.config, schema, self._cw.lang), - schema) - return rqlst.as_string(), - - -class QSPreProcessor(BaseQueryProcessor): - """Quick search preprocessor - - preprocessing query in shortcut form to their RQL form - """ - priority = 4 - - def preprocess_query(self, uquery): - """try to get rql from a unicode query string""" - args = None - try: - # Process as if there was a quoted part - args = self._quoted_words_query(uquery) - ## No quoted part - except BadRQLQuery: - words = uquery.split() - if len(words) == 1: - args = self._one_word_query(*words) - elif len(words) == 2: - args = self._two_words_query(*words) - elif len(words) == 3: - args = self._three_words_query(*words) - else: - raise - return args - - def _get_entity_type(self, word): - """check if the given word is matching an entity type, return it if - it's the case or raise BadRQLQuery if not - """ - etype = word.capitalize() - try: - return trmap(self._cw.vreg.config, self._cw.vreg.schema, self._cw.lang)[etype] - except KeyError: - raise BadRQLQuery('%s is not a valid entity name' % etype) - - def _get_attribute_name(self, word, eschema): - """check if the given word is matching an attribute of the given entity type, - return it normalized if found or return it untransformed else - """ - """Returns the attributes's name as stored in the DB""" - # Need to convert from unicode to string (could be whatever) - rtype = word.lower() - # Find the entity name as stored in the DB - translations = trmap(self._cw.vreg.config, self._cw.vreg.schema, self._cw.lang) - try: - translations = translations[rtype] - except KeyError: - raise BadRQLQuery('%s is not a valid attribute for %s entity type' - % (word, eschema)) - rtype = _get_approriate_translation(translations, eschema) - if rtype is None: - raise BadRQLQuery('%s is not a valid attribute for %s entity type' - % (word, eschema)) - return rtype - - def _one_word_query(self, word): - """Specific process for one word query (case (1) of preprocess_rql) - """ - # if this is an integer, then directly go to eid - try: - eid = int(word) - return 'Any X WHERE X eid %(x)s', {'x': eid}, 'x' - except ValueError: - etype = self._get_entity_type(word) - return '%s %s' % (etype, etype[0]), - - def _complete_rql(self, searchstr, etype, rtype=None, var=None, searchattr=None): - searchop = '' - if '%' in searchstr: - if rtype: - possible_etypes = self._cw.vreg.schema.rschema(rtype).objects(etype) - else: - possible_etypes = [self._cw.vreg.schema.eschema(etype)] - if searchattr or len(possible_etypes) == 1: - searchattr = searchattr or possible_etypes[0].main_attribute() - searchop = 'LIKE ' - searchattr = searchattr or 'has_text' - if var is None: - var = etype[0] - return '%s %s %s%%(text)s' % (var, searchattr, searchop) - - def _two_words_query(self, word1, word2): - """Specific process for two words query (case (2) of preprocess_rql) - """ - etype = self._get_entity_type(word1) - # this is a valid RQL query : ("Person X", or "Person TMP1") - if len(word2) == 1 and word2.isupper(): - return '%s %s' % (etype, word2), - # else, suppose it's a shortcut like : Person Smith - restriction = self._complete_rql(word2, etype) - if ' has_text ' in restriction: - rql = '%s %s ORDERBY FTIRANK(%s) DESC WHERE %s' % ( - etype, etype[0], etype[0], restriction) - else: - rql = '%s %s WHERE %s' % ( - etype, etype[0], restriction) - return rql, {'text': word2} - - def _three_words_query(self, word1, word2, word3): - """Specific process for three words query (case (3) of preprocess_rql) - """ - etype = self._get_entity_type(word1) - eschema = self._cw.vreg.schema.eschema(etype) - rtype = self._get_attribute_name(word2, eschema) - # expand shortcut if rtype is a non final relation - if not self._cw.vreg.schema.rschema(rtype).final: - return self._expand_shortcut(etype, rtype, word3) - if '%' in word3: - searchop = 'LIKE ' - else: - searchop = '' - rql = '%s %s WHERE %s' % (etype, etype[0], - self._complete_rql(word3, etype, searchattr=rtype)) - return rql, {'text': word3} - - def _expand_shortcut(self, etype, rtype, searchstr): - """Expands shortcut queries on a non final relation to use has_text or - the main attribute (according to possible entity type) if '%' is used in the - search word - - Transforms : 'person worksat IBM' into - 'Personne P WHERE P worksAt C, C has_text "IBM"' - """ - # check out all possilbe entity types for the relation represented - # by 'rtype' - mainvar = etype[0] - searchvar = mainvar + '1' - restriction = self._complete_rql(searchstr, etype, rtype=rtype, - var=searchvar) - if ' has_text ' in restriction: - rql = ('%s %s ORDERBY FTIRANK(%s) DESC ' - 'WHERE %s %s %s, %s' % (etype, mainvar, searchvar, - mainvar, rtype, searchvar, # P worksAt C - restriction)) - else: - rql = ('%s %s WHERE %s %s %s, %s' % (etype, mainvar, - mainvar, rtype, searchvar, # P worksAt C - restriction)) - return rql, {'text': searchstr} - - - def _quoted_words_query(self, ori_rql): - """Specific process when there's a "quoted" part - """ - m = QUOTED_SRE.match(ori_rql) - # if there's no quoted part, then no special pre-processing to do - if m is None: - raise BadRQLQuery("unable to handle request %r" % ori_rql) - left_words = m.group(1).split() - quoted_part = m.group(3) - # Case (1) : Company "My own company" - if len(left_words) == 1: - try: - word1 = left_words[0] - return self._two_words_query(word1, quoted_part) - except BadRQLQuery as error: - raise BadRQLQuery("unable to handle request %r" % ori_rql) - # Case (2) : Company name "My own company"; - elif len(left_words) == 2: - word1, word2 = left_words - return self._three_words_query(word1, word2, quoted_part) - # return ori_rql - raise BadRQLQuery("unable to handle request %r" % ori_rql) - - - -class FullTextTranslator(BaseQueryProcessor): - priority = 10 - name = 'text' - - def preprocess_query(self, uquery): - """suppose it's a plain text query""" - return 'Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s', {'text': uquery} - - - -class MagicSearchComponent(Component): - __regid__ = 'magicsearch' - def __init__(self, req, rset=None): - super(MagicSearchComponent, self).__init__(req, rset=rset) - processors = [] - self.by_name = {} - for processorcls in self._cw.vreg['components']['magicsearch_processor']: - # instantiation needed - processor = processorcls(self._cw) - processors.append(processor) - if processor.name is not None: - assert not processor.name in self.by_name - self.by_name[processor.name.lower()] = processor - self.processors = sorted(processors, key=lambda x: x.priority) - - def process_query(self, uquery): - assert isinstance(uquery, text_type) - try: - procname, query = uquery.split(':', 1) - proc = self.by_name[procname.strip().lower()] - uquery = query.strip() - except Exception: - # use processor chain - unauthorized = None - for proc in self.processors: - try: - return proc.process_query(uquery) - # FIXME : we don't want to catch any exception type here ! - except (RQLSyntaxError, BadRQLQuery): - pass - except Unauthorized as ex: - unauthorized = ex - continue - except Exception as ex: - LOGGER.debug('%s: %s', ex.__class__.__name__, ex) - continue - if unauthorized: - raise unauthorized - else: - # explicitly specified processor: don't try to catch the exception - return proc.process_query(uquery) - raise BadRQLQuery(self._cw._('sorry, the server is unable to handle this query')) - - - -## RQL suggestions builder #################################################### -class RQLSuggestionsBuilder(Component): - """main entry point is `build_suggestions()` which takes - an incomplete RQL query and returns a list of suggestions to complete - the query. - - This component is enabled by default and is used to provide autocompletion - in the RQL search bar. If you don't want this feature in your application, - just unregister it or make it unselectable. - - .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.build_suggestions - .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.etypes_suggestion_set - .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.possible_etypes - .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.possible_relations - .. automethod:: cubicweb.web.views.magicsearch.RQLSuggestionsBuilder.vocabulary - """ - __regid__ = 'rql.suggestions' - - #: maximum number of results to fetch when suggesting attribute values - attr_value_limit = 20 - - def build_suggestions(self, user_rql): - """return a list of suggestions to complete `user_rql` - - :param user_rql: an incomplete RQL query - """ - req = self._cw - try: - if 'WHERE' not in user_rql: # don't try to complete if there's no restriction - return [] - variables, restrictions = [part.strip() for part in user_rql.split('WHERE', 1)] - if ',' in restrictions: - restrictions, incomplete_part = restrictions.rsplit(',', 1) - user_rql = '%s WHERE %s' % (variables, restrictions) - else: - restrictions, incomplete_part = '', restrictions - user_rql = variables - select = parse(user_rql, print_errors=False).children[0] - req.vreg.rqlhelper.annotate(select) - req.vreg.solutions(req, select, {}) - if restrictions: - return ['%s, %s' % (user_rql, suggestion) - for suggestion in self.rql_build_suggestions(select, incomplete_part)] - else: - return ['%s WHERE %s' % (user_rql, suggestion) - for suggestion in self.rql_build_suggestions(select, incomplete_part)] - except Exception as exc: # we never want to crash - self.debug('failed to build suggestions: %s', exc) - return [] - - ## actual completion entry points ######################################### - def rql_build_suggestions(self, select, incomplete_part): - """ - :param select: the annotated select node (rql syntax tree) - :param incomplete_part: the part of the rql query that needs - to be completed, (e.g. ``X is Pr``, ``X re``) - """ - chunks = incomplete_part.split(None, 2) - if not chunks: # nothing to complete - return [] - if len(chunks) == 1: # `incomplete` looks like "MYVAR" - return self._complete_rqlvar(select, *chunks) - elif len(chunks) == 2: # `incomplete` looks like "MYVAR some_rel" - return self._complete_rqlvar_and_rtype(select, *chunks) - elif len(chunks) == 3: # `incomplete` looks like "MYVAR some_rel something" - return self._complete_relation_object(select, *chunks) - else: # would be anything else, hard to decide what to do here - return [] - - # _complete_* methods are considered private, at least while the API - # isn't stabilized. - def _complete_rqlvar(self, select, rql_var): - """return suggestions for "variable only" incomplete_part - - as in : - - - Any X WHERE X - - Any X WHERE X is Project, Y - - etc. - """ - return ['%s %s %s' % (rql_var, rtype, dest_var) - for rtype, dest_var in self.possible_relations(select, rql_var)] - - def _complete_rqlvar_and_rtype(self, select, rql_var, user_rtype): - """return suggestions for "variable + rtype" incomplete_part - - as in : - - - Any X WHERE X is - - Any X WHERE X is Person, X firstn - - etc. - """ - # special case `user_type` == 'is', return every possible type. - if user_rtype == 'is': - return self._complete_is_relation(select, rql_var) - else: - return ['%s %s %s' % (rql_var, rtype, dest_var) - for rtype, dest_var in self.possible_relations(select, rql_var) - if rtype.startswith(user_rtype)] - - def _complete_relation_object(self, select, rql_var, user_rtype, user_value): - """return suggestions for "variable + rtype + some_incomplete_value" - - as in : - - - Any X WHERE X is Per - - Any X WHERE X is Person, X firstname " - - Any X WHERE X is Person, X firstname "Pa - - etc. - """ - # special case `user_type` == 'is', return every possible type. - if user_rtype == 'is': - return self._complete_is_relation(select, rql_var, user_value) - elif user_value: - if user_value[0] in ('"', "'"): - # if finished string, don't suggest anything - if len(user_value) > 1 and user_value[-1] == user_value[0]: - return [] - user_value = user_value[1:] - return ['%s %s "%s"' % (rql_var, user_rtype, value) - for value in self.vocabulary(select, rql_var, - user_rtype, user_value)] - return [] - - def _complete_is_relation(self, select, rql_var, prefix=''): - """return every possible types for rql_var - - :param prefix: if specified, will only return entity types starting - with the specified value. - """ - return ['%s is %s' % (rql_var, etype) - for etype in self.possible_etypes(select, rql_var, prefix)] - - def etypes_suggestion_set(self): - """returns the list of possible entity types to suggest - - The default is to return any non-final entity type available - in the schema. - - Can be overridden for instance if an application decides - to restrict this list to a meaningful set of business etypes. - """ - schema = self._cw.vreg.schema - return set(eschema.type for eschema in schema.entities() if not eschema.final) - - def possible_etypes(self, select, rql_var, prefix=''): - """return all possible etypes for `rql_var` - - The returned list will always be a subset of meth:`etypes_suggestion_set` - - :param select: the annotated select node (rql syntax tree) - :param rql_var: the variable name for which we want to know possible types - :param prefix: if specified, will only return etypes starting with it - """ - available_etypes = self.etypes_suggestion_set() - possible_etypes = set() - for sol in select.solutions: - if rql_var in sol and sol[rql_var] in available_etypes: - possible_etypes.add(sol[rql_var]) - if not possible_etypes: - # `Any X WHERE X is Person, Y is` - # -> won't have a solution, need to give all etypes - possible_etypes = available_etypes - return sorted(etype for etype in possible_etypes if etype.startswith(prefix)) - - def possible_relations(self, select, rql_var, include_meta=False): - """returns a list of couple (rtype, dest_var) for each possible - relations with `rql_var` as subject. - - ``dest_var`` will be picked among availabel variables if types match, - otherwise a new one will be created. - """ - schema = self._cw.vreg.schema - relations = set() - untyped_dest_var = next(rqlvar_maker(defined=select.defined_vars)) - # for each solution - # 1. find each possible relation - # 2. for each relation: - # 2.1. if the relation is meta, skip it - # 2.2. for each possible destination type, pick up possible - # variables for this type or use a new one - for sol in select.solutions: - etype = sol[rql_var] - sol_by_types = {} - for varname, var_etype in sol.items(): - # don't push subject var to avoid "X relation X" suggestion - if varname != rql_var: - sol_by_types.setdefault(var_etype, []).append(varname) - for rschema in schema[etype].subject_relations(): - if include_meta or not rschema.meta: - for dest in rschema.objects(etype): - for varname in sol_by_types.get(dest.type, (untyped_dest_var,)): - suggestion = (rschema.type, varname) - if suggestion not in relations: - relations.add(suggestion) - return sorted(relations) - - def vocabulary(self, select, rql_var, user_rtype, rtype_incomplete_value): - """return acceptable vocabulary for `rql_var` + `user_rtype` in `select` - - Vocabulary is either found from schema (Yams) definition or - directly from database. - """ - schema = self._cw.vreg.schema - vocab = [] - for sol in select.solutions: - # for each solution : - # - If a vocabulary constraint exists on `rql_var+user_rtype`, use it - # to define possible values - # - Otherwise, query the database to fetch available values from - # database (limiting results to `self.attr_value_limit`) - try: - eschema = schema.eschema(sol[rql_var]) - rdef = eschema.rdef(user_rtype) - except KeyError: # unknown relation - continue - cstr = rdef.constraint_by_interface(IVocabularyConstraint) - if cstr is not None: - # a vocabulary is found, use it - vocab += [value for value in cstr.vocabulary() - if value.startswith(rtype_incomplete_value)] - elif rdef.final: - # no vocab, query database to find possible value - vocab_rql = 'DISTINCT Any V LIMIT %s WHERE X is %s, X %s V' % ( - self.attr_value_limit, eschema.type, user_rtype) - vocab_kwargs = {} - if rtype_incomplete_value: - vocab_rql += ', X %s LIKE %%(value)s' % user_rtype - vocab_kwargs['value'] = u'%s%%' % rtype_incomplete_value - vocab += [value for value, in - self._cw.execute(vocab_rql, vocab_kwargs)] - return sorted(set(vocab)) - - - -@ajaxfunc(output_type='json') -def rql_suggest(self): - rql_builder = self._cw.vreg['components'].select_or_none('rql.suggestions', self._cw) - if rql_builder: - return rql_builder.build_suggestions(self._cw.form['term']) - return [] diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/management.py --- a/web/views/management.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,200 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""security management and error screens""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - - -from logilab.mtconverter import xml_escape -from logilab.common.registry import yes - -from cubicweb.predicates import none_rset, match_user_groups, authenticated_user -from cubicweb.view import AnyRsetView, StartupView, EntityView, View -from cubicweb.uilib import html_traceback, rest_traceback, exc_message -from cubicweb.web import formwidgets as wdgs -from cubicweb.web.formfields import guess_field -from cubicweb.web.views.schema import SecurityViewMixIn - -from yams.buildobjs import EntityType - -SUBMIT_MSGID = _('Submit bug report') -MAIL_SUBMIT_MSGID = _('Submit bug report by mail') - -class SecurityManagementView(SecurityViewMixIn, EntityView): - """display security information for a given entity""" - __regid__ = 'security' - __select__ = EntityView.__select__ & authenticated_user() - - title = _('security') - - def call(self): - self.w(u'
              %s
              ' % self._cw._('validating...')) - super(SecurityManagementView, self).call() - - def entity_call(self, entity): - self._cw.add_js('cubicweb.edition.js') - self._cw.add_css('cubicweb.acl.css') - w = self.w - _ = self._cw._ - w(u'

              %s %s

              ' - % (entity.dc_type().capitalize(), - xml_escape(entity.absolute_url()), - xml_escape(entity.dc_title()))) - # first show permissions defined by the schema - self.w('

              %s

              ' % _('Schema\'s permissions definitions')) - self.permissions_table(entity.e_schema) - self.w('

              %s

              ' % _('Manage security')) - # ownership information - if self._cw.vreg.schema.rschema('owned_by').has_perm(self._cw, 'add', - fromeid=entity.eid): - self.owned_by_edit_form(entity) - else: - self.owned_by_information(entity) - - def owned_by_edit_form(self, entity): - self.w('

              %s

              ' % self._cw._('Ownership')) - msg = self._cw._('ownerships have been changed') - form = self._cw.vreg['forms'].select('base', self._cw, entity=entity, - form_renderer_id='onerowtable', submitmsg=msg, - form_buttons=[wdgs.SubmitButton()], - domid='ownership%s' % entity.eid, - __redirectvid='security', - __redirectpath=entity.rest_path()) - field = guess_field(entity.e_schema, - self._cw.vreg.schema['owned_by'], - req=self._cw) - form.append_field(field) - form.render(w=self.w, display_progress_div=False) - - def owned_by_information(self, entity): - ownersrset = entity.related('owned_by') - if ownersrset: - self.w('

              %s

              ' % self._cw._('Ownership')) - self.w(u'
              ') - self.w(self._cw._('this entity is currently owned by') + ' ') - self.wview('csv', entity.related('owned_by'), 'null') - self.w(u'
              ') - # else we don't know if this is because entity has no owner or becayse - # user as no access to owner users entities - - -class ErrorView(AnyRsetView): - """default view when no result has been found""" - __select__ = yes() - __regid__ = 'error' - - def page_title(self): - """returns a title according to the result set - used for the - title in the HTML header - """ - return self._cw._('an error occurred') - - def _excinfo(self): - req = self._cw - ex = req.data.get('ex') - excinfo = req.data.get('excinfo') - if 'errmsg' in req.data: - errmsg = req.data['errmsg'] - exclass = None - else: - errmsg = exc_message(ex, req.encoding) - exclass = ex.__class__.__name__ - return errmsg, exclass, excinfo - - def call(self): - req = self._cw.reset_headers() - w = self.w - title = self._cw._('an error occurred') - w(u'

              %s

              ' % title) - ex, exclass, excinfo = self._excinfo() - if excinfo is not None and self._cw.vreg.config['print-traceback']: - if exclass is None: - w(u'
              %s
              ' - % xml_escape(ex).replace("\n","
              ")) - else: - w(u'
              %s: %s
              ' - % (exclass, xml_escape(ex).replace("\n","
              "))) - w(u'
              ') - w(u'
              %s
              ' % html_traceback(excinfo, ex, '')) - else: - w(u'
              %s
              ' % (xml_escape(ex).replace("\n","
              "))) - # if excinfo is not None, it's probably not a bug - if excinfo is None: - return - vcconf = self._cw.cnx.repo.get_versions() - w(u"
              ") - eversion = vcconf.get('cubicweb', self._cw._('no version information')) - # NOTE: tuple wrapping needed since eversion is itself a tuple - w(u"CubicWeb version: %s
              \n" % (eversion,)) - cversions = [] - for cube in self._cw.vreg.config.cubes(): - cubeversion = vcconf.get(cube, self._cw._('no version information')) - w(u"Cube %s version: %s
              \n" % (cube, cubeversion)) - cversions.append((cube, cubeversion)) - w(u"
              ") - # creates a bug submission link if submit-mail is set - if self._cw.vreg.config['submit-mail']: - form = self._cw.vreg['forms'].select('base', self._cw, rset=None, - mainform=False) - binfo = text_error_description(ex, excinfo, req, eversion, cversions) - form.add_hidden('description', binfo, - # we must use a text area to keep line breaks - widget=wdgs.TextArea({'class': 'hidden'})) - # add a signature so one can't send arbitrary text - form.add_hidden('__signature', req.vreg.config.sign_text(binfo)) - form.add_hidden('__bugreporting', '1') - form.form_buttons = [wdgs.SubmitButton(MAIL_SUBMIT_MSGID)] - form.action = req.build_url('reportbug') - form.render(w=w) - - -def text_error_description(ex, excinfo, req, eversion, cubes): - binfo = rest_traceback(excinfo, xml_escape(ex)) - binfo += u'\n\n:URL: %s\n' % req.url() - if not '__bugreporting' in req.form: - binfo += u'\n:form params:\n' - binfo += u'\n'.join(u' * %s = %s' % (k, v) for k, v in req.form.items()) - binfo += u'\n\n:CubicWeb version: %s\n' % (eversion,) - for pkg, pkgversion in cubes: - binfo += u":Cube %s version: %s\n" % (pkg, pkgversion) - binfo += '\n' - return binfo - - -class CwStats(View): - """A textual stats output for monitoring tools such as munin """ - - __regid__ = 'processinfo' - content_type = 'text/plain' - templatable = False - __select__ = none_rset() & match_user_groups('users', 'managers') - - def call(self): - stats = self._cw.call_service('repo_stats') - stats['looping_tasks'] = ', '.join('%s (%s seconds)' % (n, i) for n, i in stats['looping_tasks']) - stats['threads'] = ', '.join(sorted(stats['threads'])) - for k in stats: - if k in ('extid_cache_size', 'type_source_cache_size'): - continue - if k.endswith('_cache_size'): - stats[k] = '%s / %s' % (stats[k]['size'], stats[k]['maxsize']) - results = [] - for element in stats: - results.append(u'%s %s' % (element, stats[element])) - self.w(u'\n'.join(results)) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/massmailing.py --- a/web/views/massmailing.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,40 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Mass mailing handling: send mail to entities adaptable to IEmailable""" - -try: - from cubes.massmailing.views import (SendEmailAction, - recipient_vocabulary, - MassMailingForm, - MassMailingFormRenderer, - MassMailingFormView, - SendMailController) - - - from logilab.common.deprecation import class_moved, moved - - msg = '[3.17] cubicweb.web.views.massmailing moved to cubes.massmailing.views' - SendEmailAction = class_moved(SendEmailAction, message=msg) - recipient_vocabulary = moved('cubes.massmailing.views', 'recipient_vocabulary') - MassMailingForm = class_moved(MassMailingForm, message=msg) - MassMailingFormRenderer = class_moved(MassMailingFormRenderer, message=msg) - MassMailingFormView = class_moved(MassMailingFormView, message=msg) - SendMailController = class_moved(SendMailController, message=msg) -except ImportError: - from cubicweb.web import LOGGER - LOGGER.warning('[3.17] massmailing extracted to cube massmailing that was not found. try installing it.') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/navigation.py --- a/web/views/navigation.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,415 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""This module provides some generic components to navigate in the web -application. - -Pagination ----------- - -Several implementations for large result set pagination are provided: - -.. autoclass:: PageNavigation -.. autoclass:: PageNavigationSelect -.. autoclass:: SortedNavigation - -Pagination will appear when needed according to the `page-size` ui property. - -This module monkey-patch the :func:`paginate` function to the base :class:`View` -class, so that you can ask pagination explicitly on every result-set based views. - -.. autofunction:: paginate - - -Previous / next navigation --------------------------- - -An adapter and its related component for the somewhat usal "previous / next" -navigation are provided. - - .. autoclass:: IPrevNextAdapter - .. autoclass:: NextPrevNavigationComponent -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from datetime import datetime - -from six import text_type - -from rql.nodes import VariableRef, Constant - -from logilab.mtconverter import xml_escape -from logilab.common.deprecation import deprecated - -from cubicweb.predicates import paginated_rset, sorted_rset, adaptable -from cubicweb.uilib import cut -from cubicweb.view import EntityAdapter -from cubicweb.web.component import EmptyComponent, EntityCtxComponent, NavigationComponent - - -class PageNavigation(NavigationComponent): - """The default pagination component: display link to pages where each pages - is identified by the item number of its first and last elements. - """ - def call(self): - """displays a resultset by page""" - params = dict(self._cw.form) - self.clean_params(params) - basepath = self._cw.relative_path(includeparams=False) - self.w(u'') - - def index_display(self, start, stop): - return u'%s - %s' % (start+1, stop+1) - - def iter_page_links(self, basepath, params): - rset = self.cw_rset - page_size = self.page_size - start = 0 - while start < rset.rowcount: - stop = min(start + page_size - 1, rset.rowcount - 1) - yield self.page_link(basepath, params, start, stop, - self.index_display(start, stop)) - start = stop + 1 - - -class PageNavigationSelect(PageNavigation): - """This pagination component displays a result-set by page as - :class:`PageNavigation` but in a ') - for option in self.iter_page_links(basepath, params): - w(option) - w(u'') - w(u'  %s' % self.next_link(basepath, params)) - w(u'
              ') - - -class SortedNavigation(NavigationComponent): - """This pagination component will be selected by default if there are less - than 4 pages and if the result set is sorted. - - Displayed links to navigate accross pages of a result set are done according - to the first variable on which the sort is done, and looks like: - - [ana - cro] | [cro - ghe] | ... | [tim - zou] - - You may want to override this component to customize display in some cases. - - .. automethod:: sort_on - .. automethod:: display_func - .. automethod:: format_link_content - .. automethod:: write_links - - Below an example from the tracker cube: - - .. sourcecode:: python - - class TicketsNavigation(navigation.SortedNavigation): - __select__ = (navigation.SortedNavigation.__select__ - & ~paginated_rset(4) & is_instance('Ticket')) - def sort_on(self): - col, attrname = super(TicketsNavigation, self).sort_on() - if col == 6: - # sort on state, we don't want that - return None, None - return col, attrname - - The idea is that in trackers'ticket tables, result set is first ordered on - ticket's state while this doesn't make any sense in the navigation. So we - override :meth:`sort_on` so that if we detect such sorting, we disable the - feature to go back to item number in the pagination. - - Also notice the `~paginated_rset(4)` in the selector so that if there are - more than 4 pages to display, :class:`PageNavigationSelect` will still be - selected. - """ - __select__ = paginated_rset() & sorted_rset() - - # number of considered chars to build page links - nb_chars = 5 - - def call(self): - # attrname = the name of attribute according to which the sort - # is done if any - col, attrname = self.sort_on() - index_display = self.display_func(self.cw_rset, col, attrname) - basepath = self._cw.relative_path(includeparams=False) - params = dict(self._cw.form) - self.clean_params(params) - blocklist = [] - start = 0 - total = self.cw_rset.rowcount - while start < total: - stop = min(start + self.page_size - 1, total - 1) - cell = self.format_link_content(index_display(start), index_display(stop)) - blocklist.append(self.page_link(basepath, params, start, stop, cell)) - start = stop + 1 - self.write_links(basepath, params, blocklist) - - def display_func(self, rset, col, attrname): - """Return a function that will be called with a row number as argument - and should return a string to use as link for it. - """ - if attrname is not None: - def index_display(row): - if not rset[row][col]: # outer join - return u'' - entity = rset.get_entity(row, col) - return entity.printable_value(attrname, format='text/plain') - elif col is None: # smart links disabled. - def index_display(row): - return text_type(row) - elif self._cw.vreg.schema.eschema(rset.description[0][col]).final: - def index_display(row): - return text_type(rset[row][col]) - else: - def index_display(row): - return rset.get_entity(row, col).view('text') - return index_display - - def sort_on(self): - """Return entity column number / attr name to use for nice display by - inspecting the rset'syntax tree. - """ - rschema = self._cw.vreg.schema.rschema - for sorterm in self.cw_rset.syntax_tree().children[0].orderby: - if isinstance(sorterm.term, Constant): - col = sorterm.term.value - 1 - return col, None - var = sorterm.term.get_nodes(VariableRef)[0].variable - col = None - for ref in var.references(): - rel = ref.relation() - if rel is None: - continue - attrname = rel.r_type - if attrname in ('is', 'has_text'): - continue - if not rschema(attrname).final: - col = var.selected_index() - attrname = None - if col is None: - # final relation or not selected non final relation - if var is rel.children[0]: - relvar = rel.children[1].children[0].get_nodes(VariableRef)[0] - else: - relvar = rel.children[0].variable - col = relvar.selected_index() - if col is not None: - break - else: - # no relation but maybe usable anyway if selected - col = var.selected_index() - attrname = None - if col is not None: - # if column type is date[time], set proper 'nb_chars' - if var.stinfo['possibletypes'] & frozenset(('TZDatetime', 'Datetime', - 'Date')): - self.nb_chars = len(self._cw.format_date(datetime.today())) - return col, attrname - # nothing usable found, use the first column - return 0, None - - def format_link_content(self, startstr, stopstr): - """Return text for a page link, where `startstr` and `stopstr` are the - text for the lower/upper boundaries of the page. - - By default text are stripped down to :attr:`nb_chars` characters. - """ - text = u'%s - %s' % (startstr.lower()[:self.nb_chars], - stopstr.lower()[:self.nb_chars]) - return xml_escape(text) - - def write_links(self, basepath, params, blocklist): - """Return HTML for the whole navigation: `blocklist` is a list of HTML - snippets for each page, `basepath` and `params` will be necessary to - build previous/next links. - """ - self.w(u'') - - -def do_paginate(view, rset=None, w=None, show_all_option=True, page_size=None): - """write pages index in w stream (default to view.w) and then limit the - result set (default to view.rset) to the currently displayed page if we're - not explicitly told to display everything (by setting __force_display in - req.form) - """ - req = view._cw - if rset is None: - rset = view.cw_rset - if w is None: - w = view.w - nav = req.vreg['components'].select_or_none( - 'navigation', req, rset=rset, page_size=page_size, view=view) - if nav: - if w is None: - w = view.w - if req.form.get('__force_display'): - # allow to come back to the paginated view - params = dict(req.form) - basepath = req.relative_path(includeparams=False) - del params['__force_display'] - url = nav.page_url(basepath, params) - w(u'\n' - % (xml_escape(url), req._('back to pagination (%s results)') - % nav.page_size)) - else: - # get boundaries before component rendering - start, stop = nav.page_boundaries() - nav.render(w=w) - params = dict(req.form) - nav.clean_params(params) - # make a link to see them all - if show_all_option: - basepath = req.relative_path(includeparams=False) - params['__force_display'] = 1 - params['__fromnavigation'] = 1 - url = nav.page_url(basepath, params) - w(u'\n' - % (xml_escape(url), req._('show %s results') % len(rset))) - rset.limit(offset=start, limit=stop-start, inplace=True) - - -def paginate(view, show_all_option=True, w=None, page_size=None, rset=None): - """paginate results if the view is paginable - """ - if view.paginable: - do_paginate(view, rset, w, show_all_option, page_size) - -# monkey patch base View class to add a .paginate([...]) -# method to be called to write pages index in the view and then limit the result -# set to the current page -from cubicweb.view import View -View.do_paginate = do_paginate -View.paginate = paginate -View.handle_pagination = False - - - -class IPrevNextAdapter(EntityAdapter): - """Interface for entities which can be linked to a previous and/or next - entity - - .. automethod:: next_entity - .. automethod:: previous_entity - """ - __needs_bw_compat__ = True - __regid__ = 'IPrevNext' - __abstract__ = True - - def next_entity(self): - """return the 'next' entity""" - raise NotImplementedError - - def previous_entity(self): - """return the 'previous' entity""" - raise NotImplementedError - - -class NextPrevNavigationComponent(EntityCtxComponent): - """Entities adaptable to the 'IPrevNext' should have this component - automatically displayed. You may want to override this component to have a - different look and feel. - """ - - __regid__ = 'prevnext' - # register msg not generated since no entity implements IPrevNext in cubicweb - # itself - help = _('ctxcomponents_prevnext_description') - __select__ = EntityCtxComponent.__select__ & adaptable('IPrevNext') - context = 'navbottom' - order = 10 - - @property - def prev_icon(self): - return '%s' % ( - xml_escape(self._cw.data_url('go_prev.png')), self._cw._('previous page')) - - @property - def next_icon(self): - return '%s' % ( - xml_escape(self._cw.data_url('go_next.png')), self._cw._('next page')) - - def init_rendering(self): - adapter = self.entity.cw_adapt_to('IPrevNext') - self.previous = adapter.previous_entity() - self.next = adapter.next_entity() - if not (self.previous or self.next): - raise EmptyComponent() - - def render_body(self, w): - w(u'
              ') - self.prevnext(w) - w(u'
              ') - w(u'
              ') - - def prevnext(self, w): - if self.previous: - self.prevnext_entity(w, self.previous, 'prev') - if self.next: - self.prevnext_entity(w, self.next, 'next') - - def prevnext_entity(self, w, entity, type): - textsize = self._cw.property_value('navigation.short-line-size') - content = xml_escape(cut(entity.dc_title(), textsize)) - if type == 'prev': - title = self._cw._('i18nprevnext_previous') - icon = self.prev_icon - cssclass = u'previousEntity left' - content = icon + '  ' + content - else: - title = self._cw._('i18nprevnext_next') - icon = self.next_icon - cssclass = u'nextEntity right' - content = content + '  ' + icon - self.prevnext_div(w, type, cssclass, entity.absolute_url(), - title, content) - - def prevnext_div(self, w, type, cssclass, url, title, content): - w(u'
              ' % cssclass) - w(u'%s' % (xml_escape(url), - xml_escape(title), - content)) - w(u'
              ') - self._cw.html_headers.add_raw('' % ( - type, xml_escape(url))) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/owl.py --- a/web/views/owl.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,233 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""produces some Ontology Web Language schema and views - -""" -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six.moves import range - -from logilab.mtconverter import TransformError, xml_escape - -from cubicweb.view import StartupView, EntityView -from cubicweb.predicates import none_rset, match_view -from cubicweb.web.action import Action -from cubicweb.web.views import schema - -OWL_CARD_MAP = {'1': '', - '?': '1', - '+': '1', - '*': '' - } - -OWL_TYPE_MAP = {'String': 'xsd:string', - 'Bytes': 'xsd:byte', - 'Password': 'xsd:byte', - - 'Boolean': 'xsd:boolean', - 'Int': 'xsd:int', - 'BigInt': 'xsd:int', - 'Float': 'xsd:float', - 'Decimal' : 'xsd:decimal', - - 'Date':'xsd:date', - 'Datetime': 'xsd:dateTime', - 'TZDatetime': 'xsd:dateTime', - 'Time': 'xsd:time', - 'TZTime': 'xsd:time', - 'Interval': 'xsd:duration' - } - -OWL_OPENING_ROOT = u''' - - -]> - - - - - %(appid)s Cubicweb OWL Ontology - - ''' - -OWL_CLOSING_ROOT = u'' - - -class OWLView(StartupView): - """This view export in owl format schema database. It is the TBOX""" - __regid__ = 'owl' - title = _('owl') - templatable = False - content_type = 'application/xml' # 'text/xml' - - def call(self, writeprefix=True): - skipmeta = int(self._cw.form.get('skipmeta', True)) - if writeprefix: - self.w(OWL_OPENING_ROOT % {'appid': self._cw.vreg.schema.name}) - self.visit_schema(skiptypes=skipmeta and schema.SKIP_TYPES or ()) - if writeprefix: - self.w(OWL_CLOSING_ROOT) - - def should_display_rschema(self, eschema, rschema, role): - return not rschema in self.skiptypes and ( - rschema.may_have_permission('read', self._cw, eschema, role)) - - def visit_schema(self, skiptypes): - """get a layout for a whole schema""" - self.skiptypes = skiptypes - entities = sorted(eschema for eschema in self._cw.vreg.schema.entities() - if not eschema.final or eschema in skiptypes) - self.w(u'') - for eschema in entities: - self.visit_entityschema(eschema) - self.w(u'') - self.visit_property_schema(eschema) - self.w(u'') - self.visit_property_object_schema(eschema) - - def visit_entityschema(self, eschema): - """get a layout for an entity OWL schema""" - self.w(u''% eschema) - self.w(u'') - for rschema, targetschemas, role in eschema.relation_definitions(): - if not self.should_display_rschema(eschema, rschema, role): - continue - for oeschema in targetschemas: - card = rschema.role_rdef(eschema, oeschema, role).role_cardinality(role) - cardtag = OWL_CARD_MAP[card] - if cardtag: - self.w(u''' - - - %s - -''' % (rschema, cardtag)) - - self.w(u'') - for rschema, aschema in eschema.attribute_definitions(): - if not self.should_display_rschema(eschema, rschema, 'subject'): - continue - self.w(u''' - - - - -''' % rschema) - self.w(u'') - - def visit_property_schema(self, eschema): - """get a layout for property entity OWL schema""" - for rschema, targetschemas, role in eschema.relation_definitions(): - if not self.should_display_rschema(eschema, rschema, role): - continue - for oeschema in targetschemas: - self.w(u''' - - -''' % (rschema, eschema, oeschema.type)) - - def visit_property_object_schema(self, eschema): - for rschema, aschema in eschema.attribute_definitions(): - if not self.should_display_rschema(eschema, rschema, 'subject'): - continue - self.w(u''' - - -''' % (rschema, eschema, OWL_TYPE_MAP[aschema.type])) - - -class OWLABOXView(EntityView): - '''This view represents a part of the ABOX for a given entity.''' - __regid__ = 'owlabox' - title = _('owlabox') - templatable = False - content_type = 'application/xml' # 'text/xml' - - def call(self): - self.w(OWL_OPENING_ROOT % {'appid': self._cw.vreg.schema.name}) - for i in range(self.cw_rset.rowcount): - self.cell_call(i, 0) - self.w(OWL_CLOSING_ROOT) - - def cell_call(self, row, col): - self.wview('owlaboxitem', self.cw_rset, row=row, col=col) - - -class OWLABOXItemView(EntityView): - '''This view represents a part of the ABOX for a given entity.''' - __regid__ = 'owlaboxitem' - templatable = False - content_type = 'application/xml' # 'text/xml' - - def cell_call(self, row, col): - entity = self.cw_rset.complete_entity(row, col) - eschema = entity.e_schema - self.w(u'<%s rdf:ID="%s">' % (eschema, entity.eid)) - self.w(u'') - for rschema, aschema in eschema.attribute_definitions(): - if rschema.meta: - continue - rdef = rschema.rdef(eschema, aschema) - if not rdef.may_have_permission('read', self._cw): - continue - aname = rschema.type - if aname == 'eid': - continue - try: - attr = entity.printable_value(aname, format='text/plain') - if attr: - self.w(u'<%s>%s' % (aname, xml_escape(attr), aname)) - except TransformError: - pass - self.w(u'') - for rschema, targetschemas, role in eschema.relation_definitions(): - if rschema.meta: - continue - for tschema in targetschemas: - rdef = rschema.role_rdef(eschema, tschema, role) - if rdef.may_have_permission('read', self._cw): - break - else: - # no read perms to any relation of this type. Skip. - continue - if role == 'object': - attr = 'reverse_%s' % rschema.type - else: - attr = rschema.type - for x in getattr(entity, attr): - self.w(u'<%s>%s %s' % (attr, x.__regid__, x.eid, attr)) - self.w(u''% eschema) - - -class DownloadOWLSchemaAction(Action): - __regid__ = 'download_as_owl' - __select__ = none_rset() & match_view('schema') - - category = 'mainactions' - title = _('download schema as owl') - - def url(self): - return self._cw.build_url('view', vid='owl') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/plots.py --- a/web/views/plots.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,228 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""basic plot views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six import add_metaclass -from six.moves import range - -from logilab.common.date import datetime2ticks -from logilab.common.deprecation import class_deprecated -from logilab.common.registry import objectify_predicate -from logilab.mtconverter import xml_escape - -from cubicweb.utils import UStringIO, json_dumps -from cubicweb.predicates import multi_columns_rset -from cubicweb.web.views import baseviews - -@objectify_predicate -def all_columns_are_numbers(cls, req, rset=None, *args, **kwargs): - """accept result set with at least one line and two columns of result - all columns after second must be of numerical types""" - for etype in rset.description[0]: - if etype not in ('Int', 'BigInt', 'Float'): - return 0 - return 1 - -@objectify_predicate -def second_column_is_number(cls, req, rset=None, *args, **kwargs): - etype = rset.description[0][1] - if etype not in ('Int', 'BigInt', 'Float'): - return 0 - return 1 - -@objectify_predicate -def columns_are_date_then_numbers(cls, req, rset=None, *args, **kwargs): - etypes = rset.description[0] - if etypes[0] not in ('Date', 'Datetime', 'TZDatetime'): - return 0 - for etype in etypes[1:]: - if etype not in ('Int', 'BigInt', 'Float'): - return 0 - return 1 - - -def filterout_nulls(abscissa, plot): - filtered = [] - for x, y in zip(abscissa, plot): - if x is None or y is None: - continue - filtered.append( (x, y) ) - return sorted(filtered) - -class PlotWidget(object): - # XXX refactor with cubicweb.web.views.htmlwidgets.HtmlWidget - def _initialize_stream(self, w=None): - if w: - self.w = w - else: - self._stream = UStringIO() - self.w = self._stream.write - - def render(self, *args, **kwargs): - w = kwargs.pop('w', None) - self._initialize_stream(w) - self._render(*args, **kwargs) - if w is None: - return self._stream.getvalue() - - def _render(self, *args, **kwargs): - raise NotImplementedError - - -@add_metaclass(class_deprecated) -class FlotPlotWidget(PlotWidget): - """PlotRenderer widget using Flot""" - __deprecation_warning__ = '[3.14] cubicweb.web.views.plots module is deprecated, use the jqplot cube instead' - onload = u""" -var fig = jQuery('#%(figid)s'); -if (fig.attr('cubicweb:type') != 'prepared-plot') { - %(plotdefs)s - jQuery.plot(jQuery('#%(figid)s'), [%(plotdata)s], - {points: {show: true}, - lines: {show: true}, - grid: {hoverable: true}, - /*yaxis : {tickFormatter : suffixFormatter},*/ - xaxis: {mode: %(mode)s}}); - jQuery('#%(figid)s').data({mode: %(mode)s, dateformat: %(dateformat)s}); - jQuery('#%(figid)s').bind('plothover', onPlotHover); - fig.attr('cubicweb:type','prepared-plot'); -} -""" - - def __init__(self, labels, plots, timemode=False): - self.labels = labels - self.plots = plots # list of list of couples - self.timemode = timemode - - def dump_plot(self, plot): - if self.timemode: - plot = [(datetime2ticks(x), y) for x, y in plot] - return json_dumps(plot) - - def _render(self, req, width=500, height=400): - if req.ie_browser(): - req.add_js('excanvas.js') - req.add_js(('jquery.flot.js', 'cubicweb.flot.js')) - figid = u'figure%s' % next(req.varmaker) - plotdefs = [] - plotdata = [] - self.w(u'
              ' % - (figid, width, height)) - for idx, (label, plot) in enumerate(zip(self.labels, self.plots)): - plotid = '%s_%s' % (figid, idx) - plotdefs.append('var %s = %s;' % (plotid, self.dump_plot(plot))) - # XXX ugly but required in order to not crash my demo - plotdata.append("{label: '%s', data: %s}" % (label.replace(u'&', u''), plotid)) - fmt = req.property_value('ui.date-format') # XXX datetime-format - # XXX TODO make plot options customizable - req.html_headers.add_onload(self.onload % - {'plotdefs': '\n'.join(plotdefs), - 'figid': figid, - 'plotdata': ','.join(plotdata), - 'mode': self.timemode and "'time'" or 'null', - 'dateformat': '"%s"' % fmt}) - - -@add_metaclass(class_deprecated) -class PlotView(baseviews.AnyRsetView): - __deprecation_warning__ = '[3.14] cubicweb.web.views.plots module is deprecated, use the jqplot cube instead' - __regid__ = 'plot' - title = _('generic plot') - __select__ = multi_columns_rset() & all_columns_are_numbers() - timemode = False - paginable = False - - def call(self, width=500, height=400): - # prepare data - rqlst = self.cw_rset.syntax_tree() - # XXX try to make it work with unions - varnames = [var.name for var in rqlst.children[0].get_selected_variables()][1:] - abscissa = [row[0] for row in self.cw_rset] - plots = [] - nbcols = len(self.cw_rset.rows[0]) - for col in range(1, nbcols): - data = [row[col] for row in self.cw_rset] - plots.append(filterout_nulls(abscissa, data)) - plotwidget = FlotPlotWidget(varnames, plots, timemode=self.timemode) - plotwidget.render(self._cw, width, height, w=self.w) - - -class TimeSeriePlotView(PlotView): - __select__ = multi_columns_rset() & columns_are_date_then_numbers() - timemode = True - - -try: - from GChartWrapper import Pie, Pie3D -except ImportError: - pass -else: - - class PieChartWidget(PlotWidget): - def __init__(self, labels, values, pieclass=Pie, title=None): - self.labels = labels - self.values = values - self.pieclass = pieclass - self.title = title - - def _render(self, width=None, height=None): - piechart = self.pieclass(self.values) - piechart.label(*self.labels) - if width is not None: - height = height or width - piechart.size(width, height) - if self.title: - piechart.title(self.title) - self.w(u'' % xml_escape(piechart.url)) - - class PieChartView(baseviews.AnyRsetView): - __regid__ = 'piechart' - pieclass = Pie - paginable = False - - __select__ = multi_columns_rset() & second_column_is_number() - - def _guess_vid(self, row): - etype = self.cw_rset.description[row][0] - if self._cw.vreg.schema.eschema(etype).final: - return 'final' - return 'textincontext' - - def call(self, title=None, width=None, height=None): - labels = [] - values = [] - for rowidx, (_, value) in enumerate(self.cw_rset): - if value is not None: - vid = self._guess_vid(rowidx) - label = '%s: %s' % (self._cw.view(vid, self.cw_rset, row=rowidx, col=0), - value) - labels.append(label.encode(self._cw.encoding)) - values.append(value) - pie = PieChartWidget(labels, values, pieclass=self.pieclass, - title=title) - if width is not None: - height = height or width - pie.render(width, height, w=self.w) - - - class PieChart3DView(PieChartView): - __regid__ = 'piechart3D' - pieclass = Pie3D diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/primary.py --- a/web/views/primary.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,444 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -Public API of the PrimaryView class -```````````````````````````````````` -.. autoclass:: cubicweb.web.views.primary.PrimaryView - -Views that may be used to display an entity's attribute or relation -``````````````````````````````````````````````````````````````````` - -Yoy may easily the display of an attribute or relation by simply configuring the -view using one of `primaryview_display_ctrl` or `reledit_ctrl` to use one of the -views describled below. For instance: - -.. sourcecode:: python - - primaryview_display_ctrl.tag_attribute(('Foo', 'bar'), {'vid': 'attribute'}) - - -.. autoclass:: AttributeView -.. autoclass:: URLAttributeView -.. autoclass:: VerbatimAttributeView -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from logilab.common.deprecation import deprecated -from logilab.mtconverter import xml_escape - -from cubicweb import Unauthorized, NoSelectableObject -from cubicweb.utils import support_args -from cubicweb.predicates import match_kwargs, match_context -from cubicweb.view import EntityView -from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, display_name -from cubicweb.web import component -from cubicweb.web.views import uicfg - - -class PrimaryView(EntityView): - """ - The basic layout of a primary view is as in the :ref:`primary_view_layout` - section. This layout is actually drawn by the `render_entity` method. - - The methods you may want to modify while customizing a ``PrimaryView`` - are: - - .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_title - .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_attributes - .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_relations - .. automethod:: cubicweb.web.views.primary.PrimaryView.render_side_boxes - - The placement of relations in the relations section or in side boxes - can be controlled through the :ref:`primary_view_configuration` mechanism. - - .. automethod:: cubicweb.web.views.primary.PrimaryView.content_navigation_components - - Also, please note that by setting the following attributes in your - subclass, you can already customize some of the rendering: - - :attr:`show_attr_label` - Renders the attribute label next to the attribute value if set to `True`. - Otherwise, does only display the attribute value. - - :attr:`show_rel_label` - Renders the relation label next to the relation value if set to `True`. - Otherwise, does only display the relation value. - - :attr:`main_related_section` - Renders the relations of the entity if set to `True`. - - A good practice is for you to identify the content of your entity type for - which the default rendering does not answer your need so that you can focus - on the specific method (from the list above) that needs to be modified. We - do not advise you to overwrite ``render_entity`` unless you want a - completely different layout. - """ - - __regid__ = 'primary' - title = _('primary') - show_attr_label = True - show_rel_label = True - rsection = None - display_ctrl = None - main_related_section = True - - def html_headers(self): - """return a list of html headers (eg something to be inserted between - and of the returned page - - by default primary views are indexed - """ - return [] - - def entity_call(self, entity, **kwargs): - entity.complete() - uicfg_reg = self._cw.vreg['uicfg'] - if self.rsection is None: - self.rsection = uicfg_reg.select('primaryview_section', - self._cw, entity=entity) - if self.display_ctrl is None: - self.display_ctrl = uicfg_reg.select('primaryview_display_ctrl', - self._cw, entity=entity) - self.render_entity(entity) - - def render_entity(self, entity): - self.render_entity_toolbox(entity) - self.render_entity_title(entity) - # entity's attributes and relations, excluding meta data - # if the entity isn't meta itself - if self.is_primary(): - boxes = self._prepare_side_boxes(entity) - else: - boxes = None - if boxes or hasattr(self, 'render_side_related'): - self.w(u'
              ') - - self.w(u'
              ') - self.content_navigation_components('navcontenttop') - self.render_entity_attributes(entity) - if self.main_related_section: - self.render_entity_relations(entity) - self.content_navigation_components('navcontentbottom') - self.w(u'
              ') - # side boxes - if boxes or hasattr(self, 'render_side_related'): - self.w(u'
              ') - self.w(u'
              ') - self.render_side_boxes(boxes) - self.w(u'
              ') - self.w(u'
              ') - - def content_navigation_components(self, context): - """This method is applicable only for entity type implementing the - interface `IPrevNext`. This interface is for entities which can be - linked to a previous and/or next entity. This method will render the - navigation links between entities of this type, either at the top or at - the bottom of the page given the context (navcontent{top|bottom}). - """ - self.w(u'
              ' % context) - for comp in self._cw.vreg['ctxcomponents'].poss_visible_objects( - self._cw, rset=self.cw_rset, view=self, context=context): - # XXX bw compat code - try: - comp.render(w=self.w, row=self.cw_row, view=self) - except TypeError: - comp.render(w=self.w) - self.w(u'
              ') - - def render_entity_title(self, entity): - """Renders the entity title, by default using entity's - :meth:`dc_title()` method. - """ - title = xml_escape(entity.dc_title()) - if title: - if self.is_primary(): - self.w(u'

              %s

              ' % title) - else: - atitle = self._cw._('follow this link for more information on this %s') % entity.dc_type() - self.w(u'

              %s

              ' - % (entity.absolute_url(), atitle, title)) - - def render_entity_toolbox(self, entity): - self.content_navigation_components('ctxtoolbar') - - def render_entity_attributes(self, entity): - """Renders all attributes and relations in the 'attributes' section. - """ - display_attributes = [] - for rschema, _, role, dispctrl in self._section_def(entity, 'attributes'): - vid = dispctrl.get('vid', 'reledit') - if rschema.final or vid == 'reledit' or dispctrl.get('rtypevid'): - value = entity.view(vid, rtype=rschema.type, role=role, - initargs={'dispctrl': dispctrl}) - else: - rset = self._relation_rset(entity, rschema, role, dispctrl) - if rset: - value = self._cw.view(vid, rset) - else: - value = None - if value is not None and value != '': - display_attributes.append( (rschema, role, dispctrl, value) ) - if display_attributes: - self.w(u'') - for rschema, role, dispctrl, value in display_attributes: - label = self._rel_label(entity, rschema, role, dispctrl) - self.render_attribute(label, value, table=True) - self.w(u'
              ') - - def render_attribute(self, label, value, table=False): - self.field(label, value, tr=False, table=table) - - def render_entity_relations(self, entity): - """Renders all relations in the 'relations' section.""" - defaultlimit = self._cw.property_value('navigation.related-limit') - for rschema, tschemas, role, dispctrl in self._section_def(entity, 'relations'): - if rschema.final or dispctrl.get('rtypevid'): - vid = dispctrl.get('vid', 'reledit') - try: - rview = self._cw.vreg['views'].select( - vid, self._cw, rset=entity.cw_rset, row=entity.cw_row, - col=entity.cw_col, dispctrl=dispctrl, - rtype=rschema, role=role) - except NoSelectableObject: - continue - value = rview.render(row=entity.cw_row, col=entity.cw_col, - rtype=rschema.type, role=role) - else: - vid = dispctrl.get('vid', 'autolimited') - limit = dispctrl.get('limit', defaultlimit) if vid == 'autolimited' else None - if limit is not None: - limit += 1 # need one more so the view can check if there is more than the limit - rset = self._relation_rset(entity, rschema, role, dispctrl, limit=limit) - if not rset: - continue - try: - rview = self._cw.vreg['views'].select( - vid, self._cw, rset=rset, dispctrl=dispctrl) - except NoSelectableObject: - continue - value = rview.render() - label = self._rel_label(entity, rschema, role, dispctrl) - self.render_relation(label, value) - - def render_relation(self, label, value): - self.w(u'
              ') - if label: - self.w(u'

              %s

              ' % label) - self.w(value) - self.w(u'
              ') - - def render_side_boxes(self, boxes): - """Renders side boxes on the right side of the content. This will - generate a box for each relation in the 'sidebox' section, as well as - explicit box appobjects selectable in this context. - """ - for box in boxes: - try: - box.render(w=self.w, row=self.cw_row) - except TypeError: - box.render(w=self.w) - - def _prepare_side_boxes(self, entity): - sideboxes = [] - boxesreg = self._cw.vreg['ctxcomponents'] - defaultlimit = self._cw.property_value('navigation.related-limit') - for rschema, tschemas, role, dispctrl in self._section_def(entity, 'sideboxes'): - vid = dispctrl.get('vid', 'autolimited') - limit = defaultlimit if vid == 'autolimited' else None - rset = self._relation_rset(entity, rschema, role, dispctrl, limit=limit) - if not rset: - continue - label = self._rel_label(entity, rschema, role, dispctrl) - box = boxesreg.select('rsetbox', self._cw, rset=rset, - vid=vid, title=label, dispctrl=dispctrl, - context='incontext') - sideboxes.append(box) - sideboxes += boxesreg.poss_visible_objects( - self._cw, rset=self.cw_rset, view=self, - context='incontext') - # XXX since we've two sorted list, it may be worth using bisect - def get_order(x): - if 'order' in x.cw_property_defs: - return x.cw_propval('order') - # default to 9999 so view boxes occurs after component boxes - return x.cw_extra_kwargs.get('dispctrl', {}).get('order', 9999) - return sorted(sideboxes, key=get_order) - - def _section_def(self, entity, where): - rdefs = [] - eschema = entity.e_schema - for rschema, tschemas, role in eschema.relation_definitions(True): - if rschema in VIRTUAL_RTYPES: - continue - matchtschemas = [] - for tschema in tschemas: - section = self.rsection.etype_get(eschema, rschema, role, - tschema) - if section == where: - matchtschemas.append(tschema) - if matchtschemas: - dispctrl = self.display_ctrl.etype_get(eschema, rschema, role, '*') - rdefs.append( (rschema, matchtschemas, role, dispctrl) ) - return sorted(rdefs, key=lambda x: x[-1]['order']) - - def _relation_rset(self, entity, rschema, role, dispctrl, limit=None): - try: - rset = entity.related(rschema.type, role, limit=limit) - except Unauthorized: - return - if 'filter' in dispctrl: - rset = dispctrl['filter'](rset) - return rset - - def _rel_label(self, entity, rschema, role, dispctrl): - if rschema.final: - showlabel = dispctrl.get('showlabel', self.show_attr_label) - else: - showlabel = dispctrl.get('showlabel', self.show_rel_label) - if showlabel: - if dispctrl.get('label'): - label = self._cw._(dispctrl['label']) - else: - label = display_name(self._cw, rschema.type, role, - context=entity.cw_etype) - return label - return u'' - - -class RelatedView(EntityView): - """Display a rset, usually containing entities linked to another entity - being displayed. - - It will try to display nicely according to the number of items in the result - set. - - XXX include me in the doc - """ - __regid__ = 'autolimited' - - def call(self, **kwargs): - if 'dispctrl' in self.cw_extra_kwargs: - if 'limit' in self.cw_extra_kwargs['dispctrl']: - limit = self.cw_extra_kwargs['dispctrl']['limit'] - else: - limit = self._cw.property_value('navigation.related-limit') - list_limit = self.cw_extra_kwargs['dispctrl'].get('use_list_limit', 5) - subvid = self.cw_extra_kwargs['dispctrl'].get('subvid', 'incontext') - else: - limit = list_limit = None - subvid = 'incontext' - if limit is None or self.cw_rset.rowcount <= limit: - if self.cw_rset.rowcount == 1: - self.wview(subvid, self.cw_rset, row=0) - elif list_limit is None or 1 < self.cw_rset.rowcount <= list_limit: - self.wview('csv', self.cw_rset, subvid=subvid) - else: - self.w(u'
              ') - self.wview('simplelist', self.cw_rset, subvid=subvid) - self.w(u'
              ') - # else show links to display related entities - else: - rql = self.cw_rset.printable_rql() - rset = self.cw_rset.limit(limit) # remove extra entity - if list_limit is None: - self.wview('csv', rset, subvid=subvid) - self.w(u'[%s]' % ( - xml_escape(self._cw.build_url(rql=rql, vid=subvid)), - self._cw._('see them all'))) - else: - self.w(u'
              ') - self.wview('simplelist', rset, subvid=subvid) - self.w(u'[%s]' % ( - xml_escape(self._cw.build_url(rql=rql, vid=subvid)), - self._cw._('see them all'))) - self.w(u'
              ') - - -class AttributeView(EntityView): - """:__regid__: *attribute* - - This view is generally used to disable the *reledit* feature. It works on - both relations and attributes. - """ - __regid__ = 'attribute' - __select__ = EntityView.__select__ & match_kwargs('rtype') - - def entity_call(self, entity, rtype, role='subject', **kwargs): - if self._cw.vreg.schema.rschema(rtype).final: - self.w(entity.printable_value(rtype)) - else: - dispctrl = uicfg.primaryview_display_ctrl.etype_get( - entity.e_schema, rtype, role, '*') - rset = entity.related(rtype, role) - if rset: - self.wview('autolimited', rset, initargs={'dispctrl': dispctrl}) - - -class URLAttributeView(EntityView): - """:__regid__: *urlattr* - - This view will wrap an attribute value (hence expect a string) into an '' - HTML tag to display a clickable link. - """ - __regid__ = 'urlattr' - __select__ = EntityView.__select__ & match_kwargs('rtype') - - def entity_call(self, entity, rtype, **kwargs): - url = entity.printable_value(rtype) - if url: - self.w(u'%s' % (url, url)) - - -class VerbatimAttributeView(EntityView): - """:__regid__: *verbatimattr* - - This view will wrap an attribute value into an '
              ' HTML tag to display
              -    arbitrary text where EOL will be respected. It usually make sense for
              -    attributes whose value is a multi-lines string where new lines matters.
              -    """
              -    __regid__ = 'verbatimattr'
              -    __select__ = EntityView.__select__ & match_kwargs('rtype')
              -
              -    def entity_call(self, entity, rtype, **kwargs):
              -        value = entity.printable_value(rtype)
              -        if value:
              -            self.w(u'
              %s
              ' % value) - - - - - -class ToolbarLayout(component.Layout): - # XXX include me in the doc - __select__ = match_context('ctxtoolbar') - - def render(self, w): - if self.init_rendering(): - self.cw_extra_kwargs['view'].render_body(w) - - -## default primary ui configuration ########################################### - -_pvs = uicfg.primaryview_section -for rtype in META_RTYPES: - _pvs.tag_subject_of(('*', rtype, '*'), 'hidden') - _pvs.tag_object_of(('*', rtype, '*'), 'hidden') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/pyviews.py --- a/web/views/pyviews.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,127 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Basic views for python values (eg without any result set) -""" -__docformat__ = "restructuredtext en" - -from six import text_type -from six.moves import range - -from cubicweb.view import View -from cubicweb.predicates import match_kwargs -from cubicweb.web.views import tableview - - -class PyValTableColRenderer(tableview.AbstractColumnRenderer): - """Default column renderer for :class:`PyValTableView`.""" - def bind(self, view, colid): - super(PyValTableColRenderer, self).bind(view, colid) - self.header = view.headers[colid] if view.headers else None - self.data = view.pyvalue - - def render_header(self, w): - if self.header: - w(self._cw._(self.header)) - else: - w(self.empty_cell_content) - - def render_cell(self, w, rownum): - w(text_type(self.data[rownum][self.colid])) - - -class PyValTableView(tableview.TableMixIn, View): - """This table view is designed to be used a list of list of unicode values - given as a mandatory `pyvalue` argument. Take care, content is NOT - xml-escaped. - - It's configured through the following selection arguments. - - If `headers` is specified, it is expected to be a list of headers to be - inserted as first row (in ). - - `header_column_idx` may be used to specify a column index or a set of column - indiced where values should be inserted inside tag instead of . - - `cssclass` is the CSS class used on the tag, and default to - 'listing' (so that the table will look similar to those generated by the - table view). - """ - __regid__ = 'pyvaltable' - __select__ = match_kwargs('pyvalue') - default_column_renderer_class = PyValTableColRenderer - paginable = False # not supported - headers = None - cssclass = None - domid = None - - def __init__(self, req, pyvalue, headers=None, cssclass=None, - header_column_idx=None, **kwargs): - super(PyValTableView, self).__init__(req, **kwargs) - self.pyvalue = pyvalue - if headers is not None: - self.headers = headers - elif self.headers: # headers set on a class attribute, translate - self.headers = [self._cw._(header) for header in self.headers] - if cssclass is not None: - self.cssclass = cssclass - self.header_column_idx = header_column_idx - - @property - def layout_args(self): - args = {} - if self.cssclass: - args['cssclass'] = self.cssclass - if self.header_column_idx is not None: - args['header_column_idx'] = self.header_column_idx - return args - - # layout callbacks ######################################################### - - @property - def table_size(self): - """return the number of rows (header excluded) to be displayed""" - return len(self.pyvalue) - - @property - def has_headers(self): - return self.headers - - def build_column_renderers(self): - return [self.column_renderer(colid) - for colid in range(len(self.pyvalue[0]))] - - def facets_form(self, mainvar=None): - return None # not supported - - def table_actions(self): - return [] # not supported - - -class PyValListView(View): - """display a list of values into an html list. - - Take care, content is NOT xml-escaped. - """ - __regid__ = 'pyvallist' - __select__ = match_kwargs('pyvalue') - - def call(self, pyvalue): - self.w(u'
                \n') - for line in pyvalue: - self.w(u'
              • %s
              • \n' % line) - self.w(u'
              \n') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/rdf.py --- a/web/views/rdf.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,111 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""base xml and rss views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six.moves import range - -from yams import xy - -from cubicweb.schema import VIRTUAL_RTYPES -from cubicweb.view import EntityView -from cubicweb.web.views.xmlrss import SERIALIZERS - -try: - import rdflib -except ImportError: - rdflib = None - -if rdflib is not None: - RDF = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') - CW = rdflib.Namespace('http://ns.cubicweb.org/cubicweb/0.0/') - from rdflib import Literal, URIRef, Namespace - - def urijoin(item): - base, ext = item - return URIRef(Namespace(base)[ext]) - - SKIP_RTYPES = VIRTUAL_RTYPES | set(['cwuri', 'is', 'is_instance_of']) - - class RDFView(EntityView): - """rdf view for entities""" - __regid__ = 'rdf' - title = _('rdf export') - templatable = False - binary = True - format = 'xml' - content_type = 'text/xml' # +rdf - - def call(self): - graph = rdflib.Graph() - graph.bind('cw', CW) - for prefix, xmlns in xy.XY.prefixes.items(): - graph.bind(prefix, rdflib.Namespace(xmlns)) - for i in range(self.cw_rset.rowcount): - entity = self.cw_rset.complete_entity(i, 0) - self.entity2graph(graph, entity) - self.w(graph.serialize(format=self.format)) - - def entity_call(self, entity): - self.call() - - def entity2graph(self, graph, entity): - cwuri = URIRef(entity.cwuri) - add = graph.add - add( (cwuri, RDF.type, CW[entity.e_schema.type]) ) - try: - for item in xy.xeq(entity.e_schema.type): - add( (cwuri, RDF.type, urijoin(item)) ) - except xy.UnsupportedVocabulary: - pass - for rschema, eschemas, role in entity.e_schema.relation_definitions('relation'): - rtype = rschema.type - if rtype in SKIP_RTYPES or rtype.endswith('_permission'): - continue - for eschema in eschemas: - if eschema.final: - try: - value = entity.cw_attr_cache[rtype] - except KeyError: - continue # assuming rtype is Bytes - if value is not None: - add( (cwuri, CW[rtype], Literal(value)) ) - try: - for item in xy.xeq('%s %s' % (entity.e_schema.type, rtype)): - add( (cwuri, urijoin(item[1]), Literal(value)) ) - except xy.UnsupportedVocabulary: - pass - else: - for related in entity.related(rtype, role, entities=True, safe=True): - if role == 'subject': - add( (cwuri, CW[rtype], URIRef(related.cwuri)) ) - try: - for item in xy.xeq('%s %s' % (entity.e_schema.type, rtype)): - add( (cwuri, urijoin(item[1]), URIRef(related.cwuri)) ) - except xy.UnsupportedVocabulary: - pass - else: - add( (URIRef(related.cwuri), CW[rtype], cwuri) ) - - - class RDFN3View(RDFView): - __regid__ = 'n3rdf' - format = 'n3' - content_type = 'text/n3' diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/reledit.py --- a/web/views/reledit.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,404 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""edit entity attributes/relations from any view, without going to the entity -form -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -import copy -from warnings import warn - -from logilab.mtconverter import xml_escape -from logilab.common.deprecation import deprecated, class_renamed -from logilab.common.decorators import cached - -from cubicweb import neg_role -from cubicweb.schema import display_name -from cubicweb.utils import json, json_dumps -from cubicweb.predicates import non_final_entity, match_kwargs -from cubicweb.view import EntityView -from cubicweb.web import stdmsgs -from cubicweb.web.views import uicfg -from cubicweb.web.form import FieldNotFound -from cubicweb.web.formwidgets import Button, SubmitButton -from cubicweb.web.views.ajaxcontroller import ajaxfunc - -class _DummyForm(object): - __slots__ = ('event_args',) - def form_render(self, **_args): - return u'' - def render(self, *_args, **_kwargs): - return u'' - def append_field(self, *args): - pass - def add_hidden(self, *args): - pass - -class AutoClickAndEditFormView(EntityView): - __regid__ = 'reledit' - __select__ = non_final_entity() & match_kwargs('rtype') - - # ui side continuations - _onclick = (u"cw.reledit.loadInlineEditionForm('%(formid)s', %(eid)s, '%(rtype)s', '%(role)s', " - "'%(divid)s', %(reload)s, '%(vid)s', '%(action)s');") - _cancelclick = "cw.reledit.cleanupAfterCancel('%s')" - - # ui side actions/buttons - _addzone = u'%(msg)s' - _addmsg = _('click to add a value') - _addlogo = 'plus.png' - _deletezone = u'%(msg)s' - _deletemsg = _('click to delete this value') - _deletelogo = 'cancel.png' - _editzone = u'%(msg)s' - _editzonemsg = _('click to edit this field') - _editlogo = 'pen_icon.png' - - # renderer - _form_renderer_id = 'base' - - def entity_call(self, entity, rtype=None, role='subject', - reload=False, # controls reloading the whole page after change - # boolean, eid (to redirect), or - # function taking the subject entity & returning a boolean or an eid - rvid=None, # vid to be applied to other side of rtype (non final relations only) - default_value=None, - formid='base', - action=None - ): - """display field to edit entity's `rtype` relation on click""" - assert rtype - self._cw.add_css('cubicweb.form.css') - self._cw.add_js(('cubicweb.reledit.js', 'cubicweb.edition.js', 'cubicweb.ajax.js')) - self.entity = entity - rschema = self._cw.vreg.schema[rtype] - rctrl = self._cw.vreg['uicfg'].select('reledit', self._cw, entity=entity) - self._rules = rctrl.etype_get(self.entity.e_schema.type, rschema.type, role, '*') - reload = self._compute_reload(rschema, role, reload) - divid = self._build_divid(rtype, role, self.entity.eid) - if rschema.final: - self._handle_attribute(rschema, role, divid, reload, action) - else: - if self._is_composite(): - self._handle_composite(rschema, role, divid, reload, formid, action) - else: - self._handle_relation(rschema, role, divid, reload, formid, action) - - def _handle_attribute(self, rschema, role, divid, reload, action): - rvid = self._rules.get('rvid', None) - if rvid is not None: - value = self._cw.view(rvid, entity=self.entity, - rtype=rschema.type, role=role) - else: - value = self.entity.printable_value(rschema.type) - if not self._should_edit_attribute(rschema): - self.w(value) - return - form, renderer = self._build_form(self.entity, rschema, role, divid, - 'base', reload, action) - value = value or self._compute_default_value(rschema, role) - self.view_form(divid, value, form, renderer) - - def _compute_formid_value(self, rschema, role, rvid, formid): - related_rset = self.entity.related(rschema.type, role) - if related_rset: - value = self._cw.view(rvid, related_rset) - else: - value = self._compute_default_value(rschema, role) - if not self._should_edit_relation(rschema, role): - return None, value - return formid, value - - def _handle_relation(self, rschema, role, divid, reload, formid, action): - rvid = self._rules.get('rvid', 'autolimited') - formid, value = self._compute_formid_value(rschema, role, rvid, formid) - if formid is None: - return self.w(value) - form, renderer = self._build_form(self.entity, rschema, role, divid, formid, - reload, action, dict(vid=rvid)) - self.view_form(divid, value, form, renderer) - - def _handle_composite(self, rschema, role, divid, reload, formid, action): - # this is for attribute-like composites (1 target type, 1 related entity at most, for now) - entity = self.entity - related_rset = entity.related(rschema.type, role) - add_related = self._may_add_related(related_rset, rschema, role) - edit_related = self._may_edit_related_entity(related_rset, rschema, role) - delete_related = edit_related and self._may_delete_related(related_rset, rschema, role) - rvid = self._rules.get('rvid', 'autolimited') - formid, value = self._compute_formid_value(rschema, role, rvid, formid) - if formid is None or not (edit_related or add_related): - # till we learn to handle cases where not (edit_related or add_related) - self.w(value) - return - form, renderer = self._build_form(entity, rschema, role, divid, formid, - reload, action, dict(vid=rvid)) - self.view_form(divid, value, form, renderer, - edit_related, add_related, delete_related) - - @cached - def _compute_ttypes(self, rschema, role): - dual_role = neg_role(role) - return getattr(rschema, '%ss' % dual_role)() - - def _compute_reload(self, rschema, role, reload): - ctrl_reload = self._rules.get('reload', reload) - if callable(ctrl_reload): - ctrl_reload = ctrl_reload(self.entity) - if isinstance(ctrl_reload, int) and ctrl_reload > 1: # not True/False - ctrl_reload = self._cw.build_url(ctrl_reload) - return ctrl_reload - - def _compute_default_value(self, rschema, role): - default = self._rules.get('novalue_label') - if default is None: - if self._rules.get('novalue_include_rtype'): - default = self._cw._('<%s not specified>') % display_name( - self._cw, rschema.type, role) - else: - default = self._cw._('') - else: - default = self._cw._(default) - return xml_escape(default) - - def _is_composite(self): - return self._rules.get('edit_target') == 'related' - - def _may_add_related(self, related_rset, rschema, role): - """ ok for attribute-like composite entities """ - ttypes = self._compute_ttypes(rschema, role) - if len(ttypes) > 1: # many etypes: learn how to do it - return False - rdef = rschema.role_rdef(self.entity.e_schema, ttypes[0], role) - card = rdef.role_cardinality(role) - if related_rset or card not in '?1': - return False - if role == 'subject': - kwargs = {'fromeid': self.entity.eid} - else: - kwargs = {'toeid': self.entity.eid} - return rdef.has_perm(self._cw, 'add', **kwargs) - - def _may_edit_related_entity(self, related_rset, rschema, role): - """ controls the edition of the related entity """ - ttypes = self._compute_ttypes(rschema, role) - if len(ttypes) > 1 or len(related_rset.rows) != 1: - return False - if self.entity.e_schema.rdef(rschema, role).role_cardinality(role) not in '?1': - return False - return related_rset.get_entity(0, 0).cw_has_perm('update') - - def _may_delete_related(self, related_rset, rschema, role): - # we assume may_edit_related, only 1 related entity - if not related_rset: - return False - rentity = related_rset.get_entity(0, 0) - entity = self.entity - if role == 'subject': - kwargs = {'fromeid': entity.eid, 'toeid': rentity.eid} - cardinality = rschema.rdefs[(entity.cw_etype, rentity.cw_etype)].cardinality[0] - else: - kwargs = {'fromeid': rentity.eid, 'toeid': entity.eid} - cardinality = rschema.rdefs[(rentity.cw_etype, entity.cw_etype)].cardinality[1] - if cardinality in '1+': - return False - # NOTE: should be sufficient given a well built schema/security - return rschema.has_perm(self._cw, 'delete', **kwargs) - - def _build_zone(self, zonedef, msg, logo): - return zonedef % {'msg': xml_escape(self._cw._(msg)), - 'logo': xml_escape(self._cw.data_url(logo))} - - def _build_edit_zone(self): - return self._build_zone(self._editzone, self._editzonemsg, self._editlogo) - - def _build_delete_zone(self): - return self._build_zone(self._deletezone, self._deletemsg, self._deletelogo) - - def _build_add_zone(self): - return self._build_zone(self._addzone, self._addmsg, self._addlogo) - - def _build_divid(self, rtype, role, entity_eid): - """ builds an id for the root div of a reledit widget """ - return '%s-%s-%s' % (rtype, role, entity_eid) - - def _build_args(self, entity, rtype, role, formid, reload, action, - extradata=None): - divid = self._build_divid(rtype, role, entity.eid) - event_args = {'divid' : divid, 'eid' : entity.eid, 'rtype' : rtype, 'formid': formid, - 'reload' : json_dumps(reload), 'action': action, - 'role' : role, 'vid' : u''} - if extradata: - event_args.update(extradata) - return event_args - - def _prepare_form(self, entity, rschema, role, action): - assert action in ('edit_rtype', 'edit_related', 'add', 'delete'), action - if action == 'edit_rtype': - return False, entity - label = True - if action in ('edit_related', 'delete'): - edit_entity = entity.related(rschema, role).get_entity(0, 0) - elif action == 'add': - add_etype = self._compute_ttypes(rschema, role)[0] - _new_entity = self._cw.vreg['etypes'].etype_class(add_etype)(self._cw) - _new_entity.eid = next(self._cw.varmaker) - edit_entity = _new_entity - # XXX see forms.py ~ 276 and entities.linked_to method - # is there another way? - self._cw.form['__linkto'] = '%s:%s:%s' % (rschema, entity.eid, neg_role(role)) - assert edit_entity - return label, edit_entity - - def _build_renderer(self, related_entity, display_label): - return self._cw.vreg['formrenderers'].select( - self._form_renderer_id, self._cw, entity=related_entity, - display_label=display_label, - table_class='attributeForm' if display_label else '', - display_help=False, button_bar_class='buttonbar', - display_progress_div=False) - - def _build_form(self, entity, rschema, role, divid, formid, reload, action, - extradata=None, **formargs): - rtype = rschema.type - event_args = self._build_args(entity, rtype, role, formid, reload, action, extradata) - if not action: - form = _DummyForm() - form.event_args = event_args - return form, None - label, edit_entity = self._prepare_form(entity, rschema, role, action) - cancelclick = self._cancelclick % divid - form = self._cw.vreg['forms'].select( - formid, self._cw, rset=edit_entity.as_rset(), entity=edit_entity, - domid='%s-form' % divid, formtype='inlined', - action=self._cw.build_url('validateform', __onsuccess='window.parent.cw.reledit.onSuccess'), - cwtarget='eformframe', cssclass='releditForm', - **formargs) - # pass reledit arguments - for pname, pvalue in event_args.items(): - form.add_hidden('__reledit|' + pname, pvalue) - # handle buttons - if form.form_buttons: # edition, delete - form_buttons = [] - for button in form.form_buttons: - if not button.label.endswith('apply'): - if button.label.endswith('cancel'): - button = copy.deepcopy(button) - button.cwaction = None - button.onclick = cancelclick - form_buttons.append(button) - form.form_buttons = form_buttons - else: # base - form.form_buttons = [SubmitButton(), - Button(stdmsgs.BUTTON_CANCEL, onclick=cancelclick)] - form.event_args = event_args - if formid == 'base': - field = form.field_by_name(rtype, role, entity.e_schema) - form.append_field(field) - return form, self._build_renderer(edit_entity, label) - - def _should_edit_attribute(self, rschema): - entity = self.entity - rdef = entity.e_schema.rdef(rschema) - # check permissions - if not entity.cw_has_perm('update'): - return False - rdef = entity.e_schema.rdef(rschema) - return rdef.has_perm(self._cw, 'update', eid=entity.eid) - - def _should_edit_relation(self, rschema, role): - eeid = self.entity.eid - perm_args = {'fromeid': eeid} if role == 'subject' else {'toeid': eeid} - return rschema.has_perm(self._cw, 'add', **perm_args) - - def _open_form_wrapper(self, divid, value, form, renderer, - _edit_related, _add_related, _delete_related): - w = self.w - w(u'
              ' % - {'id': divid, 'css': 'releditField', - 'out': "jQuery('#%s').addClass('invisible')" % divid, - 'over': "jQuery('#%s').removeClass('invisible')" % divid}) - w(u'
              ' % divid) - w(value) - w(u'
              ') - form.render(w=w, renderer=renderer) - w(u'') - self.w(u'
              ') - - def view_form(self, divid, value, form=None, renderer=None, - edit_related=False, add_related=False, delete_related=False): - self._open_form_wrapper(divid, value, form, renderer, - edit_related, add_related, delete_related) - args = form.event_args.copy() - self._edit_action(divid, args, edit_related, add_related, delete_related) - self._add_action(divid, args, edit_related, add_related, delete_related) - self._del_action(divid, args, edit_related, add_related, delete_related) - self._close_form_wrapper() - - -ClickAndEditFormView = class_renamed('ClickAndEditFormView', AutoClickAndEditFormView) - - -@ajaxfunc(output_type='xhtml') -def reledit_form(self): - req = self._cw - args = dict((x, req.form[x]) - for x in ('formid', 'rtype', 'role', 'reload', 'action')) - rset = req.eid_rset(int(self._cw.form['eid'])) - try: - args['reload'] = json.loads(args['reload']) - except ValueError: # not true/false, an absolute url - assert args['reload'].startswith('http') - view = req.vreg['views'].select('reledit', req, rset=rset, rtype=args['rtype']) - return self._call_view(view, **args) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/schema.py --- a/web/views/schema.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,712 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Specific views for schema related entities""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from itertools import cycle - -import tempfile -import os, os.path as osp -import codecs - -from six import text_type - -from logilab.common.graph import GraphGenerator, DotBackend -from logilab.common.ureports import Section, Table -from logilab.common.registry import yes -from logilab.mtconverter import xml_escape -from yams import BASE_TYPES, schema2dot as s2d -from yams.buildobjs import DEFAULT_ATTRPERMS - -from cubicweb.predicates import (is_instance, match_user_groups, match_kwargs, - has_related_entities, authenticated_user) -from cubicweb.schema import (META_RTYPES, SCHEMA_TYPES, SYSTEM_RTYPES, - WORKFLOW_TYPES, INTERNAL_TYPES) -from cubicweb.utils import make_uid -from cubicweb.view import EntityView, StartupView -from cubicweb import tags, uilib -from cubicweb.web import action, facet, schemaviewer -from cubicweb.web.views import uicfg, primary, baseviews, tabs, tableview, ibreadcrumbs - -ALWAYS_SKIP_TYPES = BASE_TYPES | SCHEMA_TYPES -SKIP_TYPES = (ALWAYS_SKIP_TYPES | META_RTYPES | SYSTEM_RTYPES | WORKFLOW_TYPES - | INTERNAL_TYPES) -SKIP_TYPES.update(set(('CWUser', 'CWGroup', 'EmailAddress', 'Bookmark'))) - -def skip_types(req): - if int(req.form.get('skipmeta', True)): - return SKIP_TYPES - return ALWAYS_SKIP_TYPES - -_pvs = uicfg.primaryview_section -_pvdc = uicfg.primaryview_display_ctrl - -for _action in ('read', 'add', 'update', 'delete'): - _pvs.tag_subject_of(('*', '%s_permission' % _action, '*'), 'hidden') - _pvs.tag_object_of(('*', '%s_permission' % _action, '*'), 'hidden') - -for _etype in ('CWEType', 'CWRType', 'CWAttribute', 'CWRelation'): - _pvdc.tag_attribute((_etype, 'description'), {'showlabel': False}) - -_pvs.tag_attribute(('CWEType', 'name'), 'hidden') -_pvs.tag_attribute(('CWEType', 'final'), 'hidden') -_pvs.tag_object_of(('*', 'workflow_of', 'CWEType'), 'hidden') -_pvs.tag_subject_of(('CWEType', 'default_workflow', '*'), 'hidden') -_pvs.tag_object_of(('*', 'specializes', 'CWEType'), 'hidden') -_pvs.tag_subject_of(('CWEType', 'specializes', '*'), 'hidden') -_pvs.tag_object_of(('*', 'from_entity', 'CWEType'), 'hidden') -_pvs.tag_object_of(('*', 'to_entity', 'CWEType'), 'hidden') - -_pvs.tag_attribute(('CWRType', 'name'), 'hidden') -_pvs.tag_attribute(('CWRType', 'final'), 'hidden') -_pvs.tag_object_of(('*', 'relation_type', 'CWRType'), 'hidden') - -_pvs.tag_subject_of(('CWAttribute', 'constrained_by', '*'), 'hidden') -_pvs.tag_subject_of(('CWRelation', 'constrained_by', '*'), 'hidden') - - -class SecurityViewMixIn(object): - """mixin providing methods to display security information for a entity, - relation or relation definition schema - """ - cssclass = "listing schemaInfo" - - def permissions_table(self, erschema, permissions=None): - self._cw.add_css('cubicweb.acl.css') - w = self.w - _ = self._cw._ - w(u'
              ' % self.cssclass) - w(u'' % ( - _("permission"), _('granted to groups'), _('rql expressions'))) - for action in erschema.ACTIONS: - w(u'\n') - w(u'
              %s%s%s
              %s' % _(action)) - if permissions is None: - groups = erschema.get_groups(action) - rqlexprs = sorted(e.expression for e in erschema.get_rqlexprs(action)) - else: - groups = permissions[action][0] - rqlexprs = permissions[action][1] - # XXX get group entity and call it's incontext view - groups = [u'%s' % ( - group, self._cw.build_url('cwgroup/%s' % group), label) - for label, group in sorted((_(g), g) for g in groups)] - w(u'
              '.join(groups)) - w(u'
              ') - w(u'
              '.join(rqlexprs)) - w(u'
              ') - - def grouped_permissions_table(self, rschema): - # group relation definitions with identical permissions - perms = {} - for rdef in rschema.rdefs.values(): - rdef_perms = [] - for action in rdef.ACTIONS: - groups = sorted(rdef.get_groups(action)) - exprs = sorted(e.expression for e in rdef.get_rqlexprs(action)) - rdef_perms.append( (action, (tuple(groups), tuple(exprs))) ) - rdef_perms = tuple(rdef_perms) - if rdef_perms in perms: - perms[rdef_perms].append( (rdef.subject, rdef.object) ) - else: - perms[rdef_perms] = [(rdef.subject, rdef.object)] - # set layout permissions in a table for each group of relation - # definition - w = self.w - _ = self._cw._ - w(u'
              ') - tmpl = u'%s %s %s' - for perm, rdefs in perms.items(): - w(u'
              %s
              ' % u', '.join( - tmpl % (_(s.type), _(rschema.type), _(o.type)) for s, o in rdefs)) - # accessing rdef from previous loop by design: only used to get - # ACTIONS - self.permissions_table(rdef, dict(perm)) - w(u'
              ') - - -# global schema view ########################################################### - -class SchemaView(tabs.TabsMixin, StartupView): - """display schema information (graphically, listing tables...) in tabs""" - __regid__ = 'schema' - title = _('data model schema') - tabs = [_('schema-diagram'), _('schema-entity-types'), - _('schema-relation-types')] - default_tab = 'schema-diagram' - - def call(self): - self.w(u'

              %s

              ' % self._cw._(self.title)) - self.render_tabs(self.tabs, self.default_tab) - - -class SchemaImageTab(StartupView): - __regid__ = 'schema-diagram' - - def call(self): - _ = self._cw._ - self.w(self._cw._( - u'
              This schema of the data model excludes the ' - 'meta-data, but you can also display a complete ' - 'schema with meta-data.
              ') - % xml_escape(self._cw.build_url('view', vid='schemagraph', skipmeta=0))) - self.w(u'' % - (self._cw.build_url('view', vid='owl'), - self._cw._(u'Download schema as OWL'))) - self.wview('schemagraph') - -class SchemaETypeTab(StartupView): - __regid__ = 'schema-entity-types' - - def call(self): - self.wview('table', self._cw.execute( - 'Any X ORDERBY N WHERE X is CWEType, X name N, X final FALSE')) - - -class SchemaRTypeTab(StartupView): - __regid__ = 'schema-relation-types' - - def call(self): - self.wview('table', self._cw.execute( - 'Any X ORDERBY N WHERE X is CWRType, X name N, X final FALSE')) - -# CWEType ###################################################################### - -# register msgid generated in entity relations tables -_('i18ncard_1'), _('i18ncard_?'), _('i18ncard_+'), _('i18ncard_*') - -class CWETypePrimaryView(tabs.TabbedPrimaryView): - __select__ = is_instance('CWEType') - tabs = [_('cwetype-description'), _('cwetype-box'), _('cwetype-workflow'), - _('cwetype-views'), _('cwetype-permissions')] - default_tab = 'cwetype-description' - - -class CWETypeDescriptionTab(tabs.PrimaryTab): - __regid__ = 'cwetype-description' - __select__ = tabs.PrimaryTab.__select__ & is_instance('CWEType') - - def render_entity_attributes(self, entity): - super(CWETypeDescriptionTab, self).render_entity_attributes(entity) - _ = self._cw._ - # inheritance - if entity.specializes: - self.w(u'
              %s' % _('Parent class:')) - self.wview('csv', entity.related('specializes', 'subject')) - self.w(u'
              ') - if entity.reverse_specializes: - self.w(u'
              %s' % _('Sub-classes:')) - self.wview('csv', entity.related('specializes', 'object')) - self.w(u'
              ') - # entity schema image - self.wview('schemagraph', etype=entity.name) - # entity schema attributes - self.w(u'

              %s

              ' % _('CWAttribute_plural')) - rset = self._cw.execute( - 'Any A,ON,D,C,A,DE,A, IDX,FTI,I18N,R,O,RN,S ORDERBY AA ' - 'WHERE A is CWAttribute, A from_entity S, S eid %(x)s, ' - 'A ordernum AA, A defaultval D, A description DE, A cardinality C, ' - 'A fulltextindexed FTI, A internationalizable I18N, A indexed IDX, ' - 'A relation_type R, R name RN, A to_entity O, O name ON', - {'x': entity.eid}) - self.wview('table', rset, 'null', - cellvids={0: 'rdef-name-cell', - 2: 'etype-attr-defaultval-cell', - 3: 'etype-attr-cardinality-cell', - 4: 'rdef-constraints-cell', - 6: 'rdef-options-cell'}, - headers=(_(u'name'), _(u'type'), - _(u'default value'), _(u'required'), - _(u'constraints'), _(u'description'), _('options'))) - # entity schema relations - self.w(u'

              %s

              ' % _('CWRelation_plural')) - cellvids = {0: 'rdef-name-cell', - 2: 'etype-rel-cardinality-cell', - 3: 'rdef-constraints-cell', - 4: 'rdef-options-cell'} - headers= [_(u'name'), _(u'object type'), _(u'cardinality'), - _(u'constraints'), _(u'options')] - rset = self._cw.execute( - 'Any A,TT,"i18ncard_"+SUBSTRING(C,1,1),A,A, K,TTN,R,RN ORDERBY RN ' - 'WHERE A is CWRelation, A from_entity S, S eid %(x)s, ' - 'A composite K, A cardinality C, ' - 'A relation_type R, R name RN, A to_entity TT, TT name TTN', - {'x': entity.eid}) - if rset: - self.w(u'
              %s %s
              ' % (entity.name, _('is subject of:'))) - self.wview('table', rset, cellvids=cellvids, headers=headers) - rset = self._cw.execute( - 'Any A,TT,"i18ncard_"+SUBSTRING(C,1,1),A,A, K,TTN,R,RN ORDERBY RN ' - 'WHERE A is CWRelation, A to_entity O, O eid %(x)s, ' - 'A composite K, A cardinality C, ' - 'A relation_type R, R name RN, A from_entity TT, TT name TTN', - {'x': entity.eid}) - if rset: - cellvids[0] = 'rdef-object-name-cell' - headers[1] = _(u'subject type') - self.w(u'
              %s %s
              ' % (entity.name, _('is object of:'))) - self.wview('table', rset, cellvids=cellvids, headers=headers) - - -class CWETypeAttributeCardinalityCell(baseviews.FinalView): - __regid__ = 'etype-attr-cardinality-cell' - - def cell_call(self, row, col): - if self.cw_rset.rows[row][col][0] == '1': - self.w(self._cw._(u'yes')) - else: - self.w(self._cw._(u'no')) - - -class CWETypeAttributeDefaultValCell(baseviews.FinalView): - __regid__ = 'etype-attr-defaultval-cell' - - def cell_call(self, row, col): - defaultval = self.cw_rset.rows[row][col] - if defaultval is not None: - self.w(text_type(self.cw_rset.rows[row][col].unzpickle())) - -class CWETypeRelationCardinalityCell(baseviews.FinalView): - __regid__ = 'etype-rel-cardinality-cell' - - def cell_call(self, row, col): - self.w(self._cw._(self.cw_rset.rows[row][col])) - - -class CWETypeBoxTab(EntityView): - __regid__ = 'cwetype-box' - __select__ = is_instance('CWEType') - - def cell_call(self, row, col): - viewer = schemaviewer.SchemaViewer(self._cw) - entity = self.cw_rset.get_entity(row, col) - eschema = self._cw.vreg.schema.eschema(entity.name) - layout = viewer.visit_entityschema(eschema) - self.w(uilib.ureport_as_html(layout)) - self.w(u'
              ') - - -class CWETypePermTab(SecurityViewMixIn, EntityView): - __regid__ = 'cwetype-permissions' - __select__ = is_instance('CWEType') & authenticated_user() - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - eschema = self._cw.vreg.schema.eschema(entity.name) - self.w(u'

              %s

              ' % self._cw._('This entity type permissions:')) - self.permissions_table(eschema) - self.w(u'
              ') - self.w(u'

              %s

              ' % self._cw._('Attributes permissions:')) - for attr, etype in eschema.attribute_definitions(): - if attr not in META_RTYPES: - rdef = eschema.rdef(attr) - attrtype = str(rdef.rtype) - self.w(u'

              %s (%s)

              ' - % (attrtype, self._cw._(attrtype))) - self.permissions_table(rdef) - self.w(u'
              ') - - -class CWETypeWorkflowTab(EntityView): - __regid__ = 'cwetype-workflow' - __select__ = (is_instance('CWEType') - & has_related_entities('workflow_of', 'object')) - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - if entity.default_workflow: - wf = entity.default_workflow[0] - if len(entity.reverse_workflow_of) > 1: - self.w(u'

              %s (%s)

              ' - % (wf.name, self._cw._('default_workflow'))) - self.display_workflow(wf) - defaultwfeid = wf.eid - else: - self.w(u'
              %s
              ' - % self._cw._('There is no default workflow')) - defaultwfeid = None - for altwf in entity.reverse_workflow_of: - if altwf.eid == defaultwfeid: - continue - self.w(u'

              %s

              ' % altwf.name) - self.display_workflow(altwf) - - def display_workflow(self, wf): - self.w(wf.view('wfgraph')) - self.w('%s' % ( - wf.absolute_url(), self._cw._('more info about this workflow'))) - - -class CWETypeViewsTab(EntityView): - """possible views for this entity type""" - __regid__ = 'cwetype-views' - __select__ = EntityView.__select__ & is_instance('CWEType') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - _ = self._cw._ - self.w('
              %s
              ' % _('Non exhaustive list of views that may ' - 'apply to entities of this type')) - views = [(view.content_type, view.__regid__, _(view.title)) - for view in self.possible_views(entity.name)] - self.wview('pyvaltable', pyvalue=sorted(views), - headers=(_(u'content type'), _(u'view identifier'), - _(u'view title'))) - - def possible_views(self, etype): - rset = self._cw.etype_rset(etype) - return [v for v in self._cw.vreg['views'].possible_views(self._cw, rset) - if v.category != 'startupview'] - - -class CWETypeOneLineView(baseviews.OneLineView): - __select__ = is_instance('CWEType') - - def cell_call(self, row, col, **kwargs): - entity = self.cw_rset.get_entity(row, col) - if entity.final: - self.w(u'') - super(CWETypeOneLineView, self).cell_call(row, col, **kwargs) - if entity.final: - self.w(u'') - - -# CWRType ###################################################################### - -class CWRTypePrimaryView(tabs.TabbedPrimaryView): - __select__ = is_instance('CWRType') - tabs = [_('cwrtype-description'), _('cwrtype-permissions')] - default_tab = 'cwrtype-description' - - -class CWRTypeDescriptionTab(tabs.PrimaryTab): - __regid__ = 'cwrtype-description' - __select__ = is_instance('CWRType') - - def render_entity_attributes(self, entity): - super(CWRTypeDescriptionTab, self).render_entity_attributes(entity) - _ = self._cw._ - if not entity.final: - self.wview('schemagraph', rtype=entity.name) - rset = self._cw.execute('Any R,C,R,R, RT WHERE ' - 'R relation_type RT, RT eid %(x)s, ' - 'R cardinality C', {'x': entity.eid}) - self.wview('table', rset, 'null', - headers=(_(u'relation'), _(u'cardinality'), _(u'constraints'), - _(u'options')), - cellvids={2: 'rdef-constraints-cell', - 3: 'rdef-options-cell'}) - - -class CWRTypePermTab(SecurityViewMixIn, EntityView): - __regid__ = 'cwrtype-permissions' - __select__ = is_instance('CWRType') & authenticated_user() - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - rschema = self._cw.vreg.schema.rschema(entity.name) - self.grouped_permissions_table(rschema) - - -# CWAttribute / CWRelation ##################################################### - -class RDEFPrimaryView(tabs.TabbedPrimaryView): - __select__ = is_instance('CWRelation', 'CWAttribute') - tabs = [_('rdef-description'), _('rdef-permissions')] - default_tab = 'rdef-description' - - -class RDEFDescriptionTab(tabs.PrimaryTab): - __regid__ = 'rdef-description' - __select__ = is_instance('CWRelation', 'CWAttribute') - - def render_entity_attributes(self, entity): - super(RDEFDescriptionTab, self).render_entity_attributes(entity) - rdef = entity.yams_schema() - if rdef.constraints: - self.w(u'

              %s

              ' % self._cw._('constrained_by')) - self.w(entity.view('rdef-constraints-cell')) - - -class RDEFPermTab(SecurityViewMixIn, EntityView): - __regid__ = 'rdef-permissions' - __select__ = is_instance('CWRelation', 'CWAttribute') & authenticated_user() - - def cell_call(self, row, col): - self.permissions_table(self.cw_rset.get_entity(row, col).yams_schema()) - - -class RDEFNameView(tableview.CellView): - """display relation name and its translation only in a cell view, link to - relation definition's primary view (for use in entity type relations table - for instance) - """ - __regid__ = 'rdef-name-cell' - __select__ = is_instance('CWRelation', 'CWAttribute') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - rtype = entity.relation_type[0].name - # XXX use context entity + pgettext - self.w(u'%s (%s)' % ( - entity.absolute_url(), rtype, self._cw._(rtype))) - -class RDEFObjectNameView(tableview.CellView): - """same as RDEFNameView but when the context is the object entity - """ - __regid__ = 'rdef-object-name-cell' - __select__ = is_instance('CWRelation', 'CWAttribute') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - rtype = entity.relation_type[0].name - # XXX use context entity + pgettext - self.w(u'%s (%s)' % ( - entity.absolute_url(), rtype, self._cw.__(rtype + '_object'))) - -class RDEFConstraintsCell(EntityView): - __regid__ = 'rdef-constraints-cell' - __select__ = is_instance('CWAttribute', 'CWRelation') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - rschema = self._cw.vreg.schema.rschema(entity.rtype.name) - rdef = rschema.rdefs[(entity.stype.name, entity.otype.name)] - constraints = [xml_escape(text_type(c)) for c in getattr(rdef, 'constraints')] - self.w(u'
              '.join(constraints)) - -class CWAttributeOptionsCell(EntityView): - __regid__ = 'rdef-options-cell' - __select__ = is_instance('CWAttribute') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - options = [] - if entity.indexed: - options.append(self._cw._('indexed')) - if entity.fulltextindexed: - options.append(self._cw._('fulltextindexed')) - if entity.internationalizable: - options.append(self._cw._('internationalizable')) - self.w(u','.join(options)) - -class CWRelationOptionsCell(EntityView): - __regid__ = 'rdef-options-cell' - __select__ = is_instance('CWRelation',) - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - rtype = entity.rtype - options = [] - if rtype.symmetric: - options.append(self._cw._('symmetric')) - if rtype.inlined: - options.append(self._cw._('inlined')) - if rtype.fulltext_container: - options.append('%s=%s' % (self._cw._('fulltext_container'), - self._cw._(rtype.fulltext_container))) - if entity.composite: - options.append('%s=%s' % (self._cw._('composite'), - self._cw._(entity.composite))) - self.w(u','.join(options)) - - -# schema images ############################################################### - -class RestrictedSchemaVisitorMixIn(object): - def __init__(self, req, *args, **kwargs): - self._cw = req - super(RestrictedSchemaVisitorMixIn, self).__init__(*args, **kwargs) - - def should_display_schema(self, rschema): - return (super(RestrictedSchemaVisitorMixIn, self).should_display_schema(rschema) - and rschema.may_have_permission('read', self._cw)) - - def should_display_attr(self, eschema, rschema): - return (super(RestrictedSchemaVisitorMixIn, self).should_display_attr(eschema, rschema) - and eschema.rdef(rschema).may_have_permission('read', self._cw)) - - -class FullSchemaVisitor(RestrictedSchemaVisitorMixIn, s2d.FullSchemaVisitor): - pass - -class OneHopESchemaVisitor(RestrictedSchemaVisitorMixIn, - s2d.OneHopESchemaVisitor): - pass - -class OneHopRSchemaVisitor(RestrictedSchemaVisitorMixIn, - s2d.OneHopRSchemaVisitor): - pass - -class CWSchemaDotPropsHandler(s2d.SchemaDotPropsHandler): - def __init__(self, visitor, cw): - self.visitor = visitor - self.cw = cw - self._cycle = iter(cycle(('#ff7700', '#000000', '#ebbc69', '#888888'))) - self.nextcolor = lambda: next(self._cycle) - - self.colors = {} - - def node_properties(self, eschema): - """return DOT drawing options for an entity schema include href""" - label = ['{',eschema.type,'|'] - label.append(r'\l'.join('%s (%s)' % (rel.type, eschema.rdef(rel.type).object) - for rel in eschema.ordered_relations() - if rel.final and self.visitor.should_display_attr(eschema, rel))) - label.append(r'\l}') # trailing \l ensure alignement of the last one - return {'label' : ''.join(label), 'shape' : "record", - 'fontname' : "Courier", 'style' : "filled", - 'href': self.cw.build_url('cwetype/%s' % eschema.type), - 'fontsize': '10px' - } - - def edge_properties(self, rschema, subjnode, objnode): - """return default DOT drawing options for a relation schema""" - # Inheritance relation (i.e 'specializes'). - if rschema is None: - kwargs = {'label': 'Parent class', - 'color' : 'grey', 'style' : 'filled', - 'arrowhead': 'empty', - 'fontsize': '10px'} - # symmetric rels are handled differently, let yams decide what's best - elif rschema.symmetric: - kwargs = {'label': rschema.type, - 'color': '#887788', 'style': 'dashed', - 'dir': 'both', 'arrowhead': 'normal', 'arrowtail': 'normal', - 'fontsize': '10px', - 'href': self.cw.build_url('cwrtype/%s' % rschema.type)} - else: - kwargs = {'label': rschema.type, - 'color' : 'black', 'style' : 'filled', 'fontsize': '10px', - 'href': self.cw.build_url('cwrtype/%s' % rschema.type)} - rdef = rschema.rdef(subjnode, objnode) - composite = rdef.composite - if rdef.composite == 'subject': - kwargs['arrowhead'] = 'none' - kwargs['arrowtail'] = 'diamond' - elif rdef.composite == 'object': - kwargs['arrowhead'] = 'diamond' - kwargs['arrowtail'] = 'none' - else: - kwargs['arrowhead'] = 'open' - kwargs['arrowtail'] = 'none' - # UML like cardinalities notation, omitting 1..1 - if rdef.cardinality[1] != '1': - kwargs['taillabel'] = s2d.CARD_MAP[rdef.cardinality[1]] - if rdef.cardinality[0] != '1': - kwargs['headlabel'] = s2d.CARD_MAP[rdef.cardinality[0]] - try: - kwargs['color'] = self.colors[rschema] - except KeyError: - kwargs['color'] = self.nextcolor() - self.colors[rschema] = kwargs['color'] - kwargs['fontcolor'] = kwargs['color'] - # dot label decoration is just awful (1 line underlining the label - # + 1 line going to the closest edge spline point) - kwargs['decorate'] = 'false' - #kwargs['labelfloat'] = 'true' - return kwargs - - -class SchemaGraphView(StartupView): - __regid__ = 'schemagraph' - - def call(self, etype=None, rtype=None, alt=''): - if 'MSIE 8' in self._cw.useragent(): - return - schema = self._cw.vreg.schema - if etype: - assert rtype is None - visitor = OneHopESchemaVisitor(self._cw, schema.eschema(etype), - skiptypes=skip_types(self._cw)) - alt = self._cw._('graphical representation of the %(etype)s ' - 'entity type from %(appid)s data model') - elif rtype: - visitor = OneHopRSchemaVisitor(self._cw, schema.rschema(rtype), - skiptypes=skip_types(self._cw)) - alt = self._cw._('graphical representation of the %(rtype)s ' - 'relation type from %(appid)s data model') - else: - visitor = FullSchemaVisitor(self._cw, schema, - skiptypes=skip_types(self._cw)) - alt = self._cw._('graphical representation of %(appid)s data model') - alt %= {'rtype': rtype, 'etype': etype, - 'appid': self._cw.vreg.config.appid} - prophdlr = CWSchemaDotPropsHandler(visitor, self._cw) - generator = GraphGenerator(DotBackend('schema', 'BT', - ratio='compress',size=None, - renderer='dot', - additionnal_param={ - 'overlap':'false', - 'splines':'true', - 'sep':'0.2', - })) - # svg image file - fd, tmpfile = tempfile.mkstemp('.svg') - try: - os.close(fd) - generator.generate(visitor, prophdlr, tmpfile) - with codecs.open(tmpfile, 'rb', encoding='utf-8') as svgfile: - self.w(svgfile.read()) - finally: - os.unlink(tmpfile) - -# breadcrumbs ################################################################## - -class CWRelationIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('CWRelation') - def parent_entity(self): - return self.entity.rtype - -class CWAttributeIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('CWAttribute') - def parent_entity(self): - return self.entity.stype - -class CWConstraintIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('CWConstraint') - def parent_entity(self): - if self.entity.reverse_constrained_by: - return self.entity.reverse_constrained_by[0] - -class RQLExpressionIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('RQLExpression') - def parent_entity(self): - return self.entity.expression_of - - -# misc: facets, actions ######################################################## - -class CWFinalFacet(facet.AttributeFacet): - __regid__ = 'cwfinal-facet' - __select__ = facet.AttributeFacet.__select__ & is_instance('CWEType', 'CWRType') - rtype = 'final' - - -class ViewSchemaAction(action.Action): - __regid__ = 'schema' - __select__ = yes() - - title = _('data model schema') - order = 30 - category = 'manage' - - def url(self): - return self._cw.build_url(self.__regid__) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/searchrestriction.py --- a/web/views/searchrestriction.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,27 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""contains utility functions and some visual component to restrict results of -a search - -""" -__docformat__ = "restructuredtext en" - -from logilab.common.deprecation import moved - -insert_attr_select_relation = moved('cubicweb.web.facet', - 'insert_attr_select_relation') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/sessions.py --- a/web/views/sessions.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,180 +0,0 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""web session: by default the session is actually the db connection """ -__docformat__ = "restructuredtext en" - -from time import time -from logging import getLogger - -from logilab.common.registry import RegistrableObject, yes - -from cubicweb import RepositoryError, Unauthorized, set_log_methods -from cubicweb.web import InvalidSession - -from cubicweb.web.views import authentication - - -class AbstractSessionManager(RegistrableObject): - """manage session data associated to a session identifier""" - __abstract__ = True - __select__ = yes() - __registry__ = 'sessions' - __regid__ = 'sessionmanager' - - def __init__(self, repo): - vreg = repo.vreg - self.session_time = vreg.config['http-session-time'] or None - self.authmanager = authentication.RepositoryAuthenticationManager(repo) - interval = (self.session_time or 0) / 2. - if vreg.config.anonymous_user()[0] is not None: - self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60 - assert self.cleanup_anon_session_time > 0 - if self.session_time is not None: - self.cleanup_anon_session_time = min(self.session_time, - self.cleanup_anon_session_time) - interval = self.cleanup_anon_session_time / 2. - # we don't want to check session more than once every 5 minutes - self.clean_sessions_interval = max(5 * 60, interval) - - def clean_sessions(self): - """cleanup sessions which has not been unused since a given amount of - time. Return the number of sessions which have been closed. - """ - self.debug('cleaning http sessions') - session_time = self.session_time - closed, total = 0, 0 - for session in self.current_sessions(): - total += 1 - last_usage_time = session.mtime - no_use_time = (time() - last_usage_time) - if session.anonymous_session: - if no_use_time >= self.cleanup_anon_session_time: - self.close_session(session) - closed += 1 - elif session_time is not None and no_use_time >= session_time: - self.close_session(session) - closed += 1 - return closed, total - closed - - def current_sessions(self): - """return currently open sessions""" - raise NotImplementedError() - - def get_session(self, req, sessionid): - """return existing session for the given session identifier""" - raise NotImplementedError() - - def open_session(self, req): - """open and return a new session for the given request. - - raise :exc:`cubicweb.AuthenticationError` if authentication failed - (no authentication info found or wrong user/password) - """ - raise NotImplementedError() - - def close_session(self, session): - """close session on logout or on invalid session detected (expired out, - corrupted...) - """ - raise NotImplementedError() - - -set_log_methods(AbstractSessionManager, getLogger('cubicweb.sessionmanager')) - - -class InMemoryRepositorySessionManager(AbstractSessionManager): - """manage session data associated to a session identifier""" - - def __init__(self, *args, **kwargs): - super(InMemoryRepositorySessionManager, self).__init__(*args, **kwargs) - # XXX require a RepositoryAuthenticationManager which violates - # authenticate interface by returning a session instead of a user - #assert isinstance(self.authmanager, RepositoryAuthenticationManager) - self._sessions = {} - - # dump_data / restore_data to avoid loosing open sessions on registry - # reloading - def dump_data(self): - return self._sessions - def restore_data(self, data): - self._sessions = data - - def current_sessions(self): - return self._sessions.values() - - def get_session(self, req, sessionid): - """return existing session for the given session identifier""" - if sessionid not in self._sessions: - raise InvalidSession() - session = self._sessions[sessionid] - try: - user = self.authmanager.validate_session(req, session) - except InvalidSession: - self.close_session(session) - raise - if session.closed: - self.close_session(session) - raise InvalidSession() - return session - - def open_session(self, req): - """open and return a new session for the given request. The session is - also bound to the request. - - raise :exc:`cubicweb.AuthenticationError` if authentication failed - (no authentication info found or wrong user/password) - """ - session, login = self.authmanager.authenticate(req) - self._sessions[session.sessionid] = session - session.mtime = time() - return session - - def postlogin(self, req, session): - """postlogin: the user have been related to a session - - Both req and session are passed to this function because actually - linking the request to the session is not yet done and not the - responsability of this object. - """ - # Update last connection date - # XXX: this should be in a post login hook in the repository, but there - # we can't differentiate actual login of automatic session - # reopening. Is it actually a problem? - if 'last_login_time' in req.vreg.schema: - self._update_last_login_time(session) - req.set_message(req._('welcome %s!') % session.user.login) - - def _update_last_login_time(self, session): - # XXX should properly detect missing permission / non writeable source - # and avoid "except (RepositoryError, Unauthorized)" below - try: - with session.new_cnx() as cnx: - cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', - {'x' : session.user.eid}) - cnx.commit() - except (RepositoryError, Unauthorized): - pass - - def close_session(self, session): - """close session on logout or on invalid session detected (expired out, - corrupted...) - """ - self.info('closing http session %s' % session.sessionid) - self._sessions.pop(session.sessionid, None) - if not session.closed: - session.repo.close(session.sessionid) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/sparql.py --- a/web/views/sparql.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,145 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""SPARQL integration""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six.moves import range - -from yams import xy -from rql import TypeResolverException - -from lxml import etree -from lxml.builder import E - -from cubicweb.view import StartupView, AnyRsetView -from cubicweb.web import Redirect, form, formfields, formwidgets as fwdgs -from cubicweb.web.views import forms -try: - from cubicweb.spa2rql import Sparql2rqlTranslator, UnsupportedQuery -except ImportError: - # fyzz not available (only a recommends) - Sparql2rqlTranslator = None - -class SparqlForm(forms.FieldsForm): - __regid__ = 'sparql' - sparql = formfields.StringField(help=_('type here a sparql query')) - resultvid = formfields.StringField(choices=((_('table'), 'table'), - (_('sparql xml'), 'sparqlxml')), - widget=fwdgs.Radio, - value='table') - form_buttons = [fwdgs.SubmitButton()] - @property - def action(self): - return self._cw.url() - - -class SparqlFormView(form.FormViewMixIn, StartupView): - __regid__ = 'sparql' - def call(self): - form = self._cw.vreg['forms'].select('sparql', self._cw) - form.render(w=self.w) - sparql = self._cw.form.get('sparql') - vid = self._cw.form.get('resultvid', 'table') - if sparql: - try: - qinfo = Sparql2rqlTranslator(self._cw.vreg.schema).translate(sparql) - except TypeResolverException as exc: - self.w(self._cw._('can not resolve entity types:') + u' ' + unicode(exc)) - except UnsupportedQuery: - self.w(self._cw._('we are not yet ready to handle this query')) - except xy.UnsupportedVocabulary as exc: - self.w(self._cw._('unknown vocabulary:') + u' ' + unicode(exc)) - else: - rql, args = qinfo.finalize() - if vid == 'sparqlxml': - url = self._cw.build_url('view', rql=rql % args, vid=vid) - raise Redirect(url) - rset = self._cw.execute(rql, args) - self.wview(vid, rset, 'null') - - -## sparql resultset views ##################################################### - -YAMS_XMLSCHEMA_MAPPING = { - 'String': 'string', - - 'Boolean': 'boolean', - 'Int': 'integer', - 'BigInt': 'integer', - 'Float': 'float', - - 'Datetime': 'dateTime', - 'TZDatetime': 'dateTime', - 'Date': 'date', - 'Time': 'time', - 'TZTime': 'time', - - # XXX the following types don't have direct mapping - 'Decimal': 'string', - 'Interval': 'duration', - 'Bytes': 'base64Binary', - 'Password': 'string', - } - -def xmlschema(yamstype): - return 'http://www.w3.org/2001/XMLSchema#%s' % YAMS_XMLSCHEMA_MAPPING[yamstype] - -class SparqlResultXmlView(AnyRsetView): - """The spec can be found here: http://www.w3.org/TR/rdf-sparql-XMLres/ - """ - __regid__ = 'sparqlxml' - content_type = 'application/sparql-results+xml' - templatable = False - - def call(self): - # XXX handle UNION - rqlst = self.cw_rset.syntax_tree().children[0] - varnames = [var.name for var in rqlst.selection] - results = E.results() - for rowidx in range(len(self.cw_rset)): - result = E.result() - for colidx, varname in enumerate(varnames): - result.append(self.cell_binding(rowidx, colidx, varname)) - results.append(result) - sparql = E.sparql(E.head(*(E.variable(name=name) for name in varnames)), - results) - self.w(u'\n') - self.w(etree.tostring(sparql, encoding=unicode, pretty_print=True)) - - def cell_binding(self, row, col, varname): - celltype = self.cw_rset.description[row][col] - if self._cw.vreg.schema.eschema(celltype).final: - cellcontent = self._cw.view('cell', self.cw_rset, row=row, col=col) - return E.binding(E.literal(cellcontent, - datatype=xmlschema(celltype)), - name=varname) - else: - entity = self.cw_rset.get_entity(row, col) - return E.binding(E.uri(entity.absolute_url()), name=varname) - - def set_request_content_type(self): - """overriden to set the correct filetype and filename""" - self._cw.set_content_type(self.content_type, - filename='sparql.xml', - encoding=self._cw.encoding) - -def registration_callback(vreg): - if Sparql2rqlTranslator is not None: - vreg.register_all(globals().values(), __name__) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/startup.py --- a/web/views/startup.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,174 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""This module contains the default index page and management view. - -.. autoclass:: IndexView -.. autoclass:: ManageView -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from logilab.common.textutils import unormalize -from logilab.common.deprecation import deprecated -from logilab.mtconverter import xml_escape - -from cubicweb.view import StartupView -from cubicweb.predicates import match_user_groups, is_instance -from cubicweb.schema import display_name -from cubicweb.web import httpcache -from cubicweb.web.views import uicfg - -class ManageView(StartupView): - """:__regid__: *manage* - - The manage view, display some information about what's contained by your - site and provides access to administration stuff such as user and groups - management. - - Regarding the section displaying link to entity type, notice by default it - won't display entity types which are related to another one using a - mandatory (cardinality == 1) composite relation. - - You can still configure that behaviour manually using the - `indexview_etype_section` as explained in :mod:`cubicweb.web.uicfg`. - """ - __regid__ = 'manage' - title = _('manage') - http_cache_manager = httpcache.EtagHTTPCacheManager - add_etype_links = () - skip_startup_views = set( ('index', 'manage', 'schema', 'owl', - 'systempropertiesform', 'propertiesform', - 'loggedout', 'login', - 'cw.users-and-groups-management', 'cw.groups-management', - 'cw.users-management', 'cw.sources-management', - 'siteinfo', 'info', 'registry', 'gc', - 'tree') ) - - def call(self, **kwargs): - """The default view representing the instance's management""" - self._cw.add_css('cubicweb.manageview.css') - self.w(u'

              %s

              ' % self._cw.property_value('ui.site-title')) - self.entities() - self.manage_actions() - self.startup_views() - - def manage_actions(self): - allactions = self._cw.vreg['actions'].possible_actions(self._cw) - if allactions.get('manage'): - self.w(u'
               
              ') - self.w(u'

              %s

              \n' % self._cw._('Manage')) - self.w(u'
                ') - for action in allactions['manage']: - self.w(u'
              • %s
              • ' % ( - action.url(), self._cw._(action.title))) - self.w(u'
              ') - - def startup_views(self): - views = [v for v in self._cw.vreg['views'].possible_views(self._cw, None) - if v.category == 'startupview' - and v.__regid__ not in self.skip_startup_views] - if not views: - return - self.w(u'
               
              ') - self.w(u'

              %s

              \n' % self._cw._('Startup views')) - self.w(u'
                ') - for v in sorted(views, key=lambda x: self._cw._(x.title)): - self.w('
              • %s
              • ' % ( - xml_escape(v.url()), xml_escape(self._cw._(v.title).capitalize()))) - self.w(u'
              ') - - def entities(self): - schema = self._cw.vreg.schema - eschemas = [eschema for eschema in schema.entities() - if uicfg.indexview_etype_section.get(eschema) == 'application'] - if eschemas: - self.w(u'
               
              ') - self.w(u'

              %s

              \n' % self._cw._('Browse by entity type')) - self.w(u'') - self.entity_types_table(eschemas) - self.w(u'
              ') - - def entity_types_table(self, eschemas): - infos = sorted(self.entity_types(eschemas), - key=lambda t: unormalize(t[0])) - q, r = divmod(len(infos), 2) - if r: - infos.append( (None, ' ', ' ') ) - infos = zip(infos[:q+r], infos[q+r:]) - for (_, etypelink, addlink), (_, etypelink2, addlink2) in infos: - self.w(u'\n') - self.w(u'%s%s\n' % (addlink, etypelink)) - self.w(u'%s%s\n' % (addlink2, etypelink2)) - self.w(u'\n') - - def entity_types(self, eschemas): - """return an iterator on formatted links to get a list of entities of - each entity types - """ - req = self._cw - for eschema in eschemas: - if eschema.final or not eschema.may_have_permission('read', req): - continue - etype = eschema.type - nb = req.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] - if nb > 1: - label = display_name(req, etype, 'plural') - else: - label = display_name(req, etype) - nb = req.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] - url = self._cw.build_url(etype) - etypelink = u' %s (%d)' % ( - xml_escape(url), label, nb) - if eschema.has_perm(req, 'add'): - yield (label, etypelink, self.add_entity_link(etype)) - else: - yield (label, etypelink, u'') - - def create_links(self): - self.w(u'') - - def add_entity_link(self, etype): - """creates a [+] link for adding an entity""" - url = self._cw.vreg["etypes"].etype_class(etype).cw_create_url(self._cw) - return u'[+]' % ( - xml_escape(url), self._cw.__('New %s' % etype)) - - - -class IndexView(ManageView): - """:__regid__: *index* - - The default index view, that you'll get when accessing your site's root url. - It's by default indentical to the - :class:`~cubicweb.web.views.startup.ManageView`, but you'll usually want to - customize this one. - """ - __regid__ = 'index' - title = _('view_index') - - @deprecated('[3.11] display_folders method is deprecated, backport it if needed') - def display_folders(self): - return 'Folder' in self._cw.vreg.schema and self._cw.execute('Any COUNT(X) WHERE X is Folder')[0][0] diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/staticcontrollers.py --- a/web/views/staticcontrollers.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,272 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Set of static resources controllers for : - -- /data/... -- /static/... -- /fckeditor/... -""" - -import os -import os.path as osp -import hashlib -import mimetypes -import threading -import tempfile -from time import mktime -from datetime import datetime, timedelta -from logging import getLogger - -from cubicweb import Forbidden -from cubicweb.web import NotFound, Redirect -from cubicweb.web.http_headers import generateDateTime -from cubicweb.web.controller import Controller -from cubicweb.web.views.urlrewrite import URLRewriter - - - -class StaticFileController(Controller): - """an abtract class to serve static file - - Make sure to add your subclass to the STATIC_CONTROLLERS list""" - __abstract__ = True - directory_listing_allowed = False - - def max_age(self, path): - """max cache TTL""" - return 60*60*24*7 - - def static_file(self, path): - """Return full content of a static file. - - XXX iterable content would be better - """ - debugmode = self._cw.vreg.config.debugmode - if osp.isdir(path): - if self.directory_listing_allowed: - return u'' - raise Forbidden(path) - if not osp.isfile(path): - raise NotFound() - if not debugmode: - # XXX: Don't provide additional resource information to error responses - # - # the HTTP RFC recommends not going further than 1 year ahead - expires = datetime.now() + timedelta(seconds=self.max_age(path)) - self._cw.set_header('Expires', generateDateTime(mktime(expires.timetuple()))) - self._cw.set_header('Cache-Control', 'max-age=%s' % self.max_age(path)) - - # XXX system call to os.stats could be cached once and for all in - # production mode (where static files are not expected to change) - # - # Note that: we do a osp.isdir + osp.isfile before and a potential - # os.read after. Improving this specific call will not help - # - # Real production environment should use dedicated static file serving. - self._cw.set_header('last-modified', generateDateTime(os.stat(path).st_mtime)) - if self._cw.is_client_cache_valid(): - return '' - # XXX elif uri.startswith('/https/'): uri = uri[6:] - mimetype, encoding = mimetypes.guess_type(path) - if mimetype is None: - mimetype = 'application/octet-stream' - self._cw.set_content_type(mimetype, osp.basename(path), encoding) - with open(path, 'rb') as resource: - return resource.read() - - @property - def relpath(self): - """path of a requested file relative to the controller""" - path = self._cw.form.get('static_relative_path') - if path is None: - path = self._cw.relative_path(includeparams=True) - return path - - -class ConcatFilesHandler(object): - """Emulating the behavior of modconcat - - this serve multiple file as a single one. - """ - - def __init__(self, config): - self._resources = {} - self.config = config - self.logger = getLogger('cubicweb.web') - self.lock = threading.Lock() - - def _resource(self, path): - """get the resouce""" - try: - return self._resources[path] - except KeyError: - self._resources[path] = self.config.locate_resource(path) - return self._resources[path] - - def _up_to_date(self, filepath, paths): - """ - The concat-file is considered up-to-date if it exists. - In debug mode, an additional check is performed to make sure that - concat-file is more recent than all concatenated files - """ - if not osp.isfile(filepath): - return False - if self.config.debugmode: - concat_lastmod = os.stat(filepath).st_mtime - for path in paths: - dirpath, rid = self._resource(path) - if rid is None: - raise NotFound(path) - path = osp.join(dirpath, rid) - if os.stat(path).st_mtime > concat_lastmod: - return False - return True - - def build_filepath(self, paths): - """return the filepath that will be used to cache concatenation of `paths` - """ - _, ext = osp.splitext(paths[0]) - fname = 'cache_concat_' + hashlib.md5((';'.join(paths)).encode('ascii')).hexdigest() + ext - return osp.join(self.config.appdatahome, 'uicache', fname) - - def concat_cached_filepath(self, paths): - filepath = self.build_filepath(paths) - if not self._up_to_date(filepath, paths): - with self.lock: - if self._up_to_date(filepath, paths): - # first check could have raced with some other thread - # updating the file - return filepath - fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(filepath)) - try: - f = os.fdopen(fd, 'wb') - for path in paths: - dirpath, rid = self._resource(path) - if rid is None: - # In production mode log an error, do not return a 404 - # XXX the erroneous content is cached anyway - self.logger.error('concatenated data url error: %r file ' - 'does not exist', path) - if self.config.debugmode: - raise NotFound(path) - else: - with open(osp.join(dirpath, rid), 'rb') as source: - for line in source: - f.write(line) - f.write(b'\n') - f.close() - except: - os.remove(tmpfile) - raise - else: - os.rename(tmpfile, filepath) - return filepath - - -class DataController(StaticFileController): - """Controller in charge of serving static files in /data/ - - Handles mod_concat-like URLs. - """ - - __regid__ = 'data' - - def __init__(self, *args, **kwargs): - super(DataController, self).__init__(*args, **kwargs) - config = self._cw.vreg.config - self.base_datapath = config.data_relpath() - self.data_modconcat_basepath = '%s??' % self.base_datapath - self.concat_files_registry = ConcatFilesHandler(config) - - def publish(self, rset=None): - config = self._cw.vreg.config - # includeparams=True for modconcat-like urls - relpath = self.relpath - if relpath.startswith(self.data_modconcat_basepath): - paths = relpath[len(self.data_modconcat_basepath):].split(',') - filepath = self.concat_files_registry.concat_cached_filepath(paths) - else: - if not relpath.startswith(self.base_datapath): - # /data/foo, redirect to /data/{hash}/foo - prefix = 'data/' - relpath = relpath[len(prefix):] - raise Redirect(self._cw.data_url(relpath), 302) - # skip leading '/data/{hash}/' and url params - prefix = self.base_datapath - relpath = relpath[len(prefix):] - relpath = relpath.split('?', 1)[0] - dirpath, rid = config.locate_resource(relpath) - if dirpath is None: - raise NotFound() - filepath = osp.join(dirpath, rid) - return self.static_file(filepath) - - -class FCKEditorController(StaticFileController): - """Controller in charge of serving FCKEditor related file - - The motivational for a dedicated controller have been lost. - """ - - __regid__ = 'fckeditor' - - def publish(self, rset=None): - config = self._cw.vreg.config - if self._cw.https: - uiprops = config.https_uiprops - else: - uiprops = config.uiprops - relpath = self.relpath - if relpath.startswith('fckeditor/'): - relpath = relpath[len('fckeditor/'):] - relpath = relpath.split('?', 1)[0] - return self.static_file(osp.join(uiprops['FCKEDITOR_PATH'], relpath)) - - -class StaticDirectoryController(StaticFileController): - """Controller in charge of serving static file in /static/ - """ - __regid__ = 'static' - - def publish(self, rset=None): - staticdir = self._cw.vreg.config.static_directory - relpath = self.relpath[len(self.__regid__) + 1:] - return self.static_file(osp.join(staticdir, relpath)) - -STATIC_CONTROLLERS = [DataController, FCKEditorController, - StaticDirectoryController] - -class StaticControlerRewriter(URLRewriter): - """a quick and dirty rewritter in charge of server static file. - - This is a work around the flatness of url handling in cubicweb.""" - - __regid__ = 'static' - - priority = 10 - - def rewrite(self, req, uri): - for ctrl in STATIC_CONTROLLERS: - if uri.startswith('/%s/' % ctrl.__regid__): - break - else: - self.debug("not a static file uri: %s", uri) - raise KeyError(uri) - relpath = self._cw.relative_path(includeparams=False) - self._cw.form['static_relative_path'] = self._cw.relative_path(includeparams=True) - return ctrl.__regid__, None diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/tableview.py --- a/web/views/tableview.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1338 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""This module contains table views, with the following features that may be -provided (depending on the used implementation): - -* facets filtering -* pagination -* actions menu -* properly sortable content -* odd/row/hover line styles - -The three main implementation are described below. Each implementation is -suitable for a particular case, but they each attempt to display tables that -looks similar. - -.. autoclass:: cubicweb.web.views.tableview.RsetTableView - :members: - -.. autoclass:: cubicweb.web.views.tableview.EntityTableView - :members: - -.. autoclass:: cubicweb.web.views.pyviews.PyValTableView - :members: - -All those classes are rendered using a *layout*: - -.. autoclass:: cubicweb.web.views.tableview.TableLayout - :members: - -There is by default only one table layout, using the 'table_layout' identifier, -that is referenced by table views -:attr:`cubicweb.web.views.tableview.TableMixIn.layout_id`. If you want to -customize the look and feel of your table, you can either replace the default -one by yours, having multiple variants with proper selectors, or change the -`layout_id` identifier of your table to use your table specific implementation. - -Notice you can gives options to the layout using a `layout_args` dictionary on -your class. - -If you still can't find a view that suit your needs, you should take a look at the -class below that is the common abstract base class for the three views defined -above and implement your own class. - -.. autoclass:: cubicweb.web.views.tableview.TableMixIn - :members: -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn -from copy import copy -from types import MethodType - -from six import string_types, add_metaclass, create_bound_method -from six.moves import range - -from logilab.mtconverter import xml_escape -from logilab.common.decorators import cachedproperty -from logilab.common.deprecation import class_deprecated -from logilab.common.registry import yes - -from cubicweb import NoSelectableObject, tags -from cubicweb.predicates import nonempty_rset, match_kwargs, objectify_predicate -from cubicweb.schema import display_name -from cubicweb.utils import make_uid, js_dumps, JSString, UStringIO -from cubicweb.uilib import toggle_action, limitsize, htmlescape, sgml_attributes, domid -from cubicweb.view import EntityView, AnyRsetView -from cubicweb.web import jsonize, component -from cubicweb.web.htmlwidgets import (TableWidget, TableColumn, MenuWidget, - PopupBoxMenu) - - -@objectify_predicate -def unreloadable_table(cls, req, rset=None, - displaycols=None, headers=None, cellvids=None, - paginate=False, displayactions=False, displayfilter=False, - **kwargs): - # one may wish to specify one of headers/displaycols/cellvids as long as he - # doesn't want pagination nor actions nor facets - if not kwargs and (displaycols or headers or cellvids) and not ( - displayfilter or displayactions or paginate): - return 1 - return 0 - - -class TableLayout(component.Component): - """The default layout for table. When `render` is called, this will use - the API described on :class:`TableMixIn` to feed the generated table. - - This layout behaviour may be customized using the following attributes / - selection arguments: - - * `cssclass`, a string that should be used as HTML class attribute. Default - to "listing". - - * `needs_css`, the CSS files that should be used together with this - table. Default to ('cubicweb.tablesorter.css', 'cubicweb.tableview.css'). - - * `needs_js`, the Javascript files that should be used together with this - table. Default to ('jquery.tablesorter.js',) - - * `display_filter`, tells if the facets filter should be displayed when - possible. Allowed values are: - - `None`, don't display it - - 'top', display it above the table - - 'bottom', display it below the table - - * `display_actions`, tells if a menu for available actions should be - displayed when possible (see two following options). Allowed values are: - - `None`, don't display it - - 'top', display it above the table - - 'bottom', display it below the table - - * `hide_filter`, when true (the default), facets filter will be hidden by - default, with an action in the actions menu allowing to show / hide it. - - * `show_all_option`, when true, a *show all results* link will be displayed - below the navigation component. - - * `add_view_actions`, when true, actions returned by view.table_actions() - will be included in the actions menu. - - * `header_column_idx`, if not `None`, should be a colum index or a set of - column index where tags should be generated instead of - """ #'# make emacs happier - __regid__ = 'table_layout' - cssclass = "listing" - needs_css = ('cubicweb.tableview.css',) - needs_js = () - display_filter = None # None / 'top' / 'bottom' - display_actions = 'top' # None / 'top' / 'bottom' - hide_filter = True - show_all_option = True # make navcomp generate a 'show all' results link - add_view_actions = False - header_column_idx = None - enable_sorting = True - sortvalue_limit = 10 - tablesorter_settings = { - 'textExtraction': JSString('cw.sortValueExtraction'), - 'selectorHeaders': "thead tr:first th[class='sortable']", # only plug on the first row - } - - def _setup_tablesorter(self, divid): - self._cw.add_css('cubicweb.tablesorter.css') - self._cw.add_js('jquery.tablesorter.js') - self._cw.add_onload('''$(document).ready(function() { - $("#%s table").tablesorter(%s); -});''' % (divid, js_dumps(self.tablesorter_settings))) - - def __init__(self, req, view, **kwargs): - super(TableLayout, self).__init__(req, **kwargs) - for key, val in list(self.cw_extra_kwargs.items()): - if hasattr(self.__class__, key) and not key[0] == '_': - setattr(self, key, val) - self.cw_extra_kwargs.pop(key) - self.view = view - if self.header_column_idx is None: - self.header_column_idx = frozenset() - elif isinstance(self.header_column_idx, int): - self.header_column_idx = frozenset( (self.header_column_idx,) ) - - @cachedproperty - def initial_load(self): - """We detect a bit heuristically if we are built for the first time or - from subsequent calls by the form filter or by the pagination hooks. - """ - form = self._cw.form - return 'fromformfilter' not in form and '__fromnavigation' not in form - - def render(self, w, **kwargs): - assert self.display_filter in (None, 'top', 'bottom'), self.display_filter - if self.needs_css: - self._cw.add_css(self.needs_css) - if self.needs_js: - self._cw.add_js(self.needs_js) - if self.enable_sorting: - self._setup_tablesorter(self.view.domid) - # Notice facets form must be rendered **outside** the main div as it - # shouldn't be rendered on ajax call subsequent to facet restriction - # (hence the 'fromformfilter' parameter added by the form - generate_form = self.initial_load - if self.display_filter and generate_form: - facetsform = self.view.facets_form() - else: - facetsform = None - if facetsform and self.display_filter == 'top': - cssclass = u'hidden' if self.hide_filter else u'' - facetsform.render(w, vid=self.view.__regid__, cssclass=cssclass, - divid=self.view.domid) - actions = [] - if self.display_actions: - if self.add_view_actions: - actions = self.view.table_actions() - if self.display_filter and self.hide_filter and (facetsform or not generate_form): - actions += self.show_hide_filter_actions(not generate_form) - self.render_table(w, actions, self.view.paginable) - if facetsform and self.display_filter == 'bottom': - cssclass = u'hidden' if self.hide_filter else u'' - facetsform.render(w, vid=self.view.__regid__, cssclass=cssclass, - divid=self.view.domid) - - def render_table_headers(self, w, colrenderers): - w(u'') - for colrenderer in colrenderers: - if colrenderer.sortable: - w(u'') - else: - w(u'') - colrenderer.render_header(w) - w(u'') - w(u'\n') - - def render_table_body(self, w, colrenderers): - w(u'') - for rownum in range(self.view.table_size): - self.render_row(w, rownum, colrenderers) - w(u'') - - def render_table(self, w, actions, paginate): - view = self.view - divid = view.domid - if divid is not None: - w(u'
              ' % divid) - else: - assert not (actions or paginate) - nav_html = UStringIO() - if paginate: - view.paginate(w=nav_html.write, show_all_option=self.show_all_option) - w(nav_html.getvalue()) - if actions and self.display_actions == 'top': - self.render_actions(w, actions) - colrenderers = view.build_column_renderers() - attrs = self.table_attributes() - w(u'' % sgml_attributes(attrs)) - if self.view.has_headers: - self.render_table_headers(w, colrenderers) - self.render_table_body(w, colrenderers) - w(u'
              ') - if actions and self.display_actions == 'bottom': - self.render_actions(w, actions) - w(nav_html.getvalue()) - if divid is not None: - w(u'
              ') - - def table_attributes(self): - return {'class': self.cssclass} - - def render_row(self, w, rownum, renderers): - attrs = self.row_attributes(rownum) - w(u'' % sgml_attributes(attrs)) - for colnum, renderer in enumerate(renderers): - self.render_cell(w, rownum, colnum, renderer) - w(u'\n') - - def row_attributes(self, rownum): - return {'class': 'odd' if (rownum%2==1) else 'even', - 'onmouseover': '$(this).addClass("highlighted");', - 'onmouseout': '$(this).removeClass("highlighted")'} - - def render_cell(self, w, rownum, colnum, renderer): - attrs = self.cell_attributes(rownum, colnum, renderer) - if colnum in self.header_column_idx: - tag = u'th' - else: - tag = u'td' - w(u'<%s %s>' % (tag, sgml_attributes(attrs))) - renderer.render_cell(w, rownum) - w(u'' % tag) - - def cell_attributes(self, rownum, _colnum, renderer): - attrs = renderer.attributes.copy() - if renderer.sortable: - sortvalue = renderer.sortvalue(rownum) - if isinstance(sortvalue, string_types): - sortvalue = sortvalue[:self.sortvalue_limit] - if sortvalue is not None: - attrs[u'cubicweb:sortvalue'] = js_dumps(sortvalue) - return attrs - - def render_actions(self, w, actions): - box = MenuWidget('', '', _class='tableActionsBox', islist=False) - label = tags.span(self._cw._('action menu')) - menu = PopupBoxMenu(label, isitem=False, link_class='actionsBox', - ident='%sActions' % self.view.domid) - box.append(menu) - for action in actions: - menu.append(action) - box.render(w=w) - w(u'
              ') - - def show_hide_filter_actions(self, currentlydisplayed=False): - divid = self.view.domid - showhide = u';'.join(toggle_action('%s%s' % (divid, what))[11:] - for what in ('Form', 'Show', 'Hide', 'Actions')) - showhide = 'javascript:' + showhide - self._cw.add_onload(u'''\ -$(document).ready(function() { - if ($('#%(id)sForm[class=\"hidden\"]').length) { - $('#%(id)sHide').attr('class', 'hidden'); - } else { - $('#%(id)sShow').attr('class', 'hidden'); - } -});''' % {'id': divid}) - showlabel = self._cw._('show filter form') - hidelabel = self._cw._('hide filter form') - return [component.Link(showhide, showlabel, id='%sShow' % divid), - component.Link(showhide, hidelabel, id='%sHide' % divid)] - - -class AbstractColumnRenderer(object): - """Abstract base class for column renderer. Interface of a column renderer follows: - - .. automethod:: cubicweb.web.views.tableview.AbstractColumnRenderer.bind - .. automethod:: cubicweb.web.views.tableview.AbstractColumnRenderer.render_header - .. automethod:: cubicweb.web.views.tableview.AbstractColumnRenderer.render_cell - .. automethod:: cubicweb.web.views.tableview.AbstractColumnRenderer.sortvalue - - Attributes on this base class are: - - :attr: `header`, the column header. If None, default to `_(colid)` - :attr: `addcount`, if True, add the table size in parenthezis beside the header - :attr: `trheader`, should the header be translated - :attr: `escapeheader`, should the header be xml_escaped - :attr: `sortable`, tell if the column is sortable - :attr: `view`, the table view - :attr: `_cw`, the request object - :attr: `colid`, the column identifier - :attr: `attributes`, dictionary of attributes to put on the HTML tag when - the cell is rendered - """ #'# make emacs - attributes = {} - empty_cell_content = u' ' - - def __init__(self, header=None, addcount=False, trheader=True, - escapeheader=True, sortable=True): - self.header = header - self.trheader = trheader - self.escapeheader = escapeheader - self.addcount = addcount - self.sortable = sortable - self.view = None - self._cw = None - self.colid = None - - def __str__(self): - return '<%s.%s (column %s) at 0x%x>' % (self.view.__class__.__name__, - self.__class__.__name__, - self.colid, id(self)) - - def bind(self, view, colid): - """Bind the column renderer to its view. This is where `_cw`, `view`, - `colid` are set and the method to override if you want to add more - view/request depending attributes on your column render. - """ - self.view = view - self._cw = view._cw - self.colid = colid - - def copy(self): - assert self.view is None - return copy(self) - - def default_header(self): - """Return header for this column if one has not been specified.""" - return self._cw._(self.colid) - - def render_header(self, w): - """Write label for the specified column by calling w().""" - header = self.header - if header is None: - header = self.default_header() - elif self.trheader and header: - header = self._cw._(header) - if self.addcount: - header = '%s (%s)' % (header, self.view.table_size) - if header: - if self.escapeheader: - header = xml_escape(header) - else: - header = self.empty_cell_content - if self.sortable: - header = tags.span( - header, escapecontent=False, - title=self._cw._('Click to sort on this column')) - w(header) - - def render_cell(self, w, rownum): - """Write value for the specified cell by calling w(). - - :param `rownum`: the row number in the table - """ - raise NotImplementedError() - - def sortvalue(self, _rownum): - """Return typed value to be used for sorting on the specified column. - - :param `rownum`: the row number in the table - """ - return None - - -class TableMixIn(component.LayoutableMixIn): - """Abstract mix-in class for layout based tables. - - This default implementation's call method simply delegate to - meth:`layout_render` that will select the renderer whose identifier is given - by the :attr:`layout_id` attribute. - - Then it provides some default implementation for various parts of the API - used by that layout. - - Abstract method you will have to override is: - - .. automethod:: build_column_renderers - - You may also want to overridde: - - .. autoattribute:: cubicweb.web.views.tableview.TableMixIn.table_size - - The :attr:`has_headers` boolean attribute tells if the table has some - headers to be displayed. Default to `True`. - """ - __abstract__ = True - # table layout to use - layout_id = 'table_layout' - # true if the table has some headers - has_headers = True - # dictionary {colid : column renderer} - column_renderers = {} - # default renderer class to use when no renderer specified for the column - default_column_renderer_class = None - # default layout handles inner pagination - handle_pagination = True - - def call(self, **kwargs): - self._cw.add_js('cubicweb.ajax.js') # for pagination - self.layout_render(self.w) - - def column_renderer(self, colid, *args, **kwargs): - """Return a column renderer for column of the given id.""" - try: - crenderer = self.column_renderers[colid].copy() - except KeyError: - crenderer = self.default_column_renderer_class(*args, **kwargs) - crenderer.bind(self, colid) - return crenderer - - # layout callbacks ######################################################### - - def facets_form(self, **kwargs):# XXX extracted from jqplot cube - return self._cw.vreg['views'].select_or_none( - 'facet.filtertable', self._cw, rset=self.cw_rset, view=self, - **kwargs) - - @cachedproperty - def domid(self): - return self._cw.form.get('divid') or domid('%s-%s' % (self.__regid__, make_uid())) - - @property - def table_size(self): - """Return the number of rows (header excluded) to be displayed. - - By default return the number of rows in the view's result set. If your - table isn't reult set based, override this method. - """ - return self.cw_rset.rowcount - - def build_column_renderers(self): - """Return a list of column renderers, one for each column to be - rendered. Prototype of a column renderer is described below: - - .. autoclass:: cubicweb.web.views.tableview.AbstractColumnRenderer - """ - raise NotImplementedError() - - def table_actions(self): - """Return a list of actions (:class:`~cubicweb.web.component.Link`) that - match the view's result set, and return those in the 'mainactions' - category. - """ - req = self._cw - actions = [] - actionsbycat = req.vreg['actions'].possible_actions(req, self.cw_rset) - for action in actionsbycat.get('mainactions', ()): - for action in action.actual_actions(): - actions.append(component.Link(action.url(), req._(action.title), - klass=action.html_class()) ) - return actions - - # interaction with navigation component #################################### - - def page_navigation_url(self, navcomp, _path, params): - params['divid'] = self.domid - params['vid'] = self.__regid__ - return navcomp.ajax_page_url(**params) - - -class RsetTableColRenderer(AbstractColumnRenderer): - """Default renderer for :class:`RsetTableView`.""" - - def __init__(self, cellvid, **kwargs): - super(RsetTableColRenderer, self).__init__(**kwargs) - self.cellvid = cellvid - - def bind(self, view, colid): - super(RsetTableColRenderer, self).bind(view, colid) - self.cw_rset = view.cw_rset - def render_cell(self, w, rownum): - self._cw.view(self.cellvid, self.cw_rset, 'empty-cell', - row=rownum, col=self.colid, w=w) - - # limit value's length as much as possible (e.g. by returning the 10 first - # characters of a string) - def sortvalue(self, rownum): - colid = self.colid - val = self.cw_rset[rownum][colid] - if val is None: - return u'' - etype = self.cw_rset.description[rownum][colid] - if etype is None: - return u'' - if self._cw.vreg.schema.eschema(etype).final: - entity, rtype = self.cw_rset.related_entity(rownum, colid) - if entity is None: - return val # remove_html_tags() ? - return entity.sortvalue(rtype) - entity = self.cw_rset.get_entity(rownum, colid) - return entity.sortvalue() - - -class RsetTableView(TableMixIn, AnyRsetView): - """This table view accepts any non-empty rset. It uses introspection on the - result set to compute column names and the proper way to display the cells. - - It is highly configurable and accepts a wealth of options, but take care to - check what you're trying to achieve wouldn't be a job for the - :class:`EntityTableView`. Basically the question is: does this view should - be tied to the result set query's shape or no? If yes, than you're fine. If - no, you should take a look at the other table implementation. - - The following class attributes may be used to control the table: - - * `finalvid`, a view identifier that should be called on final entities - (e.g. attribute values). Default to 'final'. - - * `nonfinalvid`, a view identifier that should be called on - entities. Default to 'incontext'. - - * `displaycols`, if not `None`, should be a list of rset's columns to be - displayed. - - * `headers`, if not `None`, should be a list of headers for the table's - columns. `None` values in the list will be replaced by computed column - names. - - * `cellvids`, if not `None`, should be a dictionary with table column index - as key and a view identifier as value, telling the view that should be - used in the given column. - - Notice `displaycols`, `headers` and `cellvids` may be specified at selection - time but then the table won't have pagination and shouldn't be configured to - display the facets filter nor actions (as they wouldn't behave as expected). - - This table class use the :class:`RsetTableColRenderer` as default column - renderer. - - .. autoclass:: RsetTableColRenderer - """ #'# make emacs happier - __regid__ = 'table' - # selector trick for bw compath with the former :class:TableView - __select__ = AnyRsetView.__select__ & (~match_kwargs( - 'title', 'subvid', 'displayfilter', 'headers', 'displaycols', - 'displayactions', 'actions', 'divid', 'cellvids', 'cellattrs', - 'mainindex', 'paginate', 'page_size', mode='any') - | unreloadable_table()) - title = _('table') - # additional configuration parameters - finalvid = 'final' - nonfinalvid = 'incontext' - displaycols = None - headers = None - cellvids = None - default_column_renderer_class = RsetTableColRenderer - - def linkable(self): - # specific subclasses of this view usually don't want to be linkable - # since they depends on a particular shape (being linkable meaning view - # may be listed in possible views - return self.__regid__ == 'table' - - def call(self, headers=None, displaycols=None, cellvids=None, - paginate=None, **kwargs): - if self.headers: - self.headers = [h and self._cw._(h) for h in self.headers] - if (headers or displaycols or cellvids or paginate): - if headers is not None: - self.headers = headers - if displaycols is not None: - self.displaycols = displaycols - if cellvids is not None: - self.cellvids = cellvids - if paginate is not None: - self.paginable = paginate - if kwargs: - # old table view arguments that we can safely ignore thanks to - # selectors - if len(kwargs) > 1: - msg = '[3.14] %s arguments are deprecated' % ', '.join(kwargs) - else: - msg = '[3.14] %s argument is deprecated' % ', '.join(kwargs) - warn(msg, DeprecationWarning, stacklevel=2) - super(RsetTableView, self).call(**kwargs) - - def main_var_index(self): - """returns the index of the first non-attribute variable among the RQL - selected variables - """ - eschema = self._cw.vreg.schema.eschema - for i, etype in enumerate(self.cw_rset.description[0]): - if not eschema(etype).final: - return i - return None - - # layout callbacks ######################################################### - - @property - def table_size(self): - """return the number of rows (header excluded) to be displayed""" - return self.cw_rset.rowcount - - def build_column_renderers(self): - headers = self.headers - # compute displayed columns - if self.displaycols is None: - if headers is not None: - displaycols = list(range(len(headers))) - else: - rqlst = self.cw_rset.syntax_tree() - displaycols = list(range(len(rqlst.children[0].selection))) - else: - displaycols = self.displaycols - # compute table headers - main_var_index = self.main_var_index() - computed_titles = self.columns_labels(main_var_index) - # compute build renderers - cellvids = self.cellvids - renderers = [] - for colnum, colid in enumerate(displaycols): - addcount = False - # compute column header - title = None - if headers is not None: - title = headers[colnum] - if title is None: - title = computed_titles[colid] - if colid == main_var_index: - addcount = True - # compute cell vid for the column - if cellvids is not None and colnum in cellvids: - cellvid = cellvids[colnum] - else: - coltype = self.cw_rset.description[0][colid] - if coltype is not None and self._cw.vreg.schema.eschema(coltype).final: - cellvid = self.finalvid - else: - cellvid = self.nonfinalvid - # get renderer - renderer = self.column_renderer(colid, header=title, trheader=False, - addcount=addcount, cellvid=cellvid) - renderers.append(renderer) - return renderers - - -class EntityTableColRenderer(AbstractColumnRenderer): - """Default column renderer for :class:`EntityTableView`. - - You may use the :meth:`entity` method to retrieve the main entity for a - given row number. - - .. automethod:: cubicweb.web.views.tableview.EntityTableColRenderer.entity - .. automethod:: cubicweb.web.views.tableview.EntityTableColRenderer.render_entity - .. automethod:: cubicweb.web.views.tableview.EntityTableColRenderer.entity_sortvalue - """ - def __init__(self, renderfunc=None, sortfunc=None, sortable=None, **kwargs): - if renderfunc is None: - renderfunc = self.render_entity - # if renderfunc nor sortfunc nor sortable specified, column will be - # sortable using the default implementation. - if sortable is None: - sortable = True - # no sortfunc given but asked to be sortable: use the default sort - # method. Sub-class may set `entity_sortvalue` to None if they don't - # support sorting. - if sortfunc is None and sortable: - sortfunc = self.entity_sortvalue - # at this point `sortable` may still be unspecified while `sortfunc` is - # sure to be set to someting else than None if the column is sortable. - sortable = sortfunc is not None - super(EntityTableColRenderer, self).__init__(sortable=sortable, **kwargs) - self.renderfunc = renderfunc - self.sortfunc = sortfunc - - def copy(self): - assert self.view is None - # copy of attribute referencing a method doesn't work with python < 2.7 - renderfunc = self.__dict__.pop('renderfunc') - sortfunc = self.__dict__.pop('sortfunc') - try: - acopy = copy(self) - for aname, member in[('renderfunc', renderfunc), - ('sortfunc', sortfunc)]: - if isinstance(member, MethodType): - member = create_bound_method(member.__func__, acopy) - setattr(acopy, aname, member) - return acopy - finally: - self.renderfunc = renderfunc - self.sortfunc = sortfunc - - def render_cell(self, w, rownum): - entity = self.entity(rownum) - if entity is None: - w(self.empty_cell_content) - else: - self.renderfunc(w, entity) - - def sortvalue(self, rownum): - entity = self.entity(rownum) - if entity is None: - return None - else: - return self.sortfunc(entity) - - def entity(self, rownum): - """Convenience method returning the table's main entity.""" - return self.view.entity(rownum) - - def render_entity(self, w, entity): - """Sort value if `renderfunc` nor `sortfunc` specified at - initialization. - - This default implementation consider column id is an entity attribute - and print its value. - """ - w(entity.printable_value(self.colid)) - - def entity_sortvalue(self, entity): - """Cell rendering implementation if `renderfunc` nor `sortfunc` - specified at initialization. - - This default implementation consider column id is an entity attribute - and return its sort value by calling `entity.sortvalue(colid)`. - """ - return entity.sortvalue(self.colid) - - -class MainEntityColRenderer(EntityTableColRenderer): - """Renderer to be used for the column displaying the 'main entity' of a - :class:`EntityTableView`. - - By default display it using the 'incontext' view. You may specify another - view identifier using the `vid` argument. - - If header not specified, it would be built using entity types in the main - column. - """ - def __init__(self, vid='incontext', addcount=True, **kwargs): - super(MainEntityColRenderer, self).__init__(addcount=addcount, **kwargs) - self.vid = vid - - def default_header(self): - view = self.view - if len(view.cw_rset) > 1: - suffix = '_plural' - else: - suffix = '' - return u', '.join(self._cw.__(et + suffix) - for et in view.cw_rset.column_types(view.cw_col or 0)) - - def render_entity(self, w, entity): - entity.view(self.vid, w=w) - - def entity_sortvalue(self, entity): - return entity.sortvalue() - - -class RelatedEntityColRenderer(MainEntityColRenderer): - """Renderer to be used for column displaying an entity related the 'main - entity' of a :class:`EntityTableView`. - - By default display it using the 'incontext' view. You may specify another - view identifier using the `vid` argument. - - If header not specified, it would be built by translating the column id. - """ - def __init__(self, getrelated, addcount=False, **kwargs): - super(RelatedEntityColRenderer, self).__init__(addcount=addcount, **kwargs) - self.getrelated = getrelated - - def entity(self, rownum): - entity = super(RelatedEntityColRenderer, self).entity(rownum) - return self.getrelated(entity) - - def default_header(self): - return self._cw._(self.colid) - - -class RelationColRenderer(EntityTableColRenderer): - """Renderer to be used for column displaying a list of entities related the - 'main entity' of a :class:`EntityTableView`. By default, the main entity is - considered as the subject of the relation but you may specify otherwise - using the `role` argument. - - By default display the related rset using the 'csv' view, using - 'outofcontext' sub-view for each entity. You may specify another view - identifier using respectivly the `vid` and `subvid` arguments. - - If you specify a 'rtype view', such as 'reledit', you should add a - is_rtype_view=True parameter. - - If header not specified, it would be built by translating the column id, - properly considering role. - """ - def __init__(self, role='subject', vid='csv', subvid=None, - fallbackvid='empty-cell', is_rtype_view=False, **kwargs): - super(RelationColRenderer, self).__init__(**kwargs) - self.role = role - self.vid = vid - if subvid is None and vid in ('csv', 'list'): - subvid = 'outofcontext' - self.subvid = subvid - self.fallbackvid = fallbackvid - self.is_rtype_view = is_rtype_view - - def render_entity(self, w, entity): - kwargs = {'w': w} - if self.is_rtype_view: - rset = None - kwargs['entity'] = entity - kwargs['rtype'] = self.colid - kwargs['role'] = self.role - else: - rset = entity.related(self.colid, self.role) - if self.subvid is not None: - kwargs['subvid'] = self.subvid - self._cw.view(self.vid, rset, self.fallbackvid, **kwargs) - - def default_header(self): - return display_name(self._cw, self.colid, self.role) - - entity_sortvalue = None # column not sortable by default - - -class EntityTableView(TableMixIn, EntityView): - """This abstract table view is designed to be used with an - :class:`is_instance()` or :class:`adaptable` predicate, hence doesn't depend - the result set shape as the :class:`RsetTableView` does. - - It will display columns that should be defined using the `columns` class - attribute containing a list of column ids. By default, each column is - renderered by :class:`EntityTableColRenderer` which consider that the column - id is an attribute of the table's main entity (ie the one for which the view - is selected). - - You may wish to specify :class:`MainEntityColRenderer` or - :class:`RelatedEntityColRenderer` renderer for a column in the - :attr:`column_renderers` dictionary. - - .. autoclass:: cubicweb.web.views.tableview.EntityTableColRenderer - .. autoclass:: cubicweb.web.views.tableview.MainEntityColRenderer - .. autoclass:: cubicweb.web.views.tableview.RelatedEntityColRenderer - .. autoclass:: cubicweb.web.views.tableview.RelationColRenderer - """ - __abstract__ = True - default_column_renderer_class = EntityTableColRenderer - columns = None # to be defined in concret class - - def call(self, columns=None, **kwargs): - if columns is not None: - self.columns = columns - self.layout_render(self.w) - - @property - def table_size(self): - return self.cw_rset.rowcount - - def build_column_renderers(self): - return [self.column_renderer(colid) for colid in self.columns] - - def entity(self, rownum): - """Return the table's main entity""" - return self.cw_rset.get_entity(rownum, self.cw_col or 0) - - -class EmptyCellView(AnyRsetView): - __regid__ = 'empty-cell' - __select__ = yes() - def call(self, **kwargs): - self.w(u' ') - cell_call = call - - -################################################################################ -# DEPRECATED tables ############################################################ -################################################################################ - - -@add_metaclass(class_deprecated) -class TableView(AnyRsetView): - """The table view accepts any non-empty rset. It uses introspection on the - result set to compute column names and the proper way to display the cells. - - It is however highly configurable and accepts a wealth of options. - """ - __deprecation_warning__ = '[3.14] %(cls)s is deprecated' - __regid__ = 'table' - title = _('table') - finalview = 'final' - - table_widget_class = TableWidget - table_column_class = TableColumn - - tablesorter_settings = { - 'textExtraction': JSString('cw.sortValueExtraction'), - 'selectorHeaders': 'thead tr:first th', # only plug on the first row - } - handle_pagination = True - - def form_filter(self, divid, displaycols, displayactions, displayfilter, - paginate, hidden=True): - try: - filterform = self._cw.vreg['views'].select( - 'facet.filtertable', self._cw, rset=self.cw_rset) - except NoSelectableObject: - return () - vidargs = {'paginate': paginate, - 'displaycols': displaycols, - 'displayactions': displayactions, - 'displayfilter': displayfilter} - cssclass = hidden and 'hidden' or '' - filterform.render(self.w, vid=self.__regid__, divid=divid, - vidargs=vidargs, cssclass=cssclass) - return self.show_hide_actions(divid, not hidden) - - def main_var_index(self): - """Returns the index of the first non final variable of the rset. - - Used to select the main etype to help generate accurate column headers. - XXX explain the concept - - May return None if none is found. - """ - eschema = self._cw.vreg.schema.eschema - for i, etype in enumerate(self.cw_rset.description[0]): - try: - if not eschema(etype).final: - return i - except KeyError: # XXX possible? - continue - return None - - def displaycols(self, displaycols, headers): - if displaycols is None: - if 'displaycols' in self._cw.form: - displaycols = [int(idx) for idx in self._cw.form['displaycols']] - elif headers is not None: - displaycols = list(range(len(headers))) - else: - displaycols = list(range(len(self.cw_rset.syntax_tree().children[0].selection))) - return displaycols - - def _setup_tablesorter(self, divid): - req = self._cw - req.add_js('jquery.tablesorter.js') - req.add_onload('''$(document).ready(function() { - $("#%s table.listing").tablesorter(%s); -});''' % (divid, js_dumps(self.tablesorter_settings))) - req.add_css(('cubicweb.tablesorter.css', 'cubicweb.tableview.css')) - - @cachedproperty - def initial_load(self): - """We detect a bit heuristically if we are built for the first time or - from subsequent calls by the form filter or by the pagination - hooks. - - """ - form = self._cw.form - return 'fromformfilter' not in form and '__start' not in form - - def call(self, title=None, subvid=None, displayfilter=None, headers=None, - displaycols=None, displayactions=None, actions=(), divid=None, - cellvids=None, cellattrs=None, mainindex=None, - paginate=False, page_size=None): - """Produces a table displaying a composite query - - :param title: title added before table - :param subvid: cell view - :param displayfilter: filter that selects rows to display - :param headers: columns' titles - :param displaycols: indexes of columns to display (first column is 0) - :param displayactions: if True, display action menu - """ - req = self._cw - divid = divid or req.form.get('divid') or 'rs%s' % make_uid(id(self.cw_rset)) - self._setup_tablesorter(divid) - # compute label first since the filter form may remove some necessary - # information from the rql syntax tree - if mainindex is None: - mainindex = self.main_var_index() - computed_labels = self.columns_labels(mainindex) - if not subvid and 'subvid' in req.form: - subvid = req.form.pop('subvid') - actions = list(actions) - if mainindex is None: - displayfilter, displayactions = False, False - else: - if displayfilter is None and req.form.get('displayfilter'): - displayfilter = True - if displayactions is None and req.form.get('displayactions'): - displayactions = True - displaycols = self.displaycols(displaycols, headers) - if self.initial_load: - self.w(u'
              ') - if not title and 'title' in req.form: - title = req.form['title'] - if title: - self.w(u'

              %s

              \n' % title) - if displayfilter: - actions += self.form_filter(divid, displaycols, displayfilter, - displayactions, paginate) - elif displayfilter: - actions += self.show_hide_actions(divid, True) - self.w(u'
              ' % divid) - if displayactions: - actionsbycat = self._cw.vreg['actions'].possible_actions(req, self.cw_rset) - for action in actionsbycat.get('mainactions', ()): - for action in action.actual_actions(): - actions.append( (action.url(), req._(action.title), - action.html_class(), None) ) - # render actions menu - if actions: - self.render_actions(divid, actions) - # render table - if paginate: - self.divid = divid # XXX iirk (see usage in page_navigation_url) - self.paginate(page_size=page_size, show_all_option=False) - table = self.table_widget_class(self) - for column in self.get_columns(computed_labels, displaycols, headers, - subvid, cellvids, cellattrs, mainindex): - table.append_column(column) - table.render(self.w) - self.w(u'
              \n') - if self.initial_load: - self.w(u'
              \n') - - def page_navigation_url(self, navcomp, path, params): - """Build a URL to the current view using the attributes - - :param navcomp: a NavigationComponent to call a URL method on. - :param path: expected to be json here? - :param params: params to give to build_url method - - this is called by :class:`cubiweb.web.component.NavigationComponent` - """ - if hasattr(self, 'divid'): - # XXX this assert a single call - params['divid'] = self.divid - params['vid'] = self.__regid__ - return navcomp.ajax_page_url(**params) - - def show_hide_actions(self, divid, currentlydisplayed=False): - showhide = u';'.join(toggle_action('%s%s' % (divid, what))[11:] - for what in ('Form', 'Show', 'Hide', 'Actions')) - showhide = 'javascript:' + showhide - showlabel = self._cw._('show filter form') - hidelabel = self._cw._('hide filter form') - if currentlydisplayed: - return [(showhide, showlabel, 'hidden', '%sShow' % divid), - (showhide, hidelabel, None, '%sHide' % divid)] - return [(showhide, showlabel, None, '%sShow' % divid), - (showhide, hidelabel, 'hidden', '%sHide' % divid)] - - def render_actions(self, divid, actions): - box = MenuWidget('', 'tableActionsBox', _class='', islist=False) - label = tags.img(src=self._cw.uiprops['PUCE_DOWN'], - alt=xml_escape(self._cw._('action(s) on this selection'))) - menu = PopupBoxMenu(label, isitem=False, link_class='actionsBox', - ident='%sActions' % divid) - box.append(menu) - for url, label, klass, ident in actions: - menu.append(component.Link(url, label, klass=klass, id=ident)) - box.render(w=self.w) - self.w(u'
              ') - - def get_columns(self, computed_labels, displaycols, headers, subvid, - cellvids, cellattrs, mainindex): - """build columns description from various parameters - - : computed_labels: columns headers computed from rset to be used if there is no headers entry - : displaycols: see :meth:`call` - : headers: explicitly define columns headers - : subvid: see :meth:`call` - : cellvids: see :meth:`call` - : cellattrs: see :meth:`call` - : mainindex: see :meth:`call` - - return a list of columns description to be used by - :class:`~cubicweb.web.htmlwidgets.TableWidget` - """ - columns = [] - eschema = self._cw.vreg.schema.eschema - for colindex, label in enumerate(computed_labels): - if colindex not in displaycols: - continue - # compute column header - if headers is not None: - _label = headers[displaycols.index(colindex)] - if _label is not None: - label = _label - if colindex == mainindex and label is not None: - label += ' (%s)' % self.cw_rset.rowcount - column = self.table_column_class(label, colindex) - coltype = self.cw_rset.description[0][colindex] - # compute column cell view (if coltype is None, it's a left outer - # join, use the default non final subvid) - if cellvids and colindex in cellvids: - column.append_renderer(cellvids[colindex], colindex) - elif coltype is not None and eschema(coltype).final: - column.append_renderer(self.finalview, colindex) - else: - column.append_renderer(subvid or 'incontext', colindex) - if cellattrs and colindex in cellattrs: - for name, value in cellattrs[colindex].items(): - column.add_attr(name, value) - # add column - columns.append(column) - return columns - - - def render_cell(self, cellvid, row, col, w): - self._cw.view('cell', self.cw_rset, row=row, col=col, cellvid=cellvid, w=w) - - def get_rows(self): - return self.cw_rset - - @htmlescape - @jsonize - @limitsize(10) - def sortvalue(self, row, col): - # XXX it might be interesting to try to limit value's - # length as much as possible (e.g. by returning the 10 - # first characters of a string) - val = self.cw_rset[row][col] - if val is None: - return u'' - etype = self.cw_rset.description[row][col] - if etype is None: - return u'' - if self._cw.vreg.schema.eschema(etype).final: - entity, rtype = self.cw_rset.related_entity(row, col) - if entity is None: - return val # remove_html_tags() ? - return entity.sortvalue(rtype) - entity = self.cw_rset.get_entity(row, col) - return entity.sortvalue() - - -class EditableTableView(TableView): - __regid__ = 'editable-table' - finalview = 'editable-final' - title = _('editable-table') - - -@add_metaclass(class_deprecated) -class CellView(EntityView): - __deprecation_warning__ = '[3.14] %(cls)s is deprecated' - __regid__ = 'cell' - __select__ = nonempty_rset() - - def cell_call(self, row, col, cellvid=None): - """ - :param row, col: indexes locating the cell value in view's result set - :param cellvid: cell view (defaults to 'outofcontext') - """ - etype, val = self.cw_rset.description[row][col], self.cw_rset[row][col] - if etype is None or not self._cw.vreg.schema.eschema(etype).final: - if val is None: - # This is usually caused by a left outer join and in that case, - # regular views will most certainly fail if they don't have - # a real eid - # XXX if cellvid is e.g. reledit, we may wanna call it anyway - self.w(u' ') - else: - self.wview(cellvid or 'outofcontext', self.cw_rset, row=row, col=col) - else: - # XXX why do we need a fallback view here? - self.wview(cellvid or 'final', self.cw_rset, 'null', row=row, col=col) - - -class InitialTableView(TableView): - """same display as table view but consider two rql queries : - - * the default query (ie `rql` form parameter), which is only used to select - this view and to build the filter form. This query should have the same - structure as the actual without actual restriction (but link to - restriction variables) and usually with a limit for efficiency (limit set - to 2 is advised) - - * the actual query (`actualrql` form parameter) whose results will be - displayed with default restrictions set - """ - __regid__ = 'initialtable' - __select__ = nonempty_rset() - # should not be displayed in possible view since it expects some specific - # parameters - title = None - - def call(self, title=None, subvid=None, headers=None, divid=None, - paginate=False, displaycols=None, displayactions=None, - mainindex=None): - """Dumps a table displaying a composite query""" - try: - actrql = self._cw.form['actualrql'] - except KeyError: - actrql = self.cw_rset.printable_rql() - else: - self._cw.ensure_ro_rql(actrql) - displaycols = self.displaycols(displaycols, headers) - if displayactions is None and 'displayactions' in self._cw.form: - displayactions = True - if divid is None and 'divid' in self._cw.form: - divid = self._cw.form['divid'] - self.w(u'
              ') - if not title and 'title' in self._cw.form: - # pop title so it's not displayed by the table view as well - title = self._cw.form.pop('title') - if title: - self.w(u'

              %s

              \n' % title) - if mainindex is None: - mainindex = self.main_var_index() - if mainindex is not None: - actions = self.form_filter(divid, displaycols, displayactions, - displayfilter=True, paginate=paginate, - hidden=True) - else: - actions = () - if not subvid and 'subvid' in self._cw.form: - subvid = self._cw.form.pop('subvid') - self._cw.view('table', self._cw.execute(actrql), - 'noresult', w=self.w, displayfilter=False, subvid=subvid, - displayactions=displayactions, displaycols=displaycols, - actions=actions, headers=headers, divid=divid) - self.w(u'
              \n') - - -class EditableInitialTableTableView(InitialTableView): - __regid__ = 'editable-initialtable' - finalview = 'editable-final' - - -@add_metaclass(class_deprecated) -class EntityAttributesTableView(EntityView): - """This table displays entity attributes in a table and allow to set a - specific method to help building cell content for each attribute as well as - column header. - - Table will render entity cell by using the appropriate build_COLNAME_cell - methods if defined otherwise cell content will be entity.COLNAME. - - Table will render column header using the method header_for_COLNAME if - defined otherwise COLNAME will be used. - """ - __deprecation_warning__ = '[3.14] %(cls)s is deprecated' - __abstract__ = True - columns = () - table_css = "listing" - css_files = () - - def call(self, columns=None): - if self.css_files: - self._cw.add_css(self.css_files) - _ = self._cw._ - self.columns = columns or self.columns - sample = self.cw_rset.get_entity(0, 0) - self.w(u'' % self.table_css) - self.table_header(sample) - self.w(u'') - for row in range(self.cw_rset.rowcount): - self.cell_call(row=row, col=0) - self.w(u'') - self.w(u'
              ') - - def cell_call(self, row, col): - _ = self._cw._ - entity = self.cw_rset.get_entity(row, col) - entity.complete() - infos = {} - for col in self.columns: - meth = getattr(self, 'build_%s_cell' % col, None) - # find the build method or try to find matching attribute - if meth: - content = meth(entity) - else: - content = entity.printable_value(col) - infos[col] = content - self.w(u"""""") - line = u''.join(u'%%(%s)s' % col for col in self.columns) - self.w(line % infos) - self.w(u'\n') - - def table_header(self, sample): - """builds the table's header""" - self.w(u'') - for column in self.columns: - meth = getattr(self, 'header_for_%s' % column, None) - if meth: - colname = meth(sample) - else: - colname = self._cw._(column) - self.w(u'%s' % xml_escape(colname)) - self.w(u'\n') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/tabs.py --- a/web/views/tabs.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,249 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""base classes to handle tabbed views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six import string_types - -from logilab.common.deprecation import class_renamed -from logilab.mtconverter import xml_escape - -from cubicweb import NoSelectableObject, role -from cubicweb import tags, uilib, utils -from cubicweb.predicates import partial_has_related_entities -from cubicweb.view import EntityView -from cubicweb.web.views import primary - -class LazyViewMixin(object): - """provides two convenience methods for the tab machinery. - - Can also be used to lazy-load arbitrary views. - """ - - def _prepare_bindings(self, vid, reloadable): - self._cw.add_onload(u""" - jQuery('#lazy-%(vid)s').bind('%(event)s', function(event) { - loadNow('#lazy-%(vid)s', '#%(vid)s-hole', %(reloadable)s); - });""" % {'event': 'load_%s' % vid, 'vid': vid, - 'reloadable' : str(reloadable).lower()}) - - def lazyview(self, vid, rql=None, eid=None, rset=None, tabid=None, - reloadable=False, show_spinbox=True, w=None): - """a lazy version of wview""" - w = w or self.w - self._cw.add_js('cubicweb.ajax.js') - # the form is copied into urlparams to please the inner views - # that might want to take params from it - # beware of already present rql or eid elements - # to be safe of collision a proper argument passing protocol - # (with namespaces) should be used instead of the current - # ad-hockery - urlparams = self._cw.form.copy() - urlparams.pop('rql', None) - urlparams.pop('eid', None) - urlparams.update({'vid' : vid, 'fname' : 'view'}) - if rql: - urlparams['rql'] = rql - elif eid: - urlparams['eid'] = eid - elif rset: - urlparams['rql'] = rset.printable_rql() - if tabid is None: - tabid = uilib.domid(vid) - w(u'
              ' % ( - tabid, xml_escape(self._cw.build_url('ajax', **urlparams)))) - if show_spinbox: - # Don't use ``alt`` since image is a *visual* helper for ajax - w(u'' - % (xml_escape(self._cw.data_url('loading.gif')), tabid)) - else: - w(u'
              ' % tabid) - w(u'' - % (xml_escape(self._cw._('Link:')), - tabid, - xml_escape(self._cw.build_url(**urlparams)), - xml_escape(self._cw._(tabid)))) - w(u'
              ') - self._prepare_bindings(tabid, reloadable) - - def forceview(self, vid): - """trigger an event that will force immediate loading of the view on dom - readyness - """ - self._cw.add_onload(uilib.js.triggerLoad(vid)) - - -class TabsMixin(LazyViewMixin): - """a tab mixin to easily get jQuery based, lazy, ajax tabs""" - lazy = True - - @property - def cookie_name(self): - return str('%s_active_tab' % self._cw.vreg.config.appid) - - def active_tab(self, default): - if 'tab' in self._cw.form: - return self._cw.form['tab'] - cookies = self._cw.get_cookie() - cookiename = self.cookie_name - activetab = cookies.get(cookiename) - if activetab is None: - domid = uilib.domid(default) - self._cw.set_cookie(cookiename, domid) - return domid - return activetab.value - - def prune_tabs(self, tabs, default_tab): - selected_tabs = [] - may_be_active_tab = self.active_tab(default_tab) - active_tab = uilib.domid(default_tab) - viewsvreg = self._cw.vreg['views'] - for tab in tabs: - if isinstance(tab, string_types): - tabid, tabkwargs = tab, {} - else: - tabid, tabkwargs = tab - tabkwargs = tabkwargs.copy() - tabkwargs.setdefault('rset', self.cw_rset) - vid = tabkwargs.get('vid', tabid) - domid = uilib.domid(tabid) - try: - viewsvreg.select(vid, self._cw, tabid=domid, **tabkwargs) - except NoSelectableObject: - continue - selected_tabs.append((tabid, domid, tabkwargs)) - if domid == may_be_active_tab: - active_tab = domid - return selected_tabs, active_tab - - def render_tabs(self, tabs, default, entity=None): - # delegate to the default tab if there is more than one entity - # in the result set (tabs are pretty useless there) - if entity and len(self.cw_rset) > 1: - entity.view(default, w=self.w) - return - self._cw.add_css('jquery.ui.css') - self._cw.add_js(('jquery.ui.js', 'cubicweb.ajax.js', 'jquery.cookie.js')) - # prune tabs : not all are to be shown - tabs, active_tab = self.prune_tabs(tabs, default) - # build the html structure - w = self.w - uid = entity and entity.eid or utils.make_uid('tab') - w(u'
              ' % uid) - w(u'') - for tabid, domid, tabkwargs in tabs: - w(u'
              ' % domid) - if self.lazy: - tabkwargs.setdefault('tabid', domid) - tabkwargs.setdefault('vid', tabid) - self.lazyview(**tabkwargs) - else: - self._cw.view(tabid, w=self.w, **tabkwargs) - w(u'
              ') - w(u'
              ') - # call the setTab() JS function *after* each tab is generated - # because the callback binding needs to be done before - # XXX make work history: true - if self.lazy: - self._cw.add_onload(u""" - jQuery('#entity-tabs-%(uid)s').tabs( - { active: %(tabindex)s, - activate: function(event, ui) { - setTab(ui.newPanel.attr('id'), '%(cookiename)s'); - } - }); - setTab('%(domid)s', '%(cookiename)s'); -""" % {'tabindex' : active_tab_idx, - 'domid' : active_tab, - 'uid' : uid, - 'cookiename' : self.cookie_name}) - else: - self._cw.add_onload( - u"jQuery('#entity-tabs-%(uid)s').tabs({active: %(tabindex)s});" - % {'tabindex': active_tab_idx, 'uid': uid}) - - -class EntityRelationView(EntityView): - """view displaying entity related stuff. - Such a view _must_ provide the rtype, target and vid attributes : - - Example : - - class ProjectScreenshotsView(EntityRelationView): - '''display project's screenshots''' - __regid__ = title = _('projectscreenshots') - __select__ = EntityRelationView.__select__ & is_instance('Project') - rtype = 'screenshot' - role = 'subject' - vid = 'gallery' - - in this example, entities related to project entity by the 'screenshot' - relation (where the project is subject of the relation) will be displayed - using the 'gallery' view. - """ - __select__ = EntityView.__select__ & partial_has_related_entities() - vid = 'list' - # to be defined in concrete classes - rtype = title = None - - def cell_call(self, row, col): - rset = self.cw_rset.get_entity(row, col).related(self.rtype, role(self)) - self.w(u'
              ') - if self.title: - self.w(tags.h1(self._cw._(self.title))) - self.wview(self.vid, rset, 'noresult') - self.w(u'
              ') - - -class TabbedPrimaryView(TabsMixin, primary.PrimaryView): - __abstract__ = True # don't register - - tabs = [_('main_tab')] - default_tab = 'main_tab' - - def render_entity(self, entity): - self.render_entity_toolbox(entity) - self.w(u'
              ') - self.render_entity_title(entity) - self.render_tabs(self.tabs, self.default_tab, entity) - -TabedPrimaryView = class_renamed('TabedPrimaryView', TabbedPrimaryView) - -class PrimaryTab(primary.PrimaryView): - __regid__ = 'main_tab' - title = None # should not appear in possible views - - def is_primary(self): - return True - - def render_entity_title(self, entity): - pass - def render_entity_toolbox(self, entity): - pass diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/timeline.py --- a/web/views/timeline.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,34 +0,0 @@ -# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -try: - from cubes.timeline.views import ( - TimelineJsonView, - TimelineViewMixIn, - TimelineView, - StaticTimelineView) - -except ImportError: - pass -else: - from logilab.common.deprecation import class_moved - - TimelineJsonView = class_moved(TimelineJsonView, 'TimelineJsonView') - TimelineViewMixIn = class_moved(TimelineViewMixIn, 'TimelineViewMixIn') - TimelineView = class_moved(TimelineView, 'TimelineView') - StaticTimelineView = class_moved(StaticTimelineView, 'StaticTimelineView') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/timetable.py --- a/web/views/timetable.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,221 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""html timetable views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six.moves import range - -from logilab.mtconverter import xml_escape -from logilab.common.date import ONEDAY, date_range, todatetime - -from cubicweb.predicates import adaptable -from cubicweb.view import EntityView - - -class _TaskEntry(object): - def __init__(self, task, color, column): - self.task = task - self.color = color - self.column = column - self.lines = 1 - -MIN_COLS = 3 # minimum number of task columns for a single user -ALL_USERS = object() - -class TimeTableView(EntityView): - __regid__ = 'timetable' - title = _('timetable') - __select__ = adaptable('ICalendarable') - paginable = False - - def call(self, title=None): - """Dumps a timetable from a resultset composed of a note (anything - with start/stop) and a user (anything)""" - self._cw.add_css('cubicweb.timetable.css') - dates = {} - users = [] - users_max = {} - # XXX: try refactoring with calendar.py:OneMonthCal - for row in range(self.cw_rset.rowcount): - task = self.cw_rset.get_entity(row, 0) - icalendarable = task.cw_adapt_to('ICalendarable') - if len(self.cw_rset[row]) > 1 and self.cw_rset.description[row][1] == 'CWUser': - user = self.cw_rset.get_entity(row, 1) - else: - user = ALL_USERS - the_dates = [] - if icalendarable.start and icalendarable.stop: - if icalendarable.start.toordinal() == icalendarable.stop.toordinal(): - the_dates.append(icalendarable.start) - else: - the_dates += date_range(icalendarable.start, - icalendarable.stop + ONEDAY) - elif icalendarable.start: - the_dates.append(icalendarable.start) - elif icalendarable.stop: - the_dates.append(icalendarable.stop) - for d in the_dates: - d = todatetime(d) - d_users = dates.setdefault(d, {}) - u_tasks = d_users.setdefault(user, set()) - u_tasks.add( task ) - task_max = users_max.setdefault(user, 0) - if len(u_tasks)>task_max: - users_max[user] = len(u_tasks) - if user not in users: - # keep original ordering - users.append(user) - if not dates: - return - date_min = min(dates) - date_max = max(dates) - #users = list(sorted(users, key=lambda u:u.login)) - - rows = [] - # colors here are class names defined in cubicweb.css - colors = ["col%x" % i for i in range(12)] - next_color_index = 0 - - visited_tasks = {} # holds a description of a task for a user - task_colors = {} # remember a color assigned to a task - for date in date_range(date_min, date_max + ONEDAY): - columns = [date] - d_users = dates.get(date, {}) - for user in users: - # every user has its column "splitted" in at least MIN_COLS - # sub-columns (for overlapping tasks) - user_columns = [None] * max(MIN_COLS, users_max[user]) - # every task that is "visited" for the first time - # require a special treatment, so we put them in - # 'postpone' - postpone = [] - for task in d_users.get(user, []): - key = (task, user) - if key in visited_tasks: - task_descr = visited_tasks[ key ] - user_columns[task_descr.column] = task_descr, False - task_descr.lines += 1 - else: - postpone.append(key) - for key in postpone: - # to every 'new' task we must affect a color - # (which must be the same for every user concerned - # by the task) - task, user = key - for i, t in enumerate(user_columns): - if t is None: - if task in task_colors: - color = task_colors[task] - else: - color = colors[next_color_index] - next_color_index = (next_color_index+1)%len(colors) - task_colors[task] = color - task_descr = _TaskEntry(task, color, i) - user_columns[i] = task_descr, True - visited_tasks[key] = task_descr - break - else: - raise RuntimeError("is it possible we got it wrong?") - - columns.append( user_columns ) - rows.append( columns ) - - widths = [ len(col) for col in rows[0][1:] ] - self.w(u'
              ') - if title: - self.w(u'

              %s

              \n' % title) - self.w(u'') - self.render_col_headers(users, widths) - self.render_rows(rows) - self.w(u'
              ') - self.w(u'
              \n') - - def render_col_headers(self, users, widths): - """ render column headers """ - self.w(u'\n') - - self.w(u' \n') - columns = [] - for user, width in zip(users, widths): - self.w(u'' % max(MIN_COLS, width)) - if user is ALL_USERS: - self.w(u'*') - else: - user.view('oneline', w=self.w) - self.w(u'') - self.w(u'\n') - return columns - - def render_rows(self, rows): - """ render table content (row headers and central content) """ - odd = False - previous_is_empty = False - for row in rows: - date = row[0] - empty_line = True - for group in row[1:]: - for value in group: - if value: - empty_line = False - break - else: - continue - break - if empty_line and previous_is_empty: - continue - previous_is_empty = False - - klass = "even" - if date.weekday() in (5, 6) and not empty_line: - klass = "odd" - self.w(u'' % klass) - odd = not odd - - if not empty_line: - self.w(u'%s' % self._cw.format_date(date) ) - else: - self.w(u'...' ) - previous_is_empty = True - - empty_klasses = [ "ttle", "ttme", "ttre" ] - filled_klasses = [ "ttlf", "ttmf", "ttrf" ] - kj = 0 # 0: left, 1: mid, 2: right - for uid, group in enumerate(row[1:]): - for i, value in enumerate(group): - if i == 0: - kj = 0 - elif i == len(group): - kj = 2 - else: - kj = 1 - if value: - task_descr, first_row = value - if first_row: - url = xml_escape(task_descr.task.absolute_url(vid="edition")) - self.w(u' 
              ' % ( - task_descr.lines, task_descr.color, filled_klasses[kj], url)) - task_descr.task.view('tooltip', w=self.w) - self.w(u'
              ') - else: - if empty_line: - self.w(u' ') - else: - self.w(u' ' % empty_klasses[kj] ) - self.w(u'\n') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/treeview.py --- a/web/views/treeview.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,315 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Set of tree views / tree-building widgets, some based on jQuery treeview -plugin. -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from warnings import warn - -from logilab.mtconverter import xml_escape - -from cubicweb.utils import make_uid, json -from cubicweb.predicates import adaptable -from cubicweb.view import EntityView -from cubicweb.web.views import baseviews -from cubicweb.web.views.ajaxcontroller import ajaxfunc - -def treecookiename(treeid): - return str('%s-treestate' % treeid) - -def _done_init(done, view, row, col): - """handle an infinite recursion safety belt""" - if done is None: - done = set() - entity = view.cw_rset.get_entity(row, col) - if entity.eid in done: - msg = entity._cw._('loop in %(rel)s relation (%(eid)s)') % { - 'rel': entity.cw_adapt_to('ITree').tree_relation, - 'eid': entity.eid - } - return None, msg - done.add(entity.eid) - return done, entity - - -class BaseTreeView(baseviews.ListView): - """base tree view""" - __regid__ = 'tree' - __select__ = adaptable('ITree') - item_vid = 'treeitem' - - def call(self, done=None, **kwargs): - if done is None: - done = set() - super(BaseTreeView, self).call(done=done, **kwargs) - - def cell_call(self, row, col=0, vid=None, done=None, maxlevel=None, klass=None, **kwargs): - assert maxlevel is None or maxlevel > 0 - done, entity = _done_init(done, self, row, col) - if done is None: - # entity is actually an error message - self.w(u'
            1. %s
            2. ' % entity) - return - self.open_item(entity) - entity.view(vid or self.item_vid, w=self.w, **kwargs) - if maxlevel is not None: - maxlevel -= 1 - if maxlevel == 0: - self.close_item(entity) - return - relatedrset = entity.cw_adapt_to('ITree').children(entities=False) - self.wview(self.__regid__, relatedrset, 'null', done=done, - maxlevel=maxlevel, klass=klass, **kwargs) - self.close_item(entity) - - def open_item(self, entity): - self.w(u'
            3. \n' % entity.cw_etype.lower()) - def close_item(self, entity): - self.w(u'
            4. \n') - - -class TreePathView(EntityView): - """a recursive path view""" - __regid__ = 'path' - __select__ = adaptable('ITree') - item_vid = 'oneline' - separator = u' > ' - - def call(self, **kwargs): - self.w(u'
              ') - super(TreePathView, self).call(**kwargs) - self.w(u'
              ') - - def cell_call(self, row, col=0, vid=None, done=None, **kwargs): - done, entity = _done_init(done, self, row, col) - if done is None: - # entity is actually an error message - self.w(u'%s' % entity) - return - parent = entity.cw_adapt_to('ITree').parent() - if parent: - parent.view(self.__regid__, w=self.w, done=done) - self.w(self.separator) - entity.view(vid or self.item_vid, w=self.w) - - -class TreeComboBoxView(TreePathView): - """display folder in edition's combobox""" - __regid__ = 'combobox' - item_vid = 'text' - separator = u' > ' - -# XXX rename regid to ajaxtree/foldabletree or something like that (same for -# treeitemview) -class TreeView(EntityView): - """ajax tree view, click to expand folder""" - - __regid__ = 'treeview' - itemvid = 'treeitemview' - subvid = 'oneline' - cssclass = 'treeview widget' - title = _('tree view') - - def _init_params(self, subvid, treeid, initial_load, initial_thru_ajax, morekwargs): - form = self._cw.form - if subvid is None: - subvid = form.pop('treesubvid', self.subvid) # consume it - if treeid is None: - treeid = form.pop('treeid', None) - if treeid is None: - treeid = 'throw_away' + make_uid('uid') - if 'morekwargs' in self._cw.form: - ajaxargs = json.loads(form.pop('morekwargs')) - # got unicode & python keywords must be strings - morekwargs.update(dict((str(k), v) - for k, v in ajaxargs.items())) - toplevel_thru_ajax = form.pop('treeview_top', False) or initial_thru_ajax - toplevel = toplevel_thru_ajax or (initial_load and not form.get('fname')) - return subvid, treeid, toplevel_thru_ajax, toplevel - - def _init_headers(self, treeid): - self._cw.add_css(('jquery-treeview/jquery.treeview.css', 'cubicweb.treeview.css')) - self._cw.add_js(('cubicweb.ajax.js', 'cubicweb.widgets.js', 'jquery-treeview/jquery.treeview.js')) - self._cw.html_headers.add_onload(u""" -jQuery("#tree-%s").treeview({toggle: toggleTree, prerendered: true});""" % treeid) - - def call(self, subvid=None, treeid=None, - initial_load=True, initial_thru_ajax=False, **morekwargs): - subvid, treeid, toplevel_thru_ajax, toplevel = self._init_params( - subvid, treeid, initial_load, initial_thru_ajax, morekwargs) - ulid = ' ' - if toplevel: - self._init_headers(treeid) - ulid = ' id="tree-%s"' % treeid - self.w(u'' % (ulid, self.cssclass)) - # XXX force sorting on x.sortvalue() (which return dc_title by default) - # we need proper ITree & co specification to avoid this. - # (pb when type ambiguity at the other side of the tree relation, - # unability to provide generic implementation on eg Folder...) - for i, entity in enumerate(sorted(self.cw_rset.entities(), - key=lambda x: x.sortvalue())): - if i+1 < len(self.cw_rset): - morekwargs['is_last'] = False - else: - morekwargs['is_last'] = True - entity.view(self.itemvid, vid=subvid, parentvid=self.__regid__, - treeid=treeid, w=self.w, **morekwargs) - self.w(u'
      ') - - def cell_call(self, *args, **allargs): - """ does not makes much sense until you have to invoke - somentity.view('treeview') """ - allargs.pop('row') - allargs.pop('col') - self.call(*args, **allargs) - - -class FileTreeView(TreeView): - """specific version of the treeview to display file trees - """ - __regid__ = 'filetree' - cssclass = 'treeview widget filetree' - title = _('file tree view') - - def call(self, subvid=None, treeid=None, initial_load=True, **kwargs): - super(FileTreeView, self).call(treeid=treeid, subvid='filetree-oneline', - initial_load=initial_load, **kwargs) - -class FileItemInnerView(EntityView): - """inner view used by the TreeItemView instead of oneline view - - This view adds an enclosing with some specific CSS classes - around the oneline view. This is needed by the jquery treeview plugin. - """ - __regid__ = 'filetree-oneline' - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - itree = entity.cw_adapt_to('ITree') - if itree and not itree.is_leaf(): - self.w(u'
      %s
      \n' % entity.view('oneline')) - else: - # XXX define specific CSS classes according to mime types - self.w(u'
      %s
      \n' % entity.view('oneline')) - - -class DefaultTreeViewItemView(EntityView): - """default treeitem view for entities which don't adapt to ITree""" - __regid__ = 'treeitemview' - - def cell_call(self, row, col, vid='oneline', treeid=None, **morekwargs): - assert treeid is not None - itemview = self._cw.view(vid, self.cw_rset, row=row, col=col) - last_class = morekwargs['is_last'] and ' class="last"' or '' - self.w(u'%s' % (last_class, itemview)) - - -class TreeViewItemView(EntityView): - """specific treeitem view for entities which adapt to ITree - - (each item should be expandable if it's not a tree leaf) - """ - __regid__ = 'treeitemview' - __select__ = adaptable('ITree') - default_branch_state_is_open = False - - def open_state(self, eeid, treeid): - cookies = self._cw.get_cookie() - treestate = cookies.get(treecookiename(treeid)) - if treestate: - return str(eeid) in treestate.value.split(':') - return self.default_branch_state_is_open - - def cell_call(self, row, col, treeid, vid='oneline', parentvid='treeview', - is_last=False, **morekwargs): - w = self.w - entity = self.cw_rset.get_entity(row, col) - itree = entity.cw_adapt_to('ITree') - liclasses = [] - if self._cw.url(includeparams=False) == entity.absolute_url(): - liclasses.append(u'selected') - is_open = self.open_state(entity.eid, treeid) - is_leaf = itree is None or itree.is_leaf() - if is_leaf: - if is_last: - liclasses.append('last') - w(u'
    • ' % u' '.join(liclasses)) - else: - rql = itree.children_rql() % {'x': entity.eid} - url = xml_escape(self._cw.build_url('ajax', rql=rql, vid=parentvid, - pageid=self._cw.pageid, - treeid=treeid, - fname='view', - treesubvid=vid, - morekwargs=json.dumps(morekwargs))) - divclasses = ['hitarea'] - if is_open: - liclasses.append('collapsable') - divclasses.append('collapsable-hitarea') - else: - liclasses.append('expandable') - divclasses.append('expandable-hitarea') - if is_last: - if is_open: - liclasses.append('lastCollapsable') - divclasses.append('lastCollapsable-hitarea') - else: - liclasses.append('lastExpandable') - divclasses.append('lastExpandable-hitarea') - if is_open: - w(u'
    • ' % u' '.join(liclasses)) - else: - w(u'
    • ' % (url, u' '.join(liclasses))) - if treeid.startswith('throw_away'): - divtail = '' - else: - divtail = """ onclick="asyncRemoteExec('node_clicked', '%s', '%s')" """ % ( - treeid, entity.eid) - w(u'
      ' % (u' '.join(divclasses), divtail)) - - # add empty
        because jquery's treeview plugin checks for - # sublists presence - if not is_open: - w(u'
        • place holder
        ') - # the local node info - self.wview(vid, self.cw_rset, row=row, col=col, **morekwargs) - if is_open and not is_leaf: # => rql is defined - self.wview(parentvid, itree.children(entities=False), subvid=vid, - treeid=treeid, initial_load=False, **morekwargs) - w(u'') - - - -@ajaxfunc -def node_clicked(self, treeid, nodeeid): - """add/remove eid in treestate cookie""" - cookies = self._cw.get_cookie() - statename = treecookiename(treeid) - treestate = cookies.get(statename) - if treestate is None: - self._cw.set_cookie(statename, nodeeid) - else: - marked = set(filter(None, treestate.value.split(':'))) - if nodeeid in marked: - marked.remove(nodeeid) - else: - marked.add(nodeeid) - self._cw.set_cookie(statename, ':'.join(marked)) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/uicfg.py --- a/web/views/uicfg.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,693 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""This module (``cubicweb.web.views.uicfg``) regroups a set of structures that may be -used to configure various options of the generated web interface. - -To configure the interface generation, we use ``RelationTag`` objects. - -Index view configuration -```````````````````````` -:indexview_etype_section: - entity type category in the index/manage page. May be one of: - - * ``application`` - * ``system`` - * ``schema`` - * ``subobject`` (not displayed by default) - - By default only entities on the ``application`` category are shown. - -.. sourcecode:: python - - from cubicweb.web.views import uicfg - # force hiding - uicfg.indexview_etype_section['HideMe'] = 'subobject' - # force display - uicfg.indexview_etype_section['ShowMe'] = 'application' - - -Actions box configuration -````````````````````````` -:actionbox_appearsin_addmenu: - simple boolean relation tags used to control the "add entity" submenu. - Relations whose rtag is True will appears, other won't. - -.. sourcecode:: python - - # Adds all subjects of the entry_of relation in the add menu of the ``Blog`` - # primary view - uicfg.actionbox_appearsin_addmenu.tag_object_of(('*', 'entry_of', 'Blog'), True) -""" -__docformat__ = "restructuredtext en" - -from warnings import warn - -from six import string_types - -from cubicweb import neg_role -from cubicweb.rtags import (RelationTags, RelationTagsBool, RelationTagsSet, - RelationTagsDict, NoTargetRelationTagsDict, - _ensure_str_key) -from cubicweb.schema import META_RTYPES, INTERNAL_TYPES, WORKFLOW_TYPES - - -# primary view configuration ################################################## - -class PrimaryViewSectionRelationTags(RelationTags): - """primary view section configuration""" - __regid__ = 'primaryview_section' - - _allowed_values = frozenset(('attributes', 'relations', - 'sideboxes', 'hidden')) - - def _init(self, sschema, rschema, oschema, role): - if self.get(sschema, rschema, oschema, role) is None: - rdef = rschema.rdef(sschema, oschema) - if rschema.final: - if rschema.meta or sschema.is_metadata(rschema) \ - or oschema.type in ('Password', 'Bytes'): - section = 'hidden' - else: - section = 'attributes' - else: - if rdef.role_cardinality(role) in '1+': - section = 'attributes' - elif rdef.composite == neg_role(role): - section = 'relations' - else: - section = 'sideboxes' - self.tag_relation((sschema, rschema, oschema, role), section) - -primaryview_section = PrimaryViewSectionRelationTags() - - -class DisplayCtrlRelationTags(NoTargetRelationTagsDict): - """primary view display controller configuration""" - __regid__ = 'primaryview_display_ctrl' - - def __init__(self, *args, **kwargs): - super(DisplayCtrlRelationTags, self).__init__(*args, **kwargs) - self.counter = 0 - - def _init(self, sschema, rschema, oschema, role): - if role == 'subject': - oschema = '*' - else: - sschema = '*' - self.counter += 1 - self.setdefault((sschema, rschema, oschema, role), - 'order', - self.counter) - - def set_fields_order(self, etype, relations): - """specify the field order in `etype` primary view. - - :param etype: the entity type as a string - :param attrs: the ordered list of attribute names (or relations) - - `attrs` can be strings or 2-tuples (relname, role_of_etype_in_the_rel) - - Unspecified fields will be displayed after specified ones, their - order being consistent with the schema definition. - - Examples: - - .. sourcecode:: python - - from cubicweb.web.views.uicfg import primaryview_display_ctrl as pvdc - pvdc.set_fields_order('CWUser', ('firstname', ('in_group', 'subject'), - 'surname', 'login')) - - """ - for index, relation in enumerate(relations): - if not isinstance(relation, tuple): - relation = (relation, 'subject') - rtype, role = relation - if role == 'subject': - self.tag_subject_of((etype, rtype, '*'), {'order': index}) - else: - self.tag_object_of((etype, rtype, '*'), {'order': index}) - - -primaryview_display_ctrl = DisplayCtrlRelationTags() - - -# index view configuration #################################################### -# entity type section in the index/manage page. May be one of -# * 'application' -# * 'system' -# * 'schema' -# * 'hidden' -# * 'subobject' (not displayed by default) - -class InitializableDict(dict): # XXX not a rtag. Turn into an appobject? - def __init__(self, *args, **kwargs): - super(InitializableDict, self).__init__(*args, **kwargs) - self.__defaults = dict(self) - - def init(self, schema, check=True): - self.update(self.__defaults) - for eschema in schema.entities(): - if eschema.final: - continue - if eschema.schema_entity(): - self.setdefault(eschema, 'schema') - elif eschema in INTERNAL_TYPES or eschema in WORKFLOW_TYPES: - self.setdefault(eschema, 'system') - elif eschema.is_subobject(strict=True): - self.setdefault(eschema, 'subobject') - else: - self.setdefault(eschema, 'application') - -indexview_etype_section = InitializableDict( - EmailAddress='subobject', - Bookmark='system', - # entity types in the 'system' table by default (managers only) - CWUser='system', CWGroup='system', - ) - - -# autoform.AutomaticEntityForm configuration ################################## - -def _formsections_as_dict(formsections): - result = {} - for formsection in formsections: - formtype, section = formsection.split('_', 1) - result[formtype] = section - return result - -def _card_and_comp(sschema, rschema, oschema, role): - rdef = rschema.rdef(sschema, oschema) - if role == 'subject': - card = rdef.cardinality[0] - composed = not rschema.final and rdef.composite == 'object' - else: - card = rdef.cardinality[1] - composed = not rschema.final and rdef.composite == 'subject' - return card, composed - -class AutoformSectionRelationTags(RelationTagsSet): - """autoform relations'section""" - __regid__ = 'autoform_section' - - _allowed_form_types = ('main', 'inlined', 'muledit') - _allowed_values = {'main': ('attributes', 'inlined', 'relations', - 'metadata', 'hidden'), - 'inlined': ('attributes', 'inlined', 'hidden'), - 'muledit': ('attributes', 'hidden'), - } - - def init(self, schema, check=True): - super(AutoformSectionRelationTags, self).init(schema, check) - self.apply(schema, self._initfunc_step2) - - def _init(self, sschema, rschema, oschema, role): - formsections = self.init_get(sschema, rschema, oschema, role) - if formsections is None: - formsections = self.tag_container_cls() - if not any(tag.startswith('inlined') for tag in formsections): - if not rschema.final: - negsects = self.init_get(sschema, rschema, oschema, neg_role(role)) - if 'main_inlined' in negsects: - formsections.add('inlined_hidden') - key = _ensure_str_key( (sschema, rschema, oschema, role) ) - self._tagdefs[key] = formsections - - def _initfunc_step2(self, sschema, rschema, oschema, role): - formsections = self.get(sschema, rschema, oschema, role) - sectdict = _formsections_as_dict(formsections) - if rschema in META_RTYPES: - sectdict.setdefault('main', 'hidden') - sectdict.setdefault('muledit', 'hidden') - sectdict.setdefault('inlined', 'hidden') - elif role == 'subject' and rschema in sschema.meta_attributes(): - # meta attribute, usually embeded by the described attribute's field - # (eg RichTextField, FileField...) - sectdict.setdefault('main', 'hidden') - sectdict.setdefault('muledit', 'hidden') - sectdict.setdefault('inlined', 'hidden') - # ensure we have a tag for each form type - if not 'main' in sectdict: - if not rschema.final and ( - sectdict.get('inlined') == 'attributes' or - 'inlined_attributes' in self.init_get(sschema, rschema, oschema, - neg_role(role))): - sectdict['main'] = 'hidden' - elif sschema.is_metadata(rschema): - sectdict['main'] = 'metadata' - else: - card, composed = _card_and_comp(sschema, rschema, oschema, role) - if card in '1+': - sectdict['main'] = 'attributes' - if not 'muledit' in sectdict: - sectdict['muledit'] = 'attributes' - elif rschema.final: - sectdict['main'] = 'attributes' - else: - sectdict['main'] = 'relations' - if not 'muledit' in sectdict: - sectdict['muledit'] = 'hidden' - if sectdict['main'] == 'attributes': - card, composed = _card_and_comp(sschema, rschema, oschema, role) - if card in '1+' and not composed: - sectdict['muledit'] = 'attributes' - if not 'inlined' in sectdict: - sectdict['inlined'] = sectdict['main'] - # recompute formsections and set it to avoid recomputing - for formtype, section in sectdict.items(): - formsections.add('%s_%s' % (formtype, section)) - - def tag_relation(self, key, formtype, section): - if isinstance(formtype, tuple): - for ftype in formtype: - self.tag_relation(key, ftype, section) - return - assert formtype in self._allowed_form_types, \ - 'formtype should be in (%s), not %s' % ( - ','.join(self._allowed_form_types), formtype) - assert section in self._allowed_values[formtype], \ - 'section for %s should be in (%s), not %s' % ( - formtype, ','.join(self._allowed_values[formtype]), section) - rtags = self._tagdefs.setdefault(_ensure_str_key(key), - self.tag_container_cls()) - # remove previous section for this form type if any - if rtags: - for tag in rtags.copy(): - if tag.startswith(formtype): - rtags.remove(tag) - rtags.add('%s_%s' % (formtype, section)) - return rtags - - def init_get(self, stype, rtype, otype, tagged): - key = (stype, rtype, otype, tagged) - rtags = {} - for key in self._get_keys(stype, rtype, otype, tagged): - tags = self._tagdefs.get(key, ()) - for tag in tags: - assert '_' in tag, (tag, tags) - section, value = tag.split('_', 1) - rtags[section] = value - cls = self.tag_container_cls - rtags = cls('_'.join([section,value]) - for section,value in rtags.items()) - return rtags - - def get(self, *key): - # overriden to avoid recomputing done in parent classes - return self._tagdefs.get(key, ()) - - def relations_by_section(self, entity, formtype, section, permission, - strict=False): - """return a list of (relation schema, target schemas, role) for the - given entity matching categories and permission. - - `strict`: - bool telling if having local role is enough (strict = False) or not - """ - tag = '%s_%s' % (formtype, section) - eschema = entity.e_schema - cw = entity._cw - permsoverrides = cw.vreg['uicfg'].select('autoform_permissions_overrides', cw, entity=entity) - if entity.has_eid(): - eid = entity.eid - else: - eid = None - strict = False - if permission == 'update': - assert section in ('attributes', 'metadata', 'hidden') - relpermission = 'add' - else: - assert section not in ('metadata', 'hidden') - relpermission = permission - for rschema, targetschemas, role in eschema.relation_definitions(True): - _targetschemas = [] - for tschema in targetschemas: - # check section's tag first, potentially lower cost than - # checking permission which may imply rql queries - if not tag in self.etype_get(eschema, rschema, role, tschema): - continue - rdef = rschema.role_rdef(eschema, tschema, role) - if rschema.final: - if not rdef.has_perm(cw, permission, eid=eid, - creating=eid is None): - continue - elif strict or not rdef.has_local_role(relpermission): - if role == 'subject': - if not rdef.has_perm(cw, relpermission, fromeid=eid): - continue - elif role == 'object': - if not rdef.has_perm(cw, relpermission, toeid=eid): - continue - _targetschemas.append(tschema) - if not _targetschemas: - continue - targetschemas = _targetschemas - rdef = eschema.rdef(rschema, role=role, targettype=targetschemas[0]) - # XXX tag allowing to hijack the permission machinery when - # permission is not verifiable until the entity is actually - # created... - if eid is None and '%s_on_new' % permission in permsoverrides.etype_get(eschema, rschema, role): - yield (rschema, targetschemas, role) - continue - if not rschema.final and role == 'subject': - # on relation with cardinality 1 or ?, we need delete perm as well - # if the relation is already set - if (relpermission == 'add' - and rdef.role_cardinality(role) in '1?' - and eid and entity.related(rschema.type, role) - and not rdef.has_perm(cw, 'delete', fromeid=eid, - toeid=entity.related(rschema.type, role)[0][0])): - continue - elif role == 'object': - # on relation with cardinality 1 or ?, we need delete perm as well - # if the relation is already set - if (relpermission == 'add' - and rdef.role_cardinality(role) in '1?' - and eid and entity.related(rschema.type, role) - and not rdef.has_perm(cw, 'delete', toeid=eid, - fromeid=entity.related(rschema.type, role)[0][0])): - continue - yield (rschema, targetschemas, role) - - def hide_field(self, etype, attr, desttype='*', formtype='main'): - """hide `attr` in `etype` forms. - - :param etype: the entity type as a string - :param attr: the name of the attribute or relation to hide - :param formtype: which form will be affected ('main', 'inlined', etc.), - *main* by default. - - `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_rel) - - Examples: - - .. sourcecode:: python - - from cubicweb.web.views.uicfg import autoform_section as afs - afs.hide_field('CWUser', 'login') - afs.hide_field('*', 'name') - afs.hide_field('CWUser', 'use_email', formtype='inlined') - - """ - self._tag_etype_attr(etype, attr, desttype, - formtype=formtype, section='hidden') - - def hide_fields(self, etype, attrs, formtype='main'): - """simple for-loop wrapper around :func:`hide_field`. - - :param etype: the entity type as a string - :param attrs: the ordered list of attribute names (or relations) - :param formtype: which form will be affected ('main', 'inlined', etc.), - *main* by default. - - `attrs` can be strings or 2-tuples (relname, role_of_etype_in_the_rel) - - Examples: - - .. sourcecode:: python - - from cubicweb.web.views.uicfg import autoform_section as afs - afs.hide_fields('CWUser', ('login', ('use_email', 'subject')), - formtype='inlined') - """ - for attr in attrs: - self.hide_field(etype, attr, formtype=formtype) - - def edit_inline(self, etype, attr, desttype='*', formtype=('main', 'inlined')): - """edit `attr` with and inlined form. - - :param etype: the entity type as a string - :param attr: the name of the attribute or relation - :param desttype: the destination type(s) concerned, default is everything - :param formtype: which form will be affected ('main', 'inlined', etc.), - *main* and *inlined* by default. - - `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) - - Examples: - - .. sourcecode:: python - - from cubicweb.web.views.uicfg import autoform_section as afs - - afs.edit_inline('*', 'use_email') - """ - self._tag_etype_attr(etype, attr, desttype, formtype=formtype, - section='inlined') - - def edit_as_attr(self, etype, attr, desttype='*', formtype=('main', 'muledit')): - """make `attr` appear in the *attributes* section of `etype` form. - - :param etype: the entity type as a string - :param attr: the name of the attribute or relation - :param desttype: the destination type(s) concerned, default is everything - :param formtype: which form will be affected ('main', 'inlined', etc.), - *main* and *muledit* by default. - - `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) - - Examples: - - .. sourcecode:: python - - from cubicweb.web.views.uicfg import autoform_section as afs - - afs.edit_as_attr('CWUser', 'in_group') - """ - self._tag_etype_attr(etype, attr, desttype, - formtype=formtype, section='attributes') - - def set_muledit_editable(self, etype, attrs): - """make `attrs` appear in muledit form of `etype`. - - :param etype: the entity type as a string - :param attrs: the ordered list of attribute names (or relations) - - `attrs` can be strings or 2-tuples (relname, role_of_etype_in_the_relation) - - Examples: - - .. sourcecode:: python - - from cubicweb.web.views.uicfg import autoform_section as afs - - afs.set_muledit_editable('CWUser', ('firstname', 'surname', 'in_group')) - """ - for attr in attrs: - self.edit_as_attr(self, etype, attr, formtype='muledit') - -autoform_section = AutoformSectionRelationTags() - - -# relations'field class - -class AutoformFieldTags(RelationTags): - __regid__ = 'autoform_field' - - def set_field(self, etype, attr, field): - """sets the `attr` field of `etype`. - - :param etype: the entity type as a string - :param attr: the name of the attribute or relation - - `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) - - """ - self._tag_etype_attr(etype, attr, '*', field) - -autoform_field = AutoformFieldTags() - - -# relations'field explicit kwargs (given to field's __init__) - -class AutoformFieldKwargsTags(RelationTagsDict): - __regid__ = 'autoform_field_kwargs' - - def set_fields_order(self, etype, attrs): - """specify the field order in `etype` main edition form. - - :param etype: the entity type as a string - :param attrs: the ordered list of attribute names (or relations) - - `attrs` can be strings or 2-tuples (relname, role_of_etype_in_the_rel) - - Unspecified fields will be displayed after specified ones, their - order being consistent with the schema definition. - - Examples: - - .. sourcecode:: python - - from cubicweb.web.views.uicfg import autoform_field_kwargs as affk - affk.set_fields_order('CWUser', ('firstname', 'surname', 'login')) - affk.set_fields_order('CWUser', ('firstname', ('in_group', 'subject'), - 'surname', 'login')) - - """ - for index, attr in enumerate(attrs): - self._tag_etype_attr(etype, attr, '*', {'order': index}) - - def set_field_kwargs(self, etype, attr, **kwargs): - """tag `attr` field of `etype` with additional named paremeters. - - :param etype: the entity type as a string - :param attr: the name of the attribute or relation - - `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) - - Examples: - - .. sourcecode:: python - - from cubicweb.web.views.uicfg import autoform_field_kwargs as affk - affk.set_field_kwargs('Person', 'works_for', widget=fwdgs.AutoCompletionWidget()) - affk.set_field_kwargs('CWUser', 'login', label=_('login or email address'), - widget=fwdgs.TextInput(attrs={'size': 30})) - """ - self._tag_etype_attr(etype, attr, '*', kwargs) - - -autoform_field_kwargs = AutoformFieldKwargsTags() - - -# set of tags of the form _on_new on relations. is a -# schema action (add/update/delete/read), and when such a tag is found -# permissions checking is by-passed and supposed to be ok -class AutoFormPermissionsOverrides(RelationTagsSet): - __regid__ = 'autoform_permissions_overrides' - -autoform_permissions_overrides = AutoFormPermissionsOverrides() - - -class ReleditTags(NoTargetRelationTagsDict): - """Associate to relation a dictionary to control `reledit` (e.g. edition of - attributes / relations from within views). - - Possible keys and associated values are: - - * `novalue_label`, alternative default value (shown when there is no value). - - * `novalue_include_rtype`, when `novalue_label` is not specified, this boolean - flag control wether the generated default value should contains the - relation label or not. Will be the opposite of the `showlabel` value found - in the `primaryview_display_ctrl` rtag by default. - - * `reload`, boolean, eid (to reload to) or function taking subject and - returning bool/eid. This is useful when editing a relation (or attribute) - that impacts the url or another parts of the current displayed - page. Defaults to False. - - * `rvid`, alternative view id (as str) for relation or composite edition. - Default is 'autolimited'. - - * `edit_target`, may be either 'rtype' (to edit the relation) or 'related' - (to edit the related entity). This controls whether to edit the relation - or the target entity of the relation. Currently only one-to-one relations - support target entity edition. By default, the 'related' option is taken - whenever the relation is composite. - """ - __regid__ = 'reledit' - _keys = frozenset('novalue_label novalue_include_rtype reload rvid edit_target'.split()) - - def tag_relation(self, key, tag): - for tagkey in tag: - assert tagkey in self._keys, 'tag %r not in accepted tags: %r' % (tag, self._keys) - return super(ReleditTags, self).tag_relation(key, tag) - - def _init(self, sschema, rschema, oschema, role): - values = self.get(sschema, rschema, oschema, role) - if not rschema.final: - composite = rschema.rdef(sschema, oschema).composite == role - if role == 'subject': - oschema = '*' - else: - sschema = '*' - edittarget = values.get('edit_target') - if edittarget not in (None, 'rtype', 'related'): - self.warning('reledit: wrong value for edit_target on relation %s: %s', - rschema, edittarget) - edittarget = None - if not edittarget: - edittarget = 'related' if composite else 'rtype' - self.tag_relation((sschema, rschema, oschema, role), - {'edit_target': edittarget}) - if not 'novalue_include_rtype' in values: - showlabel = primaryview_display_ctrl.get( - sschema, rschema, oschema, role).get('showlabel', True) - self.tag_relation((sschema, rschema, oschema, role), - {'novalue_include_rtype': not showlabel}) - -reledit_ctrl = ReleditTags() - - -# boxes.EditBox configuration ################################################# - -# 'link' / 'create' relation tags, used to control the "add entity" submenu - -class ActionBoxUicfg(RelationTagsBool): - __regid__ = 'actionbox_appearsin_addmenu' - - def _init(self, sschema, rschema, oschema, role): - if self.get(sschema, rschema, oschema, role) is None: - if rschema in META_RTYPES: - self.tag_relation((sschema, rschema, oschema, role), False) - return - rdef = rschema.rdef(sschema, oschema) - if not rdef.role_cardinality(role) in '?1' and rdef.composite == role: - self.tag_relation((sschema, rschema, oschema, role), True) - - def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs): - if isinstance(attr, string_types): - attr, role = attr, 'subject' - else: - attr, role = attr - if role == 'subject': - self.tag_subject_of((etype, attr, desttype), *args, **kwargs) - else: - self.tag_object_of((desttype, attr, etype), *args, **kwargs) - - def append_to_addmenu(self, etype, attr, createdtype='*'): - """adds `attr` in the actions box *addrelated* submenu of `etype`. - - :param etype: the entity type as a string - :param attr: the name of the attribute or relation to hide - :param createdtype: the target type of the relation (optional, defaults to '*' (all possible types)) - - `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) - - """ - self._tag_etype_attr(etype, attr, createdtype, True) - - def remove_from_addmenu(self, etype, attr, createdtype='*'): - """removes `attr` from the actions box *addrelated* submenu of `etype`. - - :param etype: the entity type as a string - :param attr: the name of the attribute or relation to hide - :param createdtype: the target type of the relation (optional, defaults to '*' (all possible types)) - - `attr` can be a string or 2-tuple (relname, role_of_etype_in_the_relation) - """ - self._tag_etype_attr(etype, attr, createdtype, False) - -actionbox_appearsin_addmenu = ActionBoxUicfg() - - - -def registration_callback(vreg): - vreg.register_all(globals().values(), __name__) - indexview_etype_section.init(vreg.schema) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/undohistory.py --- a/web/views/undohistory.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,224 +0,0 @@ -# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -__docformat__ = "restructuredtext en" -from cubicweb import _ - - -from logilab.common.registry import Predicate - -from cubicweb import UnknownEid, tags, transaction as tx -from cubicweb.view import View, StartupView -from cubicweb.predicates import match_kwargs, ExpectedValuePredicate -from cubicweb.schema import display_name - - -class undoable_action(Predicate): - """Select only undoable actions depending on filters provided. Undo Action - is expected to be specified by the `tx_action` argument. - - Currently the only implemented filter is: - - :param action_type: chars among CUDAR (standing for Create, Update, Delete, - Add, Remove) - """ - - # XXX FIXME : this selector should be completed to allow selection on the - # entity or relation types and public / private. - def __init__(self, action_type='CUDAR'): - assert not set(action_type) - set('CUDAR') - self.action_type = action_type - - def __str__(self): - return '%s(%s)' % (self.__class__.__name__, ', '.join( - "%s=%v" % (str(k), str(v)) for k, v in kwargs.items() )) - - def __call__(self, cls, req, tx_action=None, **kwargs): - # tx_action is expected to be a transaction.AbstractAction - if not isinstance(tx_action, tx.AbstractAction): - return 0 - # Filter according to action type - return int(tx_action.action in self.action_type) - - -class UndoHistoryView(StartupView): - __regid__ = 'undohistory' - title = _('Undoing') - item_vid = 'undoable-transaction-view' - cache_max_age = 0 - - redirect_path = 'view' #TODO - redirect_params = dict(vid='undohistory') #TODO - public_actions_only = True - - # TODO Allow to choose if if want all actions or only the public ones - # (default) - - def call(self, **kwargs): - txs = self._cw.cnx.undoable_transactions() - if txs : - self.w(u"
          ") - for tx in txs: - self.cell_call(tx) - self.w(u"
        ") - - def cell_call(self, tx): - self.w(u'
      • ') - self.wview(self.item_vid, None, txuuid=tx.uuid, - public=self.public_actions_only, - redirect_path=self.redirect_path, - redirect_params=self.redirect_params) - self.w(u'
      • \n') - - -class UndoableTransactionView(View): - __regid__ = 'undoable-transaction-view' - __select__ = View.__select__ & match_kwargs('txuuid') - - item_vid = 'undoable-action-list-view' - cache_max_age = 0 - - def build_undo_link(self, txuuid, - redirect_path=None, redirect_params=None): - """ the kwargs are passed to build_url""" - _ = self._cw._ - redirect = {} - if redirect_path: - redirect['__redirectpath'] = redirect_path - if redirect_params: - if isinstance(redirect_params, dict): - redirect['__redirectparams'] = self._cw.build_url_params(**redirect_params) - else: - redirect['__redirectparams'] = redirect_params - link_url = self._cw.build_url('undo', txuuid=txuuid, **redirect) - msg = u"%s" % tags.a( _('undo'), href=link_url) - return msg - - def call(self, txuuid, public=True, - redirect_path=None, redirect_params=None): - _ = self._cw._ - txinfo = self._cw.cnx.transaction_info(txuuid) - try: - #XXX Under some unknown circumstances txinfo.user_eid=-1 - user = self._cw.entity_from_eid(txinfo.user_eid) - except UnknownEid: - user = None - undo_url = self.build_undo_link(txuuid, - redirect_path=redirect_path, - redirect_params=redirect_params) - txinfo_dict = dict( dt = self._cw.format_date(txinfo.datetime, time=True), - user_eid = txinfo.user_eid, - user = user and user.view('outofcontext') or _("undefined user"), - txuuid = txuuid, - undo_link = undo_url) - self.w( _("By %(user)s on %(dt)s [%(undo_link)s]") % txinfo_dict) - - tx_actions = txinfo.actions_list(public=public) - if tx_actions : - self.wview(self.item_vid, None, tx_actions=tx_actions) - - -class UndoableActionListView(View): - __regid__ = 'undoable-action-list-view' - __select__ = View.__select__ & match_kwargs('tx_actions') - title = _('Undoable actions') - item_vid = 'undoable-action-view' - cache_max_age = 0 - - def call(self, tx_actions): - if tx_actions : - self.w(u"
          ") - for action in tx_actions: - self.cell_call(action) - self.w(u"
        ") - - def cell_call(self, action): - self.w(u'
      • ') - self.wview(self.item_vid, None, tx_action=action) - self.w(u'
      • \n') - - -class UndoableActionBaseView(View): - __regid__ = 'undoable-action-view' - __abstract__ = True - - def call(self, tx_action): - raise NotImplementedError(self) - - def _build_entity_link(self, eid): - try: - entity = self._cw.entity_from_eid(eid) - return entity.view('outofcontext') - except UnknownEid: - return _("(suppressed) entity #%d") % eid - - def _build_relation_info(self, rtype, eid_from, eid_to): - return dict( rtype=display_name(self._cw, rtype), - entity_from=self._build_entity_link(eid_from), - entity_to=self._build_entity_link(eid_to) ) - - def _build_entity_info(self, etype, eid, changes): - return dict( etype=display_name(self._cw, etype), - entity=self._build_entity_link(eid), - eid=eid, - changes=changes) - - -class UndoableAddActionView(UndoableActionBaseView): - __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='A') - - def call(self, tx_action): - _ = self._cw._ - self.w(_("Added relation : %(entity_from)s %(rtype)s %(entity_to)s") % - self._build_relation_info(tx_action.rtype, tx_action.eid_from, tx_action.eid_to)) - - -class UndoableRemoveActionView(UndoableActionBaseView): - __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='R') - - def call(self, tx_action): - _ = self._cw._ - self.w(_("Delete relation : %(entity_from)s %(rtype)s %(entity_to)s") % - self._build_relation_info(tx_action.rtype, tx_action.eid_from, tx_action.eid_to)) - - -class UndoableCreateActionView(UndoableActionBaseView): - __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='C') - - def call(self, tx_action): - _ = self._cw._ - self.w(_("Created %(etype)s : %(entity)s") % # : %(changes)s - self._build_entity_info( tx_action.etype, tx_action.eid, tx_action.changes) ) - - -class UndoableDeleteActionView(UndoableActionBaseView): - __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='D') - - def call(self, tx_action): - _ = self._cw._ - self.w(_("Deleted %(etype)s : %(entity)s") % - self._build_entity_info( tx_action.etype, tx_action.eid, tx_action.changes)) - - -class UndoableUpdateActionView(UndoableActionBaseView): - __select__ = UndoableActionBaseView.__select__ & undoable_action(action_type='U') - - def call(self, tx_action): - _ = self._cw._ - self.w(_("Updated %(etype)s : %(entity)s") % - self._build_entity_info( tx_action.etype, tx_action.eid, tx_action.changes)) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/urlpublishing.py --- a/web/views/urlpublishing.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,289 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Associate url's path to view identifier / rql queries. - -CubicWeb finds all registered URLPathEvaluators, orders them according -to their ``priority`` attribute and calls their ``evaluate_path()`` -method. The first that returns something and doesn't raise a -``PathDontMatch`` exception wins. - -Here is the default evaluator chain: - -1. :class:`cubicweb.web.views.urlpublishing.RawPathEvaluator` handles - unique url segments that match exactly one of the registered - controller's *__regid__*. Urls such as */view?*, */edit?*, */json?* - fall in that category; - -2. :class:`cubicweb.web.views.urlpublishing.EidPathEvaluator` handles - unique url segments that are eids (e.g. */1234*); - -3. :class:`cubicweb.web.views.urlpublishing.URLRewriteEvaluator` - selects all urlrewriter components, sorts them according to their - priority, call their ``rewrite()`` method, the first one that - doesn't raise a ``KeyError`` wins. This is where the - :mod:`cubicweb.web.views.urlrewrite` and - :class:`cubicweb.web.views.urlrewrite.SimpleReqRewriter` comes into - play; - -4. :class:`cubicweb.web.views.urlpublishing.RestPathEvaluator` handles - urls based on entity types and attributes : ((/])?/)? This is why ``cwuser/carlos`` works; - -5. :class:`cubicweb.web.views.urlpublishing.ActionPathEvaluator` - handles any of the previous paths with an additional trailing - "/" segment, being one of the registered actions' - __regid__. - - -.. note:: - - Actionpath executes a query whose results is lost - because of redirecting instead of direct traversal. -""" -__docformat__ = "restructuredtext en" - -from rql import TypeResolverException - -from cubicweb import RegistryException -from cubicweb.web import NotFound, Redirect, component, views - - -class PathDontMatch(Exception): - """exception used by url evaluators to notify they can't evaluate - a path - """ - -class URLPublisherComponent(component.Component): - """Associate url path to view identifier / rql queries, by - applying a chain of urlpathevaluator components. - - An evaluator is a URLPathEvaluator subclass with an .evaluate_path - method taking the request object and the path to publish as - argument. It will either return a publishing method identifier - and an rql query on success or raise a `PathDontMatch` exception - on failure. URL evaluators are called according to their - `priority` attribute, with 0 as the greatest priority and greater - values as lower priority. The first evaluator returning a result - or raising something else than `PathDontMatch` will stop the - handlers chain. - """ - __regid__ = 'urlpublisher' - vreg = None # XXX necessary until property for deprecation warning is on appobject - - def __init__(self, vreg, default_method='view'): - super(URLPublisherComponent, self).__init__() - self.vreg = vreg - self.default_method = default_method - evaluators = [] - for evaluatorcls in vreg['components']['urlpathevaluator']: - # instantiation needed - evaluator = evaluatorcls(self) - evaluators.append(evaluator) - self.evaluators = sorted(evaluators, key=lambda x: x.priority) - - def process(self, req, path): - """Given a URL (essentially characterized by a path on the - server, but additional information may be found in the request - object), return a publishing method identifier - (e.g. controller) and an optional result set. - - :type req: `cubicweb.web.request.CubicWebRequestBase` - :param req: the request object - - :type path: str - :param path: the path of the resource to publish. If empty, None or "/" - "view" is used as the default path. - - :rtype: tuple(str, `cubicweb.rset.ResultSet` or None) - :return: the publishing method identifier and an optional result set - - :raise NotFound: if no handler is able to decode the given path - """ - parts = [part for part in path.split('/') - if part != ''] or (self.default_method,) - if req.form.get('rql'): - if parts[0] in self.vreg['controllers']: - return parts[0], None - return 'view', None - for evaluator in self.evaluators: - try: - pmid, rset = evaluator.evaluate_path(req, parts[:]) - break - except PathDontMatch: - continue - else: - raise NotFound(path) - if pmid is None: - pmid = self.default_method - return pmid, rset - - -class URLPathEvaluator(component.Component): - __abstract__ = True - __regid__ = 'urlpathevaluator' - vreg = None # XXX necessary until property for deprecation warning is on appobject - - def __init__(self, urlpublisher): - self.urlpublisher = urlpublisher - self.vreg = urlpublisher.vreg - - -class RawPathEvaluator(URLPathEvaluator): - """handle path of the form:: - - ?parameters... - """ - priority = 0 - def evaluate_path(self, req, parts): - if len(parts) == 1 and parts[0] in self.vreg['controllers']: - return parts[0], None - raise PathDontMatch() - - -class EidPathEvaluator(URLPathEvaluator): - """handle path with the form:: - - - """ - priority = 1 - def evaluate_path(self, req, parts): - if len(parts) != 1: - raise PathDontMatch() - try: - rset = req.execute('Any X WHERE X eid %(x)s', {'x': int(parts[0])}) - except ValueError: - raise PathDontMatch() - if rset.rowcount == 0: - raise NotFound() - return None, rset - - -class RestPathEvaluator(URLPathEvaluator): - """handle path with the form:: - - [[/]/]* - """ - priority = 3 - - def evaluate_path(self, req, parts): - if not (0 < len(parts) < 4): - raise PathDontMatch() - try: - etype = self.vreg.case_insensitive_etypes[parts.pop(0).lower()] - except KeyError: - raise PathDontMatch() - cls = self.vreg['etypes'].etype_class(etype) - if parts: - if len(parts) == 2: - attrname = parts.pop(0).lower() - try: - cls.e_schema.subjrels[attrname] - except KeyError: - raise PathDontMatch() - else: - attrname = cls.cw_rest_attr_info()[0] - value = req.url_unquote(parts.pop(0)) - return self.handle_etype_attr(req, cls, attrname, value) - return self.handle_etype(req, cls) - - def set_vid_for_rset(self, req, cls, rset): # cls is there to ease overriding - if rset.rowcount == 0: - raise NotFound() - if 'vid' not in req.form: - # check_table=False tells vid_from_rset not to try to use a table view if fetch_rql - # include some non final relation - req.form['vid'] = views.vid_from_rset(req, rset, req.vreg.schema, - check_table=False) - - def handle_etype(self, req, cls): - rset = req.execute(cls.fetch_rql(req.user)) - self.set_vid_for_rset(req, cls, rset) - return None, rset - - def handle_etype_attr(self, req, cls, attrname, value): - st = cls.fetch_rqlst(req.user, ordermethod=None) - st.add_constant_restriction(st.get_variable('X'), attrname, - 'x', 'Substitute') - if attrname == 'eid': - try: - rset = req.execute(st.as_string(), {'x': int(value)}) - except (ValueError, TypeResolverException): - # conflicting eid/type - raise PathDontMatch() - else: - rset = req.execute(st.as_string(), {'x': value}) - self.set_vid_for_rset(req, cls, rset) - return None, rset - - -class URLRewriteEvaluator(URLPathEvaluator): - """tries to find a rewrite rule to apply - - URL rewrite rule definitions are stored in URLRewriter objects - """ - priority = 2 - - def evaluate_path(self, req, parts): - # uri <=> req._twreq.path or req._twreq.uri - uri = req.url_unquote('/' + '/'.join(parts)) - evaluators = sorted(self.vreg['urlrewriting'].all_objects(), - key=lambda x: x.priority, reverse=True) - for rewritercls in evaluators: - rewriter = rewritercls(req) - try: - # XXX we might want to chain url rewrites - return rewriter.rewrite(req, uri) - except KeyError: - continue - raise PathDontMatch() - - -class ActionPathEvaluator(URLPathEvaluator): - """handle path with the form:: - - / - """ - priority = 4 - - def evaluate_path(self, req, parts): - if len(parts) < 2: - raise PathDontMatch() - # remove last part and see if this is something like an actions - # if so, call - # XXX bad smell: refactor to simpler code - try: - actionsreg = self.vreg['actions'] - requested = parts.pop(-1) - actions = actionsreg[requested] - except RegistryException: - raise PathDontMatch() - for evaluator in self.urlpublisher.evaluators: - if evaluator is self or evaluator.priority == 0: - continue - try: - pmid, rset = evaluator.evaluate_path(req, parts[:]) - except PathDontMatch: - continue - else: - try: - action = actionsreg._select_best(actions, req, rset=rset) - if action is not None: - raise Redirect(action.url()) - except RegistryException: - pass # continue searching - raise PathDontMatch() diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/urlrewrite.py --- a/web/views/urlrewrite.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,234 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Rules based url rewriter component, to get configurable RESTful urls""" - -import re - -from six import string_types, add_metaclass - -from cubicweb.uilib import domid -from cubicweb.appobject import AppObject - - -def rgx(pattern, flags=0): - """this is just a convenient shortcut to add the $ sign""" - return re.compile(pattern+'$', flags) - -class metarewriter(type): - """auto-extend rules dictionary""" - def __new__(mcs, name, bases, classdict): - # collect baseclass' rules - rules = [] - ignore_baseclass_rules = classdict.get('ignore_baseclass_rules', False) - if not ignore_baseclass_rules: - for base in bases: - rules[0:0] = getattr(base, 'rules', []) - rules[0:0] = classdict.get('rules', []) - inputs = set() - for data in rules[:]: - try: - input, output, groups = data - except ValueError: - input, output = data - if input in inputs: - rules.remove( (input, output) ) - else: - inputs.add(input) - classdict['rules'] = rules - return super(metarewriter, mcs).__new__(mcs, name, bases, classdict) - - -@add_metaclass(metarewriter) -class URLRewriter(AppObject): - """Base class for URL rewriters. - - Url rewriters should have a `rules` dict that maps an input URI - to something that should be used for rewriting. - - The actual logic that defines how the rules dict is used is implemented - in the `rewrite` method. - - A `priority` attribute might be used to indicate which rewriter - should be tried first. The higher the priority is, the earlier the - rewriter will be tried. - """ - __registry__ = 'urlrewriting' - __abstract__ = True - priority = 1 - - def rewrite(self, req, uri): - raise NotImplementedError - - -class SimpleReqRewriter(URLRewriter): - """The SimpleReqRewriters uses a `rules` dict that maps input URI - (regexp or plain string) to a dictionary to update the request's - form. - - If the input uri is a regexp, group substitution is allowed. - """ - __regid__ = 'simple' - - rules = [ - ('/_', dict(vid='manage')), - ('/_registry', dict(vid='registry')), -# (rgx('/_([^/]+?)/?'), dict(vid=r'\1')), - ('/schema', dict(vid='schema')), - ('/index', dict(vid='index')), - ('/myprefs', dict(vid='propertiesform')), - ('/siteconfig', dict(vid='systempropertiesform')), - ('/siteinfo', dict(vid='siteinfo')), - ('/manage', dict(vid='manage')), - ('/notfound', dict(vid='404')), - ('/error', dict(vid='error')), - ('/sparql', dict(vid='sparql')), - ('/processinfo', dict(vid='processinfo')), - (rgx('/cwuser', re.I), dict(vid='cw.users-and-groups-management', - tab=domid('cw.users-management'))), - (rgx('/cwgroup', re.I), dict(vid='cw.users-and-groups-management', - tab=domid('cw.groups-management'))), - (rgx('/cwsource', re.I), dict(vid='cw.sources-management')), - # XXX should be case insensitive as 'create', but I would like to find another way than - # relying on the etype_selector - (rgx('/schema/([^/]+?)/?'), dict(vid='primary', rql=r'Any X WHERE X is CWEType, X name "\1"')), - (rgx('/add/([^/]+?)/?'), dict(vid='creation', etype=r'\1')), - (rgx('/doc/images/(.+?)/?'), dict(vid='wdocimages', fid=r'\1')), - (rgx('/doc/?'), dict(vid='wdoc', fid=r'main')), - (rgx('/doc/(.+?)/?'), dict(vid='wdoc', fid=r'\1')), - ] - - def rewrite(self, req, uri): - """for each `input`, `output `in rules, if `uri` matches `input`, - req's form is updated with `output` - """ - for data in self.rules: - try: - inputurl, infos, required_groups = data - except ValueError: - inputurl, infos = data - required_groups = None - if required_groups and not req.user.matching_groups(required_groups): - continue - if isinstance(inputurl, string_types): - if inputurl == uri: - req.form.update(infos) - break - elif inputurl.match(uri): # it's a regexp - # XXX what about i18n? (vtitle for instance) - for param, value in infos.items(): - if isinstance(value, string_types): - req.form[param] = inputurl.sub(value, uri) - else: - req.form[param] = value - break - else: - self.debug("no simple rewrite rule found for %s", uri) - raise KeyError(uri) - return None, None - - -def build_rset(rql, rgxgroups=None, setuser=False, - vid=None, vtitle=None, form={}, **kwargs): - - def do_build_rset(inputurl, uri, req, schema, kwargs=kwargs): - kwargs = kwargs.copy() - if rgxgroups: - match = inputurl.match(uri) - for arg, group in rgxgroups: - kwargs[arg] = match.group(group) - req.form.update(form) - if setuser: - kwargs['u'] = req.user.eid - if vid: - req.form['vid'] = vid - if vtitle: - req.form['vtitle'] = req._(vtitle) % kwargs - return None, req.execute(rql, kwargs) - return do_build_rset - -def update_form(**kwargs): - def do_build_rset(inputurl, uri, req, schema): - match = inputurl.match(uri) - kwargs.update(match.groupdict()) - req.form.update(kwargs) - return None, None - return do_build_rset - -def rgx_action(rql=None, args=None, argsgroups=(), setuser=False, - form=None, formgroups=(), transforms={}, rqlformparams=(), controller=None): - def do_build_rset(inputurl, uri, req, schema, - ): - if rql: - kwargs = args and args.copy() or {} - if argsgroups: - match = inputurl.match(uri) - for key in argsgroups: - value = match.group(key) - try: - kwargs[key] = transforms[key](value) - except KeyError: - kwargs[key] = value - if setuser: - kwargs['u'] = req.user.eid - for param in rqlformparams: - kwargs.setdefault(param, req.form.get(param)) - rset = req.execute(rql, kwargs) - else: - rset = None - form2 = form and form.copy() or {} - if formgroups: - match = inputurl.match(uri) - for key in formgroups: - form2[key] = match.group(key) - if "vtitle" in form2: - form2['vtitle'] = req.__(form2['vtitle']) - if form2: - req.form.update(form2) - return controller, rset - return do_build_rset - - -class SchemaBasedRewriter(URLRewriter): - """Here, the rules dict maps regexps or plain strings to callbacks - that will be called with inputurl, uri, req, schema as parameters. - """ - __regid__ = 'schemabased' - rules = [ - # rgxp : callback - (rgx('/search/(.+)'), build_rset(rql=r'Any X ORDERBY FTIRANK(X) DESC WHERE X has_text %(text)s', - rgxgroups=[('text', 1)])), - ] - - def rewrite(self, req, uri): - # XXX this could be refacted with SimpleReqRewriter - for data in self.rules: - try: - inputurl, callback, required_groups = data - except ValueError: - inputurl, callback = data - required_groups = None - if required_groups and not req.user.matching_groups(required_groups): - continue - if isinstance(inputurl, string_types): - if inputurl == uri: - return callback(inputurl, uri, req, self._cw.vreg.schema) - elif inputurl.match(uri): # it's a regexp - return callback(inputurl, uri, req, self._cw.vreg.schema) - else: - self.debug("no schemabased rewrite rule found for %s", uri) - raise KeyError(uri) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/vcard.py --- a/web/views/vcard.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,66 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""vcard import / export - -""" -__docformat__ = "restructuredtext en" - -from cubicweb.predicates import is_instance -from cubicweb.view import EntityView - -from cubicweb import _ - -VCARD_PHONE_TYPES = {'home': 'HOME', 'office': 'WORK', 'mobile': 'CELL', 'fax': 'FAX'} - -class VCardCWUserView(EntityView): - """export a person information as a vcard""" - __regid__ = 'vcard' - title = _('vcard') - templatable = False - content_type = 'text/x-vcard' - __select__ = is_instance('CWUser') - - def set_request_content_type(self): - """overriden to set a .vcf filename""" - self._cw.set_content_type(self.content_type, filename='vcard.vcf') - - def cell_call(self, row, col): - self.vcard_header() - self.vcard_content(self.cw_rset.complete_entity(row, col)) - self.vcard_footer() - - def vcard_header(self): - self.w(u'BEGIN:vcard\n') - self.w(u'VERSION:3.0\n') - - def vcard_footer(self): - self.w(u'NOTE:this card has been generated by CubicWeb\n') - self.w(u'END:vcard\n') - - def vcard_content(self, entity): - who = u'%s %s' % (entity.surname or '', - entity.firstname or '') - w = self.w - w(u'FN:%s\n' % who) - w(u'N:%s;;;;\n' % entity.login) - w(u'TITLE:%s\n' % who) - for email in entity.use_email: - w(u'EMAIL;TYPE=INTERNET:%s\n' % email.address) - -from logilab.common.deprecation import class_renamed -VCardEuserView = VCardEUserView = class_renamed('VCardEuserView', VCardCWUserView) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/wdoc.py --- a/web/views/wdoc.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,231 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""inline help system, rendering ReST files in the `wdoc` subdirectory of -CubicWeb and cubes - -""" -__docformat__ = "restructuredtext en" - -from itertools import chain -from os.path import join -from bisect import bisect_right -from datetime import date - -from logilab.common.changelog import ChangeLog -from logilab.common.date import strptime, todate -from logilab.common.registry import yes -from logilab.mtconverter import CHARSET_DECL_RGX - -from cubicweb.predicates import match_form_params -from cubicweb.view import StartupView -from cubicweb.uilib import rest_publish -from cubicweb.web import NotFound, action -from cubicweb import _ - -# table of content management ################################################# - -try: - from xml.etree.ElementTree import parse -except ImportError: - from elementtree.ElementTree import parse - -def build_toc_index(node, index): - try: - nodeidx = node.attrib['resource'] - assert not nodeidx in index, nodeidx - index[nodeidx] = node - except KeyError: - pass - for child in node: - build_toc_index(child, index) - child.parent = node - -def get_insertion_point(section, index): - if section.attrib.get('insertafter'): - snode = index[section.attrib['insertafter']] - node = snode.parent - idx = node.getchildren().index(snode) + 1 - elif section.attrib.get('insertbefore'): - snode = index[section.attrib['insertbefore']] - node = snode.parent - idx = node.getchildren().index(snode) - elif 'appendto' in section.attrib: - node = index[section.attrib['appendto']] - idx = None - else: - node, idx = None, None - return node, idx - -def build_toc(config): - alltocfiles = reversed(tuple(config.locate_all_files('toc.xml'))) - maintoc = parse(next(alltocfiles)).getroot() - maintoc.parent = None - index = {} - build_toc_index(maintoc, index) - # insert component documentation into the tree according to their toc.xml - # file - for fpath in alltocfiles: - toc = parse(fpath).getroot() - for section in toc: - node, idx = get_insertion_point(section, index) - if node is None: - continue - if idx is None: - node.append(section) - else: - node.insert(idx, section) - section.parent = node - build_toc_index(section, index) - return index - -def title_for_lang(node, lang): - fallback_title = None - for title in node.findall('title'): - title_lang = title.attrib['{http://www.w3.org/XML/1998/namespace}lang'] - if title_lang == lang: - return unicode(title.text) - if title_lang == 'en': - fallback_title = unicode(title.text) - return fallback_title - -def subsections(node): - return [child for child in node if child.tag == 'section'] - -# help views ################################################################## - -class InlineHelpView(StartupView): - __select__ = match_form_params('fid') - __regid__ = 'wdoc' - title = _('site documentation') - - def call(self): - fid = self._cw.form['fid'] - vreg = self._cw.vreg - for lang in chain((self._cw.lang, vreg.property_value('ui.language')), - vreg.config.available_languages()): - rid = '%s_%s.rst' % (fid, lang) - resourcedir = vreg.config.locate_doc_file(rid) - if resourcedir: - break - else: - raise NotFound - self.tocindex = build_toc(vreg.config) - try: - node = self.tocindex[fid] - except KeyError: - node = None - else: - self.navigation_links(node) - self.w(u'
        ') - self.w(u'

        %s

        ' % (title_for_lang(node, self._cw.lang))) - data = open(join(resourcedir, rid)).read() - self.w(rest_publish(self, data)) - if node is not None: - self.subsections_links(node) - self.w(u'
        ') - self.navigation_links(node) - - def navigation_links(self, node): - req = self._cw - parent = node.parent - if parent is None: - return - brothers = subsections(parent) - self.w(u'
        \n') - previousidx = brothers.index(node) - 1 - if previousidx >= 0: - self.navsection(brothers[previousidx], 'prev') - self.navsection(parent, 'up') - nextidx = brothers.index(node) + 1 - if nextidx < len(brothers): - self.navsection(brothers[nextidx], 'next') - self.w(u'
        \n') - - navinfo = {'prev': ('', 'data/previous.png', _('i18nprevnext_previous')), - 'next': ('', 'data/next.png', _('i18nprevnext_next')), - 'up': ('', 'data/up.png', _('i18nprevnext_up'))} - - def navsection(self, node, navtype): - htmlclass, imgpath, msgid = self.navinfo[navtype] - self.w(u'' % htmlclass) - self.w(u'%s : ' % self._cw._(msgid)) - self.w(u'%s' % ( - self._cw.build_url('doc/'+node.attrib['resource']), - title_for_lang(node, self._cw.lang))) - self.w(u'\n') - - def subsections_links(self, node, first=True): - sub = subsections(node) - if not sub: - return - if first: - self.w(u'
        ') - self.w(u'
          ') - for child in sub: - self.w(u'
        • %s' % ( - self._cw.build_url('doc/'+child.attrib['resource']), - title_for_lang(child, self._cw.lang))) - self.subsections_links(child, False) - self.w(u'
        • ') - self.w(u'
        \n') - - - -class InlineHelpImageView(StartupView): - __regid__ = 'wdocimages' - __select__ = match_form_params('fid') - binary = True - templatable = False - content_type = 'image/png' - - def call(self): - fid = self._cw.form['fid'] - for lang in chain((self._cw.lang, self._cw.vreg.property_value('ui.language')), - self._cw.vreg.config.available_languages()): - rid = join('images', '%s_%s.png' % (fid, lang)) - resourcedir = self._cw.vreg.config.locate_doc_file(rid) - if resourcedir: - break - else: - raise NotFound - self.w(open(join(resourcedir, rid)).read()) - - - -class HelpAction(action.Action): - __regid__ = 'help' - __select__ = yes() - - category = 'footer' - order = 0 - title = _('Help') - - def url(self): - return self._cw.build_url('doc/main') - - -class AboutAction(action.Action): - __regid__ = 'about' - __select__ = yes() - - category = 'footer' - order = 2 - title = _('About this site') - - def url(self): - return self._cw.build_url('doc/about') diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/workflow.py --- a/web/views/workflow.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,452 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""workflow views: - -* IWorkflowable views and forms -* workflow entities views (State, Transition, TrInfo) -""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -import os -from warnings import warn - -from six import add_metaclass - -from logilab.mtconverter import xml_escape -from logilab.common.graph import escape -from logilab.common.deprecation import class_deprecated - -from cubicweb import Unauthorized -from cubicweb.predicates import (has_related_entities, one_line_rset, - relation_possible, match_form_params, - score_entity, is_instance, adaptable) -from cubicweb.view import EntityView -from cubicweb.schema import display_name -from cubicweb.web import stdmsgs, action, component, form, action -from cubicweb.web import formfields as ff, formwidgets as fwdgs -from cubicweb.web.views import TmpFileViewMixin -from cubicweb.web.views import uicfg, forms, primary, ibreadcrumbs -from cubicweb.web.views.tabs import TabbedPrimaryView, PrimaryTab -from cubicweb.web.views.dotgraphview import DotGraphView, DotPropsHandler - -_pvs = uicfg.primaryview_section -_pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden') -_pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden') -_pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden') -_pvs.tag_object_of(('*', 'wf_info_for', '*'), 'hidden') -for rtype in ('in_state', 'by_transition', 'from_state', 'to_state'): - _pvs.tag_subject_of(('*', rtype, '*'), 'hidden') - _pvs.tag_object_of(('*', rtype, '*'), 'hidden') -_pvs.tag_object_of(('*', 'wf_info_for', '*'), 'hidden') - -_abaa = uicfg.actionbox_appearsin_addmenu -_abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False) -_abaa.tag_subject_of(('State', 'allowed_transition', 'BaseTransition'), False) -_abaa.tag_object_of(('SubWorkflowExitPoint', 'destination_state', 'State'), - False) -_abaa.tag_subject_of(('*', 'wf_info_for', '*'), False) -_abaa.tag_object_of(('*', 'wf_info_for', '*'), False) - -_abaa.tag_object_of(('*', 'state_of', 'CWEType'), True) -_abaa.tag_object_of(('*', 'transition_of', 'CWEType'), True) -_abaa.tag_subject_of(('Transition', 'destination_state', '*'), True) -_abaa.tag_object_of(('*', 'allowed_transition', 'Transition'), True) -_abaa.tag_object_of(('*', 'destination_state', 'State'), True) -_abaa.tag_subject_of(('State', 'allowed_transition', '*'), True) -_abaa.tag_object_of(('State', 'state_of', 'Workflow'), True) -_abaa.tag_object_of(('Transition', 'transition_of', 'Workflow'), True) -_abaa.tag_object_of(('WorkflowTransition', 'transition_of', 'Workflow'), True) - -_afs = uicfg.autoform_section -_affk = uicfg.autoform_field_kwargs - -# IWorkflowable views ######################################################### - -class ChangeStateForm(forms.CompositeEntityForm): - # set dom id to ensure there is no conflict with edition form (see - # session_key() implementation) - __regid__ = domid = 'changestate' - - form_renderer_id = 'base' # don't want EntityFormRenderer - form_buttons = [fwdgs.SubmitButton(), - fwdgs.Button(stdmsgs.BUTTON_CANCEL, cwaction='cancel')] - - -class ChangeStateFormView(form.FormViewMixIn, EntityView): - __regid__ = 'statuschange' - title = _('status change') - __select__ = (one_line_rset() - & match_form_params('treid') - & adaptable('IWorkflowable')) - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - transition = self._cw.entity_from_eid(self._cw.form['treid']) - form = self.get_form(entity, transition) - self.w(u'

        %s %s

        \n' % (self._cw._(transition.name), - entity.view('oneline'))) - msg = self._cw._('status will change from %(st1)s to %(st2)s') % { - 'st1': entity.cw_adapt_to('IWorkflowable').printable_state, - 'st2': self._cw._(transition.destination(entity).name)} - self.w(u'

        %s

        \n' % msg) - form.render(w=self.w) - - def redirectpath(self, entity): - return entity.rest_path() - - def get_form(self, entity, transition, **kwargs): - # XXX used to specify both rset/row/col and entity in case implements - # selector (and not is_instance) is used on custom form - form = self._cw.vreg['forms'].select( - 'changestate', self._cw, entity=entity, transition=transition, - redirect_path=self.redirectpath(entity), **kwargs) - trinfo = self._cw.vreg['etypes'].etype_class('TrInfo')(self._cw) - trinfo.eid = next(self._cw.varmaker) - subform = self._cw.vreg['forms'].select('edition', self._cw, entity=trinfo, - mainform=False) - subform.field_by_name('wf_info_for', 'subject').value = entity.eid - trfield = subform.field_by_name('by_transition', 'subject') - trfield.widget = fwdgs.HiddenInput() - trfield.value = transition.eid - form.add_subform(subform) - return form - - -class WFHistoryView(EntityView): - __regid__ = 'wfhistory' - __select__ = relation_possible('wf_info_for', role='object') & \ - score_entity(lambda x: x.cw_adapt_to('IWorkflowable').workflow_history) - - title = _('Workflow history') - - def cell_call(self, row, col, view=None, title=title): - _ = self._cw._ - eid = self.cw_rset[row][col] - sel = 'Any FS,TS,C,D' - rql = ' ORDERBY D DESC WHERE WF wf_info_for X,'\ - 'WF from_state FS, WF to_state TS, WF comment C,'\ - 'WF creation_date D' - if self._cw.vreg.schema.eschema('CWUser').has_perm(self._cw, 'read'): - sel += ',U,WF' - rql += ', WF owned_by U?' - headers = (_('from_state'), _('to_state'), _('comment'), _('date'), - _('CWUser')) - else: - sel += ',WF' - headers = (_('from_state'), _('to_state'), _('comment'), _('date')) - rql = '%s %s, X eid %%(x)s' % (sel, rql) - try: - rset = self._cw.execute(rql, {'x': eid}) - except Unauthorized: - return - if rset: - if title: - self.w(u'

        %s

        \n' % _(title)) - self.wview('table', rset, headers=headers, - cellvids={2: 'editable-final'}) - - -class WFHistoryVComponent(component.EntityCtxComponent): - """display the workflow history for entities supporting it""" - __regid__ = 'wfhistory' - __select__ = component.EntityCtxComponent.__select__ & WFHistoryView.__select__ - context = 'navcontentbottom' - title = _('Workflow history') - - def render_body(self, w): - self.entity.view('wfhistory', w=w, title=None) - - -class InContextWithStateView(EntityView): - """display incontext view for an entity as well as its current state""" - __regid__ = 'incontext-state' - __select__ = adaptable('IWorkflowable') - def entity_call(self, entity): - iwf = entity.cw_adapt_to('IWorkflowable') - self.w(u'%s [%s]' % (entity.view('incontext'), iwf.printable_state)) - - -# workflow actions ############################################################# - -class WorkflowActions(action.Action): - """fill 'workflow' sub-menu of the actions box""" - __regid__ = 'workflow' - __select__ = (action.Action.__select__ & one_line_rset() & - relation_possible('in_state')) - - submenu = _('workflow') - order = 10 - - def fill_menu(self, box, menu): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - menu.label = u'%s: %s' % (self._cw._('state'), - entity.cw_adapt_to('IWorkflowable').printable_state) - menu.append_anyway = True - super(WorkflowActions, self).fill_menu(box, menu) - - def actual_actions(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - iworkflowable = entity.cw_adapt_to('IWorkflowable') - hastr = False - for tr in iworkflowable.possible_transitions(): - url = entity.absolute_url(vid='statuschange', treid=tr.eid) - yield self.build_action(self._cw._(tr.name), url) - hastr = True - # don't propose to see wf if user can't pass any transition - if hastr: - wfurl = iworkflowable.current_workflow.absolute_url() - yield self.build_action(self._cw._('view workflow'), wfurl) - if iworkflowable.workflow_history: - wfurl = entity.absolute_url(vid='wfhistory') - yield self.build_action(self._cw._('view history'), wfurl) - - -# workflow entity types views ################################################## - -_pvs = uicfg.primaryview_section -_pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden') -_pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden') -_pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden') -_pvs.tag_object_of(('*', 'default_workflow', 'Workflow'), 'hidden') - -_abaa = uicfg.actionbox_appearsin_addmenu -_abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False) -_abaa.tag_subject_of(('State', 'allowed_transition', 'BaseTransition'), False) -_abaa.tag_object_of(('SubWorkflowExitPoint', 'destination_state', 'State'), - False) -_abaa.tag_object_of(('State', 'state_of', 'Workflow'), True) -_abaa.tag_object_of(('BaseTransition', 'transition_of', 'Workflow'), False) -_abaa.tag_object_of(('Transition', 'transition_of', 'Workflow'), True) -_abaa.tag_object_of(('WorkflowTransition', 'transition_of', 'Workflow'), True) - -class WorkflowPrimaryView(TabbedPrimaryView): - __select__ = is_instance('Workflow') - tabs = [ _('wf_tab_info'), _('wfgraph'),] - default_tab = 'wf_tab_info' - - -class StateInContextView(EntityView): - """convenience trick, State's incontext view should not be clickable""" - __regid__ = 'incontext' - __select__ = is_instance('State') - - def cell_call(self, row, col): - self.w(xml_escape(self._cw.view('textincontext', self.cw_rset, - row=row, col=col))) - -class WorkflowTabTextView(PrimaryTab): - __regid__ = 'wf_tab_info' - __select__ = PrimaryTab.__select__ & one_line_rset() & is_instance('Workflow') - - def render_entity_attributes(self, entity): - _ = self._cw._ - self.w(u'
        %s
        ' % (entity.printable_value('description'))) - self.w(u'%s%s' % (_("workflow_of").capitalize(), _(" :"))) - html = [] - for e in entity.workflow_of: - view = e.view('outofcontext') - if entity.eid == e.default_workflow[0].eid: - view += u' [%s]' % _('default_workflow') - html.append(view) - self.w(', '.join(v for v in html)) - self.w(u'

        %s

        ' % _("Transition_plural")) - rset = self._cw.execute( - 'Any T,T,DS,T,TT ORDERBY TN WHERE T transition_of WF, WF eid %(x)s,' - 'T type TT, T name TN, T destination_state DS?', {'x': entity.eid}) - self.wview('table', rset, 'null', - cellvids={ 1: 'trfromstates', 2: 'outofcontext', 3:'trsecurity',}, - headers = (_('Transition'), _('from_state'), - _('to_state'), _('permissions'), _('type') ), - ) - - -class TransitionSecurityTextView(EntityView): - __regid__ = 'trsecurity' - __select__ = is_instance('Transition') - - def cell_call(self, row, col): - _ = self._cw._ - entity = self.cw_rset.get_entity(self.cw_row, self.cw_col) - if entity.require_group: - self.w(u'
        %s%s %s
        ' % - (_('groups'), _(" :"), - u', '.join((g.view('incontext') for g - in entity.require_group)))) - if entity.condition: - self.w(u'
        %s%s %s
        ' % - ( _('conditions'), _(" :"), - u'
        '.join((e.dc_title() for e - in entity.condition)))) - -class TransitionAllowedTextView(EntityView): - __regid__ = 'trfromstates' - __select__ = is_instance('Transition') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(self.cw_row, self.cw_col) - self.w(u', '.join((e.view('outofcontext') for e - in entity.reverse_allowed_transition))) - - -# workflow entity types edition ################################################ - -def _wf_items_for_relation(req, wfeid, wfrelation, field): - wf = req.entity_from_eid(wfeid) - rschema = req.vreg.schema[field.name] - param = 'toeid' if field.role == 'subject' else 'fromeid' - return sorted((e.view('combobox'), unicode(e.eid)) - for e in getattr(wf, 'reverse_%s' % wfrelation) - if rschema.has_perm(req, 'add', **{param: e.eid})) - -# TrInfo -_afs.tag_subject_of(('TrInfo', 'to_state', '*'), 'main', 'hidden') -_afs.tag_subject_of(('TrInfo', 'from_state', '*'), 'main', 'hidden') -_afs.tag_attribute(('TrInfo', 'tr_count'), 'main', 'hidden') - -# BaseTransition -# XXX * allowed_transition BaseTransition -# XXX BaseTransition destination_state * - -def transition_states_vocabulary(form, field): - entity = form.edited_entity - if entity.has_eid(): - wfeid = entity.transition_of[0].eid - else: - eids = form.linked_to.get(('transition_of', 'subject')) - if not eids: - return [] - wfeid = eids[0] - return _wf_items_for_relation(form._cw, wfeid, 'state_of', field) - -_afs.tag_subject_of(('*', 'destination_state', '*'), 'main', 'attributes') -_affk.tag_subject_of(('*', 'destination_state', '*'), - {'choices': transition_states_vocabulary}) -_afs.tag_object_of(('*', 'allowed_transition', '*'), 'main', 'attributes') -_affk.tag_object_of(('*', 'allowed_transition', '*'), - {'choices': transition_states_vocabulary}) - -# State - -def state_transitions_vocabulary(form, field): - entity = form.edited_entity - if entity.has_eid(): - wfeid = entity.state_of[0].eid - else : - eids = form.linked_to.get(('state_of', 'subject')) - if not eids: - return [] - wfeid = eids[0] - return _wf_items_for_relation(form._cw, wfeid, 'transition_of', field) - -_afs.tag_subject_of(('State', 'allowed_transition', '*'), 'main', 'attributes') -_affk.tag_subject_of(('State', 'allowed_transition', '*'), - {'choices': state_transitions_vocabulary}) - - -# adaptaters ################################################################### - -class WorkflowIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('Workflow') - # XXX what if workflow of multiple types? - def parent_entity(self): - return self.entity.workflow_of and self.entity.workflow_of[0] or None - -class WorkflowItemIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('BaseTransition', 'State') - def parent_entity(self): - return self.entity.workflow - -class TransitionItemIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('SubWorkflowExitPoint') - def parent_entity(self): - return self.entity.reverse_subworkflow_exit[0] - -class TrInfoIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter): - __select__ = is_instance('TrInfo') - def parent_entity(self): - return self.entity.for_entity - - -# workflow images ############################################################## - -class WorkflowDotPropsHandler(DotPropsHandler): - - def node_properties(self, stateortransition): - """return default DOT drawing options for a state or transition""" - props = super(WorkflowDotPropsHandler, self).node_properties(stateortransition) - if hasattr(stateortransition, 'state_of'): - props['shape'] = 'box' - props['style'] = 'filled' - if stateortransition.reverse_initial_state: - props['fillcolor'] = '#88CC88' - else: - props['shape'] = 'ellipse' - return props - - -class WorkflowVisitor(object): - def __init__(self, entity): - self.entity = entity - - def nodes(self): - for state in self.entity.reverse_state_of: - state.complete() - yield state.eid, state - for transition in self.entity.reverse_transition_of: - transition.complete() - yield transition.eid, transition - - def edges(self): - for transition in self.entity.reverse_transition_of: - for incomingstate in transition.reverse_allowed_transition: - yield incomingstate.eid, transition.eid, transition - for outgoingstate in transition.potential_destinations(): - yield transition.eid, outgoingstate.eid, transition - -class WorkflowGraphView(DotGraphView): - __regid__ = 'wfgraph' - __select__ = EntityView.__select__ & one_line_rset() & is_instance('Workflow') - - def build_visitor(self, entity): - return WorkflowVisitor(entity) - - def build_dotpropshandler(self): - return WorkflowDotPropsHandler(self._cw) - - -@add_metaclass(class_deprecated) -class TmpPngView(TmpFileViewMixin, EntityView): - __deprecation_warning__ = '[3.18] %(cls)s is deprecated' - __regid__ = 'tmppng' - __select__ = match_form_params('tmpfile') - content_type = 'image/png' - binary = True - - def cell_call(self, row=0, col=0): - key = self._cw.form['tmpfile'] - if key not in self._cw.session.data: - # the temp file is gone and there's nothing - # we can do about it - # we should probably write it to some well - # behaved place and serve it - return - tmpfile = self._cw.session.data.pop(key) - self.w(open(tmpfile, 'rb').read()) - os.unlink(tmpfile) diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/xbel.py --- a/web/views/xbel.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,69 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""xbel views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from six.moves import range - -from logilab.mtconverter import xml_escape - -from cubicweb.predicates import is_instance -from cubicweb.view import EntityView -from cubicweb.web.views.xmlrss import XMLView - - -class XbelView(XMLView): - __regid__ = 'xbel' - title = _('xbel export') - templatable = False - content_type = 'text/xml' #application/xbel+xml - - def cell_call(self, row, col): - self.wview('xbelitem', self.cw_rset, row=row, col=col) - - def call(self): - """display a list of entities by calling their view""" - self.w(u'\n' % self._cw.encoding) - self.w(u'') - self.w(u'') - self.w(u'%s' % self._cw._('bookmarks')) - for i in range(self.cw_rset.rowcount): - self.cell_call(i, 0) - self.w(u"") - - -class XbelItemView(EntityView): - __regid__ = 'xbelitem' - - def cell_call(self, row, col): - entity = self.cw_rset.complete_entity(row, col) - self.w(u'' % xml_escape(self.url(entity))) - self.w(u' %s' % xml_escape(entity.dc_title())) - self.w(u'') - - def url(self, entity): - return entity.absolute_url() - - -class XbelItemBookmarkView(XbelItemView): - __select__ = is_instance('Bookmark') - - def url(self, entity): - return entity.actual_url() diff -r 058bb3dc685f -r 0b59724cb3f2 web/views/xmlrss.py --- a/web/views/xmlrss.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,298 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""base xml and rss views""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -from base64 import b64encode -from time import timezone - -from six.moves import range - -from logilab.mtconverter import xml_escape - -from cubicweb.predicates import (is_instance, non_final_entity, one_line_rset, - appobject_selectable, adaptable) -from cubicweb.view import EntityView, EntityAdapter, AnyRsetView, Component -from cubicweb.uilib import simple_sgml_tag -from cubicweb.web import httpcache, component - -def encode_bytes(value): - return '' % b64encode(value.getvalue()) - -# see cubicweb.sobjects.parser.DEFAULT_CONVERTERS -SERIALIZERS = { - 'String': xml_escape, - 'Bytes': encode_bytes, - 'Date': lambda x: x.strftime('%Y-%m-%d'), - 'Datetime': lambda x: x.strftime('%Y-%m-%d %H:%M:%S'), - 'Time': lambda x: x.strftime('%H:%M:%S'), - 'TZDatetime': lambda x: x.strftime('%Y-%m-%d %H:%M:%S'), # XXX TZ - 'TZTime': lambda x: x.strftime('%H:%M:%S'), - 'Interval': lambda x: x.days * 60*60*24 + x.seconds, - } - -# base xml views ############################################################## - -class XMLView(EntityView): - """xml view for entities""" - __regid__ = 'xml' - title = _('xml export (entities)') - templatable = False - content_type = 'text/xml' - xml_root = 'rset' - item_vid = 'xmlitem' - - def cell_call(self, row, col): - self.wview(self.item_vid, self.cw_rset, row=row, col=col) - - def call(self): - """display a list of entities by calling their view""" - self.w(u'\n' % self._cw.encoding) - self.w(u'<%s size="%s">\n' % (self.xml_root, len(self.cw_rset))) - for i in range(self.cw_rset.rowcount): - self.cell_call(i, 0) - self.w(u'\n' % self.xml_root) - - -class XMLItemView(EntityView): - __regid__ = 'xmlitem' - - def entity_call(self, entity): - """element as an item for an xml feed""" - entity.complete() - source = entity.cw_metainformation()['source']['uri'] - self.w(u'<%s eid="%s" cwuri="%s" cwsource="%s">\n' - % (entity.cw_etype, entity.eid, xml_escape(entity.cwuri), - xml_escape(source))) - for rschema, attrschema in sorted(entity.e_schema.attribute_definitions()): - attr = rschema.type - if attr in ('eid', 'cwuri'): - continue - else: - try: - value = entity.cw_attr_cache[attr] - except KeyError: - # Bytes - continue - if value is None: - self.w(u' <%s/>\n' % attr) - else: - try: - value = SERIALIZERS[attrschema](value) - except KeyError: - pass - self.w(u' <%s>%s\n' % (attr, value, attr)) - for relstr in self._cw.list_form_param('relation'): - try: - rtype, role = relstr.split('-') - except ValueError: - self.error('badly formated relation name %r', relstr) - continue - if role == 'subject': - getrschema = entity.e_schema.subjrels - elif role == 'object': - getrschema = entity.e_schema.objrels - else: - self.error('badly formated relation name %r', relstr) - continue - if not rtype in getrschema: - self.error('unexisting relation %r', relstr) - continue - self.w(u' <%s role="%s">\n' % (rtype, role)) - self.wview('xmlrelateditem', entity.related(rtype, role, safe=True), 'null') - self.w(u' \n' % rtype) - self.w(u'\n' % (entity.e_schema)) - - -class XMLRelatedItemView(EntityView): - __regid__ = 'xmlrelateditem' - add_div_section = False - - def entity_call(self, entity): - # XXX put unique attributes as xml attribute, they are much probably - # used to search existing entities in client data feed, and putting it - # here may avoid an extra request to get those attributes values - self.w(u' <%s eid="%s" cwuri="%s"/>\n' - % (entity.e_schema, entity.eid, xml_escape(entity.cwuri))) - - -class XMLRelatedItemStateView(XMLRelatedItemView): - __select__ = is_instance('State') - - def entity_call(self, entity): - self.w(u' <%s eid="%s" cwuri="%s" name="%s"/>\n' - % (entity.e_schema, entity.eid, xml_escape(entity.cwuri), - xml_escape(entity.name))) - - -class XMLRsetView(AnyRsetView): - """dumps raw rset as xml""" - __regid__ = 'rsetxml' - title = _('xml export') - templatable = False - content_type = 'text/xml' - xml_root = 'rset' - - def call(self): - w = self.w - rset, descr = self.cw_rset, self.cw_rset.description - eschema = self._cw.vreg.schema.eschema - labels = self.columns_labels(tr=False) - w(u'\n' % self._cw.encoding) - w(u'<%s query="%s">\n' % (self.xml_root, xml_escape(rset.printable_rql()))) - for rowindex, row in enumerate(self.cw_rset): - w(u' \n') - for colindex, val in enumerate(row): - etype = descr[rowindex][colindex] - tag = labels[colindex] - attrs = {} - if '(' in tag: - attrs['expr'] = tag - tag = 'funccall' - if val is not None and not eschema(etype).final: - attrs['eid'] = val - # csvrow.append(val) # val is eid in that case - val = self._cw.view('textincontext', rset, - row=rowindex, col=colindex) - else: - val = self._cw.view('final', rset, row=rowindex, - col=colindex, format='text/plain') - w(simple_sgml_tag(tag, val, **attrs)) - w(u' \n') - w(u'\n' % self.xml_root) - - -# RSS stuff ################################################################### - -class IFeedAdapter(EntityAdapter): - __needs_bw_compat__ = True - __regid__ = 'IFeed' - __select__ = is_instance('Any') - - def rss_feed_url(self): - """return a URL to the rss feed for this entity""" - return self.entity.absolute_url(vid='rss') - - -class RSSFeedURL(Component): - __regid__ = 'rss_feed_url' - __select__ = non_final_entity() - - def feed_url(self): - return self._cw.build_url(rql=self.cw_rset.limited_rql(), vid='rss') - - -class RSSEntityFeedURL(Component): - __regid__ = 'rss_feed_url' - __select__ = one_line_rset() & adaptable('IFeed') - - def feed_url(self): - entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - return entity.cw_adapt_to('IFeed').rss_feed_url() - - -class RSSIconBox(component.CtxComponent): - """just display the RSS icon on uniform result set""" - __regid__ = 'rss' - __select__ = (component.CtxComponent.__select__ - & appobject_selectable('components', 'rss_feed_url')) - - visible = False - order = 999 - - def render(self, w, **kwargs): - try: - rss = self._cw.uiprops['RSS_LOGO'] - except KeyError: - self.error('missing RSS_LOGO external resource') - return - urlgetter = self._cw.vreg['components'].select('rss_feed_url', self._cw, - rset=self.cw_rset) - url = urlgetter.feed_url() - w(u'rss\n' % (xml_escape(url), rss)) - - -class RSSView(XMLView): - __regid__ = 'rss' - title = _('rss export') - templatable = False - content_type = 'text/xml' - http_cache_manager = httpcache.MaxAgeHTTPCacheManager - cache_max_age = 60*60*2 # stay in http cache for 2 hours by default - item_vid = 'rssitem' - - def _open(self): - req = self._cw - self.w(u'\n' % req.encoding) - self.w(u'\n') - self.w(u' \n') - self.w(u' %s RSS Feed\n' - % xml_escape(self.page_title())) - self.w(u' %s\n' - % xml_escape(req.form.get('vtitle', ''))) - params = req.form.copy() - params.pop('vid', None) - self.w(u' %s\n' % xml_escape(self._cw.build_url(**params))) - - def _close(self): - self.w(u' \n') - self.w(u'') - - def call(self): - """display a list of entities by calling their view""" - self._open() - for i in range(self.cw_rset.rowcount): - self.cell_call(i, 0) - self._close() - - def cell_call(self, row, col): - self.wview(self.item_vid, self.cw_rset, row=row, col=col) - - -class RSSItemView(EntityView): - __regid__ = 'rssitem' - date_format = '%%Y-%%m-%%dT%%H:%%M%+03i:00' % (timezone / 3600) - add_div_section = False - - def cell_call(self, row, col): - entity = self.cw_rset.complete_entity(row, col) - self.w(u'\n') - self.w(u'%s\n' - % xml_escape(entity.absolute_url())) - self.render_title_link(entity) - self.render_description(entity) - self._marker('dc:date', entity.dc_date(self.date_format)) - self.render_entity_creator(entity) - self.w(u'\n') - - def render_description(self, entity): - self._marker('description', entity.dc_description(format='text/html')) - - def render_title_link(self, entity): - self._marker('title', entity.dc_long_title()) - self._marker('link', entity.absolute_url()) - - def render_entity_creator(self, entity): - if entity.creator: - self._marker('dc:creator', entity.dc_creator()) - - def _marker(self, marker, value): - if value: - self.w(u' <%s>%s\n' % (marker, xml_escape(value), marker)) diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/about_en.rst --- a/web/wdoc/about_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,25 +0,0 @@ -About this site -=============== - -This web application is based on the CubicWeb knowledge management system -from Logilab_. CubicWeb is made of an object database and a web -framework. It allows to develop an application by defining a set of -entities and their relationships, plus a set of views that present the -data selected from the object database using a specific query language. - -This application has a precise schema_ and can be considered an -example of a semantic web database, as it can export the data to XML -and others formats and is not limited to publishing HTML. - -Supported formats: |microformats|_ - JSON_ - |rss|_ - |dublincore|_ - -.. |microformats| image:: /data/microformats-button.png -.. _microformats: http://microformats.org -.. _JSON: http://www.json.org/ -.. |rss| image:: /data/rss-button.png -.. _rss: http://www.rssboard.org -.. |dublincore| image:: /data/dublincore-button.png -.. _dublincore: http://dublincore.org - -.. _Logilab: http://www.logilab.fr/ -.. _schema: ../schema diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/about_fr.rst --- a/web/wdoc/about_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,28 +0,0 @@ -.. -*- coding: utf-8 -*- - -A propos de ce site -=================== - -Cette application web est basée sur le système de gestion de connaissance CubicWeb de -Logilab_. CubicWeb est composé d'une base de données objet et d'un framework web. Il -permet de développer une application en définissant un ensemble d'entités et de -relations entre ces entités, ainsi que des vues présentant les données -sélectionnées depuis la base de données en utilisant un langage de requête -spécifique. - -Cette application possède un schéma_ précis et peut être considérée comme un -exemple de site web sémantique, car elle n'est pas limitée à publier du HTML -mais peut également exporter les données en XML et d'autres formats. - -Formats supportés: |microformats|_ - JSON_ - |rss|_ - |dublincore|_ - -.. |microformats| image:: /data/microformats-button.png -.. _microformats: http://microformats.org -.. _JSON: http://www.json.org/ -.. |rss| image:: /data/rss-button.png -.. _rss: http://www.rssboard.org -.. |dublincore| image:: /data/dublincore-button.png -.. _dublincore: http://dublincore.org - -.. _Logilab: http://www.logilab.fr/ -.. _schéma: ../schema diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/add_content_en.rst --- a/web/wdoc/add_content_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -Add some entities ------------------ -As manager, you can access to entity creation forms by using the `site management`_ page. - -.. _`site management`: ../manage - -As regular user, the index page or the action box may propose some links to create entities according to the context. - - diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/add_content_fr.rst --- a/web/wdoc/add_content_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -Ajouter des entités -------------------- -Pour un administrateur, la création des objets est toujours possible directement dans la `page de gestion de site`_. - -.. _`page de gestion de site`: ../manage - -Pour les utilisateurs, la page principale ou la boîte d'action des entités vous permettra la création de nouveaux contenus. -L'intérêt de la dernière méthode est de faciliter l'édition de la relation entre les objets. - - diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/advanced_usage_en.rst --- a/web/wdoc/advanced_usage_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -.. winclude:: advanced_usage_schema - diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/advanced_usage_schema_en.rst --- a/web/wdoc/advanced_usage_schema_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -Schema of the data ------------------- - -First take a look at the data schema_ then try to remember that you are browsing -through a heap of data by applying stylesheets to the results of queries. This -site is not a content management system with items placed in folders. It is an -interface to a database which applies a view to retreived data. - -.. _schema: ../schema diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/advanced_usage_schema_fr.rst --- a/web/wdoc/advanced_usage_schema_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ -.. -*- coding: utf-8 -*- - -Schéma des données ------------------- - -Commencez d'abord par jeter un œil au schéma_ de l'application, puis essayez de vous rappeler que vous naviguez dans un ensemble de données en appliquant des vues aux résultats de requête. Ce site n'est pas un système de gestion de contenu avec des objets dans des répertoires. C'est une interface vers une base de données qui vous permet d'appliquer une vue aux données récupérées. - -.. _schéma: ../schema - - -Relation entre les objets -------------------------- - -Une relation est définie pour donner un sens précis entre les éléments. Les relations peuvent être facilement ajoutées ou détruites selon le schéma_ de votre application. diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/bookmarks_en.rst --- a/web/wdoc/bookmarks_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -Any authenticated user can defines its own bookmarks in the application. They -are similar to bookmarks in a browser but are dedicated to link into the -application and they are stored in the database (and so you'll get them from any -browser you use to connect to the application. They are available through the -bookmarks box, on the left of the screen. - -To add a bookmark on the page you're seeing, simply click on the "bookmark this -page" link in the "manage bookmarks" dropdown-menu. You'll then be asked to give -it a title. Once created, it will appear in the bookmarks box. - -In this dropdown-menu, the "pick existing bookmarks" link will show you a list -of bookmarks already defined by the other users of the application. If you are -interested by one of them, simply click the `[+]` sign to the left of it to add it -to your bookmarks. - -The "edit bookmarks" link will show you a form to edit your current bookmarks, -for instance to change their title. If you are using share bookmarks (picked -from another user), those bookmarks won't appear in the form unless you're -allowed to modify them. - -To remove a bookmark, simply click the `[-]` sign to the left of it. diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/bookmarks_fr.rst --- a/web/wdoc/bookmarks_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,30 +0,0 @@ -.. -*- coding: utf-8 -*- - -Chaque utilisateur authentifié peut définir ses propres signets dans -l'application. Ils fonctionnent comme des signets dans un navigateur, mais sont -restreints aux liens internes au site et sont conservés dans la base de données, -ce qui vous permet de les retrouver quelque soit le navigateur que vous utilisez -pour vous connecter au site. Ils sont disponibles dans la boîte des signets, à -gauche de l'écran. - -Pour ajouter un signet sur la page que vous être en train de visualiser, vous -devez cliquer sur le menu déroulant `gérer des signets`; puis sur `poser un -signet ici` de la boîte pour faire apparaître le champ d'ajout. Il est alors -demandé de lui donner un titre. Une fois créé, le signet apparaît dans la boîte -des signets. - -Le signe `[-]` en face du signet permet sa suppression immédiate. - -Le lien `récupérer un signet` du menu déroulant permet de voir une liste des -signets déja définis par les autres utilisateurs de l'application. Si l'un de -ces signets vous intéresse, vous pouvez l'ajouter à vos signet en cliquant sur -le signe `[+]` à gauche de celui-ci. - -Le lien `éditer les signets` du menu déroulant permet de modifier vos signets -(par exemple changer leur titre) et le chemin relatif du signet. Si vous -utilisez des signets partagés (que vous avez récupéré d'un autre utilisateur), -ceux-ci n'apparaîtront pas dans le formulaire de modification à moins que vous -ayez le droit de les modifier. - - -.. _`préférences utilisateurs`: ../myprefs diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/custom_view_en.rst --- a/web/wdoc/custom_view_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,18 +0,0 @@ -Once you have read the `RQL tutorial`_ and you know the application'`schema`_, -you're ready to define your own views. Just type the RQL query retreiving data -you're interested in in the search field. If the view used to display them -doesn't fit your need, choose another one in the possible views box (this box -isn't displayed by default, but you can activate it in your `user -preferences`_). Once you're satified, add a `bookmark`_ if you want to reuse -this view later. - -.. _`RQL tutorial`: tut_rql -.. _`schema`: ../schema -.. _`user preferences`: ../myprefs -.. _`bookmark`: bookmarks - - -Below are some example of what can be acheived... - -.. winclude:: custom_view_last_update -.. winclude:: custom_view_rss diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/custom_view_fr.rst --- a/web/wdoc/custom_view_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -.. -*- coding: utf-8 -*- - -Une fois que vous avez lu le `tutoriel RQL`_ et que vous connaissez le `schéma`_ -de l'application, vous êtes prêt à définir vos propres vues. Tapez simplement -dans le champ de recherche la requête qui vous permet de récupérer les données -qui vous intéressent. Si la vue appliquée par défaut ne correspond pas à vos -besoins, sélectionnez en une autre dans la boîte des vues possibles (celle-ci -n'est pas affichée par défaut, vous pouvez l'activer dans vos `préférences -utilisateur`_). Une fois que vous êtes satisfait, ajoutez un `signet`_ si vous -voulez réutiliser votre vue plus tard. - -.. _`tutoriel RQL`: tut_rql -.. _`schéma`: ../schema -.. _`préférences utilisateur`: ../myprefs -.. _`signet`: bookmarks - - -Ci-dessous quelques exemples de ce que l'on peut faire... - -.. winclude:: custom_view_last_update -.. winclude:: custom_view_rss diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/custom_view_last_update_en.rst --- a/web/wdoc/custom_view_last_update_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -Latest changes --------------- - -* table of `all latest changes`_ - -Links below is providing useful RQL query example. - -.. _all latest changes: ../view?rql=Any+M%2CX+WHERE+X+modification_date+M+ORDERBY+M+DESC+LIMIT+30 diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/custom_view_last_update_fr.rst --- a/web/wdoc/custom_view_last_update_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -.. -*- coding: utf-8 -*- - -Dernières modifications ------------------------ - -* la table des `derniers changements`_ fournit un exemple d'utilisation de RQL - pour récupérer les derniers changements ayant eu lieu sur ce site. - -.. _`derniers changements`: ../view?rql=Any+M%2CX+WHERE+X+modification_date+M+ORDERBY+M+DESC+LIMIT+30 - diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/custom_view_rss_en.rst --- a/web/wdoc/custom_view_rss_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ - -.. role:: raw-html(raw) - :format: html - -RSS driven ----------- - -RSS is a pretty useful technology that can be widely used on this -site. Any set of data can be presented as RSS. You can then plug in -an RSS reader into that and follow the site activity. For example : - -:raw-html:`

        latest changes

        ` diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/custom_view_rss_fr.rst --- a/web/wdoc/custom_view_rss_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,18 +0,0 @@ -.. -*- coding: utf-8 -*- - -.. role:: raw-html(raw) - :format: html - -Flux RSS --------- - -RSS est une technologie très utile qui peut être utilisée de manière très -générique sur ce site. N'importe quel résultat de requête peut-être présenté -comme un flux RSS. Vous pouvez donc ensuite connecter ce flux à n'importe quel -lecteur RSS pour suivre l'activité de ce cite. Par exemple pour avoir les -derniers changements sous forme de flux RSS: - -:raw-html:`

        latest changes

        ` - diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/glossary_en.rst --- a/web/wdoc/glossary_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ -action box - Area visible in the upper left column. You have a list of available actions on the entity. The most frequently used entry is `modify`. - -object - All element created in the application - Example: project, ticket, user, ... - -relation editing module - HTML widget that let you define new relations amongst objects. - -relation - It's a kind of 'smart' link between 2 objets of the application. It has a specific sense that determine dynamic behaviour and add a new logic of the content. - - diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/glossary_fr.rst --- a/web/wdoc/glossary_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ -boîte des actions - boîte visible dans la colonne de gauche en haut à gauche de l'écran. Cette boîte vous permet d'accéder aux actions disponibles pour cette entité. L'entrée la plus utilisée est `modifier`. - -module d'édition des relations entre objets - module HTML qui permet l'édition des relations entre objects. - -objet - Tout élement qui peut être créé au sein de l'application - Exemple: projet, ticket, étiquette, utilisateur, ... - -relation - Une relation est un lien 'intelligent' et bi-directionnel entre 2 objets de l'application. Il est intelligent au sens où il porte un sens et permet de définir des comportements dynamiques à l'application et ajouter une logique métier au contenu. - - diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/images/userprefs_en.png Binary file web/wdoc/images/userprefs_en.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/images/userprefs_fr.png Binary file web/wdoc/images/userprefs_fr.png has changed diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/main_en.rst --- a/web/wdoc/main_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -.. winclude:: about diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/search_en.rst --- a/web/wdoc/search_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ - -.. winclude:: search_sample_queries - -You can as well type complex queries using the RQL_ query language, -used every where to build dynamic pages of this site. - -You can use one of the following prefixes to specify which kind of search you -want: - -* `rql` : RQL query -* `text` : full text search - -.. _RQL: tut_rql diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/search_fr.rst --- a/web/wdoc/search_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,15 +0,0 @@ -.. -*- coding: utf-8 -*- - -.. winclude:: search_sample_queries - -Vous pouvez également taper des requêtes complexes en utilisant le langage de -requête RQL_, base sur laquelle ce site est construit. - -Vous pouvez préfixer votre recherche des mots clés suivants pour indiquer le -type de recherche que vous désirez : - -* `rql` : requête RQL -* `text` : recherche plein texte - -.. _RQL: tut_rql - diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/search_sample_queries_en.rst --- a/web/wdoc/search_sample_queries_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -Experiment with the search bar. Try queries like "card sometextualcontent" or -"card wikiid index" or "345". diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/search_sample_queries_fr.rst --- a/web/wdoc/search_sample_queries_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -Essayer le champ de recherche. Essayer des recherches comme "fiche -unmotachercher", ou encore "fiche wikiid index" ou "345". diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/standard_usage_en.rst --- a/web/wdoc/standard_usage_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -First of all, you can use this site as any web site by clicking on the -different links. The Logo on the top left of this page will lead you -to a start page from which you will be able to navigate to all the -data hosted on this site. - -The bookmarks box on the left hand side provides some useful -shortcuts. - -Most text is indexed and you can search all the content by typing -words in the search box. diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/standard_usage_fr.rst --- a/web/wdoc/standard_usage_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -.. -*- coding: utf-8 -*- - -Plusieurs éléments par défaut sont proposés pour faciliter la navigation: - -- le logo en haut de la page vous ramène à la page d'accueil du site qui fournit un point de départ pour la navigation vers les données de ce site. - -- la boîte de signet à gauche fournit des raccourcis utiles. - -- la notion d'étiquette vous permet de marquer de manière subjective les pages à se souvenir - -- le contenu textuel des entités est indexé et vous pouvez donc rechercher des entités en tapant simplement les mots à rechercher dans la boîte de recherche. diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/toc.xml --- a/web/wdoc/toc.xml Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,54 +0,0 @@ - - Site documentation - Documentation du site - -
        - Standard use - Utilisation standard - -
        - Using bookmarks - Utilisation des signets -
        - -
        - Search - Recherche -
        - -
        - User preferences - Préférences utilisateur -
        - -
        - Add content to the site - Ajouter du contenu au site -
        -
        - -
        - Advanced use - Utilisation avancée - -
        - "Relation Query Language" tutorial - Tutoriel "Relation Query Language" -
        - -
        - Defining your own views - Définir ses propres vues -
        -
        - -
        - Glossary - Glossaire -
        - -
        - A propos de ce site - About this site -
        -
        diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/tut_rql_en.rst --- a/web/wdoc/tut_rql_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,258 +0,0 @@ -.. -*- coding: utf-8 -*- - -Let's learn RQL by practice... - -.. contents:: - -Introduction ------------- - -RQL has a syntax and principle inspirated from the SQL query language, though -it's at a higher level in order to be more intuitive and suitable to easily make -advanced queries on a schema based database. - -* the main idea in RQL is that we'are following relations between entities -* attributes are a special case of relations -* RQL has been inspirated from SQL but is at a higher level -* a knowledge of the application'schema is necessary to build rql queries - -To use RQL, you'll have to know the basis of the language as well as a good -knowledge of the application'schema. You can always view it using the "schema" -link in user's dropdown menu (on the top-right of the screen) or by clicking here_. - -.. _here: ../schema - - -Some bits of theory -------------------- - -Variables et types -~~~~~~~~~~~~~~~~~~ -Entities and attributes'value to follow and / or select are represented by the -query by *variables* which must be written upper-case. - -Possible types for each variable are deducted from the schema according to -constraints in the query. - -You can explicitly constrain a variable's type using the special relation "is". - -Base types -~~~~~~~~~~ -* `String` (literal: between double or simple quotes) -* `Int`, `Float` (using '.' as separator) -* `Date`, `Datetime`, `Time` (literal: string YYYY/MM/DD[ hh:mm] or - `TODAY` and `NOW` keywords) -* `Boolean` (keywords `TRUE` et `FALSE`) -* keyword `NULL` - -Opérateurs -~~~~~~~~~~ -* Logical operators : `AND`, `OR`, `,` -* Mathematical operators: `+`, `-`, `*`, `/` -* Comparisons operators: `=`, `<`, `<=`, `>=`, `>`, `~=`, `LIKE`, `IN` - - * `=` is the default comparison operator - - * `LIKE` / `~=` permits use of the special character `%` in a string to tell - the string must begin or end with a prefix or suffix (as SQL LIKE operator) :: - - Any X WHERE X name ~= 'Th%' - Any X WHERE X name LIKE '%lt' - - * `IN` permits to give a list of possible values :: - - Any X WHERE X name IN ('chauvat', 'fayolle', 'di mascio', 'thenault') - -Grammaire des requêtes de recherche -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -:: - - [DISTINCT] V1(, V2)* - [GROUPBY V1(, V2)*] [ORDERBY ] - [WHERE ] - [LIMIT ] [OFFSET ] - -:entity type: - Type of the selected variable(s). You'll usually use `Any` type to not specify - any type. -:restriction: - List of relations to follow, in the form - `V1 relation V2|` -:orderterms: - Define a selection order : variable or column number, followed by the sort method - (`ASC`, `DESC`), with ASC as default when not specified - -notice about grouped query (e.g using a `GROUPBY` clause): every selected -variable should be either grouped or used in an aggregat function. - - -Example schema --------------- - -In this document, we will suppose the application's schema is the one described -here. Available entity types are : - -:Person: - :: - - name (String, required) - birthday (Date) - - -:Company: - :: - - name (String) - - -:Note: - :: - - diem (Date) - type (String) - - -And relations between those entities: :: - - Person works_for Company - Person evaluated_by Note - Company evaluated_by Note - - -Meta-data -~~~~~~~~~ -Every entities'type have the following meta-data: - -* `eid (Int)`, a unique identifier -* `creation_date (Datetime)`, date on which the entity has been created -* `modification_date (Datetime)`, lastest date on which the entity has been modified - -* `created_by (CWUser)`, relation to the user which has created this entity - -* `owned_by (CWUser)`, relation to the user()s considered as owner of this - entity, the entity's creator by default - -* `is (Eetype)`, special relation to specify a variable type. - -A user's entity has the following schema: - -:CWUser: - :: - - login (String) not null - password (Password) - firstname (String) - surname (String) - - -Basis queries -------------- -0. *Every persons* :: - - Person X - - or :: - - Any X WHERE X is Person - - -1. *The company named Logilab* :: - - Company S WHERE S name 'Logilab' - - -2. *Every entities with a "name" attribute whose value starts with 'Log'* :: - - Any S WHERE S name LIKE 'Log%' - - or :: - - Any S WHERE S name ~= 'Log%' - - This query may return Person or Company entities. - - -3. *Every persons working for the Logilab company* :: - - Person P WHERE P works_for S, S name "Logilab" - - or :: - - Person P WHERE P works_for S AND S name "Logilab" - - -4. *Company named Caesium ou Logilab* :: - - Company S WHERE S name IN ('Logilab','Caesium') - - or :: - - Company S WHERE S name 'Logilab' OR S name 'Caesium' - - -5. *Every company but ones named Caesium ou Logilab* :: - - Company S WHERE NOT S name IN ('Logilab','Caesium') - - or :: - - Company S WHERE NOT S name 'Logilab' AND NOT S name 'Caesium' - - -6. *Entities evaluated by the note with eid 43* :: - - Any X WHERE X evaluated_by N, N eid 43 - - -7. *Every persons order by birthday from the youngest to the oldest* :: - - Person X ORDERBY D DESC WHERE X birthday D - - Notice you've to define a variable using the birthday relation to use it in the - sort term. - - -8. *Number of persons working for each known company* :: - - Any S, COUNT(X) GROUPBY S WHERE X works_for S - - Notice you've that since you're writing a grouped query on S, X have to be - either grouped as well or used in an aggregat function (as in this example). - - - -Advanced --------- -0. *Person with no name specified (i.e NULL)* :: - - Person P WHERE P name NULL - - -1. *Person which are not working for any company* :: - - Person P WHERE NOT p works_for S - - -2. *Every company where person named toto isn't working* :: - - Company S WHERE NOT P works_for S , P name 'toto' - - -3. *Every entity which have been modified between today and yesterday* :: - - Any X WHERE X modification_date <= TODAY, X modification_date >= TODAY - 1 - - -4. *Every note without type, to be done in the next 7 days, ordered by date* :: - - Any N, D where N is Note, N type NULL, N diem D, N diem >= TODAY, - N diem < today + 7 ORDERBY D - - -5. *Person with an homonym (without duplicate)* :: - - DISTINCT Person X,Y where X name NX, Y name NX - - or even better (e.g. without both (Xeid, Yeid) and (Yeid, Xeid) in the results) :: - - Person X,Y where X name NX, Y name NX, X eid XE, Y eid > XE diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/tut_rql_fr.rst --- a/web/wdoc/tut_rql_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,268 +0,0 @@ -.. -*- coding: utf-8 -*- - -Apprenons RQL par la pratique... - -.. contents:: - - -Introduction ------------- - -RQL est assez proche par sa syntaxe et ses principes du langage de requête des -bases de données relationnelles SQL. Il est cependant plus intuitif et mieux -adapté pour faire des recherches avancées sur des bases de données structurées -par un schéma de données. On retiendra les points suivants : - -* RQL est un langage mettant l'accent sur le parcours de relations. -* Les attributs sont considérés comme des cas particuliers de relations. -* RQL s'inspire de SQL mais se veut plus haut niveau. -* Une connaissance du schéma définissant l'application est nécessaire. - -Pour s'en servir, il convient de connaître les règles de base du langage RQL, -mais surtout d'avoir une bonne vision du schéma de données de l'application. Ce -schéma est toujours disponible dans l'application par le lien `schéma`, dans la -boîte affichée en cliquant sur le lien de l'utilisateur connectée (en haut à droite). -Vous pouvez également le voir en cliquant ici_. - -.. _ici: ../schema - - -Un peu de théorie ------------------ - -Variables et typage -~~~~~~~~~~~~~~~~~~~ -Les entités et valeurs à parcourir et / ou séléctionner sont représentées dans -la requête par des *variables* qui doivent être écrites en majuscule - -Les types possibles pour chaque variable sont déduits à partir du schéma en -fonction des contraintes présentes dans la requête. - -On peut contraindre les types possibles pour une variable à l'aide de la -relation spéciale `is`. - -Types de bases -~~~~~~~~~~~~~~ -* `String` (litéral: entre doubles ou simples quotes) -* `Int`, `Float` (le séparateur étant le '.') -* `Date`, `Datetime`, `Time` (litéral: chaîne YYYY/MM/DD[ hh:mm] ou mots-clés - `TODAY` et `NOW`) -* `Boolean` (mots-clés `TRUE` et `FALSE`) -* mot-clé `NULL` - -Opérateurs -~~~~~~~~~~ -* Opérateurs logiques : `AND`, `OR`, `,` -* Opérateurs mathématiques: `+`, `-`, `*`, `/` -* Operateur de comparaisons: `=`, `<`, `<=`, `>=`, `>`, `~=`, `LIKE`, `IN` - - * L'opérateur `=` est l'opérateur par défaut - - * L'opérateur `LIKE` / `~=` permet d'utiliser le caractère `%` dans une chaine - de caractère pour indiquer que la chaîne doit commencer ou terminer par un - préfix/suffixe :: - - Any X WHERE X nom ~= 'Th%' - Any X WHERE X nom LIKE '%lt' - - * L'opérateur `IN` permet de donner une liste de valeurs possibles :: - - Any X WHERE X nom IN ('chauvat', 'fayolle', 'di mascio', 'thenault') - -Grammaire des requêtes de recherche -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -:: - - [DISTINCT] V1(, V2)* - [GROUPBY V1(, V2)*] [ORDERBY ] - [WHERE ] - [LIMIT ] [OFFSET ] - -:type d'entité: - Type de la ou des variables séléctionnées. - Le type spécial `Any`, revient à ne pas spécifier de type. -:restriction: - liste des relations à parcourir sous la forme - `V1 relation V2|` -:orderterms: - Définition de l'ordre de sélection : variable ou n° de colonne suivie de la - méthode de tri (`ASC`, `DESC`), ASC étant la valeur par défaut - -note pour les requêtes groupées (i.e. avec une clause `GROUPBY`) : -toutes les variables sélectionnées doivent être soit groupée soit -aggrégée - - -Schéma ------- - -Nous supposerons dans la suite de ce document que le schéma de l'application est -le suivant. Les différentes entités disponibles sont : - -:Personne: - :: - - nom (String, obligatoire) - datenaiss (Date) - - -:Societe: - :: - - nom (String) - - -:Note: - :: - - diem (Date) - type (String) - - -Et les relations entre elles : :: - - Person travaille_pour Societe - Person evaluee_par Note - Societe evaluee_par Note - - -Méta-données -~~~~~~~~~~~~ -Tous les types d'entités ont les métadonnées suivantes : - -* `eid (Int)`, permettant d'identifier chaque instance de manière unique -* `creation_date (Datetime)`, date de création de l'entité -* `modification_date (Datetime)`, date de dernière modification de l'entité - -* `created_by (CWUser)`, relation vers l'utilisateur ayant créé l'entité - -* `owned_by (CWUser)`, relation vers le où les utilisateurs considérés comme - propriétaire de l'entité, par défaut le créateur de l'entité - -* `is (Eetype)`, relation spéciale permettant de spécifier le - type d'une variable. - -Enfin, le schéma standard d'un utilisateur est le suivant : - -:CWUser: - :: - - login (String, obligatoire) - password (Password) - firstname (String) - surname (String) - - -L'essentiel ------------ -0. *Toutes les personnes* :: - - Personne X - - ou :: - - Any X WHERE X is Personne - - -1. *La societé nommé Logilab* :: - - Societe S WHERE S nom 'Logilab' - - -2. *Toutes les entités ayant un attribut nom commençant par 'Log'* :: - - Any S WHERE S nom LIKE 'Log%' - - ou :: - - Any S WHERE S nom ~= 'Log%' - - Cette requête peut renvoyer des entités de type personne et de type - société. - - -3. *Toutes les personnes travaillant pour la société nommé Logilab* :: - - Personne P WHERE P travaille_pour S, S nom "Logilab" - - ou :: - - Personne P WHERE P travaille_pour S AND S nom "Logilab" - - -4. *Les societés nommées Caesium ou Logilab* :: - - Societe S WHERE S nom IN ('Logilab','Caesium') - - ou :: - - Societe S WHERE S nom 'Logilab' OR S nom 'Caesium' - - -5. *Toutes les societés sauf celles nommées Caesium ou Logilab* :: - - Societe S WHERE NOT S nom IN ('Logilab','Caesium') - - ou :: - - Societe S WHERE NOT S nom 'Logilab' AND NOT S nom 'Caesium' - - -6. *Les entités évalués par la note d'identifiant 43* :: - - Any X WHERE X evaluee_par N, N eid 43 - - -7. *Toutes les personnes triés par date de naissance dans l'ordre antechronologique* :: - - Personne X ORDERBY D DESC WHERE X datenaiss D - - On note qu'il faut définir une variable et la séléctionner pour s'en - servir pour le tri. - - -8. *Nombre de personne travaillant pour chaque société* :: - - Any S, COUNT(X) GROUPBY S WHERE X travaille_pour S - - On note qu'il faut définir une variable pour s'en servir pour le - groupage. De plus les variables séléctionnée doivent être groupée - (mais les variables groupées ne doivent pas forcément être sélectionnées). - - - -Exemples avancés ----------------- -0. *Toutes les personnes dont le champ nom n'est pas spécifié (i.e NULL)* :: - - Personne P WHERE P nom NULL - - -1. *Toutes les personnes ne travaillant pour aucune société* :: - - Personne P WHERE NOT p travaille_pour S - - -2. *Toutes les sociétés où la personne nommée toto ne travaille pas* :: - - Societe S WHERE NOT P travaille_pour S , P nom 'toto' - - -3. *Toutes les entités ayant été modifiées entre aujourd'hui et hier* :: - - Any X WHERE X modification_date <= TODAY, X modification_date >= TODAY - 1 - - -4. *Toutes les notes n'ayant pas de type et à effectuer dans les 7 jours, triées par date* :: - - Any N, D where N is Note, N type NULL, N diem D, N diem >= TODAY, - N diem < today + 7 ORDERBY D - - -5. *Les personnes ayant un homonyme (sans doublons)* :: - - DISTINCT Personne X,Y where X nom NX, Y nom NX - - ou mieux (sans avoir (Xeid, Yeid) et (Yeid, Xeid) dans les résultats) :: - - Personne X,Y where X nom NX, Y nom NX, X eid XE, Y eid > XE diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/userprefs_en.rst --- a/web/wdoc/userprefs_en.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -The personal information describing a User can be modified using the edit form -of the user. You can access it through the dropdown-menu under the link on the -top-right of the window, labeled by your login. In this menu, click the -"profile" link to go to this form. - -Each user can as well customize the site appearance using the "user's -preferences" link in this menu. This will show you a form to configure which -boxes are displayed, in which order, etc... - -.. image:: images/userprefs diff -r 058bb3dc685f -r 0b59724cb3f2 web/wdoc/userprefs_fr.rst --- a/web/wdoc/userprefs_fr.rst Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -Les données concernant l'utilisateur sont paramétrables par la page -d'édition de l'utilisateur. Vous pouvez accéder à celle ci par le menu -déroulant apparaissant en cliquant sur le lien en haut à droite de la -fenêtre de l'application, dont l'intitulé est votre login. Dans ce -menu, cliquez sur "information personnelles" pour modifier vos données -personnelles (y compris le mot de passe d'accès à l'application). - -Chaque utilisateur peut également personaliser l'apparence du site via le lien -`préférences utilisateur`_. Ce formulaire permet notamment de configurer les -boîtes qui seront affichées, leur ordre, etc... - -L'administrateur possède quant à lui un menu "configuration du site" qui reprend l'ensemble des préférences utilisateurs mais les applique par défaut au site. - - -Les types de préférences -======================== - -- navigation: détermine des caractériques plus personnelles pour l'ergonomie liée à la taille de votre écran (taille des champs d'entrées, nombre d'éléments à afficher dans des listes, ...) -- propriétés génériques de l'interface: détermine essentiellement la localisation de l'application avec des formats d'affichages particulier (champ date et heure). -- boîtes: éléments dynamiques et optionnels installés par les composants disponibles au sein de l'application. -- composants: éléments permettant l'usage d'une navigation plus évoluée -- composants contextuels: possibilité d'agir sur les comportements par défaut de l'application. - -Changement de la langue ------------------------ -Dans l'onglet **ui -> ui.language**, choisissez la langue voulue - -Changement de l'outil d'édition en ligne ----------------------------------------- -Il est possible de choisir le format de balisage par défaut pour les notes. Par défaut, le format html est proposé pour les débutants avec la possibilité d'utiliser un éditeur en ligne. - -Si vous êtes dans ce cas, vérifiez les deux entrées suivantes: - -- **ui -> ui.default-text-format** à HTML -- **ui -> ui.fckeditor** à 'yes' - -Usage avancé de RQL -------------------- -Il est possible d'afficher les requêtes RQL_ en jeu pour l'affichage d'une page en activant une barre d'entrée spécifique: - -- **components -> rql input box** à 'yes' - -Il est alors possible d'éditer et de relancer toute requête - -.. _`préférences utilisateur`: ../myprefs -.. _RQL: tut_rql -.. image:: images/userprefs diff -r 058bb3dc685f -r 0b59724cb3f2 web/webconfig.py --- a/web/webconfig.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,475 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""web ui configuration for cubicweb instances""" - -__docformat__ = "restructuredtext en" -from cubicweb import _ - -import os -import hmac -from uuid import uuid4 -from os.path import join, exists, split, isdir -from warnings import warn - -from six import text_type - -from logilab.common.decorators import cached, cachedproperty -from logilab.common.deprecation import deprecated -from logilab.common.configuration import merge_options - -from cubicweb import ConfigurationError -from cubicweb.toolsutils import read_config -from cubicweb.cwconfig import CubicWebConfiguration, register_persistent_options - - -register_persistent_options( ( - # site-wide only web ui configuration - ('site-title', - {'type' : 'string', 'default': 'unset title', - 'help': _('site title'), - 'sitewide': True, 'group': 'ui', - }), - ('main-template', - {'type' : 'string', 'default': 'main-template', - 'help': _('id of main template used to render pages'), - 'sitewide': True, 'group': 'ui', - }), - # user web ui configuration - ('fckeditor', - {'type' : 'yn', 'default': False, - 'help': _('should html fields being edited using fckeditor (a HTML ' - 'WYSIWYG editor). You should also select text/html as default ' - 'text format to actually get fckeditor.'), - 'group': 'ui', - }), - # navigation configuration - ('page-size', - {'type' : 'int', 'default': 40, - 'help': _('maximum number of objects displayed by page of results'), - 'group': 'navigation', - }), - ('related-limit', - {'type' : 'int', 'default': 8, - 'help': _('maximum number of related entities to display in the primary ' - 'view'), - 'group': 'navigation', - }), - ('combobox-limit', - {'type' : 'int', 'default': 20, - 'help': _('maximum number of entities to display in related combo box'), - 'group': 'navigation', - }), - - )) - - -class WebConfiguration(CubicWebConfiguration): - """the WebConfiguration is a singleton object handling instance's - configuration and preferences - """ - cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set([join('web', 'views')]) - cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(['views']) - - options = merge_options(CubicWebConfiguration.options + ( - ('repository-uri', - {'type' : 'string', - 'default': 'inmemory://', - 'help': 'see `cubicweb.dbapi.connect` documentation for possible value', - 'group': 'web', 'level': 2, - }), - - ('anonymous-user', - {'type' : 'string', - 'default': None, - 'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)', - 'group': 'web', 'level': 1, - }), - ('anonymous-password', - {'type' : 'string', - 'default': None, - 'help': 'password of the CubicWeb user account to use for anonymous user, ' - 'if anonymous-user is set', - 'group': 'web', 'level': 1, - }), - ('query-log-file', - {'type' : 'string', - 'default': None, - 'help': 'web instance query log file', - 'group': 'web', 'level': 3, - }), - # web configuration - ('https-url', - {'type' : 'string', - 'default': None, - 'help': 'web server root url on https. By specifying this option your '\ - 'site can be available as an http and https site. Authenticated users '\ - 'will in this case be authenticated and once done navigate through the '\ - 'https site. IMPORTANTE NOTE: to do this work, you should have your '\ - 'apache redirection include "https" as base url path so cubicweb can '\ - 'differentiate between http vs https access. For instance: \n'\ - 'RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]\n'\ - 'where the cubicweb web server is listening on port 8080.', - 'group': 'main', 'level': 3, - }), - ('datadir-url', - {'type': 'string', 'default': None, - 'help': ('base url for static data, if different from "${base-url}/data/". ' - 'If served from a different domain, that domain should allow ' - 'cross-origin requests.'), - 'group': 'web', - }), - ('auth-mode', - {'type' : 'choice', - 'choices' : ('cookie', 'http'), - 'default': 'cookie', - 'help': 'authentication mode (cookie / http)', - 'group': 'web', 'level': 3, - }), - ('realm', - {'type' : 'string', - 'default': 'cubicweb', - 'help': 'realm to use on HTTP authentication mode', - 'group': 'web', 'level': 3, - }), - ('http-session-time', - {'type' : 'time', - 'default': 0, - 'help': "duration of the cookie used to store session identifier. " - "If 0, the cookie will expire when the user exist its browser. " - "Should be 0 or greater than repository\'s session-time.", - 'group': 'web', 'level': 2, - }), - ('cleanup-anonymous-session-time', - {'type' : 'time', - 'default': '5min', - 'help': 'Same as cleanup-session-time but specific to anonymous ' - 'sessions. You can have a much smaller timeout here since it will be ' - 'transparent to the user. Default to 5min.', - 'group': 'web', 'level': 3, - }), - ('embed-allowed', - {'type' : 'regexp', - 'default': None, - 'help': 'regular expression matching URLs that may be embeded. \ -leave it blank if you don\'t want the embedding feature, or set it to ".*" \ -if you want to allow everything', - 'group': 'web', 'level': 3, - }), - ('submit-mail', - {'type' : 'string', - 'default': None, - 'help': ('Mail used as recipient to report bug in this instance, ' - 'if you want this feature on'), - 'group': 'web', 'level': 2, - }), - - ('language-negociation', - {'type' : 'yn', - 'default': True, - 'help': 'use Accept-Language http header to try to set user '\ - 'interface\'s language according to browser defined preferences', - 'group': 'web', 'level': 2, - }), - - ('print-traceback', - {'type' : 'yn', - 'default': CubicWebConfiguration.mode != 'system', - 'help': 'print the traceback on the error page when an error occurred', - 'group': 'web', 'level': 2, - }), - - ('captcha-font-file', - {'type' : 'string', - 'default': join(CubicWebConfiguration.shared_dir(), 'data', 'porkys.ttf'), - 'help': 'True type font to use for captcha image generation (you \ -must have the python imaging library installed to use captcha)', - 'group': 'web', 'level': 3, - }), - ('captcha-font-size', - {'type' : 'int', - 'default': 25, - 'help': 'Font size to use for captcha image generation (you must \ -have the python imaging library installed to use captcha)', - 'group': 'web', 'level': 3, - }), - - ('concat-resources', - {'type' : 'yn', - 'default': False, - 'help': 'use modconcat-like URLS to concat and serve JS / CSS files', - 'group': 'web', 'level': 2, - }), - ('anonymize-jsonp-queries', - {'type': 'yn', - 'default': True, - 'help': 'anonymize the connection before executing any jsonp query.', - 'group': 'web', 'level': 1 - }), - ('generate-staticdir', - {'type': 'yn', - 'default': True, - 'help': 'Generate the static data resource directory on upgrade.', - 'group': 'web', 'level': 2, - }), - ('staticdir-path', - {'type': 'string', - 'default': None, - 'help': 'The static data resource directory path.', - 'group': 'web', 'level': 2, - }), - ('access-control-allow-origin', - {'type' : 'csv', - 'default': (), - 'help':('comma-separated list of allowed origin domains or "*" for any domain'), - 'group': 'web', 'level': 2, - }), - ('access-control-allow-methods', - {'type' : 'csv', - 'default': (), - 'help': ('comma-separated list of allowed HTTP methods'), - 'group': 'web', 'level': 2, - }), - ('access-control-max-age', - {'type' : 'int', - 'default': None, - 'help': ('maximum age of cross-origin resource sharing (in seconds)'), - 'group': 'web', 'level': 2, - }), - ('access-control-expose-headers', - {'type' : 'csv', - 'default': (), - 'help':('comma-separated list of HTTP headers the application declare in response to a preflight request'), - 'group': 'web', 'level': 2, - }), - ('access-control-allow-headers', - {'type' : 'csv', - 'default': (), - 'help':('comma-separated list of HTTP headers the application may set in the response'), - 'group': 'web', 'level': 2, - }), - )) - - def __init__(self, *args, **kwargs): - super(WebConfiguration, self).__init__(*args, **kwargs) - self.uiprops = None - self.https_uiprops = None - self.datadir_url = None - self.https_datadir_url = None - - def fckeditor_installed(self): - if self.uiprops is None: - return False - return exists(self.uiprops.get('FCKEDITOR_PATH', '')) - - def cwproperty_definitions(self): - for key, pdef in super(WebConfiguration, self).cwproperty_definitions(): - if key == 'ui.fckeditor' and not self.fckeditor_installed(): - continue - yield key, pdef - - @deprecated('[3.22] call req.cnx.repo.get_versions() directly') - def vc_config(self): - return self.repository().get_versions() - - def anonymous_user(self): - """return a login and password to use for anonymous users. - - None may be returned for both if anonymous connection is not - allowed or if an empty login is used in configuration - """ - try: - user = self['anonymous-user'] or None - passwd = self['anonymous-password'] - if user: - user = text_type(user) - except KeyError: - user, passwd = None, None - except UnicodeDecodeError: - raise ConfigurationError("anonymous information should only contains ascii") - return user, passwd - - @cachedproperty - def _instance_salt(self): - """This random key/salt is used to sign content to be sent back by - browsers, eg. in the error report form. - """ - return str(uuid4()).encode('ascii') - - def sign_text(self, text): - """sign some text for later checking""" - # hmac.new expect bytes - if isinstance(text, text_type): - text = text.encode('utf-8') - # replace \r\n so we do not depend on whether a browser "reencode" - # original message using \r\n or not - return hmac.new(self._instance_salt, - text.strip().replace(b'\r\n', b'\n')).hexdigest() - - def check_text_sign(self, text, signature): - """check the text signature is equal to the given signature""" - return self.sign_text(text) == signature - - def locate_resource(self, rid): - """return the (directory, filename) where the given resource - may be found - """ - return self._fs_locate(rid, 'data') - - def locate_doc_file(self, fname): - """return the directory where the given resource may be found""" - return self._fs_locate(fname, 'wdoc')[0] - - @cached - def _fs_path_locate(self, rid, rdirectory): - """return the directory where the given resource may be found""" - path = [self.apphome] + self.cubes_path() + [join(self.shared_dir())] - for directory in path: - if exists(join(directory, rdirectory, rid)): - return directory - - def _fs_locate(self, rid, rdirectory): - """return the (directory, filename) where the given resource - may be found - """ - directory = self._fs_path_locate(rid, rdirectory) - if directory is None: - return None, None - if rdirectory == 'data' and rid.endswith('.css'): - if rid == 'cubicweb.old.css': - # @import('cubicweb.css') in css - warn('[3.20] cubicweb.old.css has been renamed back to cubicweb.css', - DeprecationWarning) - rid = 'cubicweb.css' - return self.uiprops.process_resource(join(directory, rdirectory), rid), rid - return join(directory, rdirectory), rid - - def locate_all_files(self, rid, rdirectory='wdoc'): - """return all files corresponding to the given resource""" - path = [self.apphome] + self.cubes_path() + [join(self.shared_dir())] - for directory in path: - fpath = join(directory, rdirectory, rid) - if exists(fpath): - yield join(fpath) - - def load_configuration(self, **kw): - """load instance's configuration files""" - super(WebConfiguration, self).load_configuration(**kw) - # load external resources definition - self._init_base_url() - self._build_ui_properties() - - def _init_base_url(self): - # normalize base url(s) - baseurl = self['base-url'] or self.default_base_url() - if baseurl and baseurl[-1] != '/': - baseurl += '/' - if not (self.repairing or self.creating): - self.global_set_option('base-url', baseurl) - self.datadir_url = self['datadir-url'] - if self.datadir_url: - if self.datadir_url[-1] != '/': - self.datadir_url += '/' - if self.mode != 'test': - self.datadir_url += '%s/' % self.instance_md5_version() - self.https_datadir_url = self.datadir_url - return - httpsurl = self['https-url'] - data_relpath = self.data_relpath() - if httpsurl: - if httpsurl[-1] != '/': - httpsurl += '/' - if not self.repairing: - self.global_set_option('https-url', httpsurl) - self.https_datadir_url = httpsurl + data_relpath - self.datadir_url = baseurl + data_relpath - - def data_relpath(self): - if self.mode == 'test': - return 'data/' - return 'data/%s/' % self.instance_md5_version() - - def _build_ui_properties(self): - # self.datadir_url[:-1] to remove trailing / - from cubicweb.web.propertysheet import PropertySheet - cachedir = join(self.appdatahome, 'uicache') - self.check_writeable_uid_directory(cachedir) - self.uiprops = PropertySheet( - cachedir, - data=lambda x: self.datadir_url + x, - datadir_url=self.datadir_url[:-1]) - self._init_uiprops(self.uiprops) - if self['https-url']: - cachedir = join(self.appdatahome, 'uicachehttps') - self.check_writeable_uid_directory(cachedir) - self.https_uiprops = PropertySheet( - cachedir, - data=lambda x: self.https_datadir_url + x, - datadir_url=self.https_datadir_url[:-1]) - self._init_uiprops(self.https_uiprops) - - def _init_uiprops(self, uiprops): - libuiprops = join(self.shared_dir(), 'data', 'uiprops.py') - uiprops.load(libuiprops) - for path in reversed([self.apphome] + self.cubes_path()): - self._load_ui_properties_file(uiprops, path) - self._load_ui_properties_file(uiprops, self.apphome) - datadir_url = uiprops.context['datadir_url'] - if (datadir_url+'/cubicweb.old.css') in uiprops['STYLESHEETS']: - warn('[3.20] cubicweb.old.css has been renamed back to cubicweb.css', - DeprecationWarning) - idx = uiprops['STYLESHEETS'].index(datadir_url+'/cubicweb.old.css') - uiprops['STYLESHEETS'][idx] = datadir_url+'/cubicweb.css' - if datadir_url+'/cubicweb.reset.css' in uiprops['STYLESHEETS']: - warn('[3.20] cubicweb.reset.css is obsolete', DeprecationWarning) - uiprops['STYLESHEETS'].remove(datadir_url+'/cubicweb.reset.css') - cubicweb_js_url = datadir_url + '/cubicweb.js' - if cubicweb_js_url not in uiprops['JAVASCRIPTS']: - uiprops['JAVASCRIPTS'].insert(0, cubicweb_js_url) - - def _load_ui_properties_file(self, uiprops, path): - uipropsfile = join(path, 'uiprops.py') - if exists(uipropsfile): - self.debug('loading %s', uipropsfile) - uiprops.load(uipropsfile) - - # static files handling ################################################### - - @property - def static_directory(self): - return join(self.appdatahome, 'static') - - def static_file_exists(self, rpath): - return exists(join(self.static_directory, rpath)) - - def static_file_open(self, rpath, mode='wb'): - staticdir = self.static_directory - rdir, filename = split(rpath) - if rdir: - staticdir = join(staticdir, rdir) - if not isdir(staticdir) and 'w' in mode: - os.makedirs(staticdir) - return open(join(staticdir, filename), mode) - - def static_file_add(self, rpath, data): - stream = self.static_file_open(rpath) - stream.write(data) - stream.close() - - def static_file_del(self, rpath): - if self.static_file_exists(rpath): - os.remove(join(self.static_directory, rpath)) diff -r 058bb3dc685f -r 0b59724cb3f2 web/webctl.py --- a/web/webctl.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,136 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb-ctl commands and command handlers common to twisted/modpython -web configuration -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import os, os.path as osp -from shutil import copy, rmtree - -from logilab.common.shellutils import ASK - -from cubicweb import ExecutionError -from cubicweb.cwctl import CWCTL -from cubicweb.cwconfig import CubicWebConfiguration as cwcfg -from cubicweb.toolsutils import Command, CommandHandler, underline_title - - -try: - from os import symlink as linkdir -except ImportError: - from shutil import copytree as linkdir - - -class WebCreateHandler(CommandHandler): - cmdname = 'create' - - def bootstrap(self, cubes, automatic=False, inputlevel=0): - """bootstrap this configuration""" - if not automatic: - print('\n' + underline_title('Generic web configuration')) - config = self.config - config.input_config('web', inputlevel) - if ASK.confirm('Allow anonymous access ?', False): - config.global_set_option('anonymous-user', 'anon') - config.global_set_option('anonymous-password', 'anon') - - def postcreate(self, *args, **kwargs): - """hooks called once instance's initialization has been completed""" - - -class GenStaticDataDirMixIn(object): - """Create a directory merging all data directory content from cubes and CW. - """ - def generate_static_dir(self, config, dest=None, ask_clean=False, repo=None): - if not dest: - dest = config['staticdir-path'] - if not dest: - dest = osp.join(config.appdatahome, 'data') - if osp.exists(dest): - if (not ask_clean or - not (config.verbosity and - ASK.confirm('Remove existing data directory %s?' % dest))): - raise ExecutionError('Directory %s already exists. ' - 'Remove it first.' % dest) - rmtree(dest) - config.quick_start = True # notify this is not a regular start - # list all resources (no matter their order) - resources = set() - for datadir in self._datadirs(config, repo=repo): - for dirpath, dirnames, filenames in os.walk(datadir): - rel_dirpath = dirpath[len(datadir)+1:] - resources.update(osp.join(rel_dirpath, f) for f in filenames) - # locate resources and copy them to destination - for resource in resources: - dest_resource = osp.join(dest, resource) - dirname = osp.dirname(dest_resource) - if not osp.isdir(dirname): - os.makedirs(dirname) - resource_dir, resource_path = config.locate_resource(resource) - copy(osp.join(resource_dir, resource_path), dest_resource) - # handle md5 version subdirectory - linkdir(dest, osp.join(dest, config.instance_md5_version())) - print('You can use apache rewrite rule below :\n' - 'RewriteRule ^/data/(.*) %s/$1 [L]' % dest) - - def _datadirs(self, config, repo=None): - if repo is None: - repo = config.repository() - if config._cubes is None: - # web only config - config.init_cubes(repo.get_cubes()) - for cube in repo.get_cubes(): - cube_datadir = osp.join(cwcfg.cube_dir(cube), 'data') - if osp.isdir(cube_datadir): - yield cube_datadir - yield osp.join(config.shared_dir(), 'data') - - -class WebUpgradeHandler(CommandHandler, GenStaticDataDirMixIn): - cmdname = 'upgrade' - - def postupgrade(self, repo): - config = self.config - if not config['generate-staticdir']: - return - self.generate_static_dir(config, ask_clean=True, repo=repo) - - -class GenStaticDataDir(Command, GenStaticDataDirMixIn): - """Create a directory merging all data directory content from cubes and CW. - """ - name = 'gen-static-datadir' - arguments = ' [dirpath]' - min_args = 1 - max_args = 2 - - options = () - - def run(self, args): - appid = args.pop(0) - config = cwcfg.config_for(appid) - dest = None - if args: - dest = args[0] - self.generate_static_dir(config, dest) - - -CWCTL.register(GenStaticDataDir) diff -r 058bb3dc685f -r 0b59724cb3f2 wsgi/__init__.py --- a/wsgi/__init__.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,63 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""This package contains all WSGI specific code for cubicweb - -NOTE: this package borrows a lot of code to Django - (http://www.djangoproject.com) and to the wsgiref module - of the python2.5's stdlib. - -WSGI corresponding PEP: http://www.python.org/dev/peps/pep-0333/ - -""" -__docformat__ = "restructuredtext en" - -from email import message, message_from_string -from pprint import pformat as _pformat - -from six.moves.http_cookies import SimpleCookie - -def pformat(obj): - """pretty prints `obj` if possible""" - try: - return _pformat(obj) - except Exception: - return u'' - -def normalize_header(header): - """returns a normalized header name - - >>> normalize_header('User_Agent') - 'User-agent' - """ - return header.replace('_', '-').capitalize() - -def safe_copyfileobj(fsrc, fdst, length=16*1024, size=0): - """ - THIS COMES FROM DJANGO - A version of shutil.copyfileobj that will not read more than 'size' bytes. - This makes it safe from clients sending more than CONTENT_LENGTH bytes of - data in the body. - """ - if not size: - return - while size > 0: - buf = fsrc.read(min(length, size)) - if not buf: - break - fdst.write(buf) - size -= len(buf) diff -r 058bb3dc685f -r 0b59724cb3f2 wsgi/handler.py --- a/wsgi/handler.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,134 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""WSGI request handler for cubicweb""" - -__docformat__ = "restructuredtext en" - -from itertools import chain, repeat - -from six.moves import zip - -from cubicweb import AuthenticationError -from cubicweb.web import DirectResponse -from cubicweb.web.application import CubicWebPublisher -from cubicweb.wsgi.request import CubicWebWsgiRequest - -# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html -STATUS_CODE_TEXT = { - 100: 'CONTINUE', - 101: 'SWITCHING PROTOCOLS', - 200: 'OK', - 201: 'CREATED', - 202: 'ACCEPTED', - 203: 'NON-AUTHORITATIVE INFORMATION', - 204: 'NO CONTENT', - 205: 'RESET CONTENT', - 206: 'PARTIAL CONTENT', - 300: 'MULTIPLE CHOICES', - 301: 'MOVED PERMANENTLY', - 302: 'FOUND', - 303: 'SEE OTHER', - 304: 'NOT MODIFIED', - 305: 'USE PROXY', - 306: 'RESERVED', - 307: 'TEMPORARY REDIRECT', - 400: 'BAD REQUEST', - 401: 'UNAUTHORIZED', - 402: 'PAYMENT REQUIRED', - 403: 'FORBIDDEN', - 404: 'NOT FOUND', - 405: 'METHOD NOT ALLOWED', - 406: 'NOT ACCEPTABLE', - 407: 'PROXY AUTHENTICATION REQUIRED', - 408: 'REQUEST TIMEOUT', - 409: 'CONFLICT', - 410: 'GONE', - 411: 'LENGTH REQUIRED', - 412: 'PRECONDITION FAILED', - 413: 'REQUEST ENTITY TOO LARGE', - 414: 'REQUEST-URI TOO LONG', - 415: 'UNSUPPORTED MEDIA TYPE', - 416: 'REQUESTED RANGE NOT SATISFIABLE', - 417: 'EXPECTATION FAILED', - 500: 'INTERNAL SERVER ERROR', - 501: 'NOT IMPLEMENTED', - 502: 'BAD GATEWAY', - 503: 'SERVICE UNAVAILABLE', - 504: 'GATEWAY TIMEOUT', - 505: 'HTTP VERSION NOT SUPPORTED', -} - -class WSGIResponse(object): - """encapsulates the wsgi response parameters - (code, headers and body if there is one) - """ - def __init__(self, code, req, body=None): - text = STATUS_CODE_TEXT.get(code, 'UNKNOWN STATUS CODE') - self.status = '%s %s' % (code, text) - self.headers = list(chain(*[zip(repeat(k), v) - for k, v in req.headers_out.getAllRawHeaders()])) - self.headers = [(str(k), str(v)) for k, v in self.headers] - if body: - self.body = [body] - else: - self.body = [] - - def __iter__(self): - return iter(self.body) - - -class CubicWebWSGIApplication(object): - """This is the wsgi application which will be called by the - wsgi server with the WSGI ``environ`` and ``start_response`` - parameters. - """ - - def __init__(self, config): - self.appli = CubicWebPublisher(config.repository(), config) - self.config = config - self.base_url = self.config['base-url'] - self.url_rewriter = self.appli.vreg['components'].select_or_none('urlrewriter') - - def _render(self, req): - """this function performs the actual rendering - """ - try: - path = req.path - result = self.appli.handle_request(req, path) - except DirectResponse as ex: - return ex.response - return WSGIResponse(req.status_out, req, result) - - - def __call__(self, environ, start_response): - """WSGI protocol entry point""" - req = CubicWebWsgiRequest(environ, self.appli.vreg) - response = self._render(req) - start_response(response.status, response.headers) - return response.body - - - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(CubicWebWSGIApplication, getLogger('cubicweb.wsgi')) diff -r 058bb3dc685f -r 0b59724cb3f2 wsgi/request.py --- a/wsgi/request.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,160 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""WSGI request adapter for cubicweb - -NOTE: each docstring tagged with ``COME FROM DJANGO`` means that -the code has been taken (or adapted) from Djanco source code : - http://www.djangoproject.com/ - -""" - -__docformat__ = "restructuredtext en" - -import tempfile - -from io import BytesIO - -from six.moves.urllib.parse import parse_qs - -from cubicweb.multipart import ( - copy_file, parse_form_data, parse_options_header) -from cubicweb.web import RequestError -from cubicweb.web.request import CubicWebRequestBase -from cubicweb.wsgi import pformat, normalize_header - - -class CubicWebWsgiRequest(CubicWebRequestBase): - """most of this code COMES FROM DJANGO - """ - - def __init__(self, environ, vreg): - # self.vreg is used in get_posted_data, which is called before the - # parent constructor. - self.vreg = vreg - - self.environ = environ - self.path = environ['PATH_INFO'] - self.method = environ['REQUEST_METHOD'].upper() - - # content_length "may be empty or absent" - try: - length = int(environ['CONTENT_LENGTH']) - except (KeyError, ValueError): - length = 0 - # wsgi.input is not seekable, so copy the request contents to a temporary file - if length < 100000: - self.content = BytesIO() - else: - self.content = tempfile.TemporaryFile() - copy_file(environ['wsgi.input'], self.content, maxread=length) - self.content.seek(0, 0) - environ['wsgi.input'] = self.content - - headers_in = dict((normalize_header(k[5:]), v) for k, v in self.environ.items() - if k.startswith('HTTP_')) - if 'CONTENT_TYPE' in environ: - headers_in['Content-Type'] = environ['CONTENT_TYPE'] - https = self.is_secure() - if self.path.startswith('/https/'): - self.path = self.path[6:] - self.environ['PATH_INFO'] = self.path - https = True - - post, files = self.get_posted_data() - - super(CubicWebWsgiRequest, self).__init__(vreg, https, post, - headers= headers_in) - self.content = environ['wsgi.input'] - if files is not None: - for key, part in files.items(): - self.form[key] = (part.filename, part.file) - - def __repr__(self): - # Since this is called as part of error handling, we need to be very - # robust against potentially malformed input. - form = pformat(self.form) - meta = pformat(self.environ) - return '' % \ - (form, meta) - - ## cubicweb request interface ################################################ - - def http_method(self): - """returns 'POST', 'GET', 'HEAD', etc.""" - return self.method - - def relative_path(self, includeparams=True): - """return the normalized path of the request (ie at least relative - to the instance's root, but some other normalization may be needed - so that the returned path may be used to compare to generated urls - - :param includeparams: - boolean indicating if GET form parameters should be kept in the path - """ - path = self.environ['PATH_INFO'] - path = path[1:] # remove leading '/' - if includeparams: - qs = self.environ.get('QUERY_STRING') - if qs: - return '%s?%s' % (path, qs) - - return path - - ## wsgi request helpers ################################################### - - def is_secure(self): - return self.environ['wsgi.url_scheme'] == 'https' - - def get_posted_data(self): - # The WSGI spec says 'QUERY_STRING' may be absent. - post = parse_qs(self.environ.get('QUERY_STRING', '')) - files = None - if self.method == 'POST': - content_type = self.environ.get('CONTENT_TYPE') - if not content_type: - raise RequestError("Missing Content-Type") - content_type, options = parse_options_header(content_type) - if content_type in ( - 'multipart/form-data', - 'application/x-www-form-urlencoded', - 'application/x-url-encoded'): - forms, files = parse_form_data( - self.environ, strict=True, - mem_limit=self.vreg.config['max-post-length']) - post.update(forms.dict) - self.content.seek(0, 0) - return post, files - - def setup_params(self, params): - # This is a copy of CubicWebRequestBase.setup_params, but without - # converting unicode strings because it is partially done by - # get_posted_data - self.form = {} - if params is None: - return - encoding = self.encoding - for param, val in params.items(): - if isinstance(val, (tuple, list)): - if len(val) == 1: - val = val[0] - if param in self.no_script_form_params and val: - val = self.no_script_form_param(param, val) - if param == '_cwmsgid': - self.set_message_id(val) - else: - self.form[param] = val diff -r 058bb3dc685f -r 0b59724cb3f2 wsgi/server.py --- a/wsgi/server.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,46 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""dummy wsgi server for CubicWeb web instances""" - -__docformat__ = "restructuredtext en" - -from cubicweb.wsgi.handler import CubicWebWSGIApplication -from cubicweb import ConfigurationError -from wsgiref import simple_server - -from logging import getLogger -LOGGER = getLogger('cubicweb') - - -def run(config): - config.check_writeable_uid_directory(config.appdatahome) - - port = config['port'] or 8080 - interface = config['interface'] - - app = CubicWebWSGIApplication(config) - handler_cls = simple_server.WSGIRequestHandler - httpd = simple_server.WSGIServer((interface, port), handler_cls) - httpd.set_app(app) - repo = app.appli.repo - try: - repo.start_looping_tasks() - LOGGER.info('starting http server on %s', config['base-url']) - httpd.serve_forever() - finally: - repo.shutdown() diff -r 058bb3dc685f -r 0b59724cb3f2 wsgi/test/requirements.txt --- a/wsgi/test/requirements.txt Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -webtest diff -r 058bb3dc685f -r 0b59724cb3f2 wsgi/test/unittest_wsgi.py --- a/wsgi/test/unittest_wsgi.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,116 +0,0 @@ -# encoding=utf-8 - -import webtest.app -from io import BytesIO - -from cubicweb.devtools.webtest import CubicWebTestTC - -from cubicweb.wsgi.request import CubicWebWsgiRequest -from cubicweb.multipart import MultipartError - - -class WSGIAppTC(CubicWebTestTC): - def test_content_type(self): - r = webtest.app.TestRequest.blank('/', {'CONTENT_TYPE': 'text/plain'}) - - req = CubicWebWsgiRequest(r.environ, self.vreg) - - self.assertEqual('text/plain', req.get_header('Content-Type')) - - def test_content_body(self): - r = webtest.app.TestRequest.blank('/', { - 'CONTENT_LENGTH': 12, - 'CONTENT_TYPE': 'text/plain', - 'wsgi.input': BytesIO(b'some content')}) - - req = CubicWebWsgiRequest(r.environ, self.vreg) - - self.assertEqual(b'some content', req.content.read()) - - def test_http_scheme(self): - r = webtest.app.TestRequest.blank('/', { - 'wsgi.url_scheme': 'http'}) - - req = CubicWebWsgiRequest(r.environ, self.vreg) - - self.assertFalse(req.https) - - def test_https_scheme(self): - r = webtest.app.TestRequest.blank('/', { - 'wsgi.url_scheme': 'https'}) - - req = CubicWebWsgiRequest(r.environ, self.vreg) - - self.assertTrue(req.https) - - def test_https_prefix(self): - r = webtest.app.TestRequest.blank('/https/', { - 'wsgi.url_scheme': 'http'}) - - req = CubicWebWsgiRequest(r.environ, self.vreg) - - self.assertTrue(req.https) - - def test_big_content(self): - content = b'x'*100001 - r = webtest.app.TestRequest.blank('/', { - 'CONTENT_LENGTH': len(content), - 'CONTENT_TYPE': 'text/plain', - 'wsgi.input': BytesIO(content)}) - - req = CubicWebWsgiRequest(r.environ, self.vreg) - - self.assertEqual(content, req.content.read()) - - def test_post(self): - self.webapp.post( - '/', - params={'__login': self.admlogin, '__password': self.admpassword}) - - def test_post_bad_form(self): - with self.assertRaises(MultipartError): - self.webapp.post( - '/', - params='badcontent', - headers={'Content-Type': 'multipart/form-data'}) - - def test_post_non_form(self): - self.webapp.post( - '/', - params='{}', - headers={'Content-Type': 'application/json'}) - - def test_get_multiple_variables(self): - r = webtest.app.TestRequest.blank('/?arg=1&arg=2') - req = CubicWebWsgiRequest(r.environ, self.vreg) - - self.assertEqual([u'1', u'2'], req.form['arg']) - - def test_post_multiple_variables(self): - r = webtest.app.TestRequest.blank('/', POST='arg=1&arg=2') - req = CubicWebWsgiRequest(r.environ, self.vreg) - - self.assertEqual([u'1', u'2'], req.form['arg']) - - def test_post_files(self): - content_type, params = self.webapp.encode_multipart( - (), (('filefield', 'aname', b'acontent'),)) - r = webtest.app.TestRequest.blank( - '/', POST=params, content_type=content_type) - req = CubicWebWsgiRequest(r.environ, self.vreg) - self.assertIn('filefield', req.form) - fieldvalue = req.form['filefield'] - self.assertEqual(u'aname', fieldvalue[0]) - self.assertEqual(b'acontent', fieldvalue[1].read()) - - def test_post_unicode_urlencoded(self): - params = 'arg=%C3%A9' - r = webtest.app.TestRequest.blank( - '/', POST=params, content_type='application/x-www-form-urlencoded') - req = CubicWebWsgiRequest(r.environ, self.vreg) - self.assertEqual(u"é", req.form['arg']) - - -if __name__ == '__main__': - import unittest - unittest.main() diff -r 058bb3dc685f -r 0b59724cb3f2 wsgi/tnd.py --- a/wsgi/tnd.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""tornado wsgi server for CubicWeb web instances""" - -__docformat__ = "restructuredtext en" - - -from cubicweb.wsgi.handler import CubicWebWSGIApplication -from cubicweb import ConfigurationError -from tornado import wsgi, httpserver, ioloop - -from logging import getLogger -LOGGER = getLogger('cubicweb') - - -def run(config): - config.check_writeable_uid_directory(config.appdatahome) - - port = config['port'] or 8080 - interface = config['interface'] - - app = CubicWebWSGIApplication(config) - container = wsgi.WSGIContainer(app) - http_server = httpserver.HTTPServer(container) - http_server.listen(port, interface) - repo = app.appli.repo - try: - repo.start_looping_tasks() - LOGGER.info('starting http server on %s', config['base-url']) - ioloop.IOLoop.instance().start() - finally: - repo.shutdown() diff -r 058bb3dc685f -r 0b59724cb3f2 wsgi/wz.py --- a/wsgi/wz.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""dummy wsgi server for CubicWeb web instances""" - -__docformat__ = "restructuredtext en" - -import socket - -from cubicweb.wsgi.handler import CubicWebWSGIApplication -from cubicweb import ConfigurationError -from werkzeug.serving import run_simple - -from logging import getLogger -LOGGER = getLogger('cubicweb') - - -def run(config): - config.check_writeable_uid_directory(config.appdatahome) - - port = config['port'] or 8080 - interface = config['interface'] - - app = CubicWebWSGIApplication(config) - repo = app.appli.repo - try: - repo.start_looping_tasks() - LOGGER.info('starting http server on %s', config['base-url']) - run_simple(interface, port, app, - threaded=True, - use_debugger=True, - processes=1) # more processes yield weird errors - finally: - repo.shutdown() diff -r 058bb3dc685f -r 0b59724cb3f2 xy.py --- a/xy.py Mon Jan 04 18:40:30 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,34 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""map standard cubicweb schema to xml vocabularies""" - -from yams import xy - -xy.register_prefix('rdf', 'http://www.w3.org/1999/02/22-rdf-syntax-ns#') -xy.register_prefix('dc', 'http://purl.org/dc/elements/1.1/') -xy.register_prefix('foaf', 'http://xmlns.com/foaf/0.1/') -xy.register_prefix('doap', 'http://usefulinc.com/ns/doap#') -xy.register_prefix('owl', 'http://www.w3.org/2002/07/owl#') -xy.register_prefix('dcterms', 'http://purl.org/dc/terms/') - -xy.add_equivalence('creation_date', 'dc:date') -xy.add_equivalence('created_by', 'dc:creator') -xy.add_equivalence('description', 'dc:description') -xy.add_equivalence('CWUser', 'foaf:Person') -xy.add_equivalence('CWUser login', 'foaf:Person dc:title') -xy.add_equivalence('CWUser surname', 'foaf:Person foaf:name')